Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions tests/metrics/audio/test_fad.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,10 @@
import numpy as np
import torch
from torcheval.metrics import FrechetAudioDistance
import pytest

pytest.importorskip("torchaudio.prototype.pipelines")



# pyre-fixme[24]: Generic type `np.ndarray` expects 2 type parameters.
Expand Down
13 changes: 9 additions & 4 deletions tests/metrics/classification/test_accuracy.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,10 +118,15 @@ def test_accuracy_class_average(self) -> None:
target_flattened = target.flatten()
accuracy_per_class = np.empty(num_classes)
for i in range(num_classes):
accuracy_per_class[i] = accuracy_score(
target_flattened[target_flattened == i].numpy(),
input_flattened[target_flattened == i].numpy(),
)
mask = target_flattened == i
if mask.sum().item() == 0:
accuracy_per_class[i] = np.nan
else:
accuracy_per_class[i] = accuracy_score(
target_flattened[mask].numpy(),
input_flattened[mask].numpy(),
)


self.run_class_implementation_tests(
metric=MulticlassAccuracy(num_classes=num_classes, average="macro"),
Expand Down
18 changes: 11 additions & 7 deletions tests/metrics/classification/test_auroc.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,15 +37,16 @@ def _test_auroc_class_with_input(
weight_tensors = weight.reshape(-1, 1) if weight is not None else None

if compute_result is None:
compute_result = (
torch.tensor(
roc_auc_score(
target_tensors, input_tensors, sample_weight=weight_tensors
)
)
score = (
roc_auc_score(target_tensors, input_tensors, sample_weight=weight_tensors)
if weight_tensors is not None
else torch.tensor(roc_auc_score(target_tensors, input_tensors))
else roc_auc_score(target_tensors, input_tensors)
)
compute_result = torch.tensor(score, dtype=torch.float64)




if weight is not None:
self.run_class_implementation_tests(
metric=BinaryAUROC(num_tasks=num_tasks, use_fbgemm=use_fbgemm),
Expand Down Expand Up @@ -147,7 +148,10 @@ def test_auroc_class_update_input_shape_different(self) -> None:
torch.cat(update_target, dim=0),
torch.cat(update_input, dim=0),
sample_weight=torch.cat(update_weight, dim=0),

),
dtype=torch.float64,

)

self.run_class_implementation_tests(
Expand Down
13 changes: 9 additions & 4 deletions tests/metrics/functional/classification/test_accuracy.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,10 +117,15 @@ def test_accuracy_class_average(self) -> None:
target_flattened = target.flatten()
accuracy_per_class = np.empty(num_classes)
for i in range(num_classes):
accuracy_per_class[i] = accuracy_score(
target_flattened[target_flattened == i].numpy(),
input_flattened[target_flattened == i].numpy(),
)
mask = target_flattened == i
if mask.sum().item() == 0:
accuracy_per_class[i] = np.nan
else:
accuracy_per_class[i] = accuracy_score(
target_flattened[mask].numpy(),
input_flattened[mask].numpy(),
)


torch.testing.assert_close(
multiclass_accuracy(
Expand Down