Skip to content

Commit 4e44d5b

Browse files
NicolasHugfacebook-github-bot
authored andcommitted
[fbsync] update naming feature -> datapoint in prototype test suite (#7117)
Reviewed By: YosuaMichael Differential Revision: D42706911 fbshipit-source-id: c4c6cabe39e1b4015fbd8e6613c13ca5b86b84b7
1 parent 140a480 commit 4e44d5b

5 files changed

+69
-67
lines changed

test/prototype_transforms_dispatcher_infos.py

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -44,19 +44,19 @@ def __init__(
4444
self.pil_kernel_info = pil_kernel_info
4545

4646
kernel_infos = {}
47-
for feature_type, kernel in self.kernels.items():
47+
for datapoint_type, kernel in self.kernels.items():
4848
kernel_info = self._KERNEL_INFO_MAP.get(kernel)
4949
if not kernel_info:
5050
raise pytest.UsageError(
51-
f"Can't register {kernel.__name__} for type {feature_type} since there is no `KernelInfo` for it. "
51+
f"Can't register {kernel.__name__} for type {datapoint_type} since there is no `KernelInfo` for it. "
5252
f"Please add a `KernelInfo` for it in `prototype_transforms_kernel_infos.py`."
5353
)
54-
kernel_infos[feature_type] = kernel_info
54+
kernel_infos[datapoint_type] = kernel_info
5555
self.kernel_infos = kernel_infos
5656

57-
def sample_inputs(self, *feature_types, filter_metadata=True):
58-
for feature_type in feature_types or self.kernel_infos.keys():
59-
kernel_info = self.kernel_infos.get(feature_type)
57+
def sample_inputs(self, *datapoint_types, filter_metadata=True):
58+
for datapoint_type in datapoint_types or self.kernel_infos.keys():
59+
kernel_info = self.kernel_infos.get(datapoint_type)
6060
if not kernel_info:
6161
raise pytest.UsageError(f"There is no kernel registered for type {type.__name__}")
6262

@@ -66,7 +66,7 @@ def sample_inputs(self, *feature_types, filter_metadata=True):
6666
yield from sample_inputs
6767
else:
6868
for args_kwargs in sample_inputs:
69-
for attribute in feature_type.__annotations__.keys():
69+
for attribute in datapoint_type.__annotations__.keys():
7070
if attribute in args_kwargs.kwargs:
7171
del args_kwargs.kwargs[attribute]
7272

@@ -107,9 +107,9 @@ def xfail_jit_list_of_ints(name, *, reason=None):
107107
)
108108

109109

110-
skip_dispatch_feature = TestMark(
111-
("TestDispatchers", "test_dispatch_feature"),
112-
pytest.mark.skip(reason="Dispatcher doesn't support arbitrary feature dispatch."),
110+
skip_dispatch_datapoint = TestMark(
111+
("TestDispatchers", "test_dispatch_datapoint"),
112+
pytest.mark.skip(reason="Dispatcher doesn't support arbitrary datapoint dispatch."),
113113
)
114114

115115

@@ -352,7 +352,7 @@ def fill_sequence_needs_broadcast(args_kwargs):
352352
},
353353
pil_kernel_info=PILKernelInfo(F.erase_image_pil),
354354
test_marks=[
355-
skip_dispatch_feature,
355+
skip_dispatch_datapoint,
356356
],
357357
),
358358
DispatcherInfo(
@@ -404,7 +404,7 @@ def fill_sequence_needs_broadcast(args_kwargs):
404404
pil_kernel_info=PILKernelInfo(F.five_crop_image_pil),
405405
test_marks=[
406406
xfail_jit_python_scalar_arg("size"),
407-
skip_dispatch_feature,
407+
skip_dispatch_datapoint,
408408
],
409409
),
410410
DispatcherInfo(
@@ -415,7 +415,7 @@ def fill_sequence_needs_broadcast(args_kwargs):
415415
},
416416
test_marks=[
417417
xfail_jit_python_scalar_arg("size"),
418-
skip_dispatch_feature,
418+
skip_dispatch_datapoint,
419419
],
420420
pil_kernel_info=PILKernelInfo(F.ten_crop_image_pil),
421421
),
@@ -437,7 +437,7 @@ def fill_sequence_needs_broadcast(args_kwargs):
437437
datapoints.Video: F.convert_dtype_video,
438438
},
439439
test_marks=[
440-
skip_dispatch_feature,
440+
skip_dispatch_datapoint,
441441
],
442442
),
443443
DispatcherInfo(
@@ -446,7 +446,7 @@ def fill_sequence_needs_broadcast(args_kwargs):
446446
datapoints.Video: F.uniform_temporal_subsample_video,
447447
},
448448
test_marks=[
449-
skip_dispatch_feature,
449+
skip_dispatch_datapoint,
450450
],
451451
),
452452
]

test/test_prototype_datapoints.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ def test_to_wrapping():
2828
assert label_to.categories is label.categories
2929

3030

31-
def test_to_feature_reference():
31+
def test_to_datapoint_reference():
3232
tensor = torch.tensor([0, 1, 0], dtype=torch.int64)
3333
label = datapoints.Label(tensor, categories=["foo", "bar"]).to(torch.int32)
3434

test/test_prototype_transforms.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -285,23 +285,23 @@ def test_pil_image(self, p):
285285

286286
assert_equal(expected, pil_to_tensor(actual))
287287

288-
def test_features_image(self, p):
288+
def test_datapoints_image(self, p):
289289
input, expected = self.input_expected_image_tensor(p)
290290
transform = transforms.RandomHorizontalFlip(p=p)
291291

292292
actual = transform(datapoints.Image(input))
293293

294294
assert_equal(datapoints.Image(expected), actual)
295295

296-
def test_features_mask(self, p):
296+
def test_datapoints_mask(self, p):
297297
input, expected = self.input_expected_image_tensor(p)
298298
transform = transforms.RandomHorizontalFlip(p=p)
299299

300300
actual = transform(datapoints.Mask(input))
301301

302302
assert_equal(datapoints.Mask(expected), actual)
303303

304-
def test_features_bounding_box(self, p):
304+
def test_datapoints_bounding_box(self, p):
305305
input = datapoints.BoundingBox([0, 0, 5, 5], format=datapoints.BoundingBoxFormat.XYXY, spatial_size=(10, 10))
306306
transform = transforms.RandomHorizontalFlip(p=p)
307307

@@ -338,23 +338,23 @@ def test_pil_image(self, p):
338338

339339
assert_equal(expected, pil_to_tensor(actual))
340340

341-
def test_features_image(self, p):
341+
def test_datapoints_image(self, p):
342342
input, expected = self.input_expected_image_tensor(p)
343343
transform = transforms.RandomVerticalFlip(p=p)
344344

345345
actual = transform(datapoints.Image(input))
346346

347347
assert_equal(datapoints.Image(expected), actual)
348348

349-
def test_features_mask(self, p):
349+
def test_datapoints_mask(self, p):
350350
input, expected = self.input_expected_image_tensor(p)
351351
transform = transforms.RandomVerticalFlip(p=p)
352352

353353
actual = transform(datapoints.Mask(input))
354354

355355
assert_equal(datapoints.Mask(expected), actual)
356356

357-
def test_features_bounding_box(self, p):
357+
def test_datapoints_bounding_box(self, p):
358358
input = datapoints.BoundingBox([0, 0, 5, 5], format=datapoints.BoundingBoxFormat.XYXY, spatial_size=(10, 10))
359359
transform = transforms.RandomVerticalFlip(p=p)
360360

test/test_prototype_transforms_consistency.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -558,15 +558,15 @@ def check_call_consistency(
558558
output_prototype_image = prototype_transform(image)
559559
except Exception as exc:
560560
raise AssertionError(
561-
f"Transforming a feature image with shape {image_repr} failed in the prototype transform with "
561+
f"Transforming a image datapoint with shape {image_repr} failed in the prototype transform with "
562562
f"the error above. This means there is a consistency bug either in `_get_params` or in the "
563-
f"`features.Image` path in `_transform`."
563+
f"`datapoints.Image` path in `_transform`."
564564
) from exc
565565

566566
assert_close(
567567
output_prototype_image,
568568
output_prototype_tensor,
569-
msg=lambda msg: f"Output for feature and tensor images is not equal: \n\n{msg}",
569+
msg=lambda msg: f"Output for datapoint and tensor images is not equal: \n\n{msg}",
570570
**closeness_kwargs,
571571
)
572572

@@ -931,15 +931,15 @@ def make_datapoints(self, with_mask=True):
931931

932932
yield (tensor_image, target)
933933

934-
feature_image = make_image(size=size, color_space=datapoints.ColorSpace.RGB)
934+
datapoint_image = make_image(size=size, color_space=datapoints.ColorSpace.RGB)
935935
target = {
936936
"boxes": make_bounding_box(spatial_size=size, format="XYXY", extra_dims=(num_objects,), dtype=torch.float),
937937
"labels": make_label(extra_dims=(num_objects,), categories=80),
938938
}
939939
if with_mask:
940940
target["masks"] = make_detection_mask(size=size, num_objects=num_objects, dtype=torch.long)
941941

942-
yield (feature_image, target)
942+
yield (datapoint_image, target)
943943

944944
@pytest.mark.parametrize(
945945
"t_ref, t, data_kwargs",
@@ -1015,13 +1015,13 @@ def make_datapoints(self, supports_pil=True, image_dtype=torch.uint8):
10151015
conv_fns.extend([torch.Tensor, lambda x: x])
10161016

10171017
for conv_fn in conv_fns:
1018-
feature_image = make_image(size=size, color_space=datapoints.ColorSpace.RGB, dtype=image_dtype)
1019-
feature_mask = make_segmentation_mask(size=size, num_categories=num_categories, dtype=torch.uint8)
1018+
datapoint_image = make_image(size=size, color_space=datapoints.ColorSpace.RGB, dtype=image_dtype)
1019+
datapoint_mask = make_segmentation_mask(size=size, num_categories=num_categories, dtype=torch.uint8)
10201020

1021-
dp = (conv_fn(feature_image), feature_mask)
1021+
dp = (conv_fn(datapoint_image), datapoint_mask)
10221022
dp_ref = (
1023-
to_image_pil(feature_image) if supports_pil else feature_image.as_subclass(torch.Tensor),
1024-
to_image_pil(feature_mask),
1023+
to_image_pil(datapoint_image) if supports_pil else datapoint_image.as_subclass(torch.Tensor),
1024+
to_image_pil(datapoint_mask),
10251025
)
10261026

10271027
yield dp, dp_ref

test/test_prototype_transforms_functional.py

Lines changed: 37 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -162,7 +162,7 @@ def _unbatch(self, batch, *, data_dims):
162162
def test_batched_vs_single(self, test_id, info, args_kwargs, device):
163163
(batched_input, *other_args), kwargs = args_kwargs.load(device)
164164

165-
feature_type = (
165+
datapoint_type = (
166166
datapoints.Image
167167
if torchvision.prototype.transforms.utils.is_simple_tensor(batched_input)
168168
else type(batched_input)
@@ -178,10 +178,10 @@ def test_batched_vs_single(self, test_id, info, args_kwargs, device):
178178
# common ground.
179179
datapoints.Mask: 2,
180180
datapoints.Video: 4,
181-
}.get(feature_type)
181+
}.get(datapoint_type)
182182
if data_dims is None:
183183
raise pytest.UsageError(
184-
f"The number of data dimensions cannot be determined for input of type {feature_type.__name__}."
184+
f"The number of data dimensions cannot be determined for input of type {datapoint_type.__name__}."
185185
) from None
186186
elif batched_input.ndim <= data_dims:
187187
pytest.skip("Input is not batched.")
@@ -323,8 +323,8 @@ def test_logging(self, spy_on, info, args_kwargs, device):
323323
def test_scripted_smoke(self, info, args_kwargs, device):
324324
dispatcher = script(info.dispatcher)
325325

326-
(image_feature, *other_args), kwargs = args_kwargs.load(device)
327-
image_simple_tensor = torch.Tensor(image_feature)
326+
(image_datapoint, *other_args), kwargs = args_kwargs.load(device)
327+
image_simple_tensor = torch.Tensor(image_datapoint)
328328

329329
dispatcher(image_simple_tensor, *other_args, **kwargs)
330330

@@ -352,8 +352,8 @@ def test_scriptable(self, dispatcher):
352352

353353
@image_sample_inputs
354354
def test_dispatch_simple_tensor(self, info, args_kwargs, spy_on):
355-
(image_feature, *other_args), kwargs = args_kwargs.load()
356-
image_simple_tensor = torch.Tensor(image_feature)
355+
(image_datapoint, *other_args), kwargs = args_kwargs.load()
356+
image_simple_tensor = torch.Tensor(image_datapoint)
357357

358358
kernel_info = info.kernel_infos[datapoints.Image]
359359
spy = spy_on(kernel_info.kernel, module=info.dispatcher.__module__, name=kernel_info.id)
@@ -367,12 +367,12 @@ def test_dispatch_simple_tensor(self, info, args_kwargs, spy_on):
367367
args_kwargs_fn=lambda info: info.sample_inputs(datapoints.Image),
368368
)
369369
def test_dispatch_pil(self, info, args_kwargs, spy_on):
370-
(image_feature, *other_args), kwargs = args_kwargs.load()
370+
(image_datapoint, *other_args), kwargs = args_kwargs.load()
371371

372-
if image_feature.ndim > 3:
372+
if image_datapoint.ndim > 3:
373373
pytest.skip("Input is batched")
374374

375-
image_pil = F.to_image_pil(image_feature)
375+
image_pil = F.to_image_pil(image_datapoint)
376376

377377
pil_kernel_info = info.pil_kernel_info
378378
spy = spy_on(pil_kernel_info.kernel, module=info.dispatcher.__module__, name=pil_kernel_info.id)
@@ -385,37 +385,39 @@ def test_dispatch_pil(self, info, args_kwargs, spy_on):
385385
DISPATCHER_INFOS,
386386
args_kwargs_fn=lambda info: info.sample_inputs(),
387387
)
388-
def test_dispatch_feature(self, info, args_kwargs, spy_on):
389-
(feature, *other_args), kwargs = args_kwargs.load()
388+
def test_dispatch_datapoint(self, info, args_kwargs, spy_on):
389+
(datapoint, *other_args), kwargs = args_kwargs.load()
390390

391391
method_name = info.id
392-
method = getattr(feature, method_name)
393-
feature_type = type(feature)
394-
spy = spy_on(method, module=feature_type.__module__, name=f"{feature_type.__name__}.{method_name}")
392+
method = getattr(datapoint, method_name)
393+
datapoint_type = type(datapoint)
394+
spy = spy_on(method, module=datapoint_type.__module__, name=f"{datapoint_type.__name__}.{method_name}")
395395

396-
info.dispatcher(feature, *other_args, **kwargs)
396+
info.dispatcher(datapoint, *other_args, **kwargs)
397397

398398
spy.assert_called_once()
399399

400400
@pytest.mark.parametrize(
401-
("dispatcher_info", "feature_type", "kernel_info"),
401+
("dispatcher_info", "datapoint_type", "kernel_info"),
402402
[
403-
pytest.param(dispatcher_info, feature_type, kernel_info, id=f"{dispatcher_info.id}-{feature_type.__name__}")
403+
pytest.param(
404+
dispatcher_info, datapoint_type, kernel_info, id=f"{dispatcher_info.id}-{datapoint_type.__name__}"
405+
)
404406
for dispatcher_info in DISPATCHER_INFOS
405-
for feature_type, kernel_info in dispatcher_info.kernel_infos.items()
407+
for datapoint_type, kernel_info in dispatcher_info.kernel_infos.items()
406408
],
407409
)
408-
def test_dispatcher_kernel_signatures_consistency(self, dispatcher_info, feature_type, kernel_info):
410+
def test_dispatcher_kernel_signatures_consistency(self, dispatcher_info, datapoint_type, kernel_info):
409411
dispatcher_signature = inspect.signature(dispatcher_info.dispatcher)
410412
dispatcher_params = list(dispatcher_signature.parameters.values())[1:]
411413

412414
kernel_signature = inspect.signature(kernel_info.kernel)
413415
kernel_params = list(kernel_signature.parameters.values())[1:]
414416

415-
# We filter out metadata that is implicitly passed to the dispatcher through the input feature, but has to be
417+
# We filter out metadata that is implicitly passed to the dispatcher through the input datapoint, but has to be
416418
# explicit passed to the kernel.
417-
feature_type_metadata = feature_type.__annotations__.keys()
418-
kernel_params = [param for param in kernel_params if param.name not in feature_type_metadata]
419+
datapoint_type_metadata = datapoint_type.__annotations__.keys()
420+
kernel_params = [param for param in kernel_params if param.name not in datapoint_type_metadata]
419421

420422
dispatcher_params = iter(dispatcher_params)
421423
for dispatcher_param, kernel_param in zip(dispatcher_params, kernel_params):
@@ -433,26 +435,26 @@ def test_dispatcher_kernel_signatures_consistency(self, dispatcher_info, feature
433435
assert dispatcher_param == kernel_param
434436

435437
@pytest.mark.parametrize("info", DISPATCHER_INFOS, ids=lambda info: info.id)
436-
def test_dispatcher_feature_signatures_consistency(self, info):
438+
def test_dispatcher_datapoint_signatures_consistency(self, info):
437439
try:
438-
feature_method = getattr(datapoints._datapoint.Datapoint, info.id)
440+
datapoint_method = getattr(datapoints._datapoint.Datapoint, info.id)
439441
except AttributeError:
440-
pytest.skip("Dispatcher doesn't support arbitrary feature dispatch.")
442+
pytest.skip("Dispatcher doesn't support arbitrary datapoint dispatch.")
441443

442444
dispatcher_signature = inspect.signature(info.dispatcher)
443445
dispatcher_params = list(dispatcher_signature.parameters.values())[1:]
444446

445-
feature_signature = inspect.signature(feature_method)
446-
feature_params = list(feature_signature.parameters.values())[1:]
447+
datapoint_signature = inspect.signature(datapoint_method)
448+
datapoint_params = list(datapoint_signature.parameters.values())[1:]
447449

448-
# Because we use `from __future__ import annotations` inside the module where `features._datapoint` is defined,
449-
# the annotations are stored as strings. This makes them concrete again, so they can be compared to the natively
450-
# concrete dispatcher annotations.
451-
feature_annotations = get_type_hints(feature_method)
452-
for param in feature_params:
453-
param._annotation = feature_annotations[param.name]
450+
# Because we use `from __future__ import annotations` inside the module where `datapoints._datapoint` is
451+
# defined, the annotations are stored as strings. This makes them concrete again, so they can be compared to the
452+
# natively concrete dispatcher annotations.
453+
datapoint_annotations = get_type_hints(datapoint_method)
454+
for param in datapoint_params:
455+
param._annotation = datapoint_annotations[param.name]
454456

455-
assert dispatcher_params == feature_params
457+
assert dispatcher_params == datapoint_params
456458

457459
@pytest.mark.parametrize("info", DISPATCHER_INFOS, ids=lambda info: info.id)
458460
def test_unkown_type(self, info):

0 commit comments

Comments
 (0)