25
25
26
26
# hyperparameters, which are passed into the training job
27
27
hyperparameters = {
28
- "model_name_or_path" : "hf-internal-testing/tiny-random-RobertaModel " ,
28
+ "model_name_or_path" : "hf-internal-testing/tiny-random-DistilBertModel " ,
29
29
"task_name" : "mnli" ,
30
30
"per_device_train_batch_size" : 2 , # batch size must be divisible by the number of microbatches
31
31
"per_device_eval_batch_size" : 2 ,
@@ -68,7 +68,7 @@ def get_transformers_version_from_image_uri(ecr_image):
68
68
69
69
@pytest .mark .processor ("gpu" )
70
70
@pytest .mark .integration ("smmp" )
71
- @pytest .mark .model ("hf_qa_smmp " )
71
+ @pytest .mark .model ("hf_distilbert_smmp " )
72
72
@pytest .mark .skip_cpu
73
73
@pytest .mark .skip_py2_containers
74
74
@pytest .mark .skip_trcomp_containers
@@ -85,7 +85,7 @@ def test_smmp_gpu(
85
85
86
86
@pytest .mark .processor ("gpu" )
87
87
@pytest .mark .integration ("smmp" )
88
- @pytest .mark .model ("hf_qa_smmp_multi " )
88
+ @pytest .mark .model ("hf_distilbert_smmp_multi " )
89
89
@pytest .mark .skip_cpu
90
90
@pytest .mark .skip_py2_containers
91
91
@pytest .mark .multinode (2 )
@@ -126,4 +126,6 @@ def _test_smmp_gpu_function(ecr_image, sagemaker_session, py_version, instances_
126
126
hyperparameters = hyperparameters ,
127
127
sagemaker_session = sagemaker_session ,
128
128
)
129
- huggingface_estimator .fit (job_name = sagemaker .utils .unique_name_from_base ("test-hf-pt-qa-smmp" ))
129
+ huggingface_estimator .fit (
130
+ job_name = sagemaker .utils .unique_name_from_base ("test-hf-pt-text-classif-smmp" )
131
+ )
0 commit comments