Skip to content

Commit 583aec7

Browse files
BoyuanFengmeta-codesync[bot]
authored andcommitted
remove old timm models from benchmark (#164805)
Summary: Prune models from TorchInductor dashboard to reduce ci cost. This PR prunes for timm models according to the [doc](https://docs.google.com/document/d/1nLPNNAU-_M9Clx9FMrJ1ycdPxe-xRA54olPnsFzdpoU/edit?tab=t.0), which reduces from 60 to 14 models. X-link: pytorch/pytorch#164805 Approved by: https://github.com/anijain2305, https://github.com/seemethere, https://github.com/huydhn, https://github.com/malfet Reviewed By: izaitsevfb Differential Revision: D84181820 fbshipit-source-id: 13034ebbe8511b819c895467a04415c999b6a173
1 parent 492b0a0 commit 583aec7

File tree

4 files changed

+6
-176
lines changed

4 files changed

+6
-176
lines changed

userbenchmark/dynamo/dynamobench/common.py

Lines changed: 0 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -117,11 +117,7 @@ class CI(NamedTuple):
117117

118118

119119
CI_SKIP_OPTIMIZER = {
120-
# TIMM
121-
"convmixer_768_32", # accuracy
122-
"hrnet_w18", # Stack issue in fx
123120
# HF
124-
"pnasnet5large", # Stack issue in fx
125121
"MobileBertForMaskedLM", # Stack issue in fx
126122
}
127123

@@ -200,25 +196,8 @@ class CI(NamedTuple):
200196
"XGLMForCausalLM",
201197
# TIMM
202198
"adv_inception_v3",
203-
"botnet26t_256",
204-
"cait_m36_384", # OOM
205-
"coat_lite_mini",
206-
"convit_base",
207-
"dpn107",
208-
"fbnetv3_b",
209-
"gernet_l",
210-
"lcnet_050",
211-
"mixnet_l",
212-
"res2net101_26w_4s",
213-
"res2net50_14w_8s",
214-
"res2next50",
215-
"resnest101e",
216-
"sebotnet33ts_256",
217-
"swsl_resnext101_32x16d",
218199
"tf_efficientnet_b0",
219200
"ghostnet_100",
220-
"gmixer_24_224",
221-
"tinynet_a",
222201
}
223202

224203
# These models OOM in CI
@@ -243,25 +222,17 @@ class CI(NamedTuple):
243222
"mobilenet_v2_quantized_qat",
244223
"phi_1_5 resnet50_quantized_qat",
245224
"BlenderbotForCausalLM",
246-
"cait_m36_384",
247225
"DALLE2_pytorch",
248226
"moco",
249227
"timm_efficientdet",
250228
"ghostnet_100",
251-
"regnety_002",
252-
"poolformer_m36",
253229
"inception_v3",
254-
"tinynet_a",
255-
"selecsls42b",
256230
"mobilevit_s",
257231
"pytorch_CycleGAN_and_pix2pix",
258232
"vision_maskrcnn",
259-
"resmlp_12_224",
260233
"dlrm",
261234
"resnet50",
262235
"dm_nfnet_f0",
263-
"pit_b_224",
264-
"tf_mixnet_l",
265236
}
266237

267238

@@ -3782,7 +3753,6 @@ def run(runner, args, original_dir=None):
37823753
os.environ["CUBLAS_WORKSPACE_CONFIG"] = ":4096:8"
37833754
if args.only is not None and args.only in {
37843755
"nvidia_deeprecommender",
3785-
"crossvit_9_240",
37863756
}:
37873757
# These seem unhappy with numerics of larger cuBLASLt workspace
37883758
torch.backends.cuda.matmul.allow_bf16_reduced_precision_reduction = False
@@ -3810,7 +3780,6 @@ def run(runner, args, original_dir=None):
38103780
runner.skip_models.update(
38113781
{
38123782
# xfail: https://github.com/pytorch/pytorch/issues/145773
3813-
"convit_base",
38143783
"llama",
38153784
"cm3leon_generate",
38163785
}

userbenchmark/dynamo/dynamobench/timm_models.py

Lines changed: 6 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -60,84 +60,41 @@ def pip_install(package):
6060

6161
BATCH_SIZE_DIVISORS = {
6262
"beit_base_patch16_224": 2,
63-
"convit_base": 2,
64-
"convmixer_768_32": 2,
65-
"convnext_base": 2,
66-
"cspdarknet53": 2,
6763
"deit_base_distilled_patch16_224": 2,
6864
"gluon_xception65": 2,
6965
"mobilevit_s": 2,
70-
"pnasnet5large": 2,
71-
"poolformer_m36": 2,
72-
"resnest101e": 2,
7366
"swin_base_patch4_window7_224": 2,
74-
"swsl_resnext101_32x16d": 2,
75-
"vit_base_patch16_224": 2,
76-
"volo_d1_224": 2,
77-
"jx_nest_base": 4,
7867
}
7968

8069
REQUIRE_HIGHER_TOLERANCE = {
81-
"crossvit_9_240",
82-
"fbnetv3_b",
83-
"gmixer_24_224",
84-
"hrnet_w18",
8570
"inception_v3",
86-
"mixer_b16_224",
8771
"mobilenetv3_large_100",
88-
"sebotnet33ts_256",
89-
"selecsls42b",
90-
"convnext_base",
91-
"cait_m36_384",
9272
}
9373

94-
REQUIRE_HIGHER_TOLERANCE_AMP = {
95-
"poolformer_m36",
96-
}
74+
REQUIRE_HIGHER_TOLERANCE_AMP = {}
9775

9876
REQUIRE_EVEN_HIGHER_TOLERANCE = {
99-
"levit_128",
100-
"sebotnet33ts_256",
10177
"beit_base_patch16_224",
102-
"cspdarknet53",
10378
}
10479

10580
# These models need higher tolerance in MaxAutotune mode
106-
REQUIRE_EVEN_HIGHER_TOLERANCE_MAX_AUTOTUNE = {
107-
"gluon_inception_v3",
108-
}
81+
REQUIRE_EVEN_HIGHER_TOLERANCE_MAX_AUTOTUNE = {}
10982

11083
REQUIRE_HIGHER_TOLERANCE_FOR_FREEZING = {
11184
"adv_inception_v3",
112-
"botnet26t_256",
113-
"gluon_inception_v3",
114-
"selecsls42b",
115-
"swsl_resnext101_32x16d",
11685
}
11786

11887
SCALED_COMPUTE_LOSS = {
119-
"ese_vovnet19b_dw",
120-
"fbnetc_100",
121-
"mnasnet_100",
12288
"mobilevit_s",
123-
"sebotnet33ts_256",
12489
}
12590

126-
FORCE_AMP_FOR_FP16_BF16_MODELS = {
127-
"convit_base",
128-
"xcit_large_24_p8_224",
129-
}
91+
FORCE_AMP_FOR_FP16_BF16_MODELS = {}
13092

131-
SKIP_ACCURACY_CHECK_AS_EAGER_NON_DETERMINISTIC_MODELS = {
132-
"xcit_large_24_p8_224",
133-
}
93+
SKIP_ACCURACY_CHECK_AS_EAGER_NON_DETERMINISTIC_MODELS = {}
13494

13595
REQUIRE_LARGER_MULTIPLIER_FOR_SMALLER_TENSOR = {
13696
"inception_v3",
13797
"mobilenetv3_large_100",
138-
"cspdarknet53",
139-
"gluon_inception_v3",
140-
"cait_m36_384",
14198
}
14299

143100

@@ -269,15 +226,11 @@ def skip_accuracy_check_as_eager_non_deterministic(self):
269226

270227
@property
271228
def guard_on_nn_module_models(self):
272-
return {
273-
"convit_base",
274-
}
229+
return {}
275230

276231
@property
277232
def inline_inbuilt_nn_modules_models(self):
278-
return {
279-
"lcnet_050",
280-
}
233+
return {}
281234

282235
@download_retry_decorator
283236
def _download_model(self, model_name):
Lines changed: 0 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -1,61 +1,14 @@
11
adv_inception_v3 128
22
beit_base_patch16_224 128
3-
botnet26t_256 128
4-
cait_m36_384 4
5-
coat_lite_mini 128
6-
convit_base 128
7-
convmixer_768_32 64
8-
convnext_base 128
9-
crossvit_9_240 256
10-
cspdarknet53 128
113
deit_base_distilled_patch16_224 128
12-
dla102 128
134
dm_nfnet_f0 128
14-
dpn107 64
15-
eca_botnext26ts_256 128
16-
eca_halonext26ts 128
17-
ese_vovnet19b_dw 256
18-
fbnetc_100 512
19-
fbnetv3_b 256
20-
gernet_l 128
215
ghostnet_100 512
22-
gluon_inception_v3 256
23-
gmixer_24_224 128
24-
gmlp_s16_224 128
25-
hrnet_w18 128
266
inception_v3 128
27-
jx_nest_base 128
28-
lcnet_050 256
29-
levit_128 1024
30-
mixer_b16_224 128
31-
mixnet_l 128
32-
mnasnet_100 512
337
mobilenetv2_100 128
348
mobilenetv3_large_100 512
359
mobilevit_s 128
3610
nfnet_l0 128
37-
pit_b_224 64
38-
pnasnet5large 32
39-
poolformer_m36 128
40-
regnety_002 1024
4111
repvgg_a2 128
42-
res2net101_26w_4s 128
43-
res2net50_14w_8s 128
44-
res2next50 128
45-
resmlp_12_224 128
46-
resnest101e 128
47-
rexnet_100 256
48-
sebotnet33ts_256 64
49-
selecsls42b 128
50-
spnasnet_100 128
5112
swin_base_patch4_window7_224 128
52-
swsl_resnext101_32x16d 64
5313
tf_efficientnet_b0 128
54-
tf_mixnet_l 128
55-
tinynet_a 128
56-
tnt_s_patch16_224 128
57-
twins_pcpvt_base 128
5814
visformer_small 128
59-
vit_base_patch16_224 128
60-
volo_d1_224 128
61-
xcit_large_24_p8_224 16
Lines changed: 0 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -1,59 +1,14 @@
11
adv_inception_v3,128
22
beit_base_patch16_224,64
3-
botnet26t_256,128
4-
cait_m36_384,4
5-
coat_lite_mini,32
6-
convit_base,64
7-
convmixer_768_32,2
8-
convnext_base,64
9-
crossvit_9_240,32
10-
cspdarknet53,64
113
deit_base_distilled_patch16_224,64
124
dm_nfnet_f0,128
13-
dpn107,32
14-
eca_botnext26ts_256,128
15-
eca_halonext26ts,128
16-
ese_vovnet19b_dw,128
17-
fbnetc_100,32
18-
fbnetv3_b,32
19-
gernet_l,128
205
ghostnet_100,128
21-
gluon_inception_v3,128
22-
gmixer_24_224,16
23-
gmlp_s16_224,128
24-
hrnet_w18,128
256
inception_v3,128
26-
jx_nest_base,32
27-
lcnet_050,64
28-
mixer_b16_224,128
29-
mixnet_l,128
30-
mnasnet_100,32
317
mobilenetv2_100,32
328
mobilenetv3_large_100,32
339
mobilevit_s,256
3410
nfnet_l0,128
35-
pit_b_224,64
36-
pnasnet5large,16
37-
poolformer_m36,64
38-
regnety_002,128
3911
repvgg_a2,128
40-
res2net101_26w_4s,64
41-
res2net50_14w_8s,128
42-
res2next50,128
43-
resmlp_12_224,128
44-
resnest101e,64
45-
rexnet_100,128
46-
sebotnet33ts_256,64
47-
selecsls42b,128
48-
spnasnet_100,32
4912
swin_base_patch4_window7_224,64
50-
swsl_resnext101_32x16d,32
5113
tf_efficientnet_b0,128
52-
tf_mixnet_l,32
53-
tinynet_a,128
54-
tnt_s_patch16_224,32
55-
twins_pcpvt_base,64
5614
visformer_small,128
57-
vit_base_patch16_224,64
58-
volo_d1_224,64
59-
xcit_large_24_p8_224,5

0 commit comments

Comments
 (0)