[AOTI][dashboard] Skip torchbench models not supported by export (#148359)

Summary: Certain models fail in export because of data-dependent ops. Skip them so that oncall can better track the AOTInductor dashboard.

Pull Request resolved: https://github.com/pytorch/pytorch/pull/148359
Approved by: https://github.com/angelayi, https://github.com/ysiraichi
This commit is contained in:
Bin Bao 2025-03-03 12:14:01 -08:00 committed by PyTorch MergeBot
parent d91a634edf
commit d10bacd4ce
3 changed files with 19 additions and 0 deletions

View File

@ -1824,6 +1824,10 @@ class BenchmarkRunner:
def skip_models_due_to_control_flow(self):
return set()
@property
def skip_models_due_to_export_not_supported(self):
return set()
@property
def disable_cudagraph_models(self):
return set()
@ -3786,6 +3790,7 @@ def run(runner, args, original_dir=None):
# AOTInductor doesn't support control flow yet
runner.skip_models.update(runner.skip_models_due_to_control_flow)
runner.skip_models.update(runner.skip_models_due_to_export_not_supported)
elif args.backend == "torchao":
assert "cuda" in args.devices, "Quantization requires CUDA device."
assert args.bfloat16, "Quantization requires dtype bfloat16."

View File

@ -205,6 +205,10 @@ class TorchBenchmarkRunner(BenchmarkRunner):
def skip_models_due_to_control_flow(self):
return self._skip["control_flow"]
@property
def skip_models_due_to_export_not_supported(self):
return self._skip["export_not_supported"]
@property
def guard_on_nn_module_models(self):
return {

View File

@ -242,6 +242,16 @@ skip:
- opacus_cifar10
- speech_transformer
export_not_supported:
- doctr_reco_predictor
- doctr_det_predictor
- drq
- llama
- sam_fast
- soft_actor_critic
- timm_efficientdet
- vision_maskrcnn
# Models that should only run in --multiprocess mode
multiprocess:
- simple_gpt