mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-06 12:20:52 +01:00
[inductor] Remove the batch fusion passes from being a default (#135922)
Ads team do a search internally to figure out which fusion passes to use. Pull Request resolved: https://github.com/pytorch/pytorch/pull/135922 Approved by: https://github.com/eellison, https://github.com/yanboliang ghstack dependencies: #135819
This commit is contained in:
parent
9fd54d787d
commit
7dc1788396
|
|
@ -198,14 +198,7 @@ batch_fusion = True
|
|||
# merge_splits_pass
|
||||
# mutate_cat_pass
|
||||
# split_cat_pass
|
||||
pre_grad_fusion_options: Dict[str, Dict[str, Any]] = {
|
||||
"batch_linear": {},
|
||||
"batch_linear_lhs": {},
|
||||
"batch_layernorm": {},
|
||||
"batch_tanh": {},
|
||||
"batch_relu": {},
|
||||
"batch_sigmoid": {},
|
||||
}
|
||||
pre_grad_fusion_options: Dict[str, Dict[str, Any]] = {}
|
||||
|
||||
# Post grad fusion and options, set to empty dict to disable fusion.
|
||||
# Call `torch._inductor.fx_passes.group_batch_fusion.list_group_batch_fusions(False)` to see available fusions.
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user