mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-06 12:20:52 +01:00
[MegaCache] Return None on no compilation (#151921)
Pull Request resolved: https://github.com/pytorch/pytorch/pull/151921 Approved by: https://github.com/jamesjwu
This commit is contained in:
parent
78bbb468c6
commit
f9bdfe90ae
|
|
@ -584,6 +584,9 @@ class TestFxGraphCache(TestCase):
|
|||
f(torch.randn(2, 6))
|
||||
self.assertEqual(backend.frame_count, 1)
|
||||
|
||||
def test_cache_hot_load_empty(self):
|
||||
self.assertIsNone(torch.compiler.save_cache_artifacts())
|
||||
|
||||
@requires_triton()
|
||||
@config.patch({"fx_graph_cache": True})
|
||||
@config.patch({"fx_graph_remote_cache": False})
|
||||
|
|
|
|||
|
|
@ -85,6 +85,14 @@ class CacheInfo:
|
|||
self.aot_autograd_artifacts.clear()
|
||||
self.pgo_artifacts.clear()
|
||||
|
||||
def empty(self) -> bool:
|
||||
return not (
|
||||
self.inductor_artifacts
|
||||
or self.autotune_artifacts
|
||||
or self.aot_autograd_artifacts
|
||||
or self.pgo_artifacts
|
||||
)
|
||||
|
||||
|
||||
class CacheArtifactManager:
|
||||
"""
|
||||
|
|
@ -181,6 +189,12 @@ class CacheArtifactManager:
|
|||
for artifact in cls._new_cache_artifacts:
|
||||
log.debug("saving: %s", artifact)
|
||||
cls._cache_info.add(artifact)
|
||||
|
||||
if cls._cache_info.empty():
|
||||
# If there are not artifacts, dont just return bytes with
|
||||
# version.
|
||||
return None
|
||||
|
||||
try:
|
||||
# We deep copy cls._cache_info since later compilations
|
||||
# can keep adding to cache_info
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user