mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-06 12:20:52 +01:00
Revert "[inductor][BE] Add more debug logs for why fx graph cache doesn't happen (#152487)"
This reverts commit 76331657d2.
Reverted https://github.com/pytorch/pytorch/pull/152487 on behalf of https://github.com/malfet due to And it broke those tests, not sure why signal was ignored ([comment](https://github.com/pytorch/pytorch/pull/152487#issuecomment-2843333471))
This commit is contained in:
parent
3f10091d3c
commit
49a72011cc
|
|
@ -789,15 +789,6 @@ def _compile_fx_inner(
|
|||
remote = fx_graph_remote_cache
|
||||
set_feature_use("fx_cache", use_cache)
|
||||
|
||||
log.debug(
|
||||
"FX cache status: use_cache=%s, local=%s, remote=%s, aot_mode=%s, force_disable_caches=%s",
|
||||
use_cache,
|
||||
local,
|
||||
remote,
|
||||
aot_mode,
|
||||
config.force_disable_caches,
|
||||
)
|
||||
|
||||
# TODO: This is a hack purely to get some info to extract_tensor_metadata_for_cache_key,
|
||||
# figure out how to not have to modify example inputs
|
||||
for i, input in enumerate(example_inputs):
|
||||
|
|
@ -825,10 +816,8 @@ def _compile_fx_inner(
|
|||
# Attempt a cache lookup
|
||||
if key_info is not None:
|
||||
key, debug_lines = key_info
|
||||
log.debug("FX cache key generated: %s", key)
|
||||
if remote:
|
||||
remote_cache = FxGraphCache.get_remote_cache()
|
||||
log.debug("Using remote FX cache")
|
||||
mb_compiled_graph, cache_info = FxGraphCache.load_with_key(
|
||||
key,
|
||||
debug_lines,
|
||||
|
|
@ -838,20 +827,12 @@ def _compile_fx_inner(
|
|||
is_backward=graph_kwargs.get("is_backward", False),
|
||||
constants=constants,
|
||||
)
|
||||
else:
|
||||
log.debug("Failed to generate FX cache key")
|
||||
|
||||
# CACHE BYPASS: Compile the graph, don't save it to the cache
|
||||
# (this can happen either because cache was disabled, or we
|
||||
# determined the input is uncacheable)
|
||||
if cache_info is None or cache_info["cache_state"] == "bypass":
|
||||
assert mb_compiled_graph is None
|
||||
log.debug(
|
||||
"FX cache bypass reason: %s",
|
||||
cache_info.get("cache_bypass_reason", "unknown")
|
||||
if cache_info is not None
|
||||
else "FX cache disabled or key generation failed",
|
||||
)
|
||||
mb_compiled_graph = fx_codegen_and_compile(
|
||||
gm, example_inputs, inputs_to_check, **graph_kwargs
|
||||
)
|
||||
|
|
@ -860,7 +841,6 @@ def _compile_fx_inner(
|
|||
elif cache_info["cache_state"] == "miss":
|
||||
assert mb_compiled_graph is None
|
||||
assert key_info is not None
|
||||
log.debug("FX cache miss, compiling and saving to cache")
|
||||
TritonBundler.begin_compile()
|
||||
try:
|
||||
mb_compiled_graph = fx_codegen_and_compile(
|
||||
|
|
@ -887,7 +867,6 @@ def _compile_fx_inner(
|
|||
if triton_bundler_meta is not None:
|
||||
cache_info["triton_bundler_meta"] = str(triton_bundler_meta)
|
||||
cache_info["time_taken_ns"] = mb_compiled_graph._time_taken_ns
|
||||
log.debug("Saving compiled graph to FX cache with key: %s", cache_key)
|
||||
FxGraphCache._save_graph(
|
||||
cache_key,
|
||||
mb_compiled_graph,
|
||||
|
|
@ -903,7 +882,6 @@ def _compile_fx_inner(
|
|||
assert mb_compiled_graph is not None
|
||||
assert key_info is not None
|
||||
(cache_key, debug_lines) = key_info
|
||||
log.debug("FX cache hit with key: %s", cache_key)
|
||||
mb_compiled_graph._fx_graph_cache_key = cache_key
|
||||
mb_compiled_graph._fx_graph_cache_debug_lines = debug_lines
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user