Add a force argument to soft_empty_cache to force a cache empty.

This commit is contained in:
comfyanonymous
2023-09-04 00:58:18 -04:00
parent 7746bdf7b0
commit 1938f5c5fe
3 changed files with 4 additions and 3 deletions

View File

@@ -639,14 +639,14 @@ def should_use_fp16(device=None, model_params=0, prioritize_performance=True):
return True
def soft_empty_cache():
def soft_empty_cache(force=False):
global cpu_state
if cpu_state == CPUState.MPS:
torch.mps.empty_cache()
elif is_intel_xpu():
torch.xpu.empty_cache()
elif torch.cuda.is_available():
if is_nvidia(): #This seems to make things worse on ROCm so I only do it for cuda
if force or is_nvidia(): #This seems to make things worse on ROCm so I only do it for cuda
torch.cuda.empty_cache()
torch.cuda.ipc_collect()