From abb83239e5d708adf72fa457c3de81603e7281d3 Mon Sep 17 00:00:00 2001 From: hlky <106811348+hlky@users.noreply.github.com> Date: Wed, 24 Aug 2022 14:12:33 +0100 Subject: [PATCH] torch_gc/empty cache after generation added torch_gc() which calls both cuda.empty_cache() and cuda.ipc_collect() called before and after generation --- webui.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/webui.py b/webui.py index 6ba76407..7521b3ce 100644 --- a/webui.py +++ b/webui.py @@ -126,6 +126,9 @@ def create_random_tensors(shape, seeds): x = torch.stack(xs) return x +def torch_gc(): + torch.cuda.empty_cache() + torch.cuda.ipc_collect() def load_GFPGAN(): model_name = 'GFPGANv1.3' @@ -300,7 +303,7 @@ def process_images(outpath, func_init, func_sample, prompt, seed, sampler_name, """this is the main loop that both txt2img and img2img use; it calls func_init once inside all the scopes and func_sample once per batch""" assert prompt is not None - torch.cuda.empty_cache() + torch_gc() if seed == -1: seed = random.randrange(4294967294) @@ -412,7 +415,7 @@ Steps: {steps}, Sampler: {sampler_name}, CFG scale: {cfg_scale}, Seed: {seed}{', for comment in comments: info += "\n\n" + comment - + torch_gc() return output_images, seed, info