From e302673b44298464db5e4396a03970b7f3d5d984 Mon Sep 17 00:00:00 2001 From: Leonardo Marino-Ramirez Date: Tue, 21 Apr 2026 10:59:04 -0400 Subject: [PATCH 1/2] feat: add inference.empty_cache_per_design flag (off by default) --- config/inference/base.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/config/inference/base.yaml b/config/inference/base.yaml index 3bb0a5c1..686e71bf 100644 --- a/config/inference/base.yaml +++ b/config/inference/base.yaml @@ -11,6 +11,7 @@ inference: model_only_neighbors: False output_prefix: samples/design write_trajectory: True + empty_cache_per_design: False scaffold_guided: False model_runner: SelfConditioning cautious: True From e8bd8d14357548acc46a3062f616423d77479c4e Mon Sep 17 00:00:00 2001 From: Leonardo Marino-Ramirez Date: Tue, 21 Apr 2026 10:59:41 -0400 Subject: [PATCH 2/2] feat: call torch.cuda.empty_cache() per design when empty_cache_per_design=True --- scripts/run_inference.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scripts/run_inference.py b/scripts/run_inference.py index 3fb6466e..3ebb5e3f 100755 --- a/scripts/run_inference.py +++ b/scripts/run_inference.py @@ -188,6 +188,9 @@ def main(conf: HydraConfig) -> None: chain_ids=sampler.chain_idx, ) + if conf.inference.empty_cache_per_design and torch.cuda.is_available(): + torch.cuda.empty_cache() + log.info(f"Finished design in {(time.time()-start_time)/60:.2f} minutes")