gaolegao commited on
Commit
efb2a18
·
unverified ·
1 Parent(s): 3724fba

fix: use torch.no_grad() in inference to prevent excessive memory usage (~30GB) with inference (#349)

Browse files
Files changed (1) hide show
  1. scripts/realtime_inference.py +1 -0
scripts/realtime_inference.py CHANGED
@@ -235,6 +235,7 @@ class Avatar:
235
  cv2.imwrite(f"{self.avatar_path}/tmp/{str(self.idx).zfill(8)}.png", combine_frame)
236
  self.idx = self.idx + 1
237
 
 
238
  def inference(self, audio_path, out_vid_name, fps, skip_save_images):
239
  os.makedirs(self.avatar_path + '/tmp', exist_ok=True)
240
  print("start inference")
 
235
  cv2.imwrite(f"{self.avatar_path}/tmp/{str(self.idx).zfill(8)}.png", combine_frame)
236
  self.idx = self.idx + 1
237
 
238
+ @torch.no_grad()
239
  def inference(self, audio_path, out_vid_name, fps, skip_save_images):
240
  os.makedirs(self.avatar_path + '/tmp', exist_ok=True)
241
  print("start inference")