From 98a946ebb0d5becaf3c8ad86476785fcb8543082 Mon Sep 17 00:00:00 2001 From: marksverdhei Date: Tue, 5 Aug 2025 23:58:36 +0200 Subject: [PATCH] Convert process tasks to function --- wgp.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/wgp.py b/wgp.py index c0ec19b..1b0b186 100644 --- a/wgp.py +++ b/wgp.py @@ -5386,7 +5386,7 @@ def process_tasks(state): gen["status"] = data elif cmd == "output": gen["preview"] = None - yield time.time() , time.time() + # yield time.time() , time.time() elif cmd == "progress": gen["progress_args"] = data # progress(*data) @@ -5394,7 +5394,7 @@ def process_tasks(state): torch.cuda.current_stream().synchronize() preview= None if data== None else generate_preview(data) gen["preview"] = preview - yield time.time() , gr.Text() + # yield time.time() , gr.Text() else: raise Exception(f"unknown command {cmd}") @@ -5403,7 +5403,7 @@ def process_tasks(state): gen["abort"] = False status = "Video Generation Aborted", "Video Generation Aborted" # yield gr.Text(), gr.Text() - yield time.time() , time.time() + # yield time.time() , time.time() gen["status"] = status queue[:] = [item for item in queue if item['id'] != task['id']] @@ -5428,6 +5428,8 @@ def process_tasks(state): gen["status"] = status gen["status_display"] = False + return time.time(), time.time() + def get_generation_status(prompt_no, prompts_max, repeat_no, repeat_max, window_no, total_windows):