liamsch
commited on
Commit
·
fdf7e9f
1
Parent(s):
e34628e
update progress
Browse files- gradio_demo.py +1 -1
- video_demo.py +2 -2
gradio_demo.py
CHANGED
|
@@ -187,7 +187,7 @@ def process_video_frames(video_path: str, temp_dir: Path, progress=gr.Progress()
|
|
| 187 |
render_queue.put((frame_idx, cropped_frame, verts[i]))
|
| 188 |
frame_idx += 1
|
| 189 |
progress(
|
| 190 |
-
frame_idx / num_frames, desc=f"Processing frame {frame_idx}/{num_frames}"
|
| 191 |
)
|
| 192 |
# Stop rendering threads
|
| 193 |
for _ in range(num_render_workers):
|
|
|
|
| 187 |
render_queue.put((frame_idx, cropped_frame, verts[i]))
|
| 188 |
frame_idx += 1
|
| 189 |
progress(
|
| 190 |
+
0.95 * frame_idx / num_frames, desc=f"Processing frame {frame_idx}/{num_frames}"
|
| 191 |
)
|
| 192 |
# Stop rendering threads
|
| 193 |
for _ in range(num_render_workers):
|
video_demo.py
CHANGED
|
@@ -275,12 +275,12 @@ class VideoFrameDataset(IterableDataset):
|
|
| 275 |
def process_video(
|
| 276 |
video_path: str,
|
| 277 |
model_type: str = "expressive",
|
| 278 |
-
batch_size: int =
|
| 279 |
num_workers: int = 0,
|
| 280 |
device: str = "cuda" if torch.cuda.is_available() else "cpu",
|
| 281 |
output_video_path: Optional[str] = None,
|
| 282 |
render_size: int = 512,
|
| 283 |
-
num_render_workers: int =
|
| 284 |
max_queue_size: int = 128,
|
| 285 |
) -> List[Dict[str, Any]]:
|
| 286 |
"""
|
|
|
|
| 275 |
def process_video(
|
| 276 |
video_path: str,
|
| 277 |
model_type: str = "expressive",
|
| 278 |
+
batch_size: int = 1,
|
| 279 |
num_workers: int = 0,
|
| 280 |
device: str = "cuda" if torch.cuda.is_available() else "cpu",
|
| 281 |
output_video_path: Optional[str] = None,
|
| 282 |
render_size: int = 512,
|
| 283 |
+
num_render_workers: int = 1,
|
| 284 |
max_queue_size: int = 128,
|
| 285 |
) -> List[Dict[str, Any]]:
|
| 286 |
"""
|