MyNiuuu commited on
Commit
1e77919
1 Parent(s): 06f86b5

fix run_gradio.py

Browse files
Files changed (1) hide show
  1. run_gradio.py +10 -3
run_gradio.py CHANGED
@@ -642,7 +642,7 @@ with gr.Blocks() as demo:
642
  <br>
643
  1. Use the "Upload Image" button to upload an image. Avoid dragging the image directly into the window. <br>
644
  2. Proceed to draw trajectories: <br>
645
- 2.1. Click "Add Trajectory" first, then select points on the "Add Trajectory Here" image. The first click sets the starting point. Click multiple points to create a non-linear trajectory. To add a new trajectory, click "Add Trajectory" again and select points on the image. <br>
646
  2.2. After adding each trajectory, an optical flow image will be displayed automatically. Use it as a reference to adjust the trajectory for desired effects (e.g., area, intensity). <br>
647
  2.3. To delete the latest trajectory, click "Delete Last Trajectory." <br>
648
  2.4. Choose the Control Scale in the bar. This determines the control intensity. Setting it to 0 means no control (pure generation result of SVD itself), while setting it to 1 results in the strongest control (which will not lead to good results in most cases because of twisting artifacts). A preset value of 0.6 is recommended for most cases. <br>
@@ -689,6 +689,8 @@ with gr.Blocks() as demo:
689
  return first_frame_path, first_frame_path, first_frame_path, gr.State([]), gr.State([]), np.zeros((crop_h, crop_w)), np.zeros((crop_h, crop_w, 4))
690
 
691
  def add_drag(tracking_points):
 
 
692
  tracking_points.constructor_args['value'].append([])
693
  return tracking_points
694
 
@@ -697,7 +699,8 @@ with gr.Blocks() as demo:
697
  return motion_brush_points
698
 
699
  def delete_last_drag(tracking_points, first_frame_path, motion_brush_mask):
700
- tracking_points.constructor_args['value'].pop()
 
701
  transparent_background = Image.open(first_frame_path).convert('RGBA')
702
  w, h = transparent_background.size
703
  transparent_layer = np.zeros((h, w, 4))
@@ -746,6 +749,10 @@ with gr.Blocks() as demo:
746
  def add_tracking_points(tracking_points, first_frame_path, motion_brush_mask, evt: gr.SelectData):
747
 
748
  print(f"You selected {evt.value} at {evt.index} from {evt.target}")
 
 
 
 
749
  tracking_points.constructor_args['value'][-1].append(evt.index)
750
 
751
  # print(tracking_points.constructor_args['value'])
@@ -828,4 +835,4 @@ with gr.Blocks() as demo:
828
 
829
  run_button.click(DragNUWA_net.run, [first_frame_path, tracking_points, inference_batch_size, motion_brush_mask, motion_brush_viz, ctrl_scale], [hint_image, output_video, output_flow, output_video_mp4, output_flow_mp4])
830
 
831
- demo.launch(server_name="0.0.0.0", debug=True, server_port=80)
 
642
  <br>
643
  1. Use the "Upload Image" button to upload an image. Avoid dragging the image directly into the window. <br>
644
  2. Proceed to draw trajectories: <br>
645
+ 2.1. Click "Add Trajectory" first, then select points on the "Add Trajectory Here" image. The first click sets the starting point. Click multiple points to create a non-linear trajectory. To add a new trajectory, click "Add Trajectory" again and select points on the image. Avoid clicking the "Add Trajectory" button multiple times without clicking points in the image to add the trajectory, as this can lead to errors. <br>
646
  2.2. After adding each trajectory, an optical flow image will be displayed automatically. Use it as a reference to adjust the trajectory for desired effects (e.g., area, intensity). <br>
647
  2.3. To delete the latest trajectory, click "Delete Last Trajectory." <br>
648
  2.4. Choose the Control Scale in the bar. This determines the control intensity. Setting it to 0 means no control (pure generation result of SVD itself), while setting it to 1 results in the strongest control (which will not lead to good results in most cases because of twisting artifacts). A preset value of 0.6 is recommended for most cases. <br>
 
689
  return first_frame_path, first_frame_path, first_frame_path, gr.State([]), gr.State([]), np.zeros((crop_h, crop_w)), np.zeros((crop_h, crop_w, 4))
690
 
691
  def add_drag(tracking_points):
692
+ if len(tracking_points.constructor_args['value']) != 0 and tracking_points.constructor_args['value'][-1] == []:
693
+ return tracking_points
694
  tracking_points.constructor_args['value'].append([])
695
  return tracking_points
696
 
 
699
  return motion_brush_points
700
 
701
  def delete_last_drag(tracking_points, first_frame_path, motion_brush_mask):
702
+ if len(tracking_points.constructor_args['value']) > 0:
703
+ tracking_points.constructor_args['value'].pop()
704
  transparent_background = Image.open(first_frame_path).convert('RGBA')
705
  w, h = transparent_background.size
706
  transparent_layer = np.zeros((h, w, 4))
 
749
  def add_tracking_points(tracking_points, first_frame_path, motion_brush_mask, evt: gr.SelectData):
750
 
751
  print(f"You selected {evt.value} at {evt.index} from {evt.target}")
752
+
753
+ if len(tracking_points.constructor_args['value']) == 0:
754
+ tracking_points.constructor_args['value'].append([])
755
+
756
  tracking_points.constructor_args['value'][-1].append(evt.index)
757
 
758
  # print(tracking_points.constructor_args['value'])
 
835
 
836
  run_button.click(DragNUWA_net.run, [first_frame_path, tracking_points, inference_batch_size, motion_brush_mask, motion_brush_viz, ctrl_scale], [hint_image, output_video, output_flow, output_video_mp4, output_flow_mp4])
837
 
838
+ demo.launch(server_name="127.0.0.1", debug=True, server_port=9080)