kfahn commited on
Commit
880828c
1 Parent(s): 39b853a

Update app.py

Browse files

Try to add p5 sketch

Files changed (1) hide show
  1. app.py +31 -27
app.py CHANGED
@@ -8,6 +8,9 @@ from PIL import Image
8
  from diffusers import FlaxStableDiffusionControlNetPipeline, FlaxControlNetModel
9
  import cv2
10
 
 
 
 
11
  def create_key(seed=0):
12
  return jax.random.PRNGKey(seed)
13
 
@@ -49,37 +52,38 @@ def infer(prompts, negative_prompts, image):
49
  output_images = pipe.numpy_to_pil(np.asarray(output.reshape((num_samples,) + output.shape[-3:])))
50
  return output_images
51
 
52
- #gr.Interface(infer, inputs=["text", "text", "image"], outputs="gallery").launch()
53
-
54
-
55
- title = "Animal Pose Control Net"
56
- description = "This is a demo of Animal Pose ControlNet, which is a model trained on runwayml/stable-diffusion-v1-5 with new type of conditioning."
57
-
58
- #with gr.Blocks(theme=gr.themes.Default(font=[gr.themes.GoogleFont("Inconsolata"), "Arial", "sans-serif"])) as demo:
59
- #gr.Markdown(
60
- # """
61
- # Animal Pose Control Net
62
- # This is a demo of Animal Pose Control Net, which is a model trained on runwayml/stable-diffusion-v1-5 with new type of conditioning.
63
- #""")
64
-
65
- #theme = gr.themes.Default(primary_hue="green").set(
66
- # button_primary_background_fill="*primary_200",
67
- # button_primary_background_fill_hover="*primary_300",
68
- #)
69
-
70
- #gr.Interface(fn = infer, inputs = ["text"], outputs = "image",
71
- # title = title, description = description, theme='gradio/soft').launch()
72
-
73
  control_image = "https://huggingface.co/spaces/kfahn/Animal_Pose_Control_Net/blob/main/image_control.png"
74
 
75
- gr.Interface(fn = infer, inputs = ["text", "text", "image"], outputs = "gallery",
76
- title = title, description = description, theme='gradio/soft',
77
- #examples=[["a Labrador crossing the road", "low quality", control_image]]
78
- ).launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
79
 
80
  gr.Markdown(
81
- """
82
  * [Dataset](https://huggingface.co/datasets/JFoz/dog-poses-controlnet-dataset)
83
  * [Diffusers model](), [Web UI model](https://huggingface.co/JFoz/dog-pose)
84
- * [Training Report](https://wandb.ai/john-fozard/dog-cat-pose/runs/kmwcvae5))
85
  """)
 
 
 
 
 
 
 
 
 
8
  from diffusers import FlaxStableDiffusionControlNetPipeline, FlaxControlNetModel
9
  import cv2
10
 
11
+ with open("test.html") as f:
12
+ lines = f.readlines()
13
+
14
  def create_key(seed=0):
15
  return jax.random.PRNGKey(seed)
16
 
 
52
  output_images = pipe.numpy_to_pil(np.asarray(output.reshape((num_samples,) + output.shape[-3:])))
53
  return output_images
54
 
55
+ mytheme = gr.themes.Default(primary_hue="slate")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
56
  control_image = "https://huggingface.co/spaces/kfahn/Animal_Pose_Control_Net/blob/main/image_control.png"
57
 
58
+ with gr.Blocks(theme = mytheme) as demo:
59
+ gr.Markdown(
60
+ """
61
+ # Animal Pose Control Net
62
+ ## This is a demo of Animal Pose ControlNet, which is a model trained on runwayml/stable-diffusion-v1-5 with new type of conditioning.
63
+ """)
64
+ with gr.Column():
65
+ with gr.Row():
66
+ pos_prompts = gr.Textbox(label="Prompt")
67
+ neg_prompts = gr.Textbox(label="Negative Prompt")
68
+ image = gr.Image()
69
+
70
+ with gr.Column():
71
+ with gr.Row():
72
+ explain = gr.Textbox("Keypoint Tool: Use mouse to move joints")
73
+ with gr.Row():
74
+ keypoint_tool = gr.HTML(lines)
75
 
76
  gr.Markdown(
77
+ """
78
  * [Dataset](https://huggingface.co/datasets/JFoz/dog-poses-controlnet-dataset)
79
  * [Diffusers model](), [Web UI model](https://huggingface.co/JFoz/dog-pose)
80
+ * [Training Report](https://wandb.ai/john-fozard/dog-cat-pose/runs/kmwcvae5)
81
  """)
82
+
83
+ btn = gr.Button("Run")
84
+ #btn.click(inputs = ["text", "text", "image"])
85
+
86
+ btn.click(fn=infer, inputs = ["text", "text", "image"], outputs = "gallery",
87
+ examples=[["a Labrador crossing the road", "low quality", control_image]])
88
+
89
+ demo.launch()