Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -12,6 +12,11 @@ import re
|
|
12 |
import time
|
13 |
import torch
|
14 |
import cv2
|
|
|
|
|
|
|
|
|
|
|
15 |
|
16 |
model_id = "llava-hf/llava-interleave-qwen-0.5b-hf"
|
17 |
|
@@ -148,13 +153,13 @@ def respond(message, history):
|
|
148 |
yield output
|
149 |
elif json_data["name"] == "image_generation":
|
150 |
query = json_data["arguments"]["query"]
|
|
|
|
|
|
|
|
|
151 |
gr.Info("Generating Image, Please wait 10 sec...")
|
152 |
-
|
153 |
-
|
154 |
-
image = client.text_to_image(f"{str(query)} {message_text}", negative_prompt=f"{seed}")
|
155 |
-
generated_images = gr.Image(image)
|
156 |
-
yield generated_images
|
157 |
-
gr.Info("We are going to more upgrade image generator in next update")
|
158 |
elif json_data["name"] == "image_qna":
|
159 |
inputs = llava(message, history)
|
160 |
streamer = TextIteratorStreamer(processor, skip_prompt=True, **{"skip_special_tokens": True})
|
|
|
12 |
import time
|
13 |
import torch
|
14 |
import cv2
|
15 |
+
from gradio_client import Client, file
|
16 |
+
|
17 |
+
def image_gen(prompt):
|
18 |
+
client = Client("KingNish/Image-Gen-Pro")
|
19 |
+
return client.predict("Image Generation",None, prompt, api_name="/image_gen_pro")
|
20 |
|
21 |
model_id = "llava-hf/llava-interleave-qwen-0.5b-hf"
|
22 |
|
|
|
153 |
yield output
|
154 |
elif json_data["name"] == "image_generation":
|
155 |
query = json_data["arguments"]["query"]
|
156 |
+
try:
|
157 |
+
number_of_image = json_data["arguments"]["number_of_image"]
|
158 |
+
except:
|
159 |
+
number_of_image = 1
|
160 |
gr.Info("Generating Image, Please wait 10 sec...")
|
161 |
+
image = image_gen(f"{str(query)}")
|
162 |
+
yield gr.Image(image[1])
|
|
|
|
|
|
|
|
|
163 |
elif json_data["name"] == "image_qna":
|
164 |
inputs = llava(message, history)
|
165 |
streamer = TextIteratorStreamer(processor, skip_prompt=True, **{"skip_special_tokens": True})
|