Spaces:
Paused
Paused
Add sleep time to help server recovery of the fastness
Browse files
app.py
CHANGED
@@ -43,12 +43,14 @@ def predict(request: gr.Request, *args, progress=gr.Progress(track_tqdm=True)):
|
|
43 |
json_response = response.json()
|
44 |
#If the output component is JSON return the entire output response
|
45 |
if(outputs[0].get_config()["name"] == "json"):
|
|
|
46 |
return json_response["output"]
|
47 |
predict_outputs = parse_outputs(json_response["output"])
|
48 |
processed_outputs = process_outputs(predict_outputs)
|
49 |
-
|
50 |
return tuple(processed_outputs) if len(processed_outputs) > 1 else processed_outputs[0]
|
51 |
else:
|
|
|
52 |
if(response.status_code == 409):
|
53 |
raise gr.Error(f"Sorry, the Cog image is still processing. Try again in a bit.")
|
54 |
raise gr.Error(f"The submission failed! Error: {response.status_code}")
|
|
|
43 |
json_response = response.json()
|
44 |
#If the output component is JSON return the entire output response
|
45 |
if(outputs[0].get_config()["name"] == "json"):
|
46 |
+
time.sleep(1)
|
47 |
return json_response["output"]
|
48 |
predict_outputs = parse_outputs(json_response["output"])
|
49 |
processed_outputs = process_outputs(predict_outputs)
|
50 |
+
time.sleep(1)
|
51 |
return tuple(processed_outputs) if len(processed_outputs) > 1 else processed_outputs[0]
|
52 |
else:
|
53 |
+
time.sleep(1)
|
54 |
if(response.status_code == 409):
|
55 |
raise gr.Error(f"Sorry, the Cog image is still processing. Try again in a bit.")
|
56 |
raise gr.Error(f"The submission failed! Error: {response.status_code}")
|