import gradio as gr def greet(name): return "results " + name + "!!" iface = gr.Interface(fn=greet, inputs="text", outputs="text") iface.launch() #When running locally, you won`t have access to this, so you can remove this part word_list_dataset = load_dataset("stabilityai/word-list", data_files="list.txt", use_auth_token=True) word_list = word_list_dataset["train"]['text'] is_gpu_busy = False def infer(prompt): global is_gpu_busy samples = 4 steps = 50 scale = 7.5 #When running locally you can also remove this filter for filter in word_list: if re.search(rf"\b{filter}\b", prompt): raise gr.Error("Unsafe content found. Please try again with different prompts.") #generator = torch.Generator(device=device).manual_seed(seed) #print("Is GPU busy? ", is_gpu_busy) images = [] #if(not is_gpu_busy): # is_gpu_busy = True