Spaces:
Build error
Build error
import subprocess | |
if __name__ == '__main__': | |
backend_comand = ['python3', 'src/demo/seed_llama_flask.py', '--image_transform', 'configs/processer/qwen_448_transform.yaml', \ | |
'--tokenizer', 'configs/tokenizer/clm_llama_tokenizer_224loc_anyres.yaml', \ | |
'--llm', 'configs/clm_models/llm_seed_x_i.yaml', \ | |
'--visual_encoder', 'configs/visual_encoder/qwen_vitg_448.yaml', \ | |
'--sd_adapter', 'configs/sdxl_adapter/sdxl_qwen_vit_resampler_l4_q64_pretrain_no_normalize.yaml', \ | |
'--agent', 'configs/clm_models/agent_seed_x_i.yaml', \ | |
'--diffusion_path', 'https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0', \ | |
'--port', '7890', '--llm_device', 'cuda:0', '--vit_sd_device', 'cuda:0', '--multi_resolution', 'True', '--has_bbox'] | |
frontend_comand = ['python3', 'src/demo/seed_llama_gradio.py', '--server_port', '7860', '--request_address', 'http://127.0.0.1:7890/generate'] | |
backend_proc = subprocess.Popen(backend_comand) | |
frontend_proc = subprocess.Popen(frontend_comand) | |