Spaces:
Sleeping
Sleeping
import subprocess | |
if __name__ == '__main__': | |
backend_comand = ['python3', 'gradio_demo/seed_llama_flask.py', '--image_transform', 'configs/transform/clip_transform.yaml', '--tokenizer', 'configs/tokenizer/seed_llama_tokenizer_hf.yaml', '--model', 'configs/llm/seed_llama_14b_8bit.yaml', '--port', '7890', '--llm_device', 'cuda:0', '--tokenizer_device', 'cuda:0', '--offload_encoder', '--offload_decoder'] | |
frontend_comand = ['python3', 'gradio_demo/seed_llama_gradio.py', '--server_port', '7860', '--request_address', 'http://127.0.0.1:7890/generate', '--model_type', 'seed-llama-14b'] | |
backend_proc = subprocess.Popen(backend_comand) | |
frontend_proc = subprocess.Popen(frontend_comand) | |