# THIS SOFTWARE IS NOT OPEN SOURCED!!! REDISTRIBUTION PROHIBITED! SEE LICENSE FOR DETAILS. ## TODO: Only allow 2 quantizations to run at once from huggingface_hub import HfApi import os from hfconv import convert from constants import * import gradio as gr import threading from slugify import slugify theme = gr.themes.Base( font=[gr.themes.GoogleFont('Libre Franklin'), gr.themes.GoogleFont('Public Sans'), 'system-ui', 'sans-serif'], ) DESCRIPTION = """ Welcome to Convert to GGUF, a **free** tool to convert all your models to gguf """.strip() # def run_real(model_id: str) -> str: def run(model_id): if model_id == "": return """ ### Invalid input 🐞 Please input a model_id. """ try: api = HfApi(token=HF_TOKEN) if not api.repo_exists(model_id): raise gr.Error('Unable to locate repo') # repo_id = convert(api=api, model_id=model_id) background_thread = threading.Thread(target=convert, args=(api, model_id)) background_thread.start() repo_id = username + "/" + slugify(model_id.strip()) + "-GGUF" string = f"""## Quantizing We are quantizing the model now. If it is successful and it works, it will be available [here](https://huggingface.co/{repo_id}). It may take up to several hours to complete. If it does not work after several hours, please try again. If it does not work after many tries, please contact us.""".strip() # if errors: # string += "\nErrors during conversion:\n" # string += "\n".join(f"Error while converting {filename}: {e}, skipped conversion" for filename, e in errors) return string except Exception as e: return f""" ### Error {e} """ demo = gr.Interface( title="Convert LLMs to GGUF & Quantize", description=DESCRIPTION, allow_flagging="never", article="Created by [mrfakename](https://twitter.com/realmrfakename).", inputs=[ gr.Text(max_lines=1, label="model_id"), ], outputs=[gr.Markdown(label="output")], fn=run, css="footer{display:none !important}", theme=theme ) demo.queue(api_open=False, max_size=15).launch(show_api=False)