File size: 2,226 Bytes
129f554
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
# THIS SOFTWARE IS NOT OPEN SOURCED!!! REDISTRIBUTION PROHIBITED! SEE LICENSE FOR DETAILS.

## TODO: Only allow 2 quantizations to run at once
from huggingface_hub import HfApi
import os
from hfconv import convert
from constants import *
import gradio as gr
import threading
from slugify import slugify

theme = gr.themes.Base(
    font=[gr.themes.GoogleFont('Libre Franklin'), gr.themes.GoogleFont('Public Sans'), 'system-ui', 'sans-serif'],
)
DESCRIPTION = """
Welcome to Convert to GGUF, a **free** tool to convert all your models to gguf
""".strip()

# def run_real(model_id: str) -> str:
def run(model_id):
    if model_id == "":
        return """
        ### Invalid input 🐞
        
        Please input a model_id.
        """
    try:
        api = HfApi(token=HF_TOKEN)
        if not api.repo_exists(model_id):
            raise gr.Error('Unable to locate repo')
        # repo_id = convert(api=api, model_id=model_id)
        background_thread = threading.Thread(target=convert, args=(api, model_id))
        background_thread.start()
        repo_id = username + "/" + slugify(model_id.strip()) + "-GGUF"
        string =  f"""## Quantizing
We are quantizing the model now. If it is successful and it works, it will be available [here](https://huggingface.co/{repo_id}). It may take up to several hours to complete. If it does not work after several hours, please try again. If it does not work after many tries, please contact us.""".strip()
        # if errors:
        #     string += "\nErrors during conversion:\n"
        #     string += "\n".join(f"Error while converting {filename}: {e}, skipped conversion" for filename, e in errors)
        return string
    except Exception as e:
        return f"""
        ### Error
        
        {e}
        """
demo = gr.Interface(
    title="Convert LLMs to GGUF & Quantize",
    description=DESCRIPTION,
    allow_flagging="never",
    article="Created by [mrfakename](https://twitter.com/realmrfakename).",
    inputs=[
        gr.Text(max_lines=1, label="model_id"),
    ],
    outputs=[gr.Markdown(label="output")],
    fn=run,
    css="footer{display:none !important}",
    theme=theme
)

demo.queue(api_open=False, max_size=15).launch(show_api=False)