File size: 3,195 Bytes
3ad1ad0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5f4e3d2
3ad1ad0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
from hugchat import hugchat
from hugchat.login import Login
import streamlit as st

if "logged_in" not in st.session_state:
    st.session_state.logged_in = False
if "cookies" not in st.session_state:
    st.session_state.cookies = None

llms = [
    'meta-llama/Meta-Llama-3.1-70B-Instruct', 
    'meta-llama/Meta-Llama-3.1-405B-Instruct-FP8', 
    'CohereForAI/c4ai-command-r-plus', 
    'mistralai/Mixtral-8x7B-Instruct-v0.1', 
    'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO', 
    '01-ai/Yi-1.5-34B-Chat', 
    'mistralai/Mistral-7B-Instruct-v0.3', 
    'microsoft/Phi-3-mini-4k-instruct'
]

st.title("HugChat LLM Chatbot")
st.markdown("Made by Vo1d_s")

sidebar = st.sidebar
sidebar.markdown("## HuggingFace account") 
email = sidebar.text_input("Email", placeholder="Your email", disabled=st.session_state.logged_in)
password = sidebar.text_input("Password (secure)", placeholder="Your password", type="password", disabled=st.session_state.logged_in)
empty = sidebar.empty()
sidebar.warning("Note: You may get an email saying that someone logged into your account because this uses HuggingFace, it is safe.")

def change_system_prompt():
    st.session_state.messages = []
    st.session_state.chatbot.delete_conversation()
    st.session_state.chatbot = hugchat.ChatBot(st.session_state.cookies, system_prompt=st.session_state.sys_prompt, default_llm=st.session_state.selected_llm)
    print(st.session_state.selected_llm)

sidebar.markdown("## Model Configuration")
system_prompt = sidebar.text_input("System prompt (don't leave empty)", placeholder="Model behaviour", on_change=change_system_prompt, key="sys_prompt", disabled=not st.session_state.logged_in)
selected_llm = sidebar.selectbox("LLM (model)", placeholder="Default LLM", on_change=change_system_prompt, options=llms, key="selected_llm", disabled=not st.session_state.logged_in)

if email and password:
    if not st.session_state.logged_in:
        try:
            credentials = Login(email=email, passwd=password)
            st.session_state.cookies = credentials.login()
            st.session_state.logged_in = True
            st.session_state.chatbot = hugchat.ChatBot(st.session_state.cookies)
        except Exception as e:
            empty.error(f"Error: {e}")

def stream(prompt):
    for event in st.session_state.chatbot.chat(prompt):
        if event:
            yield event["token"]


def main():
    if "messages" not in st.session_state:
        st.session_state.messages = []

    for message in st.session_state.messages:
        with st.chat_message(message["role"]):
            st.write(message["content"])

    if prompt := st.chat_input("What is up?", disabled=not st.session_state.logged_in):
        with st.chat_message("user"):
            st.write(prompt)

        st.session_state.messages.append({"role": "user", "content": prompt})

        with st.chat_message("assistant"):
            try:
                response = st.write_stream(stream(prompt))
            except Exception as e:
                response = f"Error: {e}"
                st.write(response)

        st.session_state.messages.append({"role": "assistant", "content": response})

if __name__ == "__main__":
    main()