Tonic commited on
Commit
89df8e7
1 Parent(s): 72b512b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -19
app.py CHANGED
@@ -53,25 +53,13 @@ def multimodal_prompt(user_input, system_prompt="You are an expert medical analy
53
  device = "cuda" if torch.cuda.is_available() else "cpu"
54
 
55
  # Use the base model's ID
56
- base_model_id = "mistralai/Mistral-7B-v0.1"
57
- model_directory = "Tonic/mistralmed"
58
 
59
  # Instantiate the Tokenizer
60
- tokenizer = AutoTokenizer.from_pretrained("mistralai/Mistral-7B-v0.1", trust_remote_code=True, padding_side="left")
61
- # tokenizer = AutoTokenizer.from_pretrained("Tonic/mistralmed", trust_remote_code=True, padding_side="left")
62
  tokenizer.pad_token = tokenizer.eos_token
63
  tokenizer.padding_side = 'left'
64
-
65
- # Specify the configuration class for the model
66
- #model_config = AutoConfig.from_pretrained(base_model_id)
67
-
68
- # Load the PEFT model with the specified configuration
69
- #peft_model = AutoModelForCausalLM.from_pretrained(base_model_id, config=model_config)
70
-
71
- # Load the PEFT model
72
- peft_config = PeftConfig.from_pretrained("Tonic/mistralmed", token="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")
73
- peft_model = MistralForCausalLM.from_pretrained("mistralai/Mistral-7B-v0.1", trust_remote_code=True)
74
- peft_model = PeftModel.from_pretrained(peft_model, "Tonic/mistralmed", token="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")
75
 
76
  class ChatBot:
77
  def __init__(self):
@@ -91,7 +79,7 @@ class ChatBot:
91
  chat_history_ids = user_input_ids
92
 
93
  # Generate a response using the PEFT model
94
- response = peft_model.generate(input_ids=chat_history_ids, max_length=512, pad_token_id=tokenizer.eos_token_id)
95
 
96
  # Update chat history
97
  self.history = chat_history_ids
@@ -102,9 +90,9 @@ class ChatBot:
102
 
103
  bot = ChatBot()
104
 
105
- title = "👋🏻Welcome to Tonic's MistralMed Chat🚀"
106
- description = "You can use this Space to test out the current model (MistralMed) or duplicate this Space and use it for any other model on 🤗HuggingFace. Join me on Discord to build together."
107
- examples = [["What is the proper treatment for buccal herpes?", "Please provide information on the most effective antiviral medications and home remedies for treating buccal herpes."]]
108
 
109
  iface = gr.Interface(
110
  fn=bot.predict,
 
53
  device = "cuda" if torch.cuda.is_available() else "cpu"
54
 
55
  # Use the base model's ID
56
+ base_model_id = "OpenLLM-France/Claire-Mistral-7B-0.1"
 
57
 
58
  # Instantiate the Tokenizer
59
+ tokenizer = AutoTokenizer.from_pretrained("OpenLLM-France/Claire-Mistral-7B-0.1", trust_remote_code=True, padding_side="left")
 
60
  tokenizer.pad_token = tokenizer.eos_token
61
  tokenizer.padding_side = 'left'
62
+ model = AutoModelForCausalLM.from_pretrained("OpenLLM-France/Claire-Mistral-7B-0.1")
 
 
 
 
 
 
 
 
 
 
63
 
64
  class ChatBot:
65
  def __init__(self):
 
79
  chat_history_ids = user_input_ids
80
 
81
  # Generate a response using the PEFT model
82
+ response = model.generate(input_ids=chat_history_ids, max_length=512, pad_token_id=tokenizer.eos_token_id)
83
 
84
  # Update chat history
85
  self.history = chat_history_ids
 
90
 
91
  bot = ChatBot()
92
 
93
+ title = "👋🏻Welcome to Tonic's Claire Chat🚀"
94
+ description = "You can use this Space to test out the current model (ClaireLLM) or duplicate this Space and use it for any other model on 🤗HuggingFace. Join me on Discord to build together."
95
+ examples = [["Oueche Normal, Claire, ça va ou quoi?", "bonjour je m'appele Claire et je suis une assistante francophone-first conçu par openLLM"]]
96
 
97
  iface = gr.Interface(
98
  fn=bot.predict,