Spaces:
Runtime error
Runtime error
shivam9980
commited on
Commit
•
7bf8bd1
1
Parent(s):
2161f81
Update app.py
Browse files
app.py
CHANGED
@@ -1,13 +1,12 @@
|
|
1 |
# Load model directly
|
2 |
import streamlit as st
|
3 |
|
4 |
-
from
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
dtype = None,
|
9 |
load_in_4bit = True,)
|
10 |
-
|
11 |
|
12 |
# alpaca_prompt = You MUST copy from above!
|
13 |
|
@@ -33,4 +32,8 @@ inputs = tokenizer(
|
|
33 |
|
34 |
outputs = model.generate(**inputs, max_new_tokens = 64, use_cache = True)
|
35 |
results= tokenizer.batch_decode(outputs)
|
36 |
-
|
|
|
|
|
|
|
|
|
|
1 |
# Load model directly
|
2 |
import streamlit as st
|
3 |
|
4 |
+
from peft import AutoPeftModelForCausalLM
|
5 |
+
from transformers import AutoTokenizer
|
6 |
+
model = AutoPeftModelForCausalLM.from_pretrained(
|
7 |
+
"shivam9980/mistral-7b-news", # YOUR MODEL YOU USED FOR TRAINING
|
|
|
8 |
load_in_4bit = True,)
|
9 |
+
tokenizer = AutoTokenizer.from_pretrained("shivam9980/mistral-7b-news")
|
10 |
|
11 |
# alpaca_prompt = You MUST copy from above!
|
12 |
|
|
|
32 |
|
33 |
outputs = model.generate(**inputs, max_new_tokens = 64, use_cache = True)
|
34 |
results= tokenizer.batch_decode(outputs)
|
35 |
+
out = results[0].split('\n')[-1]
|
36 |
+
st.text_area(label='Headline',value=out[:len(out)-4])
|
37 |
+
|
38 |
+
|
39 |
+
|