Spaces:
Sleeping
Sleeping
RichieBurundi
commited on
Commit
•
b88328d
1
Parent(s):
7162ad3
Update app.py
Browse files
app.py
CHANGED
@@ -1,28 +1,27 @@
|
|
1 |
-
import
|
2 |
-
from
|
|
|
3 |
|
4 |
-
|
|
|
|
|
|
|
5 |
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
|
12 |
-
|
13 |
-
|
14 |
-
topics, probs = model.transform([input_text])
|
15 |
-
generated_text = model.generate_topic_labels(topics[0], probs[0], top_n=1)[0]
|
16 |
-
return generated_text
|
17 |
-
except Exception as e:
|
18 |
-
return f"Error generating text: {e}"
|
19 |
|
20 |
-
|
21 |
-
fn=generate_text,
|
22 |
-
inputs="text",
|
23 |
-
outputs="text",
|
24 |
-
title="Ariginal Model Text Generation",
|
25 |
-
description="Enter your text, and the model will generate a response."
|
26 |
-
)
|
27 |
|
28 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
from transformers import AutoTokenizer, AutoModelForCausalLM
|
3 |
+
import streamlit as st
|
4 |
|
5 |
+
# Загрузка модели и токенизатора
|
6 |
+
model_name = "Richieburundi/Ariginalmodel"
|
7 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
8 |
+
model = AutoModelForCausalLM.from_pretrained(model_name)
|
9 |
|
10 |
+
def generate_text(input_text, max_length=50):
|
11 |
+
inputs = tokenizer(input_text, return_tensors="pt")
|
12 |
+
outputs = model.generate(**inputs, max_length=max_length, num_return_sequences=1, do_sample=True, top_k=50, top_p=0.95, num_beams=5)
|
13 |
+
generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
14 |
+
return generated_text
|
15 |
|
16 |
+
st.title("AI Text Generation")
|
17 |
+
st.write("Enter some text, and the AI will generate a response.")
|
|
|
|
|
|
|
|
|
|
|
18 |
|
19 |
+
input_text = st.text_area("Input Text", height=200)
|
|
|
|
|
|
|
|
|
|
|
|
|
20 |
|
21 |
+
if st.button("Generate Text"):
|
22 |
+
try:
|
23 |
+
generated_text = generate_text(input_text)
|
24 |
+
st.write("Generated Text:")
|
25 |
+
st.write(generated_text)
|
26 |
+
except Exception as e:
|
27 |
+
st.error(f"Error generating text: {e}")
|