Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,59 +1,49 @@
|
|
1 |
-
from tensorflow.keras.layers import GRU
|
2 |
import tensorflow as tf
|
3 |
from tensorflow import keras
|
4 |
import gradio as gr
|
5 |
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
|
13 |
-
#
|
14 |
-
|
15 |
|
16 |
-
#
|
17 |
-
|
|
|
18 |
|
19 |
-
|
20 |
-
|
21 |
-
idx2char=['\t', '\n', ' ', 'ء', 'آ', 'أ', 'ؤ', 'إ', 'ئ', 'ا', 'ب', 'ة', 'ت', 'ث', 'ج', 'ح', 'خ', 'د', 'ذ', 'ر', 'ز', 'س', 'ش', 'ص', 'ض', 'ط', 'ظ', 'ع', 'غ', 'ف', 'ق', 'ك', 'ل', 'م', 'ن', 'ه', 'و', 'ى', 'ي']
|
22 |
-
# Evaluation step (generating text using the learned model)
|
23 |
|
24 |
-
|
25 |
-
|
|
|
26 |
|
27 |
-
|
28 |
-
|
29 |
-
|
|
|
|
|
|
|
30 |
|
31 |
-
|
32 |
-
|
|
|
|
|
|
|
33 |
|
34 |
-
|
35 |
-
# Higher temperatures results in more surprising text.
|
36 |
-
# Experiment to find the best setting.
|
37 |
|
38 |
-
|
39 |
-
model.reset_states()
|
40 |
-
for i in range(num_generate):
|
41 |
-
predictions = model(input_eval)
|
42 |
-
# remove the batch dimension
|
43 |
-
predictions = tf.squeeze(predictions, 0)
|
44 |
|
45 |
-
|
46 |
-
predictions = predictions / temperature
|
47 |
-
predicted_id = tf.random.categorical(predictions, num_samples=1)[-1,0].numpy()
|
48 |
-
|
49 |
-
input_eval = tf.expand_dims([predicted_id], 0)
|
50 |
|
51 |
-
|
|
|
52 |
|
53 |
-
|
54 |
-
|
55 |
-
def generate_poem(start, temperature):
|
56 |
-
return generate_text(reconstructed_model, temperature, start_string=u""+start)
|
57 |
-
|
58 |
-
iface = gr.Interface(fn=generate_poem, inputs=["text", gr.Slider(0, 1, value=1)], outputs="text")
|
59 |
-
iface.launch()
|
|
|
|
|
1 |
import tensorflow as tf
|
2 |
from tensorflow import keras
|
3 |
import gradio as gr
|
4 |
|
5 |
+
def generate_text(model,temperature, start_string):
|
6 |
+
char2idx={'\t': 0, '\n': 1, ' ': 2, 'ء': 3, 'آ': 4, 'أ': 5, 'ؤ': 6, 'إ': 7, 'ئ': 8, 'ا': 9, 'ب': 10, 'ة': 11, 'ت': 12, 'ث': 13, 'ج': 14, 'ح': 15, 'خ': 16, 'د': 17, 'ذ': 18, 'ر': 19, 'ز': 20, 'س': 21, 'ش': 22, 'ص': 23, 'ض': 24, 'ط': 25, 'ظ': 26, 'ع': 27, 'غ': 28, 'ف': 29, 'ق': 30, 'ك': 31, 'ل': 32, 'م': 33, 'ن': 34, 'ه': 35, 'و': 36, 'ى': 37, 'ي': 38}
|
7 |
+
idx2char=['\t', '\n', ' ', 'ء', 'آ', 'أ', 'ؤ', 'إ', 'ئ', 'ا', 'ب', 'ة', 'ت',
|
8 |
+
'ث', 'ج', 'ح', 'خ', 'د', 'ذ', 'ر', 'ز', 'س', 'ش', 'ص', 'ض', 'ط',
|
9 |
+
'ظ', 'ع', 'غ', 'ف', 'ق', 'ك', 'ل', 'م', 'ن', 'ه', 'و', 'ى', 'ي']
|
10 |
+
# Evaluation step (generating text using the learned model)
|
11 |
|
12 |
+
# Number of characters to generate
|
13 |
+
num_generate = 1000
|
14 |
|
15 |
+
# Converting our start string to numbers (vectorizing)
|
16 |
+
input_eval = [char2idx[s] for s in start_string]
|
17 |
+
input_eval = tf.expand_dims(input_eval, 0)
|
18 |
|
19 |
+
# Empty string to store our results
|
20 |
+
text_generated = []
|
|
|
|
|
21 |
|
22 |
+
# Low temperatures results in more predictable text.
|
23 |
+
# Higher temperatures results in more surprising text.
|
24 |
+
# Experiment to find the best setting.
|
25 |
|
26 |
+
# Here batch size == 1
|
27 |
+
model.reset_states()
|
28 |
+
for i in range(num_generate):
|
29 |
+
predictions = model(input_eval)
|
30 |
+
# remove the batch dimension
|
31 |
+
predictions = tf.squeeze(predictions, 0)
|
32 |
|
33 |
+
# using a random.categorical distribution to predict the word returned by the model
|
34 |
+
predictions = predictions / temperature
|
35 |
+
predicted_id = tf.random.categorical(predictions, num_samples=1)[-1,0].numpy()
|
36 |
+
|
37 |
+
input_eval = tf.expand_dims([predicted_id], 0)
|
38 |
|
39 |
+
text_generated.append(idx2char[predicted_id])
|
|
|
|
|
40 |
|
41 |
+
return (start_string + ''.join(text_generated))
|
|
|
|
|
|
|
|
|
|
|
42 |
|
43 |
+
reconstructed_model = keras.models.load_model("poems_generation_GRU (1).h5")
|
|
|
|
|
|
|
|
|
44 |
|
45 |
+
def generate_poem(start,temperature):
|
46 |
+
return generate_text(reconstructed_model,temperature, start_string=u""+start )
|
47 |
|
48 |
+
iface = gr.Interface(fn=generate_poem, inputs=["text",gr.Slider(0, 1, value=1)], outputs="text")
|
49 |
+
iface.launch()
|
|
|
|
|
|
|
|
|
|