Aalaa commited on
Commit
ca86a72
1 Parent(s): 6df6de3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +43 -33
app.py CHANGED
@@ -1,49 +1,59 @@
 
1
  import tensorflow as tf
2
  from tensorflow import keras
3
  import gradio as gr
4
 
5
- def generate_text(model,temperature, start_string):
6
- char2idx={'\t': 0, '\n': 1, ' ': 2, 'ء': 3, 'آ': 4, 'أ': 5, 'ؤ': 6, 'إ': 7, 'ئ': 8, 'ا': 9, 'ب': 10, 'ة': 11, 'ت': 12, 'ث': 13, 'ج': 14, 'ح': 15, 'خ': 16, 'د': 17, 'ذ': 18, 'ر': 19, 'ز': 20, 'س': 21, 'ش': 22, 'ص': 23, 'ض': 24, 'ط': 25, 'ظ': 26, 'ع': 27, 'غ': 28, 'ف': 29, 'ق': 30, 'ك': 31, 'ل': 32, 'م': 33, 'ن': 34, 'ه': 35, 'و': 36, 'ى': 37, 'ي': 38}
7
- idx2char=['\t', '\n', ' ', 'ء', 'آ', 'أ', 'ؤ', 'إ', 'ئ', 'ا', 'ب', 'ة', 'ت',
8
- 'ث', 'ج', 'ح', 'خ', 'د', 'ذ', 'ر', 'ز', 'س', 'ش', 'ص', 'ض', 'ط',
9
- 'ظ', 'ع', 'غ', 'ف', 'ق', 'ك', 'ل', 'م', 'ن', 'ه', 'و', 'ى', 'ي']
10
- # Evaluation step (generating text using the learned model)
11
 
12
- # Number of characters to generate
13
- num_generate = 1000
14
 
15
- # Converting our start string to numbers (vectorizing)
16
- input_eval = [char2idx[s] for s in start_string]
17
- input_eval = tf.expand_dims(input_eval, 0)
18
 
19
- # Empty string to store our results
20
- text_generated = []
 
 
21
 
22
- # Low temperatures results in more predictable text.
23
- # Higher temperatures results in more surprising text.
24
- # Experiment to find the best setting.
25
 
26
- # Here batch size == 1
27
- model.reset_states()
28
- for i in range(num_generate):
29
- predictions = model(input_eval)
30
- # remove the batch dimension
31
- predictions = tf.squeeze(predictions, 0)
32
 
33
- # using a random.categorical distribution to predict the word returned by the model
34
- predictions = predictions / temperature
35
- predicted_id = tf.random.categorical(predictions, num_samples=1)[-1,0].numpy()
36
-
37
- input_eval = tf.expand_dims([predicted_id], 0)
38
 
39
- text_generated.append(idx2char[predicted_id])
 
 
40
 
41
- return (start_string + ''.join(text_generated))
 
 
 
 
 
42
 
43
- reconstructed_model = keras.models.load_model("poems_generation_GRU (1).h5")
 
 
 
 
44
 
45
- def generate_poem(start,temperature):
46
- return generate_text(reconstructed_model,temperature, start_string=u""+start )
47
 
48
- iface = gr.Interface(fn=generate_poem, inputs=["text",gr.Slider(0, 1, value=1)], outputs="text")
 
 
 
 
 
49
  iface.launch()
 
1
+ from tensorflow.keras.layers import GRU
2
  import tensorflow as tf
3
  from tensorflow import keras
4
  import gradio as gr
5
 
6
+ # Define a custom GRU layer that ignores unsupported arguments
7
+ class CustomGRU(GRU):
8
+ def __init__(self, **kwargs):
9
+ kwargs.pop('time_major', None)
10
+ kwargs.pop('implementation', None)
11
+ super().__init__(**kwargs)
12
 
13
+ # Register the custom layer
14
+ custom_objects = {'CustomGRU': CustomGRU}
15
 
16
+ # Load the model with the custom objects
17
+ reconstructed_model = keras.models.load_model("poems_generation_GRU (1).h5", custom_objects=custom_objects)
 
18
 
19
+ def generate_text(model, temperature, start_string):
20
+ char2idx={'\t': 0, '\n': 1, ' ': 2, 'ء': 3, 'آ': 4, 'أ': 5, 'ؤ': 6, 'إ': 7, 'ئ': 8, 'ا': 9, 'ب': 10, 'ة': 11, 'ت': 12, 'ث': 13, 'ج': 14, 'ح': 15, 'خ': 16, 'د': 17, 'ذ': 18, 'ر': 19, 'ز': 20, 'س': 21, 'ش': 22, 'ص': 23, 'ض': 24, 'ط': 25, 'ظ': 26, 'ع': 27, 'غ': 28, 'ف': 29, 'ق': 30, 'ك': 31, 'ل': 32, 'م': 33, 'ن': 34, 'ه': 35, 'و': 36, 'ى': 37, 'ي': 38}
21
+ idx2char=['\t', '\n', ' ', 'ء', 'آ', 'أ', 'ؤ', 'إ', 'ئ', 'ا', 'ب', 'ة', 'ت', 'ث', 'ج', 'ح', 'خ', 'د', 'ذ', 'ر', 'ز', 'س', 'ش', 'ص', 'ض', 'ط', 'ظ', 'ع', 'غ', 'ف', 'ق', 'ك', 'ل', 'م', 'ن', 'ه', 'و', 'ى', 'ي']
22
+ # Evaluation step (generating text using the learned model)
23
 
24
+ # Number of characters to generate
25
+ num_generate = 1000
 
26
 
27
+ # Converting our start string to numbers (vectorizing)
28
+ input_eval = [char2idx[s] for s in start_string]
29
+ input_eval = tf.expand_dims(input_eval, 0)
 
 
 
30
 
31
+ # Empty string to store our results
32
+ text_generated = []
 
 
 
33
 
34
+ # Low temperatures results in more predictable text.
35
+ # Higher temperatures results in more surprising text.
36
+ # Experiment to find the best setting.
37
 
38
+ # Here batch size == 1
39
+ model.reset_states()
40
+ for i in range(num_generate):
41
+ predictions = model(input_eval)
42
+ # remove the batch dimension
43
+ predictions = tf.squeeze(predictions, 0)
44
 
45
+ # using a random.categorical distribution to predict the word returned by the model
46
+ predictions = predictions / temperature
47
+ predicted_id = tf.random.categorical(predictions, num_samples=1)[-1,0].numpy()
48
+
49
+ input_eval = tf.expand_dims([predicted_id], 0)
50
 
51
+ text_generated.append(idx2char[predicted_id])
 
52
 
53
+ return (start_string + ''.join(text_generated))
54
+
55
+ def generate_poem(start, temperature):
56
+ return generate_text(reconstructed_model, temperature, start_string=u""+start)
57
+
58
+ iface = gr.Interface(fn=generate_poem, inputs=["text", gr.Slider(0, 1, value=1)], outputs="text")
59
  iface.launch()