Javierss commited on
Commit
e2b757a
1 Parent(s): 21e5561
.vscode/settings.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "ros.distro": "noetic"
3
+ }
app.py CHANGED
@@ -1,7 +1,15 @@
 
 
1
  import gradio as gr
2
 
3
- def greet(name):
4
- return "Hello " + name + "!!"
5
 
6
- iface = gr.Interface(fn=greet, inputs="text", outputs="text")
7
- iface.launch()
 
 
 
 
 
 
 
 
 
1
+
2
+ import time
3
  import gradio as gr
4
 
 
 
5
 
6
+ def slow_echo(message, history):
7
+ for i in range(len(message)):
8
+ time.sleep(0.05)
9
+ yield "You typed: " + message[: i + 1]
10
+
11
+
12
+ demo = gr.ChatInterface(slow_echo).queue()
13
+
14
+ if __name__ == "__main__":
15
+ demo.launch()
config/lang.json ADDED
@@ -0,0 +1,124 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "SPA": {
3
+ "Game": {
4
+ "Introduction_0": "Bienvenido a Semantrix, el emocionante Juego de la Adivinanza Semántica.",
5
+ "Introduction_1": "¿Quieres saber cómo se juega?",
6
+ "Rules_0": "Veamos cómo se juega:",
7
+ "Rules_1": "Tu misión es adivinar una palabra secreta que yo he escogido, pero no te preocupes, te ayudaré en el camino.",
8
+ "Rules_2": "Lanza al aire la primera palabra que se te ocurra. Te daré pistas diciéndote si estás caliente, es decir muy cerca de adivinarla o frío, es decir, muy lejos de la palabra.",
9
+ "Rules_3": "Adicionalmente, Cada palabra que propongas recibirá una puntuación entre 0 y 10, un puntaje alto significa que estás muy cerca de adivinar la palabra secreta",
10
+ "Rules_4": "Si veo que estás un poco perdido, estaré aquí para darte pistas que te ayudarán a acercarte a la palabra secreta.",
11
+ "Rules_5": " Como ayuda extra, mostraré en mi tablet un ranking de todas las palabras que has propuesto, ordenadas según su puntuación. Así podrás tener una idea mejor de qué tan cerca están y qué palabras funcionan mejor.",
12
+ "Rules_6": "Serás el ganador cuando adivines correctamente la palabra secreta. ¡No desistas, lo tienes al alcance!",
13
+ "Rules_7": "Así que, ¡enciende tu mente, confía en tus ideas y por sobre todo, pasa un buen rato! Este es un juego en el que cada palabra, cada puntuación y cada pista te acerca a tu victoria. ¡Mucha suerte!",
14
+ "Difficulty_presentation_Full": "En este emocionante desafío de adivinanza semántica, puedes elegir cuán grande quieres que sea el reto. Aquí te presento los niveles de dificultad:\n\nFácil: ¡Es hora de calentar motores! En este nivel, te daré pistas evidentes para que puedas adivinar la palabra secreta de manera más rápida. Si estás comenzando a jugar o simplemente quieres pasar un buen rato sin mucha presión, ¡este es tu nivel!\n\nNormal: Aquí es donde las cosas comienzan a ponerse interesantes. En este nivel, solo te daré pistas cuando te vea muy perdido. Es bueno para aquellos jugadores que quieren un reto, pero sin ser tan duros consigo mismos.\n\nDifícil: ¿Listo para un verdadero desafío? En este nivel, te ayudaré solo cuando te vea realmente perdido, y prepárate, porque las palabras pueden llegar a ser más complejas. Para esos pensadores agudos que les encanta una buena cabeza rompecabezas.\n\nExperto: ¿Eres un maestro de las palabras? Este es el camino menos transitado, para aquellos campeones de la semántica que buscan la pura adrenalina del reto. No te daré ninguna pista y las palabras serán complejas. Aquí es donde puedes demostrar tu verdadero poder.\n\nRecuerda, ganes o pierdas, cada nivel está diseñado para hacerte disfrutar y mejorar tus habilidades de adivinanza y comprensión de las palabras. ¡Escoge tu nivel y empieza a jugar!",
15
+ "Difficulty": "Elige tu nivel de dificultad",
16
+ "New_word": "Nueva palabra: ",
17
+ "Feedback_0": "Helado, puntuación: ",
18
+ "Feedback_1": "Frío, puntuación: ",
19
+ "Feedback_2": "Templado, puntuación: ",
20
+ "Feedback_3": "Caliente, puntuación: ",
21
+ "Feedback_4": "Quemando, puntuación: ",
22
+ "Feedback_5": "Ardiendo, puntuación: ",
23
+ "Feedback_6": "Te estás acercando",
24
+ "Feedback_7": "Te estás alejando",
25
+ "Feedback_8": "¡Has ganado, ENHORABUENA!",
26
+ "Feedback_9": "La palabra secreta era: ",
27
+ "Feedback_10": "Aunque no fue una victoria esta vez, ¡no temas! ¡Cada intento es una nueva oportunidad para brillar! ¡Sigue adelante!",
28
+ "secret_word": "PALABRA SECRETA",
29
+ "hint_intro": [
30
+ "Parece que las palabras están jugando al escondite contigo. ¿Necesitas una ayudita? Aquí va una pista:",
31
+ "¡Vamos, estás tan cerca de descifrar el enigma semántico! Pero si sientes que te falta un empujón, aquí tienes una pista:",
32
+ "Tu mente está explorando este campo semántico como un detective, ¡pero incluso los detectives a veces necesitan pistas extra! Así que, aquí va una para ti:",
33
+ "El camino semántico puede volverse un poco sinuoso a veces. No te preocupes, estoy aquí para allanar el camino con una pista:",
34
+ "Las palabras son como piezas de un rompecabezas, y sé que estás cerca de completar la imagen. Aquí va una pista para encajar las piezas restantes:",
35
+ "Estás navegando por las aguas semánticas con destreza, ¡pero incluso los capitanes expertos pueden necesitar un faro de vez en cuando! Aquí está tu faro, tu pista:",
36
+ "La danza de las palabras puede ser complicada, pero no te preocupes, estoy aquí para ser tu guía de baile. Aquí tienes una pista para que sigas moviéndote con gracia:"
37
+ ]
38
+ },
39
+ "Hint": {
40
+ "secret_word": "La palabra secreta",
41
+ "hint_0_0": "Define brevemente \"",
42
+ "hint_0_1": "\" sin incluir la propia palabra ni palabras de su familia léxica. En caso de usarla refiérete a ",
43
+ "hint_0_2": " como \"la palabra secreta\":",
44
+ "hint_0_3": "Podría definir la palabra secreta como:\n",
45
+ "hint_1_0": "Representa la palabra ",
46
+ "hint_1_1": " con emojis:",
47
+ "hint_1_2": "Voy a intentar representarte la palabra usando emojis:\n",
48
+ "hint_2_0": "Da una pista en forma de poema de 4 versos sobre ",
49
+ "hint_2_1": " que no incluya la propia palabra:",
50
+ "hint_2_2": "Me he inspirado un poco, voy a recitar un poema sobre la palabra secreta:\n",
51
+ "hint_3_0": "Dime tres palabras aleatorias separadas por coma:",
52
+ "hint_3_1": "Dime una palabra relacionada con",
53
+ "hint_3_2": " pero no puede ser la propia palabra ni de su familia léxica. No añadas texto adicional",
54
+ "hint_3_3": "Voy a pensar cuatro palabras y te muestro entre ellas cual se acerca más a la palabra secreta:\n",
55
+ "hint_4_0": "Dime el título de una película real donde aparece ",
56
+ "hint_4_1": ". Di únicamente el título, nada mas:",
57
+ "hint_4_2": "Representa la película ",
58
+ "hint_4_3": " únicamente con unos pocos emojis:",
59
+ "hint_4_4": "He pensado en una película relacionada con la palabra secreta y te la voy a representar con emojis, la película es:\n",
60
+ "curiosity": "Cuéntame una curiosidad sobre \""
61
+ }
62
+ },
63
+ "ENG": {
64
+ "Game": {
65
+ "Introduction_0": "Welcome to Semantrix, the exciting Semantic Guessing Game.",
66
+ "Introduction_1": "Do you want to know how to play?",
67
+ "Rules_0": "Let's see how to play:",
68
+ "Rules_1": "Your mission is to guess a secret word that I have chosen, but don't worry, I'll help you along the way.",
69
+ "Rules_2": "Throw out the first word that comes to your mind. I'll give you hints by telling you if you're hot, meaning very close to guessing it, or cold, meaning very far from the word.",
70
+ "Rules_3": "Additionally, each word you propose will receive a score between 0 and 10; a high score means you are very close to guessing the secret word.",
71
+ "Rules_4": "If I see that you're a bit lost, I'll be here to give you clues that will help you get closer to the secret word.",
72
+ "Rules_5": "As an extra help, I'll display on my tablet a ranking of all the words you've proposed, sorted by their score. This way, you can get a better idea of how close they are and which words work better.",
73
+ "Rules_6": "You will be the winner when you correctly guess the secret word. Don't give up; it's within your reach!",
74
+ "Rules_7": "So, ignite your mind, trust your ideas, and above all, have a good time! This is a game where each word, each score, and each clue brings you closer to victory. Good luck!",
75
+ "Difficulty_presentation_Full": "In this thrilling semantic guessing challenge, you can choose how big you want the challenge to be. Here are the difficulty levels:\n\nEasy: Time to warm up! In this level, I'll give you obvious clues so you can guess the secret word more quickly. If you're starting to play or just want to have a good time without much pressure, this is your level!\n\nNormal: This is where things start to get interesting. In this level, I'll only give you hints when I see you're very lost. It's good for players who want a challenge but without being too hard on themselves.\n\nHard: Ready for a real challenge? In this level, I'll help you only when I see you're truly lost, and get ready because the words can become more complex. For those sharp thinkers who love a good puzzle.\n\nExpert: Are you a master of words? This is the less-traveled path, for those semantic champions seeking the pure adrenaline of the challenge. I won't give you any hints, and the words will be complex. Here's where you can showcase your true power.\n\nRemember, win or lose, each level is designed to make you enjoy and improve your guessing and word comprehension skills. Choose your level and start playing!",
76
+ "Difficulty": "Choose your difficulty level",
77
+ "New_word": "New word: ",
78
+ "Feedback_0": "Ice-cold, score: ",
79
+ "Feedback_1": "Cold, score: ",
80
+ "Feedback_2": "Warm, score: ",
81
+ "Feedback_3": "Hot, score: ",
82
+ "Feedback_4": "Burning, score: ",
83
+ "Feedback_5": "On fire, score: ",
84
+ "Feedback_6": "You're getting closer",
85
+ "Feedback_7": "You're moving away",
86
+ "Feedback_8": "You've won, CONGRATULATIONS!",
87
+ "Feedback_9": "The secret word was: ",
88
+ "Feedback_10": "Though it wasn't a win this time, fear not! Each try is a new chance to shine! Keep going!",
89
+ "secret_word": "SECRET WORD",
90
+ "hint_intro": [
91
+ "It seems like the words are playing hide-and-seek with you. Need a little help? Here's a hint:",
92
+ "Come on, you're so close to unraveling the semantic mystery! But if you feel like you need a push, here's a hint:",
93
+ "Your mind is exploring this semantic field like a detective, but even detectives sometimes need extra clues! So, here's one for you:",
94
+ "The semantic path can get a bit winding at times. Don't worry, I'm here to smooth the way with a hint:",
95
+ "Words are like pieces of a puzzle, and I know you're close to completing the picture. Here's a hint to fit the remaining pieces:",
96
+ "You're navigating the semantic waters skillfully, but even expert captains may need a lighthouse now and then! Here's your lighthouse, your hint:",
97
+ "The dance of words can be intricate, but don't worry, I'm here to be your dance guide. Here's a hint to keep you moving with grace:"
98
+ ]
99
+ },
100
+ "Hint": {
101
+ "secret_word": "The secret word",
102
+ "hint_0_0": "Briefly define \"",
103
+ "hint_0_1": "\" without including the word itself or words from its lexical family. If using it, refer to ",
104
+ "hint_0_2": " as \"the secret word\":",
105
+ "hint_0_3": "I could define the secret word as:\n",
106
+ "hint_1_0": "Represent the word ",
107
+ "hint_1_1": " with emojis:",
108
+ "hint_1_2": "I'll try to represent the word using emojis:\n",
109
+ "hint_2_0": "Give a hint in the form of a 4-verse poem about ",
110
+ "hint_2_1": " that doesn't include the word itself:",
111
+ "hint_2_2": "I've been a bit inspired; I'll recite a poem about the secret word:\n",
112
+ "hint_3_0": "Tell me three random words separated by commas:",
113
+ "hint_3_1": "Tell me a word related to",
114
+ "hint_3_2": " but it can't be the word itself or from its lexical family. Don't add any additional text",
115
+ "hint_3_3": "I'll think of four words and show you which one comes closest to the secret word:\n",
116
+ "hint_4_0": "Tell me the title of a real movie where ",
117
+ "hint_4_1": ". Only say the title, nothing more:",
118
+ "hint_4_2": "Represent the movie ",
119
+ "hint_4_3": " only with a few emojis:",
120
+ "hint_4_4": "I've thought of a movie related to the secret word, and I'll represent it with emojis. The movie is:\n",
121
+ "curiosity": "Tell me an interesting fact about \""
122
+ }
123
+ }
124
+ }
config/secret.json ADDED
@@ -0,0 +1,1043 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "ENG": {
3
+ "basic": [
4
+ "joy",
5
+ "cricket",
6
+ "ant",
7
+ "tree",
8
+ "glass",
9
+ "mountains",
10
+ "dress",
11
+ "rain",
12
+ "soap",
13
+ "finger",
14
+ "man",
15
+ "earth",
16
+ "stick",
17
+ "office",
18
+ "sadness",
19
+ "radio",
20
+ "keyboard",
21
+ "dirt",
22
+ "plum",
23
+ "message",
24
+ "eel",
25
+ "king",
26
+ "wild boar",
27
+ "nut",
28
+ "planet",
29
+ "party",
30
+ "son",
31
+ "cellphone",
32
+ "visit",
33
+ "tab",
34
+ "elegance",
35
+ "gloves",
36
+ "turtle",
37
+ "lock",
38
+ "plane",
39
+ "leaf",
40
+ "suitcase",
41
+ "cookie",
42
+ "hand",
43
+ "lamp",
44
+ "woman",
45
+ "pen",
46
+ "mountain",
47
+ "leaves",
48
+ "wind",
49
+ "helmet",
50
+ "spring",
51
+ "atom",
52
+ "dad",
53
+ "father-in-law",
54
+ "watch",
55
+ "cabin",
56
+ "map",
57
+ "cow",
58
+ "rectangle",
59
+ "shoe",
60
+ "parrot",
61
+ "insect",
62
+ "letter",
63
+ "baby",
64
+ "keys",
65
+ "toucan",
66
+ "wall",
67
+ "jacket",
68
+ "waterfall",
69
+ "seat",
70
+ "ear",
71
+ "team",
72
+ "luck",
73
+ "soul",
74
+ "guitar",
75
+ "choir",
76
+ "summer",
77
+ "dream",
78
+ "button",
79
+ "chair",
80
+ "lamb",
81
+ "ease",
82
+ "notebook",
83
+ "curiosity",
84
+ "coin",
85
+ "hanger",
86
+ "violin",
87
+ "singer",
88
+ "camera",
89
+ "snow",
90
+ "pool",
91
+ "container",
92
+ "photograph",
93
+ "shelter",
94
+ "animal",
95
+ "fear",
96
+ "rat",
97
+ "plastic",
98
+ "debate",
99
+ "duck",
100
+ "feather",
101
+ "monkey",
102
+ "people",
103
+ "door",
104
+ "blind",
105
+ "nose",
106
+ "wood",
107
+ "leg",
108
+ "cars",
109
+ "hair",
110
+ "key",
111
+ "stewardess",
112
+ "living room",
113
+ "forest",
114
+ "castle",
115
+ "salad",
116
+ "lock",
117
+ "carnation",
118
+ "molar",
119
+ "bird",
120
+ "table",
121
+ "beach",
122
+ "screen",
123
+ "agenda",
124
+ "person",
125
+ "idea",
126
+ "clothing",
127
+ "wheel",
128
+ "tool",
129
+ "aluminium",
130
+ "house",
131
+ "cup",
132
+ "clouds",
133
+ "jug",
134
+ "circle",
135
+ "brother",
136
+ "farm",
137
+ "sorrow",
138
+ "hook",
139
+ "iron",
140
+ "hump",
141
+ "sound",
142
+ "zoo",
143
+ "photo",
144
+ "toy",
145
+ "crib",
146
+ "war",
147
+ "dressing room",
148
+ "sheep",
149
+ "hail",
150
+ "pilot",
151
+ "grass",
152
+ "spider",
153
+ "hammer",
154
+ "last name",
155
+ "boat",
156
+ "armchair",
157
+ "plate",
158
+ "seal",
159
+ "tiger",
160
+ "lawn",
161
+ "dog",
162
+ "oil",
163
+ "bow",
164
+ "helm",
165
+ "school",
166
+ "can opener",
167
+ "chimney",
168
+ "match",
169
+ "molecule",
170
+ "shirt",
171
+ "hut",
172
+ "calculator",
173
+ "horse",
174
+ "pine grove",
175
+ "paper",
176
+ "ornament",
177
+ "church",
178
+ "uncle",
179
+ "pencil",
180
+ "pig",
181
+ "cheese",
182
+ "cream",
183
+ "time",
184
+ "field",
185
+ "pants",
186
+ "pen",
187
+ "cloud",
188
+ "friend",
189
+ "purse",
190
+ "competition",
191
+ "light",
192
+ "program",
193
+ "cable",
194
+ "key",
195
+ "writing",
196
+ "floor",
197
+ "job",
198
+ "compass",
199
+ "coaster",
200
+ "sun",
201
+ "lights",
202
+ "furniture",
203
+ "dogs",
204
+ "eye",
205
+ "rope",
206
+ "noodles",
207
+ "kindness",
208
+ "sailboat",
209
+ "bed",
210
+ "sand",
211
+ "corner",
212
+ "cat",
213
+ "honey",
214
+ "chocolate",
215
+ "bell",
216
+ "noise",
217
+ "pillar",
218
+ "child",
219
+ "street",
220
+ "rocket",
221
+ "herd",
222
+ "godfather",
223
+ "headphones",
224
+ "nephew",
225
+ "hospital",
226
+ "July",
227
+ "bunch",
228
+ "speaker",
229
+ "brother-in-law",
230
+ "coffee",
231
+ "satellite",
232
+ "screw",
233
+ "books",
234
+ "letter",
235
+ "folder",
236
+ "train",
237
+ "briefcase",
238
+ "school",
239
+ "picture",
240
+ "eagle",
241
+ "love",
242
+ "room",
243
+ "truck",
244
+ "newspapers",
245
+ "tablecloth",
246
+ "notes",
247
+ "heron",
248
+ "painting",
249
+ "fluff",
250
+ "tie",
251
+ "letters",
252
+ "printer",
253
+ "sofa",
254
+ "flower",
255
+ "weasel",
256
+ "lemon",
257
+ "mason",
258
+ "glass",
259
+ "meat",
260
+ "city",
261
+ "designer",
262
+ "lagoon",
263
+ "darkness",
264
+ "meadow",
265
+ "puma",
266
+ "ship",
267
+ "troop",
268
+ "lime",
269
+ "cage",
270
+ "sport",
271
+ "casino",
272
+ "building",
273
+ "firefighter",
274
+ "plant"
275
+ ],
276
+ "advanced": [
277
+ "accountant",
278
+ "hatred",
279
+ "engineer",
280
+ "viewer",
281
+ "window",
282
+ "substance",
283
+ "complaint",
284
+ "platform",
285
+ "toilet",
286
+ "Argentina",
287
+ "Guadeloupe",
288
+ "machinist",
289
+ "explosion",
290
+ "gas",
291
+ "presidency",
292
+ "lake",
293
+ "visit",
294
+ "speech",
295
+ "archipelago",
296
+ "entrepreneur",
297
+ "Temple",
298
+ "utensil",
299
+ "theater",
300
+ "classroom",
301
+ "television",
302
+ "glasses",
303
+ "reptile",
304
+ "whale",
305
+ "fauna",
306
+ "plane",
307
+ "stone",
308
+ "family",
309
+ "discotheque",
310
+ "bomb",
311
+ "waitress",
312
+ "candidate",
313
+ "triangle",
314
+ "famous",
315
+ "auction",
316
+ "belt",
317
+ "atom",
318
+ "bookstore",
319
+ "watch",
320
+ "music",
321
+ "Grandmother",
322
+ "candies",
323
+ "battery",
324
+ "brother",
325
+ "Colombia",
326
+ "hook",
327
+ "sound",
328
+ "government",
329
+ "note",
330
+ "hippopotamus",
331
+ "metal",
332
+ "happiness",
333
+ "baby",
334
+ "tiger",
335
+ "child",
336
+ "vocabulary",
337
+ "grapefruit",
338
+ "cloud",
339
+ "competition",
340
+ "cable",
341
+ "Mexico",
342
+ "program",
343
+ "writing",
344
+ "compass",
345
+ "coaster",
346
+ "program",
347
+ "writing",
348
+ "compass",
349
+ "coaster",
350
+ "lights",
351
+ "water",
352
+ "bottle",
353
+ "floor",
354
+ "rabbit",
355
+ "book",
356
+ "bridge",
357
+ "scarf",
358
+ "school",
359
+ "picture",
360
+ "notes",
361
+ "photo",
362
+ "toy",
363
+ "war",
364
+ "heron",
365
+ "carnation",
366
+ "enthusiasm",
367
+ "molar",
368
+ "bird",
369
+ "table",
370
+ "screen",
371
+ "agenda",
372
+ "person",
373
+ "idea",
374
+ "clothing",
375
+ "wheel",
376
+ "tool",
377
+ "aluminium",
378
+ "house",
379
+ "cup",
380
+ "clouds",
381
+ "jug",
382
+ "circle",
383
+ "Colombia",
384
+ "farm",
385
+ "sorrow",
386
+ "iron",
387
+ "hump",
388
+ "zoo",
389
+ "government",
390
+ "photo",
391
+ "toy",
392
+ "crib",
393
+ "war",
394
+ "dressing room",
395
+ "tornado",
396
+ "sheep",
397
+ "hail",
398
+ "pilot",
399
+ "grass",
400
+ "spider",
401
+ "anguish",
402
+ "hammer",
403
+ "statistics",
404
+ "last name",
405
+ "boat",
406
+ "armchair",
407
+ "plate",
408
+ "river",
409
+ "seal",
410
+ "rhinoceros",
411
+ "shirt",
412
+ "hut",
413
+ "calculator",
414
+ "horse",
415
+ "pine grove",
416
+ "paper",
417
+ "ornament",
418
+ "church",
419
+ "uncle",
420
+ "pencil",
421
+ "pig",
422
+ "cheese",
423
+ "cream",
424
+ "time",
425
+ "vocabulary",
426
+ "grapefruit",
427
+ "field",
428
+ "pants",
429
+ "pen",
430
+ "cloud",
431
+ "friend",
432
+ "purse",
433
+ "competition",
434
+ "light",
435
+ "program",
436
+ "cable",
437
+ "key",
438
+ "writing",
439
+ "Mexico",
440
+ "floor",
441
+ "job",
442
+ "compass",
443
+ "coaster",
444
+ "sun",
445
+ "lights",
446
+ "furniture",
447
+ "dogs",
448
+ "eye",
449
+ "rope",
450
+ "noodles",
451
+ "kindness",
452
+ "sailboat",
453
+ "bed",
454
+ "sand",
455
+ "corner",
456
+ "cat",
457
+ "honey",
458
+ "chocolate",
459
+ "bell",
460
+ "noise",
461
+ "pillar",
462
+ "child",
463
+ "street",
464
+ "rocket",
465
+ "herd",
466
+ "godfather",
467
+ "headphones",
468
+ "nephew",
469
+ "hospital",
470
+ "July",
471
+ "bunch",
472
+ "Chile",
473
+ "speaker",
474
+ "brother-in-law",
475
+ "coffee",
476
+ "satellite",
477
+ "screw",
478
+ "books",
479
+ "letter",
480
+ "folder",
481
+ "train",
482
+ "briefcase",
483
+ "school",
484
+ "picture",
485
+ "eagle",
486
+ "love",
487
+ "room",
488
+ "truck",
489
+ "newspapers",
490
+ "tablecloth",
491
+ "notes",
492
+ "heron",
493
+ "painting",
494
+ "fluff",
495
+ "tie",
496
+ "letters",
497
+ "printer",
498
+ "sofa",
499
+ "flower",
500
+ "weasel",
501
+ "lemon",
502
+ "mason",
503
+ "glass",
504
+ "meat",
505
+ "city",
506
+ "designer",
507
+ "lagoon",
508
+ "darkness",
509
+ "meadow",
510
+ "puma",
511
+ "ship",
512
+ "troop",
513
+ "lime",
514
+ "cage",
515
+ "sport",
516
+ "casino",
517
+ "building",
518
+ "firefighter",
519
+ "plant",
520
+ "clarity"
521
+ ]
522
+ },
523
+ "SPA": {
524
+ "basic": [
525
+ "alegría",
526
+ "cricket",
527
+ "hormiga",
528
+ "árbol",
529
+ "vaso",
530
+ "montañas",
531
+ "vestido",
532
+ "lluvia",
533
+ "jabón",
534
+ "dedo",
535
+ "hombre",
536
+ "tierra",
537
+ "palo",
538
+ "oficina",
539
+ "tristeza",
540
+ "radio",
541
+ "teclado",
542
+ "tierra",
543
+ "ciruela",
544
+ "mensaje",
545
+ "anguila",
546
+ "rey",
547
+ "jabalí",
548
+ "nuez",
549
+ "planeta",
550
+ "fiesta",
551
+ "hijo",
552
+ "teléfono celular",
553
+ "visita",
554
+ "pestaña",
555
+ "elegancia",
556
+ "guantes",
557
+ "tortuga",
558
+ "cerradura",
559
+ "avión",
560
+ "hoja",
561
+ "maleta",
562
+ "galleta",
563
+ "mano",
564
+ "lámpara",
565
+ "mujer",
566
+ "pluma",
567
+ "montaña",
568
+ "hojas",
569
+ "viento",
570
+ "casco",
571
+ "primavera",
572
+ "átomo",
573
+ "papá",
574
+ "suegro",
575
+ "reloj",
576
+ "cabaña",
577
+ "mapa",
578
+ "vaca",
579
+ "rectángulo",
580
+ "zapato",
581
+ "loro",
582
+ "insecto",
583
+ "carta",
584
+ "bebé",
585
+ "llaves",
586
+ "tucán",
587
+ "pared",
588
+ "chaqueta",
589
+ "cascada",
590
+ "asiento",
591
+ "oreja",
592
+ "equipo",
593
+ "suerte",
594
+ "alma",
595
+ "guitarra",
596
+ "coro",
597
+ "verano",
598
+ "sueño",
599
+ "botón",
600
+ "silla",
601
+ "cordero",
602
+ "facilidad",
603
+ "cuaderno",
604
+ "curiosidad",
605
+ "moneda",
606
+ "percha",
607
+ "violín",
608
+ "cantante",
609
+ "cámara",
610
+ "nieve",
611
+ "piscina",
612
+ "contenedor",
613
+ "fotografía",
614
+ "refugio",
615
+ "animal",
616
+ "miedo",
617
+ "rata",
618
+ "plástico",
619
+ "debate",
620
+ "pato",
621
+ "pluma",
622
+ "mono",
623
+ "gente",
624
+ "puerta",
625
+ "persiana",
626
+ "nariz",
627
+ "madera",
628
+ "pierna",
629
+ "autos",
630
+ "cabello",
631
+ "llave",
632
+ "azafata",
633
+ "sala de estar",
634
+ "bosque",
635
+ "castillo",
636
+ "ensalada",
637
+ "cerradura",
638
+ "clavel",
639
+ "molar",
640
+ "pájaro",
641
+ "mesa",
642
+ "playa",
643
+ "pantalla",
644
+ "agenda",
645
+ "persona",
646
+ "idea",
647
+ "ropa",
648
+ "rueda",
649
+ "herramienta",
650
+ "aluminio",
651
+ "casa",
652
+ "taza",
653
+ "nubes",
654
+ "jarra",
655
+ "círculo",
656
+ "hermano",
657
+ "granja",
658
+ "pena",
659
+ "gancho",
660
+ "hierro",
661
+ "joroba",
662
+ "sonido",
663
+ "zoológico",
664
+ "foto",
665
+ "juguete",
666
+ "cuna",
667
+ "guerra",
668
+ "vestuario",
669
+ "oveja",
670
+ "granizo",
671
+ "piloto",
672
+ "césped",
673
+ "araña",
674
+ "martillo",
675
+ "apellido",
676
+ "barco",
677
+ "sillón",
678
+ "plato",
679
+ "foca",
680
+ "tigre",
681
+ "césped",
682
+ "perro",
683
+ "aceite",
684
+ "arco",
685
+ "timón",
686
+ "escuela",
687
+ "abrelatas",
688
+ "chimenea",
689
+ "fósforo",
690
+ "molécula",
691
+ "camisa",
692
+ "choza",
693
+ "calculadora",
694
+ "caballo",
695
+ "pinar",
696
+ "papel",
697
+ "adorno",
698
+ "iglesia",
699
+ "tío",
700
+ "lápiz",
701
+ "cerdo",
702
+ "queso",
703
+ "crema",
704
+ "tiempo",
705
+ "campo",
706
+ "pantalones",
707
+ "pluma",
708
+ "nube",
709
+ "amigo",
710
+ "bolso",
711
+ "competencia",
712
+ "luz",
713
+ "programa",
714
+ "cable",
715
+ "llave",
716
+ "escritura",
717
+ "piso",
718
+ "trabajo",
719
+ "brújula",
720
+ "posavasos",
721
+ "sol",
722
+ "luces",
723
+ "muebles",
724
+ "perros",
725
+ "ojo",
726
+ "cuerda",
727
+ "fideos",
728
+ "bondad",
729
+ "velero",
730
+ "cama",
731
+ "arena",
732
+ "esquina",
733
+ "gato",
734
+ "miel",
735
+ "chocolate",
736
+ "campana",
737
+ "ruido",
738
+ "pilar",
739
+ "niño",
740
+ "calle",
741
+ "cohete",
742
+ "manada",
743
+ "padrino",
744
+ "auriculares",
745
+ "sobrino",
746
+ "hospital",
747
+ "julio",
748
+ "ramo",
749
+ "altavoz",
750
+ "cuñado",
751
+ "café",
752
+ "satélite",
753
+ "tornillo",
754
+ "libros",
755
+ "carta",
756
+ "carpeta",
757
+ "tren",
758
+ "maletín",
759
+ "escuela",
760
+ "imagen",
761
+ "águila",
762
+ "amor",
763
+ "habitación",
764
+ "camión",
765
+ "periódicos",
766
+ "mantel",
767
+ "notas",
768
+ "garza",
769
+ "pintura",
770
+ "pelusa",
771
+ "corbata",
772
+ "letras",
773
+ "impresora",
774
+ "sofá",
775
+ "flor",
776
+ "comadreja",
777
+ "limón",
778
+ "albañil",
779
+ "vidrio",
780
+ "carne",
781
+ "ciudad",
782
+ "diseñador",
783
+ "laguna",
784
+ "oscuridad",
785
+ "prado",
786
+ "puma",
787
+ "barco",
788
+ "tropa",
789
+ "lima",
790
+ "jaula",
791
+ "deporte",
792
+ "casino",
793
+ "edificio",
794
+ "bombero",
795
+ "planta"
796
+ ],
797
+ "advanced": [
798
+ "contador",
799
+ "odio",
800
+ "ingeniero",
801
+ "espectador",
802
+ "ventana",
803
+ "sustancia",
804
+ "queja",
805
+ "plataforma",
806
+ "inodoro",
807
+ "Argentina",
808
+ "Guadalupe",
809
+ "mecánico",
810
+ "explosión",
811
+ "gas",
812
+ "presidencia",
813
+ "lago",
814
+ "visita",
815
+ "discurso",
816
+ "archipiélago",
817
+ "empresario",
818
+ "templo",
819
+ "utensilio",
820
+ "teatro",
821
+ "aula",
822
+ "televisión",
823
+ "gafas",
824
+ "reptil",
825
+ "ballena",
826
+ "fauna",
827
+ "avión",
828
+ "piedra",
829
+ "familia",
830
+ "discoteca",
831
+ "bomba",
832
+ "mesera",
833
+ "candidato",
834
+ "triángulo",
835
+ "famoso",
836
+ "subasta",
837
+ "cinturón",
838
+ "átomo",
839
+ "librería",
840
+ "reloj",
841
+ "música",
842
+ "abuela",
843
+ "caramelos",
844
+ "batería",
845
+ "hermano",
846
+ "Colombia",
847
+ "gancho",
848
+ "sonido",
849
+ "gobierno",
850
+ "nota",
851
+ "hipopótamo",
852
+ "metal",
853
+ "felicidad",
854
+ "bebé",
855
+ "tigre",
856
+ "niño",
857
+ "vocabulario",
858
+ "pomelo",
859
+ "nube",
860
+ "competencia",
861
+ "cable",
862
+ "México",
863
+ "programa",
864
+ "escritura",
865
+ "brújula",
866
+ "posavasos",
867
+ "programa",
868
+ "escritura",
869
+ "brújula",
870
+ "posavasos",
871
+ "luces",
872
+ "agua",
873
+ "botella",
874
+ "piso",
875
+ "conejo",
876
+ "libro",
877
+ "puente",
878
+ "bufanda",
879
+ "escuela",
880
+ "imagen",
881
+ "notas",
882
+ "foto",
883
+ "juguete",
884
+ "guerra",
885
+ "garza",
886
+ "clavel",
887
+ "entusiasmo",
888
+ "molar",
889
+ "pájaro",
890
+ "mesa",
891
+ "pantalla",
892
+ "agenda",
893
+ "persona",
894
+ "idea",
895
+ "ropa",
896
+ "rueda",
897
+ "herramienta",
898
+ "aluminio",
899
+ "casa",
900
+ "taza",
901
+ "nubes",
902
+ "jarra",
903
+ "círculo",
904
+ "Colombia",
905
+ "granja",
906
+ "pena",
907
+ "hierro",
908
+ "joroba",
909
+ "zoológico",
910
+ "gobierno",
911
+ "foto",
912
+ "juguete",
913
+ "cuna",
914
+ "guerra",
915
+ "vestuario",
916
+ "oveja",
917
+ "granizo",
918
+ "piloto",
919
+ "césped",
920
+ "araña",
921
+ "angustia",
922
+ "martillo",
923
+ "estadísticas",
924
+ "apellido",
925
+ "barco",
926
+ "sillón",
927
+ "plato",
928
+ "río",
929
+ "foca",
930
+ "rinoceronte",
931
+ "camisa",
932
+ "choza",
933
+ "calculadora",
934
+ "caballo",
935
+ "pinar",
936
+ "papel",
937
+ "adorno",
938
+ "iglesia",
939
+ "tío",
940
+ "lápiz",
941
+ "cerdo",
942
+ "queso",
943
+ "crema",
944
+ "tiempo",
945
+ "vocabulario",
946
+ "pomelo",
947
+ "campo",
948
+ "pantalones",
949
+ "pluma",
950
+ "nube",
951
+ "amigo",
952
+ "bolso",
953
+ "competencia",
954
+ "luz",
955
+ "programa",
956
+ "cable",
957
+ "llave",
958
+ "escritura",
959
+ "México",
960
+ "piso",
961
+ "trabajo",
962
+ "brújula",
963
+ "posavasos",
964
+ "sol",
965
+ "luces",
966
+ "muebles",
967
+ "perros",
968
+ "ojo",
969
+ "cuerda",
970
+ "fideos",
971
+ "bondad",
972
+ "velero",
973
+ "cama",
974
+ "arena",
975
+ "esquina",
976
+ "gato",
977
+ "miel",
978
+ "chocolate",
979
+ "campana",
980
+ "ruido",
981
+ "pilar",
982
+ "niño",
983
+ "calle",
984
+ "cohete",
985
+ "manada",
986
+ "padrino",
987
+ "auriculares",
988
+ "sobrino",
989
+ "hospital",
990
+ "julio",
991
+ "ramo",
992
+ "Chile",
993
+ "altavoz",
994
+ "cuñado",
995
+ "café",
996
+ "satélite",
997
+ "tornillo",
998
+ "libros",
999
+ "carta",
1000
+ "carpeta",
1001
+ "tren",
1002
+ "maletín",
1003
+ "escuela",
1004
+ "imagen",
1005
+ "águila",
1006
+ "amor",
1007
+ "habitación",
1008
+ "camión",
1009
+ "periódicos",
1010
+ "mantel",
1011
+ "notas",
1012
+ "garza",
1013
+ "pintura",
1014
+ "pelusa",
1015
+ "corbata",
1016
+ "letras",
1017
+ "impresora",
1018
+ "sofá",
1019
+ "flor",
1020
+ "comadreja",
1021
+ "limón",
1022
+ "albañil",
1023
+ "vidrio",
1024
+ "carne",
1025
+ "ciudad",
1026
+ "diseñador",
1027
+ "laguna",
1028
+ "oscuridad",
1029
+ "prado",
1030
+ "puma",
1031
+ "barco",
1032
+ "tropa",
1033
+ "lima",
1034
+ "jaula",
1035
+ "deporte",
1036
+ "casino",
1037
+ "edificio",
1038
+ "bombero",
1039
+ "planta",
1040
+ "claridad"
1041
+ ]
1042
+ }
1043
+ }
data/hospital_2024-01-09 12:01:14.954756 ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ ['#8', 'médico', 6.33]
2
+ ['#7', 'doctor', 6.13]
3
+ ['#9', 'enfermo', 5.72]
4
+ ['#1', 'álvaro', 3.45]
5
+ ['#4', 'persona', 3.16]
6
+ ['#6', 'morir', 2.71]
7
+ ['#5', 'caca', 2.57]
8
+ ['#2', 'amigo', 2.4]
9
+ ['#3', 'saltar', 2.3]
data/pomelo_2024-01-09 11:59:50.395465 ADDED
File without changes
display.py ADDED
@@ -0,0 +1,192 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # %%
2
+ import asyncio
3
+ import pickle as pk
4
+ import time
5
+ import warnings
6
+
7
+ import matplotlib as mpl
8
+ import matplotlib.pyplot as plt
9
+ import mpl_toolkits.mplot3d.art3d as art3d
10
+ import numpy as np
11
+ import torch
12
+ from matplotlib import cm
13
+ from matplotlib.animation import FuncAnimation
14
+ from matplotlib.gridspec import GridSpec
15
+ from matplotlib.patches import Circle, PathPatch
16
+ from mpl_toolkits.mplot3d import Axes3D, axes3d
17
+ from sklearn.decomposition import PCA
18
+
19
+ warnings.filterwarnings("ignore", category=UserWarning)
20
+
21
+ # file_path = "word_embeddings_mpnet.pth"
22
+ # embeddings_dict = torch.load(file_path)
23
+
24
+ # # %%
25
+ # words = list(embeddings_dict.keys())
26
+
27
+ # sentences = [[word] for word in words]
28
+
29
+ # vectors = list(embeddings_dict.values())
30
+ # vectors_list = []
31
+ # for item in vectors:
32
+ # vectors_list.append(item.tolist())
33
+
34
+ # vector_list = vectors_list[:10]
35
+ # # %%
36
+ # # pca = PCA(n_components=3)
37
+ # # pca = pca.fit(vectors_list)
38
+ # # pk.dump(pca, open("pca_mpnet.pkl", "wb"))
39
+ # score = np.array([0, 0, 0, 0, 0, 1, 1, 1, 1, 1])
40
+
41
+
42
+ # %%
43
+ def display_words(words, vector_list, score, bold):
44
+ # %%
45
+ plt.ioff()
46
+ fig = plt.figure()
47
+
48
+ ax = fig.add_subplot(111, projection="3d")
49
+ plt.rcParams["image.cmap"] = "magma"
50
+ colormap = cm.get_cmap("magma") # You can choose any colormap you like
51
+
52
+ # Normalize the float values to the range [0, 1]
53
+ score = np.array(score)
54
+ norm = plt.Normalize(0, 10) # type: ignore
55
+ colors = colormap(norm(score))
56
+ ax.xaxis.pane.fill = False
57
+ ax.yaxis.pane.fill = False
58
+ ax.w_zaxis.set_pane_color(
59
+ (0.87, 0.91, 0.94, 0.8)
60
+ ) # Set the z-axis face color (gray)
61
+ ax.xaxis.line.set_color((1.0, 1.0, 1.0, 0.0)) # Transparent x-axis line
62
+ ax.yaxis.line.set_color((1.0, 1.0, 1.0, 0.0)) # Transparent y-axis line
63
+ ax.zaxis.line.set_color((1.0, 1.0, 1.0, 0.0))
64
+
65
+ # Turn off axis labels
66
+ ax.set_xticks([])
67
+ ax.set_yticks([])
68
+ ax.set_zticks([])
69
+ ax.grid(False)
70
+ # %%
71
+ data_pca = vector_list
72
+
73
+ if len(data_pca) > 1:
74
+ # for i in range(len(data_pca) - 1):
75
+ # data = np.append(
76
+ # data_pca,
77
+ # [norm_distance(data_pca[0], data_pca[i + 1], score[i + 1])],
78
+ # axis=0,
79
+ # )
80
+
81
+ # Create copies of the zero-th element of data_pca
82
+ data_pca0 = np.repeat(data_pca[0][None, :], len(data_pca) - 1, axis=0)
83
+
84
+ # Use these arrays to construct the calls to norm_distance_v
85
+ data = norm_distance_v(data_pca0, data_pca[1:], score[1:])
86
+
87
+ else:
88
+ data = data_pca.transpose()
89
+
90
+ (
91
+ x,
92
+ y,
93
+ z,
94
+ ) = data
95
+
96
+ center_x = x[0]
97
+ center_y = y[0]
98
+ center_z = z[0]
99
+ # %%
100
+ ax.autoscale(enable=True, axis="both", tight=True)
101
+ # if bold == -1:
102
+ # k = len(words) - 1
103
+ # else:
104
+ # k = repeated
105
+ for i, word in enumerate(words):
106
+ if i == bold:
107
+ fontsize = "large"
108
+ fontweight = "demibold"
109
+ else:
110
+ fontsize = "medium"
111
+ fontweight = "normal"
112
+
113
+ ax.text(
114
+ x[i],
115
+ y[i],
116
+ z[i] + 0.05,
117
+ word,
118
+ fontsize=fontsize,
119
+ fontweight=fontweight,
120
+ alpha=1,
121
+ )
122
+ # ax.text(
123
+ # x[0],
124
+ # y[0],
125
+ # z[0] + 0.05,
126
+ # words[0],
127
+ # fontsize="medium",
128
+ # fontweight="normal",
129
+ # alpha=1,
130
+ # )
131
+ ax.scatter(x, y, z, c="black", marker="o", s=75, cmap="magma", vmin=0, vmax=10)
132
+ scatter = ax.scatter(
133
+ x,
134
+ y,
135
+ z,
136
+ marker="o",
137
+ s=70,
138
+ c=colors,
139
+ cmap="magma",
140
+ vmin=0,
141
+ vmax=10,
142
+ )
143
+
144
+ # cax = fig.add_subplot(gs[1, :]) # cb = plt.colorbar(sc, cax=cax)
145
+ # a = fig.colorbar(
146
+ # mappable=scatter,
147
+ # ax=ax,
148
+ # cmap="magma",
149
+ # norm=mpl.colors.Normalize(vmin=0, vmax=10),
150
+ # orientation="horizontal",
151
+ # )
152
+ fig.colorbar(
153
+ cm.ScalarMappable(norm=mpl.colors.Normalize(0, 10), cmap="magma"),
154
+ ax=ax,
155
+ orientation="horizontal",
156
+ )
157
+ # cbar.set_label("Score Values")
158
+
159
+ def update(frame):
160
+ distance = 0.5 * (score.max() - score.min())
161
+ ax.set_xlim(center_x - distance, center_x + distance)
162
+ ax.set_ylim(center_y - distance, center_y + distance)
163
+ ax.set_zlim(center_z - distance, center_z + distance)
164
+ ax.view_init(elev=20, azim=frame)
165
+
166
+ # %%
167
+
168
+ # Create the animation
169
+ frames = np.arange(0, 360, 5)
170
+ ani = FuncAnimation(fig, update, frames=frames, interval=120)
171
+
172
+ ani.save("3d_rotation.gif", writer="pillow", dpi=140)
173
+ plt.close(fig)
174
+
175
+
176
+ # %%
177
+ def norm_distance_v(orig, points, distances):
178
+ # Calculate the vector AB
179
+
180
+ AB = points - orig
181
+
182
+ # Calculate the normalized vector AB
183
+ Normalized_AB = AB / np.linalg.norm(AB, axis=1, keepdims=True)
184
+
185
+ # Specify the desired distance from point A
186
+ d = 10 - (distances.reshape(-1, 1) * 1)
187
+
188
+ # Calculate the new points C
189
+ C = orig + (Normalized_AB * d)
190
+ C = np.append([orig[0]], C, axis=0)
191
+
192
+ return np.array([C[:, 0], C[:, 1], C[:, 2]])
juego_embbedings_text_config.py ADDED
@@ -0,0 +1,287 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # %%
2
+ import json
3
+ import pickle as pk
4
+ import random
5
+ import threading
6
+ from datetime import datetime
7
+
8
+ import numpy as np
9
+ from gensim.models import KeyedVectors
10
+ from sentence_transformers import SentenceTransformer
11
+
12
+ from display import display_words
13
+ from pistas import curiosity, hint
14
+ from seguimiento import calculate_moving_average, calculate_tendency_slope
15
+
16
+ # %%
17
+ model = KeyedVectors(768)
18
+ model_st = SentenceTransformer(
19
+ "sentence-transformers/paraphrase-multilingual-mpnet-base-v2"
20
+ )
21
+ # file_path = "word_embeddings_mpnet.pth"
22
+ # embeddings_dict = torch.load(file_path)
23
+ embeddings_dict = {}
24
+
25
+ config_file_path = "config/lang.json"
26
+ secret_file_path = "config/secret.json"
27
+
28
+
29
+ class DictWrapper:
30
+ def __init__(self, data_dict):
31
+ self.__dict__.update(data_dict)
32
+
33
+
34
+ with open(config_file_path, "r") as file:
35
+ # Load JSON from the file into a dictionary
36
+ Config_full = json.load(file)
37
+
38
+ with open(secret_file_path, "r") as file:
39
+ # Load JSON from the file into a dictionary
40
+ secret = json.load(file)
41
+
42
+ lang = 0
43
+
44
+ if lang == 0:
45
+ Config = DictWrapper(Config_full["SPA"]["Game"]) # type: ignore
46
+ secret_dict = secret["SPA"]
47
+ elif lang == 1:
48
+ Config = DictWrapper(Config_full["ENG"]["Game"]) # type: ignore
49
+ secret_dict = secret["ENG"]
50
+ else:
51
+ Config = DictWrapper(Config_full["SPA"]["Game"]) # type: ignore
52
+ secret_dict = secret["SPA"]
53
+
54
+
55
+ with open("ranking.txt", "w+") as file:
56
+ file.write("---------------------------")
57
+
58
+ # %%
59
+ pca = pk.load(open("pca_mpnet.pkl", "rb"))
60
+
61
+ print(Config.Difficulty_presentation_Full) # type: ignore
62
+ # difficulty = int(input("Dificultad: "))
63
+ difficulty = int(input(Config.Difficulty + ": ")) # type: ignore
64
+
65
+
66
+ # with open(file_path, "r") as file:
67
+ # secret_list = file.readlines()
68
+
69
+ # Write a function
70
+
71
+
72
+ # Optional: Remove newline characters from each element in the list
73
+ secret_list = secret_dict["basic"] if difficulty <= 2 else secret_dict["advanced"]
74
+
75
+ secret = secret_list.pop(random.randint(0, len(secret_list) - 1))
76
+ secret = secret.lower()
77
+
78
+ words = [Config.secret_word] # type: ignore
79
+ scores = [10]
80
+
81
+ # %%
82
+ # if word not in embeddings_dict.keys():
83
+ embeddings_dict[secret] = model_st.encode(secret, convert_to_tensor=True)
84
+ model.add_vector(secret, embeddings_dict[secret].tolist())
85
+
86
+ word_vect = [embeddings_dict[secret].tolist()]
87
+
88
+ # model.add_vector(secret, embedding.tolist())
89
+
90
+ thread = threading.Thread(
91
+ target=display_words, args=(words, pca.transform(word_vect), scores, -1)
92
+ )
93
+
94
+ # Start the thread
95
+ thread.start()
96
+
97
+
98
+ def preproc_vectors(words, word_vect, scores, repeated):
99
+ ascending_indices = np.argsort(scores)
100
+ # Reverse the order to get descending indices
101
+ descending_indices = list(ascending_indices[::-1])
102
+ ranking_data = []
103
+ k = len(words) - 1
104
+ if repeated != -1:
105
+ k = repeated
106
+
107
+ ranking_data.append(["#" + str(k), words[k], scores[k]])
108
+
109
+ ranking_data.append("---------------------------")
110
+ for i in descending_indices: # type: ignore
111
+ if i == 0:
112
+ continue
113
+ ranking_data.append(["#" + str(i), words[i], scores[i]])
114
+
115
+ with open("ranking.txt", "w+") as file:
116
+ for item in ranking_data:
117
+ file.write("%s\n" % item)
118
+
119
+ if len(words) > 11:
120
+ if k in descending_indices[:11]:
121
+ descending_indices = descending_indices[:11]
122
+ else:
123
+ descending_indices = descending_indices[:11]
124
+ descending_indices.append(k)
125
+ words_display = [words[i] for i in descending_indices]
126
+ displayvect_display = pca.transform([word_vect[i] for i in descending_indices])
127
+ scores_display = [scores[i] for i in descending_indices]
128
+ bold = descending_indices.index(k)
129
+
130
+ else:
131
+ words_display = words
132
+ displayvect_display = pca.transform(word_vect)
133
+ scores_display = scores
134
+ bold = k
135
+
136
+ return (
137
+ words_display,
138
+ displayvect_display,
139
+ scores_display,
140
+ bold,
141
+ )
142
+
143
+
144
+ # Example usage:
145
+
146
+ win = False
147
+ n = 0
148
+ recent_hint = 0
149
+ f_dev_avg = 0
150
+ last_hint = -1
151
+
152
+ if difficulty == 1:
153
+ n = 3
154
+
155
+ while win == False:
156
+ word = input(Config.New_word).lower() # type: ignore
157
+ if word == "give_up":
158
+ break
159
+ if word in words:
160
+ repeated = words.index(word)
161
+
162
+ else:
163
+ repeated = -1
164
+ words.append(word)
165
+
166
+ thread.join()
167
+
168
+ # if word not in embeddings_dict.keys():
169
+ embedding = model_st.encode(word, convert_to_tensor=True)
170
+ embeddings_dict[word] = embedding
171
+ # model.add_vector(word, embeddings_dict[word].tolist())
172
+ model.add_vector(word, embedding.tolist()) # type: ignore
173
+ # model.add_vector(word, embedding.tolist())
174
+ if repeated == -1:
175
+ word_vect.append(embeddings_dict[word].tolist())
176
+
177
+ score = round(model.similarity(secret, word) * 10, 2)
178
+
179
+ if repeated == -1:
180
+ scores.append(score) # type: ignore
181
+ #
182
+ # score = round(score * 10, 2)
183
+ # %%
184
+ if score <= 2.5:
185
+ feedback = Config.Feedback_0 + str(score) # type: ignore
186
+
187
+ elif score > 2.5 and score <= 4.0:
188
+ feedback = Config.Feedback_1 + str(score) # type: ignore
189
+
190
+ elif score > 4.0 and score <= 6.0:
191
+ feedback = Config.Feedback_2 + str(score) # type: ignore
192
+
193
+ elif score > 6.0 and score <= 7.5:
194
+ feedback = Config.Feedback_3 + str(score) # type: ignore
195
+
196
+ elif score > 7.5 and score <= 8.0:
197
+ feedback = Config.Feedback_4 + str(score) # type: ignore
198
+
199
+ elif score > 8.0 and score < 10.0:
200
+ feedback = Config.Feedback_5 + str(score) # type: ignore
201
+
202
+ else:
203
+ win = True
204
+ feedback = Config.Feedback_8 # type: ignore
205
+ words[0] = secret
206
+ words.pop(len(words) - 1)
207
+ word_vect.pop(len(word_vect) - 1)
208
+ scores.pop(len(scores) - 1)
209
+ # print(model.most_similar(secret, topn=20))
210
+
211
+ print(feedback)
212
+ if score > scores[len(scores) - 2] and win == False:
213
+ print(Config.Feedback_6) # type: ignore
214
+ elif score < scores[len(scores) - 2] and win == False:
215
+ print(Config.Feedback_7) # type: ignore
216
+
217
+ if difficulty != 4:
218
+ mov_avg = calculate_moving_average(scores[1:], 5)
219
+
220
+ # print (mov_avg)
221
+ if len(mov_avg) > 1 and win == False:
222
+ f_dev = calculate_tendency_slope(mov_avg)
223
+ # print(f_dev[len(f_dev) - 3 :])
224
+ f_dev_avg = calculate_moving_average(f_dev, 3)
225
+ # print(f_dev_avg[len(f_dev_avg) - 3 :])
226
+ # print(f_dev_avg)
227
+ if f_dev_avg[len(f_dev_avg) - 1] < 0 and recent_hint == 0:
228
+ i = random.randint(0, len(Config.hint_intro) - 1) # type: ignore
229
+ print("\n")
230
+ print(Config.hint_intro[i]) # type: ignore
231
+ hint_text, n, last_hint = hint(
232
+ secret,
233
+ n,
234
+ model_st,
235
+ last_hint,
236
+ lang,
237
+ DictWrapper(Config_full["SPA"]["Hint"])
238
+ if lang == 0
239
+ else DictWrapper(Config_full["ENG"]["Hint"]),
240
+ )
241
+ print(hint_text)
242
+ recent_hint = 3
243
+
244
+ if recent_hint != 0:
245
+ recent_hint -= 1
246
+
247
+ (
248
+ words_display,
249
+ displayvect_display,
250
+ scores_display,
251
+ bold_display,
252
+ ) = preproc_vectors(words, word_vect, scores, repeated)
253
+
254
+ if win:
255
+ bold_display = 0
256
+
257
+ thread = threading.Thread(
258
+ target=display_words,
259
+ args=(words_display, displayvect_display, scores_display, bold_display),
260
+ )
261
+
262
+ # Start the thread
263
+ thread.start()
264
+
265
+ if win == False:
266
+ print(Config.Feedback_9 + secret) # type: ignore
267
+ print(Config.Feedback_10) # type: ignore
268
+
269
+ curiosity = curiosity(
270
+ secret,
271
+ DictWrapper(Config_full["SPA"]["Hint"])
272
+ if lang == 0
273
+ else DictWrapper(Config_full["ENG"]["Hint"]),
274
+ )
275
+ print(curiosity)
276
+
277
+ with open("ranking.txt", "r") as original_file:
278
+ file_content = original_file.readlines()
279
+
280
+
281
+ new_file_name = secret + "_" + str(datetime.now())
282
+
283
+ with open("data/" + new_file_name, "w") as new_file:
284
+ new_file.writelines(file_content[2:])
285
+
286
+ thread.join()
287
+ exit()
pca_mpnet.pkl ADDED
Binary file (25.4 kB). View file
 
pistas.py ADDED
@@ -0,0 +1,231 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import os
3
+ import random
4
+
5
+ import openai
6
+ from sentence_transformers import util
7
+ from transformers import pipeline
8
+
9
+ openai.api_key = os.getenv("OPENAI_API_KEY")
10
+
11
+
12
+ def hint(secret, n, model, last_pista, lang, Config):
13
+ pista = ""
14
+
15
+ # Pistas avanzadas
16
+ if n >= 3:
17
+ j = random.randint(0, 2)
18
+ while j == last_pista:
19
+ j = random.randint(0, 2)
20
+ if j == 0:
21
+ response = openai.chat.completions.create(
22
+ model="gpt-3.5-turbo",
23
+ messages=[
24
+ {
25
+ "role": "user",
26
+ "content": Config.hint_0_0 # type: ignore
27
+ + secret
28
+ + Config.hint_0_1 # type: ignore
29
+ + secret
30
+ + Config.hint_0_2, # type: ignore
31
+ }
32
+ ],
33
+ temperature=1,
34
+ max_tokens=256,
35
+ top_p=1,
36
+ frequency_penalty=0.5,
37
+ presence_penalty=0,
38
+ )
39
+ output = str(response.choices[0].message.content)
40
+ output = output.replace('"', "").replace("'", "")
41
+
42
+ if lang == 0:
43
+ output = ireplace("la " + secret, "La palabra secreta", output)
44
+ output = ireplace("las " + secret, "La palabra secreta", output)
45
+ output = ireplace("el " + secret, "La palabra secreta", output)
46
+ output = ireplace("los " + secret, "La palabra secreta", output)
47
+ output = ireplace("un " + secret, "La palabra secreta", output)
48
+ output = ireplace("una " + secret, "La palabra secreta", output)
49
+ output = ireplace("unos " + secret, "La palabra secreta", output)
50
+ output = ireplace("unas " + secret, "La palabra secreta", output)
51
+ elif lang == 1:
52
+ output = ireplace("the " + secret, "The secret word", output)
53
+ output = ireplace("a " + secret, "The secret word", output)
54
+
55
+ pista += Config.hint_0_3 + output # type: ignore
56
+ last_pista = 0
57
+ elif j == 1:
58
+ response = openai.chat.completions.create(
59
+ model="gpt-3.5-turbo",
60
+ messages=[
61
+ {
62
+ "role": "user",
63
+ "content": Config.hint_1_0 + secret + Config.hint_1_1, # type: ignore
64
+ }
65
+ ],
66
+ temperature=1,
67
+ max_tokens=256,
68
+ top_p=1,
69
+ frequency_penalty=0,
70
+ presence_penalty=0,
71
+ )
72
+ output = str(response.choices[0].message.content)
73
+ pista += Config.hint_1_2 + output # type: ignore
74
+ last_pista = 1
75
+ elif j == 2:
76
+ response = openai.chat.completions.create(
77
+ model="gpt-3.5-turbo",
78
+ messages=[
79
+ {
80
+ "role": "user",
81
+ "content": Config.hint_2_0 + secret + Config.hint_2_1, # type: ignore
82
+ }
83
+ ],
84
+ temperature=1,
85
+ max_tokens=256,
86
+ top_p=1,
87
+ frequency_penalty=0,
88
+ presence_penalty=0,
89
+ )
90
+ output = str(response.choices[0].message.content)
91
+
92
+ pista += Config.hint_2_2 + output # type: ignore
93
+
94
+ last_pista = 2
95
+ # Pistas iniciales
96
+ else:
97
+ j = random.randint(3, 4)
98
+ while j == last_pista:
99
+ j = random.randint(3, 4)
100
+ if j == 3:
101
+ words = []
102
+ response = openai.chat.completions.create(
103
+ model="gpt-3.5-turbo",
104
+ messages=[
105
+ {
106
+ "role": "user",
107
+ "content": Config.hint_3_0, # type: ignore
108
+ }
109
+ ],
110
+ temperature=1.25,
111
+ max_tokens=256,
112
+ top_p=1,
113
+ frequency_penalty=0,
114
+ presence_penalty=0,
115
+ )
116
+ output = str(response.choices[0].message.content)
117
+ output = (output.replace(" ", "").replace(".", "")).lower()
118
+ words.extend(output.strip().split(","))
119
+ response = openai.chat.completions.create(
120
+ model="gpt-3.5-turbo",
121
+ messages=[
122
+ {
123
+ "role": "user",
124
+ "content": Config.hint_3_1 # type: ignore
125
+ + secret
126
+ + Config.hint_3_2, # type: ignore
127
+ }
128
+ ],
129
+ temperature=1.1,
130
+ max_tokens=256,
131
+ top_p=1,
132
+ frequency_penalty=0,
133
+ presence_penalty=0,
134
+ )
135
+ output = str(response.choices[0].message.content)
136
+ output = (output.replace(".", "")).lower()
137
+ words.append(output) # type: ignore
138
+ random.shuffle(words)
139
+ sentences1 = [secret, secret, secret, secret]
140
+ sentences2 = words
141
+ embeddings1 = model.encode(sentences1, convert_to_tensor=True)
142
+ embeddings2 = model.encode(sentences2, convert_to_tensor=True)
143
+
144
+ cosine_scores = util.cos_sim(embeddings1, embeddings2)
145
+ scores = cosine_scores[0].tolist()
146
+ sum_scores = sum(scores)
147
+ normalized_scores = [round(score * 100 / sum_scores, 1) for score in scores]
148
+
149
+ pista += Config.hint_3_3 # type: ignore
150
+
151
+ for i in range(len(words)):
152
+ pista += (
153
+ words[i]
154
+ + (" ") * (15 - len(words[i]))
155
+ + "|"
156
+ + ("🟩") * round(normalized_scores[i] * 0.5)
157
+ + " "
158
+ + str(normalized_scores[i])
159
+ + "%\n"
160
+ )
161
+ last_pista = 3
162
+ elif j == 4:
163
+ response = openai.chat.completions.create(
164
+ model="gpt-3.5-turbo",
165
+ messages=[
166
+ {
167
+ "role": "user",
168
+ "content": Config.hint_4_0 # type: ignore
169
+ + secret
170
+ + Config.hint_4_1, # type: ignore
171
+ }
172
+ ],
173
+ temperature=1,
174
+ max_tokens=256,
175
+ top_p=1,
176
+ frequency_penalty=0,
177
+ presence_penalty=0,
178
+ )
179
+ film_title = str(response.choices[0].message.content).replace('"', "")
180
+ response = openai.chat.completions.create(
181
+ model="gpt-3.5-turbo",
182
+ messages=[
183
+ {
184
+ "role": "user",
185
+ "content": Config.hint_4_2 # type: ignore
186
+ + film_title
187
+ + Config.hint_4_3, # type: ignore
188
+ }
189
+ ],
190
+ temperature=1,
191
+ max_tokens=256,
192
+ top_p=1,
193
+ frequency_penalty=0,
194
+ presence_penalty=0,
195
+ )
196
+ output = str(response.choices[0].message.content)
197
+ pista += Config.hint_4_4 + film_title + "\n" + output # type: ignore
198
+ last_pista = 4
199
+
200
+ return pista, n + 1, last_pista
201
+
202
+
203
+ def curiosity(secret, Config):
204
+ response = openai.chat.completions.create(
205
+ model="gpt-3.5-turbo",
206
+ messages=[
207
+ {
208
+ "role": "user",
209
+ "content": Config.curiosity + secret + '".',
210
+ }
211
+ ],
212
+ temperature=1,
213
+ max_tokens=256,
214
+ top_p=1,
215
+ frequency_penalty=0,
216
+ presence_penalty=0,
217
+ )
218
+ output = str(response.choices[0].message.content)
219
+
220
+ return output
221
+
222
+
223
+ def ireplace(old, new, text):
224
+ idx = 0
225
+ while idx < len(text):
226
+ index_l = text.lower().find(old.lower(), idx)
227
+ if index_l == -1:
228
+ return text
229
+ text = text[:index_l] + new + text[index_l + len(old) :]
230
+ idx = index_l + len(new)
231
+ return text
seguimiento.py ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+
4
+ def calculate_moving_average(scores, window_size):
5
+ # Convert the scores list to a NumPy array for better performance
6
+ scores_array = np.array(scores)
7
+
8
+ # Create an array of rolling windows using the np.convolve function
9
+ moving_averages = np.around(
10
+ np.convolve(scores_array, np.ones(window_size) / window_size, mode="valid"), 2
11
+ )
12
+
13
+ return list(moving_averages)
14
+
15
+
16
+ def calculate_tendency_slope(scores):
17
+ # Convert the scores list to a NumPy array for better performance
18
+ scores_array = np.array(scores)
19
+
20
+ # Calculate the first derivative (slope) of the scores
21
+ derivative = np.around(np.gradient(scores_array), 2)
22
+
23
+ return list(derivative)