vasilisklv commited on
Commit
dfb55ea
1 Parent(s): bf23728

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +107 -0
app.py CHANGED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from openai import OpenAI
2
+ import gradio as gr
3
+
4
+
5
+ # create model for story creation
6
+ client = OpenAI(
7
+ base_url="https://api-inference.huggingface.co/v1/",
8
+ api_key=genai_stories
9
+ )
10
+
11
+ # the model to utilize
12
+ model = "mistralai/Mixtral-8x7B-Instruct-v0.1"
13
+
14
+
15
+ #------------------------------------------------------------------------------------------------
16
+
17
+
18
+ # connect with the LLM and create responses
19
+ def chat_with_llm(model, messages):
20
+ completion = client.chat.completions.create(
21
+ model=model,
22
+ messages=messages,
23
+ max_tokens=2048,
24
+ temperature=0)
25
+ return completion.choices[0].message.content
26
+
27
+
28
+ #------------------------------------------------------------------------------------------------
29
+
30
+
31
+ # Initialize the story
32
+ def generate_story(story_type):
33
+ global model, messages
34
+ messages = []
35
+ messages.append({"role": "system", "content": f"You are a story creator for kids. The player is a kid, so you must use simple words. The player selects this type of story: {story_type}, according to which you will be asked to create a story. Keep the plot interesting and concise, and write the story as if you talk to children, with simple words. If you are asked to create a story, a question and the answers, then you must create these and only these. Do not create anything else. If you are asked to create the ending of the story, then you must create this and only this. Do not create anything else. "})
36
+ messages.append({"role": "user", "content": f"You must create and return ONLY a story part, a question and 4 answers. To do that, you must explicitly follow these steps: 1) Create the first part of the story, according to the story_type from your system content, within 50 and 60 words, without switching to new line. Then you must switch line by adding this: '\n\n'. 2) Then create a question, within 10 and 20 words, on how to proceed the story from step 1. Then you must switch line by adding this: '\n\n'. 3) Then create the 4 potential answers for the question in step 2. The answers of every question must be given in the format: '1:... | 2:... | 3:... | 4:...'. Do not change this format and do not add any new lines between the answers. Every answer must be maximum 20 words. All answers must be separated from each other with '|'. Don't explicitly specify 'Story', 'Question', or 'Answer'. You must reply in this format: '[story from step 1]\n\n[question from step 2]\n\n[answers from step 3]'. Do not create any other stuff."})
37
+ messages = [messages[0]] + [messages[-1]]
38
+ response = chat_with_llm(model, messages)
39
+ lines = response.split("\n\n")
40
+ story = lines[0] # Everything before the last two lines is the story
41
+ question = lines[1] # The second to last line is the question
42
+ answers = [i.strip() for i in lines[2].split('|')] # The last line contains answers separated by '|'
43
+ messages.append({"role": "assistant", "content": "I am waiting for next command."})
44
+ return story, question, gr.Radio(choices=answers, interactive=True), gr.Radio(choices=[story_type], interactive=False), gr.Button(interactive=False)
45
+
46
+
47
+ # Continue the story based on what happened so far and the player's latest answer
48
+ def continue_story(previous_story, selected_option):
49
+ global model, messages
50
+ messages.append({"role": "user", "content": f"You must create and return a story part, a question and 4 answers. To do that, you must explicitly follow these steps: 1) Based on this story so far: '{previous_story} {selected_option}', continue the story and create the next part of the story within 50 and 60 words, without changing lines. You must provide ONLY the new part that you created. Then you must switch line by adding this: '\n\n'. 2) Then create a question, within 10 and 20 words, on how to proceed the story from step 1. Then you must switch line by adding this: '\n\n'. 3) Then create the 4 potential answers for the question in step 2. The answers of every question must be given in the format: '1:... | 2:... | 3:... | 4:...'. Do not change this format and do not add any new lines between the answers. Every answer must be maximum 20 words. All answers must be separated from each other with '|' and no other separator. Don't explicitly specify 'Story', 'Question', or 'Answer'. You must reply in this format: '[story from step 1]\n\n[question from step 2]\n\n[answers from step 3]'. Do not create any other stuff."})
51
+ messages = [messages[0]] + [messages[-1]]
52
+ response = chat_with_llm(model, messages)
53
+ lines = response.split("\n\n")
54
+ next_story = lines[0] # Everything before the last two lines is the story
55
+ question = lines[1] # The second to last line is the question
56
+ answers = [i.strip() for i in lines[2].split('|')] # The last line contains answers separated by '|'
57
+ messages.append({"role": "assistant", "content": "I am waiting for next command."})
58
+ story = previous_story + '\n\n' + next_story
59
+ return story, question, gr.Radio(choices=answers, interactive=True)
60
+
61
+
62
+ # End the story
63
+ def end_story(previous_story, selected_option):
64
+ global model, messages
65
+ messages.append({"role": "user", "content": f"Please create an ending for this story: '{previous_story}' and considering the latest answer: '{selected_option}'. You must provide only the ending of the story in an exciting way!."})
66
+ end_story = chat_with_llm(model, messages)
67
+ # lines = response.split("\n\n")
68
+ # end_story = lines[0] # Everything before the last two lines is the story
69
+ messages.append({"role": "assistant", "content": "I ended the story successfully. Now I am not waiting for any more responses from the player."})
70
+ story = previous_story + '\n\n' + end_story
71
+ return story
72
+
73
+
74
+ # initialize messages
75
+ messages = []
76
+
77
+
78
+ # design the UI
79
+ with gr.Blocks() as game_ui:
80
+ with gr.Row():
81
+ # Left column for the story
82
+ with gr.Column(scale=1):
83
+ story = gr.Textbox(label="Story", interactive=False, lines=12)
84
+
85
+ # Right column for the question, answers, and buttons
86
+ with gr.Column(scale=1):
87
+ story_type = gr.Radio(label="What story to create?", choices=["A knight who leads an army attacking a castle",
88
+ "A kid alien who lands on earth and explores around",
89
+ "Animals who participate in song contest",
90
+ "A little princess who is saved by a prince"])
91
+
92
+ start_button = gr.Button("Start Game!")
93
+
94
+ question = gr.Textbox(label="Question", interactive=False)
95
+ answers = gr.Radio(label="Choose an answer", choices=[])
96
+
97
+ submit_button = gr.Button("Submit your answer")
98
+ end_button = gr.Button("End the story")
99
+
100
+ # what the button do
101
+ start_button.click(fn=generate_story, inputs=[story_type], outputs=[story, question, answers, story_type, start_button])
102
+ submit_button.click(fn=continue_story, inputs=[story, answers], outputs=[story, question, answers])
103
+ end_button.click(fn=end_story, inputs=[story, answers], outputs=[story])
104
+
105
+
106
+ # Launch the Gradio interface
107
+ game_ui.launch(share=True)