Commit
•
5bf59c5
1
Parent(s):
ebccbe8
let's add some music
Browse files- .env +3 -2
- package-lock.json +30 -30
- package.json +2 -2
- src/app/main.tsx +246 -148
- src/app/server/aitube/editClapMusic.ts +23 -0
- src/app/store.ts +18 -2
- src/types.ts +3 -1
.env
CHANGED
@@ -1,8 +1,9 @@
|
|
1 |
HF_API_TOKEN="<USE YOUR OWN>"
|
2 |
MICROSERVICE_API_SECRET_TOKEN="<USE YOUR OWN>"
|
3 |
|
4 |
-
|
5 |
-
#
|
|
|
6 |
|
7 |
AI_TUBE_API_SECRET_JWT_KEY=""
|
8 |
AI_TUBE_API_SECRET_JWT_ISSUER=""
|
|
|
1 |
HF_API_TOKEN="<USE YOUR OWN>"
|
2 |
MICROSERVICE_API_SECRET_TOKEN="<USE YOUR OWN>"
|
3 |
|
4 |
+
# you can use this to bypass AiTube client URL
|
5 |
+
#AITUBE_URL="https://aitube.at"
|
6 |
+
#AITUBE_URL="http://localhost:3000"
|
7 |
|
8 |
AI_TUBE_API_SECRET_JWT_KEY=""
|
9 |
AI_TUBE_API_SECRET_JWT_ISSUER=""
|
package-lock.json
CHANGED
@@ -8,8 +8,8 @@
|
|
8 |
"name": "@jbilcke-hf/ai-stories-factory",
|
9 |
"version": "0.0.0",
|
10 |
"dependencies": {
|
11 |
-
"@aitube/clap": "0.0.
|
12 |
-
"@aitube/client": "0.0.
|
13 |
"@radix-ui/react-accordion": "^1.1.2",
|
14 |
"@radix-ui/react-avatar": "^1.0.4",
|
15 |
"@radix-ui/react-checkbox": "^1.0.4",
|
@@ -66,9 +66,9 @@
|
|
66 |
}
|
67 |
},
|
68 |
"node_modules/@aitube/clap": {
|
69 |
-
"version": "0.0.
|
70 |
-
"resolved": "https://registry.npmjs.org/@aitube/clap/-/clap-0.0.
|
71 |
-
"integrity": "sha512-
|
72 |
"dependencies": {
|
73 |
"pure-uuid": "^1.8.1",
|
74 |
"yaml": "^2.4.1"
|
@@ -78,14 +78,14 @@
|
|
78 |
}
|
79 |
},
|
80 |
"node_modules/@aitube/client": {
|
81 |
-
"version": "0.0.
|
82 |
-
"resolved": "https://registry.npmjs.org/@aitube/client/-/client-0.0.
|
83 |
-
"integrity": "sha512-
|
84 |
"dependencies": {
|
85 |
"query-string": "^9.0.0"
|
86 |
},
|
87 |
"peerDependencies": {
|
88 |
-
"@aitube/clap": "0.0.
|
89 |
}
|
90 |
},
|
91 |
"node_modules/@alloc/quick-lru": {
|
@@ -3129,9 +3129,9 @@
|
|
3129 |
}
|
3130 |
},
|
3131 |
"node_modules/caniuse-lite": {
|
3132 |
-
"version": "1.0.
|
3133 |
-
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.
|
3134 |
-
"integrity": "sha512-
|
3135 |
"funding": [
|
3136 |
{
|
3137 |
"type": "opencollective",
|
@@ -3748,9 +3748,9 @@
|
|
3748 |
"integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA=="
|
3749 |
},
|
3750 |
"node_modules/electron-to-chromium": {
|
3751 |
-
"version": "1.4.
|
3752 |
-
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.
|
3753 |
-
"integrity": "sha512-
|
3754 |
},
|
3755 |
"node_modules/emoji-regex": {
|
3756 |
"version": "9.2.2",
|
@@ -4602,9 +4602,9 @@
|
|
4602 |
}
|
4603 |
},
|
4604 |
"node_modules/get-tsconfig": {
|
4605 |
-
"version": "4.7.
|
4606 |
-
"resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.7.
|
4607 |
-
"integrity": "sha512-
|
4608 |
"dependencies": {
|
4609 |
"resolve-pkg-maps": "^1.0.0"
|
4610 |
},
|
@@ -5347,9 +5347,9 @@
|
|
5347 |
}
|
5348 |
},
|
5349 |
"node_modules/jose": {
|
5350 |
-
"version": "5.
|
5351 |
-
"resolved": "https://registry.npmjs.org/jose/-/jose-5.
|
5352 |
-
"integrity": "sha512-
|
5353 |
"funding": {
|
5354 |
"url": "https://github.com/sponsors/panva"
|
5355 |
}
|
@@ -5594,9 +5594,9 @@
|
|
5594 |
}
|
5595 |
},
|
5596 |
"node_modules/minipass": {
|
5597 |
-
"version": "7.1.
|
5598 |
-
"resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.
|
5599 |
-
"integrity": "sha512-
|
5600 |
"engines": {
|
5601 |
"node": ">=16 || 14 >=14.17"
|
5602 |
}
|
@@ -6015,9 +6015,9 @@
|
|
6015 |
"integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw=="
|
6016 |
},
|
6017 |
"node_modules/path-scurry": {
|
6018 |
-
"version": "1.
|
6019 |
-
"resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.
|
6020 |
-
"integrity": "sha512-
|
6021 |
"dependencies": {
|
6022 |
"lru-cache": "^10.2.0",
|
6023 |
"minipass": "^5.0.0 || ^6.0.2 || ^7.0.0"
|
@@ -6742,9 +6742,9 @@
|
|
6742 |
}
|
6743 |
},
|
6744 |
"node_modules/semver": {
|
6745 |
-
"version": "7.6.
|
6746 |
-
"resolved": "https://registry.npmjs.org/semver/-/semver-7.6.
|
6747 |
-
"integrity": "sha512-
|
6748 |
"bin": {
|
6749 |
"semver": "bin/semver.js"
|
6750 |
},
|
|
|
8 |
"name": "@jbilcke-hf/ai-stories-factory",
|
9 |
"version": "0.0.0",
|
10 |
"dependencies": {
|
11 |
+
"@aitube/clap": "0.0.17",
|
12 |
+
"@aitube/client": "0.0.25",
|
13 |
"@radix-ui/react-accordion": "^1.1.2",
|
14 |
"@radix-ui/react-avatar": "^1.0.4",
|
15 |
"@radix-ui/react-checkbox": "^1.0.4",
|
|
|
66 |
}
|
67 |
},
|
68 |
"node_modules/@aitube/clap": {
|
69 |
+
"version": "0.0.17",
|
70 |
+
"resolved": "https://registry.npmjs.org/@aitube/clap/-/clap-0.0.17.tgz",
|
71 |
+
"integrity": "sha512-g/jjePX2Hz9Eo4hk+rxd6FRwoy2Hx9sadGLgN9yWSm7dGHhr9B/DVv8eLjFabu7jgW0zvZZ1FHvlsNAby4Pr/Q==",
|
72 |
"dependencies": {
|
73 |
"pure-uuid": "^1.8.1",
|
74 |
"yaml": "^2.4.1"
|
|
|
78 |
}
|
79 |
},
|
80 |
"node_modules/@aitube/client": {
|
81 |
+
"version": "0.0.25",
|
82 |
+
"resolved": "https://registry.npmjs.org/@aitube/client/-/client-0.0.25.tgz",
|
83 |
+
"integrity": "sha512-gX5eJOKiigVY3xK1NcsStruUuWMQbj4o4XHTceZhUyKCgHDldC0Y15mvIWVabCtEW5FFebdmhH0EFeg+PBMCsg==",
|
84 |
"dependencies": {
|
85 |
"query-string": "^9.0.0"
|
86 |
},
|
87 |
"peerDependencies": {
|
88 |
+
"@aitube/clap": "0.0.17"
|
89 |
}
|
90 |
},
|
91 |
"node_modules/@alloc/quick-lru": {
|
|
|
3129 |
}
|
3130 |
},
|
3131 |
"node_modules/caniuse-lite": {
|
3132 |
+
"version": "1.0.30001617",
|
3133 |
+
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001617.tgz",
|
3134 |
+
"integrity": "sha512-mLyjzNI9I+Pix8zwcrpxEbGlfqOkF9kM3ptzmKNw5tizSyYwMe+nGLTqMK9cO+0E+Bh6TsBxNAaHWEM8xwSsmA==",
|
3135 |
"funding": [
|
3136 |
{
|
3137 |
"type": "opencollective",
|
|
|
3748 |
"integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA=="
|
3749 |
},
|
3750 |
"node_modules/electron-to-chromium": {
|
3751 |
+
"version": "1.4.762",
|
3752 |
+
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.762.tgz",
|
3753 |
+
"integrity": "sha512-rrFvGweLxPwwSwJOjIopy3Vr+J3cIPtZzuc74bmlvmBIgQO3VYJDvVrlj94iKZ3ukXUH64Ex31hSfRTLqvjYJQ=="
|
3754 |
},
|
3755 |
"node_modules/emoji-regex": {
|
3756 |
"version": "9.2.2",
|
|
|
4602 |
}
|
4603 |
},
|
4604 |
"node_modules/get-tsconfig": {
|
4605 |
+
"version": "4.7.5",
|
4606 |
+
"resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.7.5.tgz",
|
4607 |
+
"integrity": "sha512-ZCuZCnlqNzjb4QprAzXKdpp/gh6KTxSJuw3IBsPnV/7fV4NxC9ckB+vPTt8w7fJA0TaSD7c55BR47JD6MEDyDw==",
|
4608 |
"dependencies": {
|
4609 |
"resolve-pkg-maps": "^1.0.0"
|
4610 |
},
|
|
|
5347 |
}
|
5348 |
},
|
5349 |
"node_modules/jose": {
|
5350 |
+
"version": "5.3.0",
|
5351 |
+
"resolved": "https://registry.npmjs.org/jose/-/jose-5.3.0.tgz",
|
5352 |
+
"integrity": "sha512-IChe9AtAE79ru084ow8jzkN2lNrG3Ntfiv65Cvj9uOCE2m5LNsdHG+9EbxWxAoWRF9TgDOqLN5jm08++owDVRg==",
|
5353 |
"funding": {
|
5354 |
"url": "https://github.com/sponsors/panva"
|
5355 |
}
|
|
|
5594 |
}
|
5595 |
},
|
5596 |
"node_modules/minipass": {
|
5597 |
+
"version": "7.1.1",
|
5598 |
+
"resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.1.tgz",
|
5599 |
+
"integrity": "sha512-UZ7eQ+h8ywIRAW1hIEl2AqdwzJucU/Kp59+8kkZeSvafXhZjul247BvIJjEVFVeON6d7lM46XX1HXCduKAS8VA==",
|
5600 |
"engines": {
|
5601 |
"node": ">=16 || 14 >=14.17"
|
5602 |
}
|
|
|
6015 |
"integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw=="
|
6016 |
},
|
6017 |
"node_modules/path-scurry": {
|
6018 |
+
"version": "1.11.0",
|
6019 |
+
"resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.0.tgz",
|
6020 |
+
"integrity": "sha512-LNHTaVkzaYaLGlO+0u3rQTz7QrHTFOuKyba9JMTQutkmtNew8dw8wOD7mTU/5fCPZzCWpfW0XnQKzY61P0aTaw==",
|
6021 |
"dependencies": {
|
6022 |
"lru-cache": "^10.2.0",
|
6023 |
"minipass": "^5.0.0 || ^6.0.2 || ^7.0.0"
|
|
|
6742 |
}
|
6743 |
},
|
6744 |
"node_modules/semver": {
|
6745 |
+
"version": "7.6.2",
|
6746 |
+
"resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz",
|
6747 |
+
"integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==",
|
6748 |
"bin": {
|
6749 |
"semver": "bin/semver.js"
|
6750 |
},
|
package.json
CHANGED
@@ -9,8 +9,8 @@
|
|
9 |
"lint": "next lint"
|
10 |
},
|
11 |
"dependencies": {
|
12 |
-
"@aitube/clap": "0.0.
|
13 |
-
"@aitube/client": "0.0.
|
14 |
"@radix-ui/react-accordion": "^1.1.2",
|
15 |
"@radix-ui/react-avatar": "^1.0.4",
|
16 |
"@radix-ui/react-checkbox": "^1.0.4",
|
|
|
9 |
"lint": "next lint"
|
10 |
},
|
11 |
"dependencies": {
|
12 |
+
"@aitube/clap": "0.0.17",
|
13 |
+
"@aitube/client": "0.0.25",
|
14 |
"@radix-ui/react-accordion": "^1.1.2",
|
15 |
"@radix-ui/react-avatar": "^1.0.4",
|
16 |
"@radix-ui/react-checkbox": "^1.0.4",
|
src/app/main.tsx
CHANGED
@@ -2,7 +2,7 @@
|
|
2 |
|
3 |
import React, { useEffect, useRef, useTransition } from 'react'
|
4 |
import { IoMdPhonePortrait } from 'react-icons/io'
|
5 |
-
import { ClapProject, ClapMediaOrientation, ClapSegmentCategory } from '@aitube/clap'
|
6 |
import Image from 'next/image'
|
7 |
import { useFilePicker } from 'use-file-picker'
|
8 |
import { DeviceFrameset } from 'react-device-frameset'
|
@@ -18,6 +18,8 @@ import { createClap } from './server/aitube/createClap'
|
|
18 |
import { editClapEntities } from './server/aitube/editClapEntities'
|
19 |
import { editClapDialogues } from './server/aitube/editClapDialogues'
|
20 |
import { editClapStoryboards } from './server/aitube/editClapStoryboards'
|
|
|
|
|
21 |
import { exportClapToVideo } from './server/aitube/exportClapToVideo'
|
22 |
|
23 |
import { useStore } from './store'
|
@@ -42,9 +44,11 @@ export function Main() {
|
|
42 |
const parseGenerationStatus = useStore(s => s.parseGenerationStatus)
|
43 |
const storyGenerationStatus = useStore(s => s.storyGenerationStatus)
|
44 |
const assetGenerationStatus = useStore(s => s.assetGenerationStatus)
|
|
|
45 |
const voiceGenerationStatus = useStore(s => s.voiceGenerationStatus)
|
46 |
const imageGenerationStatus = useStore(s => s.imageGenerationStatus)
|
47 |
const videoGenerationStatus = useStore(s => s.videoGenerationStatus)
|
|
|
48 |
const currentClap = useStore(s => s.currentClap)
|
49 |
const currentVideo = useStore(s => s.currentVideo)
|
50 |
const currentVideoOrientation = useStore(s => s.currentVideoOrientation)
|
@@ -59,9 +63,11 @@ export function Main() {
|
|
59 |
const setParseGenerationStatus = useStore(s => s.setParseGenerationStatus)
|
60 |
const setStoryGenerationStatus = useStore(s => s.setStoryGenerationStatus)
|
61 |
const setAssetGenerationStatus = useStore(s => s.setAssetGenerationStatus)
|
|
|
62 |
const setVoiceGenerationStatus = useStore(s => s.setVoiceGenerationStatus)
|
63 |
const setImageGenerationStatus = useStore(s => s.setImageGenerationStatus)
|
64 |
const setVideoGenerationStatus = useStore(s => s.setVideoGenerationStatus)
|
|
|
65 |
const setCurrentClap = useStore(s => s.setCurrentClap)
|
66 |
const setCurrentVideo = useStore(s => s.setCurrentVideo)
|
67 |
const progress = useStore(s => s.progress)
|
@@ -87,7 +93,7 @@ export function Main() {
|
|
87 |
setStatus,
|
88 |
setProgress,
|
89 |
setParseGenerationStatus,
|
90 |
-
|
91 |
} = useStore.getState()
|
92 |
|
93 |
let clap: ClapProject | undefined = undefined
|
@@ -113,6 +119,7 @@ export function Main() {
|
|
113 |
|
114 |
setParseGenerationStatus("finished")
|
115 |
|
|
|
116 |
try {
|
117 |
setProgress(60)
|
118 |
setVoiceGenerationStatus("generating")
|
@@ -135,8 +142,9 @@ export function Main() {
|
|
135 |
if (!clap) {
|
136 |
return
|
137 |
}
|
|
|
138 |
|
139 |
-
|
140 |
|
141 |
let assetUrl = ""
|
142 |
try {
|
@@ -150,13 +158,13 @@ export function Main() {
|
|
150 |
}
|
151 |
|
152 |
if (!assetUrl) {
|
153 |
-
|
154 |
setStatus("error")
|
155 |
setProgress(0)
|
156 |
return
|
157 |
}
|
158 |
|
159 |
-
|
160 |
|
161 |
setProgress(80)
|
162 |
|
@@ -169,41 +177,30 @@ export function Main() {
|
|
169 |
fn()
|
170 |
}, [fileData?.name])
|
171 |
|
172 |
-
const
|
173 |
|
174 |
-
|
175 |
-
|
|
|
176 |
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
|
181 |
-
|
182 |
-
|
183 |
-
|
184 |
|
185 |
-
|
186 |
-
|
187 |
-
orientation: useStore.getState().orientation,
|
188 |
-
turbo: true,
|
189 |
-
})
|
190 |
|
191 |
-
|
192 |
|
193 |
-
|
194 |
|
195 |
-
|
196 |
-
|
197 |
-
|
198 |
-
} catch (err) {
|
199 |
-
setStoryGenerationStatus("error")
|
200 |
-
setStatus("error")
|
201 |
-
setError(`${err}`)
|
202 |
-
return
|
203 |
-
}
|
204 |
-
if (!clap) {
|
205 |
-
return
|
206 |
-
}
|
207 |
|
208 |
console.log("-------- GENERATED STORY --------")
|
209 |
console.table(clap.segments, [
|
@@ -213,34 +210,30 @@ export function Main() {
|
|
213 |
'category',
|
214 |
'prompt'
|
215 |
])
|
|
|
|
|
|
|
|
|
|
|
|
|
216 |
|
217 |
-
|
218 |
-
|
219 |
-
|
220 |
-
|
221 |
-
|
222 |
-
|
223 |
|
224 |
-
|
225 |
-
|
226 |
-
|
227 |
-
|
228 |
|
229 |
-
|
230 |
|
231 |
-
|
232 |
-
|
233 |
-
|
234 |
-
} catch (err) {
|
235 |
-
setAssetGenerationStatus("error")
|
236 |
-
setStatus("error")
|
237 |
-
setError(`${err}`)
|
238 |
-
return
|
239 |
-
}
|
240 |
-
if (!clap) {
|
241 |
-
return
|
242 |
-
}
|
243 |
-
|
244 |
console.log("-------- GENERATED ENTITIES --------")
|
245 |
console.table(clap.entities, [
|
246 |
'category',
|
@@ -248,110 +241,209 @@ export function Main() {
|
|
248 |
'imagePrompt',
|
249 |
'appearance'
|
250 |
])
|
|
|
|
|
|
|
|
|
|
|
|
|
251 |
|
252 |
-
/*
|
253 |
-
if (mainCharacterImage) {
|
254 |
-
console.log("handleSubmit(): User specified a main character image")
|
255 |
-
// various strategies here, for instance we can assume that the first character is the main character,
|
256 |
-
// or maybe a more reliable way is to count the number of occurrences.
|
257 |
-
// there is a risk of misgendering, so ideally we should add some kind of UI to do this,
|
258 |
-
// such as a list of characters.
|
259 |
-
}
|
260 |
-
*/
|
261 |
|
262 |
-
|
263 |
-
|
264 |
-
|
265 |
-
|
266 |
-
try {
|
267 |
-
setProgress(40)
|
268 |
-
setImageGenerationStatus("generating")
|
269 |
-
clap = await editClapStoryboards({
|
270 |
-
clap,
|
271 |
-
// the turbo is mandatory here,
|
272 |
-
// since this uses a model with character consistency,
|
273 |
-
// which is not the case for the non-turbo one
|
274 |
-
turbo: true
|
275 |
-
})
|
276 |
|
277 |
-
|
|
|
|
|
|
|
278 |
|
279 |
-
|
280 |
-
|
281 |
-
|
282 |
-
|
283 |
-
|
284 |
-
|
285 |
-
|
286 |
-
|
287 |
-
|
288 |
-
|
289 |
-
|
290 |
-
|
291 |
-
|
|
|
|
|
|
|
|
|
292 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
293 |
console.log("-------- GENERATED STORYBOARDS --------")
|
294 |
console.table(clap.segments.filter(s => s.category === ClapSegmentCategory.STORYBOARD), [
|
295 |
'endTimeInMs',
|
296 |
'prompt',
|
297 |
'assetUrl'
|
298 |
])
|
|
|
|
|
|
|
|
|
|
|
|
|
299 |
|
300 |
-
|
301 |
-
|
302 |
-
|
303 |
-
|
304 |
-
clap = await editClapDialogues({
|
305 |
-
clap,
|
306 |
-
turbo: true
|
307 |
-
})
|
308 |
|
309 |
-
|
|
|
|
|
|
|
310 |
|
311 |
-
|
312 |
-
setCurrentClap(clap)
|
313 |
-
setVoiceGenerationStatus("finished")
|
314 |
-
} catch (err) {
|
315 |
-
setVoiceGenerationStatus("error")
|
316 |
-
setStatus("error")
|
317 |
-
setError(`${err}`)
|
318 |
-
return
|
319 |
-
}
|
320 |
-
if (!clap) {
|
321 |
-
console.log("aborting prematurely")
|
322 |
-
return
|
323 |
-
}
|
324 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
325 |
console.log("-------- GENERATED DIALOGUES --------")
|
326 |
console.table(clap.segments.filter(s => s.category === ClapSegmentCategory.DIALOGUE), [
|
327 |
'endTimeInMs',
|
328 |
'prompt',
|
329 |
'entityId',
|
330 |
])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
331 |
|
332 |
-
let assetUrl = ""
|
333 |
try {
|
334 |
-
|
335 |
-
|
336 |
-
|
337 |
-
clap,
|
338 |
-
|
339 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
340 |
|
341 |
-
console.log(`handleSubmit(): received a video: ${assetUrl.slice(0, 60)}...`)
|
342 |
-
setVideoGenerationStatus("finished")
|
343 |
-
setCurrentVideo(assetUrl)
|
344 |
setStatus("finished")
|
345 |
setError("")
|
346 |
} catch (err) {
|
347 |
-
console.error(`
|
348 |
-
setVideoGenerationStatus("error")
|
349 |
setStatus("error")
|
350 |
setError(`${err}`)
|
351 |
-
// setCurrentVideo("")
|
352 |
}
|
353 |
-
|
354 |
-
console.log("-------- GENERATED FINAL VIDEO --------")
|
355 |
})
|
356 |
}
|
357 |
|
@@ -377,18 +469,22 @@ export function Main() {
|
|
377 |
const progressDelayInMsPerStage: Record<GenerationStage, number> = {
|
378 |
story: 2200,
|
379 |
entities: 2200,
|
|
|
380 |
images: 1000,
|
381 |
voices: 2000,
|
382 |
-
|
|
|
383 |
idle: 1000
|
384 |
}
|
385 |
|
386 |
const maxProgressPerStage: Record<GenerationStage, number> = {
|
387 |
story: 19,
|
388 |
-
entities:
|
|
|
389 |
images: 49,
|
390 |
-
|
391 |
-
|
|
|
392 |
idle: 100
|
393 |
}
|
394 |
|
@@ -424,20 +520,20 @@ export function Main() {
|
|
424 |
// `bg-gradient-to-br from-amber-700 to-yellow-300`,
|
425 |
|
426 |
// warm orange, a bit flash but not bad, not bad at all
|
427 |
-
|
428 |
|
429 |
-
|
430 |
-
|
431 |
|
432 |
-
|
433 |
-
|
434 |
|
435 |
-
|
436 |
-
|
437 |
-
|
438 |
|
439 |
-
|
440 |
-
|
441 |
|
442 |
// `bg-gradient-to-br from-sky-400 to-sky-300/30`,
|
443 |
`w-screen h-full overflow-y-scroll md:overflow-hidden`,
|
@@ -755,12 +851,14 @@ export function Main() {
|
|
755 |
<p className="text-2xl font-bold">{progress}%</p>
|
756 |
<p className="text-base text-white/70">{isBusy
|
757 |
? (
|
758 |
-
storyGenerationStatus === "generating" ? "Writing
|
759 |
: parseGenerationStatus === "generating" ? "Loading the project.."
|
760 |
: assetGenerationStatus === "generating" ? "Casting characters.."
|
|
|
761 |
: imageGenerationStatus === "generating" ? "Creating storyboards.."
|
762 |
-
:
|
763 |
-
:
|
|
|
764 |
: "Please wait.."
|
765 |
)
|
766 |
: status === "error"
|
|
|
2 |
|
3 |
import React, { useEffect, useRef, useTransition } from 'react'
|
4 |
import { IoMdPhonePortrait } from 'react-icons/io'
|
5 |
+
import { ClapProject, ClapMediaOrientation, ClapSegmentCategory, updateClap } from '@aitube/clap'
|
6 |
import Image from 'next/image'
|
7 |
import { useFilePicker } from 'use-file-picker'
|
8 |
import { DeviceFrameset } from 'react-device-frameset'
|
|
|
18 |
import { editClapEntities } from './server/aitube/editClapEntities'
|
19 |
import { editClapDialogues } from './server/aitube/editClapDialogues'
|
20 |
import { editClapStoryboards } from './server/aitube/editClapStoryboards'
|
21 |
+
import { editClapMusic } from './server/aitube/editClapMusic'
|
22 |
+
import { editClapVideos } from './server/aitube/editClapVideos'
|
23 |
import { exportClapToVideo } from './server/aitube/exportClapToVideo'
|
24 |
|
25 |
import { useStore } from './store'
|
|
|
44 |
const parseGenerationStatus = useStore(s => s.parseGenerationStatus)
|
45 |
const storyGenerationStatus = useStore(s => s.storyGenerationStatus)
|
46 |
const assetGenerationStatus = useStore(s => s.assetGenerationStatus)
|
47 |
+
const musicGenerationStatus = useStore(s => s.musicGenerationStatus)
|
48 |
const voiceGenerationStatus = useStore(s => s.voiceGenerationStatus)
|
49 |
const imageGenerationStatus = useStore(s => s.imageGenerationStatus)
|
50 |
const videoGenerationStatus = useStore(s => s.videoGenerationStatus)
|
51 |
+
const finalGenerationStatus = useStore(s => s.finalGenerationStatus)
|
52 |
const currentClap = useStore(s => s.currentClap)
|
53 |
const currentVideo = useStore(s => s.currentVideo)
|
54 |
const currentVideoOrientation = useStore(s => s.currentVideoOrientation)
|
|
|
63 |
const setParseGenerationStatus = useStore(s => s.setParseGenerationStatus)
|
64 |
const setStoryGenerationStatus = useStore(s => s.setStoryGenerationStatus)
|
65 |
const setAssetGenerationStatus = useStore(s => s.setAssetGenerationStatus)
|
66 |
+
const setMusicGenerationStatus = useStore(s => s.setMusicGenerationStatus)
|
67 |
const setVoiceGenerationStatus = useStore(s => s.setVoiceGenerationStatus)
|
68 |
const setImageGenerationStatus = useStore(s => s.setImageGenerationStatus)
|
69 |
const setVideoGenerationStatus = useStore(s => s.setVideoGenerationStatus)
|
70 |
+
const setFinalGenerationStatus = useStore(s => s.setFinalGenerationStatus)
|
71 |
const setCurrentClap = useStore(s => s.setCurrentClap)
|
72 |
const setCurrentVideo = useStore(s => s.setCurrentVideo)
|
73 |
const progress = useStore(s => s.progress)
|
|
|
93 |
setStatus,
|
94 |
setProgress,
|
95 |
setParseGenerationStatus,
|
96 |
+
setFinalGenerationStatus,
|
97 |
} = useStore.getState()
|
98 |
|
99 |
let clap: ClapProject | undefined = undefined
|
|
|
119 |
|
120 |
setParseGenerationStatus("finished")
|
121 |
|
122 |
+
/*
|
123 |
try {
|
124 |
setProgress(60)
|
125 |
setVoiceGenerationStatus("generating")
|
|
|
142 |
if (!clap) {
|
143 |
return
|
144 |
}
|
145 |
+
*/
|
146 |
|
147 |
+
setFinalGenerationStatus("generating")
|
148 |
|
149 |
let assetUrl = ""
|
150 |
try {
|
|
|
158 |
}
|
159 |
|
160 |
if (!assetUrl) {
|
161 |
+
setFinalGenerationStatus("error")
|
162 |
setStatus("error")
|
163 |
setProgress(0)
|
164 |
return
|
165 |
}
|
166 |
|
167 |
+
setFinalGenerationStatus("finished")
|
168 |
|
169 |
setProgress(80)
|
170 |
|
|
|
177 |
fn()
|
178 |
}, [fileData?.name])
|
179 |
|
180 |
+
const generateStory = async (): Promise<ClapProject> => {
|
181 |
|
182 |
+
let clap: ClapProject | undefined = undefined
|
183 |
+
try {
|
184 |
+
setProgress(0)
|
185 |
|
186 |
+
setStatus("generating")
|
187 |
+
setStoryGenerationStatus("generating")
|
188 |
+
setStoryPrompt(promptDraft.current)
|
189 |
|
190 |
+
clap = await createClap({
|
191 |
+
prompt: promptDraft.current,
|
192 |
+
orientation: useStore.getState().orientation,
|
193 |
|
194 |
+
turbo: true,
|
195 |
+
})
|
|
|
|
|
|
|
196 |
|
197 |
+
if (!clap) { throw new Error(`failed to create the clap`) }
|
198 |
|
199 |
+
if (clap.segments.length <= 1) { throw new Error(`failed to generate more than one segments`) }
|
200 |
|
201 |
+
console.log(`handleSubmit(): received a clap = `, clap)
|
202 |
+
setCurrentClap(clap)
|
203 |
+
setStoryGenerationStatus("finished")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
204 |
|
205 |
console.log("-------- GENERATED STORY --------")
|
206 |
console.table(clap.segments, [
|
|
|
210 |
'category',
|
211 |
'prompt'
|
212 |
])
|
213 |
+
return clap
|
214 |
+
} catch (err) {
|
215 |
+
setStoryGenerationStatus("error")
|
216 |
+
throw err
|
217 |
+
}
|
218 |
+
}
|
219 |
|
220 |
+
const generateEntities = async (clap: ClapProject): Promise<ClapProject> => {
|
221 |
+
try {
|
222 |
+
setProgress(20)
|
223 |
+
setAssetGenerationStatus("generating")
|
224 |
+
clap = await editClapEntities({
|
225 |
+
clap,
|
226 |
|
227 |
+
// generating entities requires a "smart" LLM
|
228 |
+
turbo: false,
|
229 |
+
// turbo: true,
|
230 |
+
})
|
231 |
|
232 |
+
if (!clap) { throw new Error(`failed to edit the entities`) }
|
233 |
|
234 |
+
console.log(`handleSubmit(): received a clap with entities = `, clap)
|
235 |
+
setCurrentClap(clap)
|
236 |
+
setAssetGenerationStatus("finished")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
237 |
console.log("-------- GENERATED ENTITIES --------")
|
238 |
console.table(clap.entities, [
|
239 |
'category',
|
|
|
241 |
'imagePrompt',
|
242 |
'appearance'
|
243 |
])
|
244 |
+
return clap
|
245 |
+
} catch (err) {
|
246 |
+
setAssetGenerationStatus("error")
|
247 |
+
throw err
|
248 |
+
}
|
249 |
+
}
|
250 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
251 |
|
252 |
+
const generateMusic = async (clap: ClapProject): Promise<ClapProject> => {
|
253 |
+
try {
|
254 |
+
setProgress(30)
|
255 |
+
setMusicGenerationStatus("generating")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
256 |
|
257 |
+
clap = await editClapMusic({
|
258 |
+
clap,
|
259 |
+
turbo: true
|
260 |
+
})
|
261 |
|
262 |
+
if (!clap) { throw new Error(`failed to edit the music`) }
|
263 |
+
|
264 |
+
console.log(`handleSubmit(): received a clap with music = `, clap)
|
265 |
+
setCurrentClap(clap)
|
266 |
+
setMusicGenerationStatus("finished")
|
267 |
+
console.log("-------- GENERATED MUSIC --------")
|
268 |
+
console.table(clap.segments.filter(s => s.category === ClapSegmentCategory.MUSIC), [
|
269 |
+
'endTimeInMs',
|
270 |
+
'prompt',
|
271 |
+
'entityId',
|
272 |
+
])
|
273 |
+
return clap
|
274 |
+
} catch (err) {
|
275 |
+
setMusicGenerationStatus("error")
|
276 |
+
throw err
|
277 |
+
}
|
278 |
+
}
|
279 |
|
280 |
+
const generateStoryboards = async (clap: ClapProject): Promise<ClapProject> => {
|
281 |
+
try {
|
282 |
+
setProgress(40)
|
283 |
+
setImageGenerationStatus("generating")
|
284 |
+
clap = await editClapStoryboards({
|
285 |
+
clap,
|
286 |
+
// the turbo is mandatory here,
|
287 |
+
// since this uses a model with character consistency,
|
288 |
+
// which is not the case for the non-turbo one
|
289 |
+
turbo: true
|
290 |
+
})
|
291 |
+
|
292 |
+
if (!clap) { throw new Error(`failed to edit the storyboards`) }
|
293 |
+
|
294 |
+
// const fusion =
|
295 |
+
console.log(`handleSubmit(): received a clap with images = `, clap)
|
296 |
+
setCurrentClap(clap)
|
297 |
+
setImageGenerationStatus("finished")
|
298 |
console.log("-------- GENERATED STORYBOARDS --------")
|
299 |
console.table(clap.segments.filter(s => s.category === ClapSegmentCategory.STORYBOARD), [
|
300 |
'endTimeInMs',
|
301 |
'prompt',
|
302 |
'assetUrl'
|
303 |
])
|
304 |
+
return clap
|
305 |
+
} catch (err) {
|
306 |
+
setImageGenerationStatus("error")
|
307 |
+
throw err
|
308 |
+
}
|
309 |
+
}
|
310 |
|
311 |
+
const generateVideos = async (clap: ClapProject): Promise<ClapProject> => {
|
312 |
+
try {
|
313 |
+
setProgress(50)
|
314 |
+
setVideoGenerationStatus("generating")
|
|
|
|
|
|
|
|
|
315 |
|
316 |
+
clap = await editClapVideos({
|
317 |
+
clap,
|
318 |
+
turbo: true
|
319 |
+
})
|
320 |
|
321 |
+
if (!clap) { throw new Error(`failed to edit the videos`) }
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
322 |
|
323 |
+
console.log(`handleSubmit(): received a clap with videos = `, clap)
|
324 |
+
setCurrentClap(clap)
|
325 |
+
setVideoGenerationStatus("finished")
|
326 |
+
console.log("-------- GENERATED VIDEOS --------")
|
327 |
+
console.table(clap.segments.filter(s => s.category === ClapSegmentCategory.VIDEO), [
|
328 |
+
'endTimeInMs',
|
329 |
+
'prompt',
|
330 |
+
'entityId',
|
331 |
+
])
|
332 |
+
return clap
|
333 |
+
} catch (err) {
|
334 |
+
setVideoGenerationStatus("error")
|
335 |
+
throw err
|
336 |
+
}
|
337 |
+
}
|
338 |
+
|
339 |
+
const generateDialogues = async (clap: ClapProject): Promise<ClapProject> => {
|
340 |
+
try {
|
341 |
+
setProgress(70)
|
342 |
+
setVoiceGenerationStatus("generating")
|
343 |
+
clap = await editClapDialogues({
|
344 |
+
clap,
|
345 |
+
turbo: true
|
346 |
+
})
|
347 |
+
|
348 |
+
if (!clap) { throw new Error(`failed to edit the dialogues`) }
|
349 |
+
|
350 |
+
console.log(`handleSubmit(): received a clap with dialogues = `, clap)
|
351 |
+
setCurrentClap(clap)
|
352 |
+
setVoiceGenerationStatus("finished")
|
353 |
console.log("-------- GENERATED DIALOGUES --------")
|
354 |
console.table(clap.segments.filter(s => s.category === ClapSegmentCategory.DIALOGUE), [
|
355 |
'endTimeInMs',
|
356 |
'prompt',
|
357 |
'entityId',
|
358 |
])
|
359 |
+
return clap
|
360 |
+
} catch (err) {
|
361 |
+
setVoiceGenerationStatus("error")
|
362 |
+
throw err
|
363 |
+
}
|
364 |
+
}
|
365 |
+
|
366 |
+
const generateFinalVideo = async (clap: ClapProject): Promise<string> => {
|
367 |
+
|
368 |
+
let assetUrl = ""
|
369 |
+
try {
|
370 |
+
setProgress(85)
|
371 |
+
setFinalGenerationStatus("generating")
|
372 |
+
assetUrl = await exportClapToVideo({
|
373 |
+
clap,
|
374 |
+
turbo: true
|
375 |
+
})
|
376 |
+
|
377 |
+
console.log(`handleSubmit(): received a video: ${assetUrl.slice(0, 60)}...`)
|
378 |
+
setFinalGenerationStatus("finished")
|
379 |
+
setCurrentVideo(assetUrl)
|
380 |
+
return assetUrl
|
381 |
+
} catch (err) {
|
382 |
+
setFinalGenerationStatus("error")
|
383 |
+
throw err
|
384 |
+
}
|
385 |
+
}
|
386 |
+
|
387 |
+
const handleSubmit = async () => {
|
388 |
+
|
389 |
+
startTransition(async () => {
|
390 |
+
console.log(`handleSubmit(): generating a clap using prompt = "${promptDraft.current}" `)
|
391 |
|
|
|
392 |
try {
|
393 |
+
let clap = await generateStory()
|
394 |
+
|
395 |
+
const claps = await Promise.all([
|
396 |
+
generateMusic(clap),
|
397 |
+
generateVideos(clap)
|
398 |
+
])
|
399 |
+
|
400 |
+
for (const newerClap of claps) {
|
401 |
+
console.log("newerClap:", newerClap)
|
402 |
+
clap = await updateClap(clap, newerClap, {
|
403 |
+
overwriteMeta: false,
|
404 |
+
inlineReplace: true,
|
405 |
+
})
|
406 |
+
}
|
407 |
+
console.log("finalClap: ", clap)
|
408 |
+
|
409 |
+
/*
|
410 |
+
clap = await claps.reduce(async (existingClap, newerClap) =>
|
411 |
+
updateClap(existingClap, newerClap, {
|
412 |
+
overwriteMeta: false,
|
413 |
+
inlineReplace: true,
|
414 |
+
})
|
415 |
+
, Promise.resolve(clap)
|
416 |
+
*/
|
417 |
+
|
418 |
+
|
419 |
+
// We can't have consistent characters with video (yet)
|
420 |
+
// clap = await generateEntities(clap)
|
421 |
+
|
422 |
+
/*
|
423 |
+
if (mainCharacterImage) {
|
424 |
+
console.log("handleSubmit(): User specified a main character image")
|
425 |
+
// various strategies here, for instance we can assume that the first character is the main character,
|
426 |
+
// or maybe a more reliable way is to count the number of occurrences.
|
427 |
+
// there is a risk of misgendering, so ideally we should add some kind of UI to do this,
|
428 |
+
// such as a list of characters.
|
429 |
+
}
|
430 |
+
*/
|
431 |
+
|
432 |
+
// let's skip storyboards for now
|
433 |
+
// clap = await generateStoryboards(clap)
|
434 |
+
|
435 |
+
// clap = await generateVideos(clap)
|
436 |
+
// clap = await generateDialogues(clap)
|
437 |
+
|
438 |
+
await generateFinalVideo(clap)
|
439 |
|
|
|
|
|
|
|
440 |
setStatus("finished")
|
441 |
setError("")
|
442 |
} catch (err) {
|
443 |
+
console.error(`failed to generate: `, err)
|
|
|
444 |
setStatus("error")
|
445 |
setError(`${err}`)
|
|
|
446 |
}
|
|
|
|
|
447 |
})
|
448 |
}
|
449 |
|
|
|
469 |
const progressDelayInMsPerStage: Record<GenerationStage, number> = {
|
470 |
story: 2200,
|
471 |
entities: 2200,
|
472 |
+
music: 3000,
|
473 |
images: 1000,
|
474 |
voices: 2000,
|
475 |
+
videos: 2000,
|
476 |
+
final: 2500,
|
477 |
idle: 1000
|
478 |
}
|
479 |
|
480 |
const maxProgressPerStage: Record<GenerationStage, number> = {
|
481 |
story: 19,
|
482 |
+
entities: 29,
|
483 |
+
music: 39,
|
484 |
images: 49,
|
485 |
+
videos: 69,
|
486 |
+
voices: 84,
|
487 |
+
final: 99,
|
488 |
idle: 100
|
489 |
}
|
490 |
|
|
|
520 |
// `bg-gradient-to-br from-amber-700 to-yellow-300`,
|
521 |
|
522 |
// warm orange, a bit flash but not bad, not bad at all
|
523 |
+
// `bg-gradient-to-br from-orange-700 to-yellow-400`,
|
524 |
|
525 |
+
// nice "AiTube" vibe
|
526 |
+
// `bg-gradient-to-br from-red-700 to-yellow-400`,
|
527 |
|
528 |
+
// pretty cool lime!
|
529 |
+
`bg-gradient-to-br from-lime-700 to-yellow-400`,
|
530 |
|
531 |
+
// new style, pretty "fresh" - maybe too bright?
|
532 |
+
// use a dark logo for this one
|
533 |
+
// `bg-gradient-to-br from-yellow-200 to-yellow-500`,
|
534 |
|
535 |
+
// too pastel
|
536 |
+
// `bg-gradient-to-br from-yellow-200 to-red-300`,
|
537 |
|
538 |
// `bg-gradient-to-br from-sky-400 to-sky-300/30`,
|
539 |
`w-screen h-full overflow-y-scroll md:overflow-hidden`,
|
|
|
851 |
<p className="text-2xl font-bold">{progress}%</p>
|
852 |
<p className="text-base text-white/70">{isBusy
|
853 |
? (
|
854 |
+
storyGenerationStatus === "generating" ? "Writing story.."
|
855 |
: parseGenerationStatus === "generating" ? "Loading the project.."
|
856 |
: assetGenerationStatus === "generating" ? "Casting characters.."
|
857 |
+
: musicGenerationStatus === "generating" ? "Producing music.."
|
858 |
: imageGenerationStatus === "generating" ? "Creating storyboards.."
|
859 |
+
: videoGenerationStatus === "generating" ? "Filming shots.."
|
860 |
+
: voiceGenerationStatus === "generating" ? "Recording dialogues.."
|
861 |
+
: finalGenerationStatus === "generating" ? "Assembling final cut.."
|
862 |
: "Please wait.."
|
863 |
)
|
864 |
: status === "error"
|
src/app/server/aitube/editClapMusic.ts
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"use server"
|
2 |
+
|
3 |
+
import { ClapProject } from "@aitube/clap"
|
4 |
+
import { editClapMusic as apiEditClapMusic, ClapCompletionMode } from "@aitube/client"
|
5 |
+
|
6 |
+
import { getToken } from "./getToken"
|
7 |
+
|
8 |
+
export async function editClapMusic({
|
9 |
+
clap,
|
10 |
+
turbo = false,
|
11 |
+
}: {
|
12 |
+
clap: ClapProject
|
13 |
+
turbo?: boolean
|
14 |
+
}): Promise<ClapProject> {
|
15 |
+
const newClap: ClapProject = await apiEditClapMusic({
|
16 |
+
clap,
|
17 |
+
completionMode: ClapCompletionMode.MERGE,
|
18 |
+
turbo,
|
19 |
+
token: await getToken()
|
20 |
+
})
|
21 |
+
|
22 |
+
return newClap
|
23 |
+
}
|
src/app/store.ts
CHANGED
@@ -24,9 +24,11 @@ export const useStore = create<{
|
|
24 |
parseGenerationStatus: TaskStatus
|
25 |
storyGenerationStatus: TaskStatus
|
26 |
assetGenerationStatus: TaskStatus
|
|
|
27 |
voiceGenerationStatus: TaskStatus
|
28 |
imageGenerationStatus: TaskStatus
|
29 |
videoGenerationStatus: TaskStatus
|
|
|
30 |
isBusy: boolean
|
31 |
|
32 |
currentClap?: ClapProject
|
@@ -47,9 +49,11 @@ export const useStore = create<{
|
|
47 |
setParseGenerationStatus: (parseGenerationStatus: TaskStatus) => void
|
48 |
setStoryGenerationStatus: (storyGenerationStatus: TaskStatus) => void
|
49 |
setAssetGenerationStatus: (assetGenerationStatus: TaskStatus) => void
|
|
|
50 |
setVoiceGenerationStatus: (voiceGenerationStatus: TaskStatus) => void
|
51 |
setImageGenerationStatus: (imageGenerationStatus: TaskStatus) => void
|
52 |
setVideoGenerationStatus: (videoGenerationStatus: TaskStatus) => void
|
|
|
53 |
syncStatusAndStageState: () => void
|
54 |
setCurrentClap: (currentClap?: ClapProject) => void
|
55 |
|
@@ -71,9 +75,11 @@ export const useStore = create<{
|
|
71 |
parseGenerationStatus: "idle",
|
72 |
storyGenerationStatus: "idle",
|
73 |
assetGenerationStatus: "idle",
|
|
|
74 |
voiceGenerationStatus: "idle",
|
75 |
imageGenerationStatus: "idle",
|
76 |
videoGenerationStatus: "idle",
|
|
|
77 |
isBusy: false,
|
78 |
currentClap: undefined,
|
79 |
currentVideo: "",
|
@@ -118,6 +124,10 @@ export const useStore = create<{
|
|
118 |
set({ assetGenerationStatus })
|
119 |
get().syncStatusAndStageState()
|
120 |
},
|
|
|
|
|
|
|
|
|
121 |
setVoiceGenerationStatus: (voiceGenerationStatus: TaskStatus) => {
|
122 |
set({ voiceGenerationStatus })
|
123 |
get().syncStatusAndStageState()
|
@@ -130,8 +140,12 @@ export const useStore = create<{
|
|
130 |
set({ videoGenerationStatus })
|
131 |
get().syncStatusAndStageState()
|
132 |
},
|
|
|
|
|
|
|
|
|
133 |
syncStatusAndStageState: () => {
|
134 |
-
const { status, storyGenerationStatus, assetGenerationStatus, voiceGenerationStatus, imageGenerationStatus, videoGenerationStatus } = get()
|
135 |
|
136 |
// note: we don't really have "stages" since some things run in parallel,
|
137 |
// and some parallel tasks may finish before the others
|
@@ -139,9 +153,11 @@ export const useStore = create<{
|
|
139 |
let stage: GenerationStage =
|
140 |
storyGenerationStatus === "generating" ? "story" :
|
141 |
assetGenerationStatus === "generating" ? "entities" :
|
|
|
142 |
voiceGenerationStatus === "generating" ? "voices" :
|
143 |
imageGenerationStatus === "generating" ? "images" :
|
144 |
-
videoGenerationStatus === "generating" ? "
|
|
|
145 |
"idle"
|
146 |
|
147 |
|
|
|
24 |
parseGenerationStatus: TaskStatus
|
25 |
storyGenerationStatus: TaskStatus
|
26 |
assetGenerationStatus: TaskStatus
|
27 |
+
musicGenerationStatus: TaskStatus
|
28 |
voiceGenerationStatus: TaskStatus
|
29 |
imageGenerationStatus: TaskStatus
|
30 |
videoGenerationStatus: TaskStatus
|
31 |
+
finalGenerationStatus: TaskStatus
|
32 |
isBusy: boolean
|
33 |
|
34 |
currentClap?: ClapProject
|
|
|
49 |
setParseGenerationStatus: (parseGenerationStatus: TaskStatus) => void
|
50 |
setStoryGenerationStatus: (storyGenerationStatus: TaskStatus) => void
|
51 |
setAssetGenerationStatus: (assetGenerationStatus: TaskStatus) => void
|
52 |
+
setMusicGenerationStatus: (musicGenerationStatus: TaskStatus) => void
|
53 |
setVoiceGenerationStatus: (voiceGenerationStatus: TaskStatus) => void
|
54 |
setImageGenerationStatus: (imageGenerationStatus: TaskStatus) => void
|
55 |
setVideoGenerationStatus: (videoGenerationStatus: TaskStatus) => void
|
56 |
+
setFinalGenerationStatus: (finalGenerationStatus: TaskStatus) => void
|
57 |
syncStatusAndStageState: () => void
|
58 |
setCurrentClap: (currentClap?: ClapProject) => void
|
59 |
|
|
|
75 |
parseGenerationStatus: "idle",
|
76 |
storyGenerationStatus: "idle",
|
77 |
assetGenerationStatus: "idle",
|
78 |
+
musicGenerationStatus: "idle",
|
79 |
voiceGenerationStatus: "idle",
|
80 |
imageGenerationStatus: "idle",
|
81 |
videoGenerationStatus: "idle",
|
82 |
+
finalGenerationStatus: "idle",
|
83 |
isBusy: false,
|
84 |
currentClap: undefined,
|
85 |
currentVideo: "",
|
|
|
124 |
set({ assetGenerationStatus })
|
125 |
get().syncStatusAndStageState()
|
126 |
},
|
127 |
+
setMusicGenerationStatus: (musicGenerationStatus: TaskStatus) => {
|
128 |
+
set({ musicGenerationStatus })
|
129 |
+
get().syncStatusAndStageState()
|
130 |
+
},
|
131 |
setVoiceGenerationStatus: (voiceGenerationStatus: TaskStatus) => {
|
132 |
set({ voiceGenerationStatus })
|
133 |
get().syncStatusAndStageState()
|
|
|
140 |
set({ videoGenerationStatus })
|
141 |
get().syncStatusAndStageState()
|
142 |
},
|
143 |
+
setFinalGenerationStatus: (finalGenerationStatus: TaskStatus) => {
|
144 |
+
set({ finalGenerationStatus })
|
145 |
+
get().syncStatusAndStageState()
|
146 |
+
},
|
147 |
syncStatusAndStageState: () => {
|
148 |
+
const { status, storyGenerationStatus, assetGenerationStatus, musicGenerationStatus, voiceGenerationStatus, imageGenerationStatus, videoGenerationStatus, finalGenerationStatus } = get()
|
149 |
|
150 |
// note: we don't really have "stages" since some things run in parallel,
|
151 |
// and some parallel tasks may finish before the others
|
|
|
153 |
let stage: GenerationStage =
|
154 |
storyGenerationStatus === "generating" ? "story" :
|
155 |
assetGenerationStatus === "generating" ? "entities" :
|
156 |
+
musicGenerationStatus === "generating" ? "music" :
|
157 |
voiceGenerationStatus === "generating" ? "voices" :
|
158 |
imageGenerationStatus === "generating" ? "images" :
|
159 |
+
videoGenerationStatus === "generating" ? "videos" :
|
160 |
+
finalGenerationStatus === "generating" ? "final" :
|
161 |
"idle"
|
162 |
|
163 |
|
src/types.ts
CHANGED
@@ -13,7 +13,9 @@ export type GlobalStatus =
|
|
13 |
export type GenerationStage =
|
14 |
| "story"
|
15 |
| "entities"
|
|
|
16 |
| "voices"
|
17 |
| "images"
|
18 |
-
| "
|
|
|
19 |
| "idle"
|
|
|
13 |
export type GenerationStage =
|
14 |
| "story"
|
15 |
| "entities"
|
16 |
+
| "music"
|
17 |
| "voices"
|
18 |
| "images"
|
19 |
+
| "videos"
|
20 |
+
| "final"
|
21 |
| "idle"
|