jbilcke-hf HF staff commited on
Commit
b2a6404
1 Parent(s): 3a86e21

adjust timer

Browse files
src/app/config.ts ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ export const defaultPrompt =
2
+ // "Yesterday I was walking in SF when I saw a zebra"
3
+ // "underwater footage of the loch ness"
4
+ // "beautiful underwater footage of a clownfish swimming around coral" // who discovers a secret pirate treasure chest"
5
+ // "beautiful footage of a Caribbean fishing village and bay, sail ships, during golden hour, no captions"
6
+ "videogame gameplay footage, first person, exploring some mysterious ruins, no commentary"
src/app/main.tsx CHANGED
@@ -7,6 +7,7 @@ import { FaCloudDownloadAlt, FaDiscord } from "react-icons/fa"
7
  import { useLocalStorage } from "usehooks-ts"
8
  import { ClapProject, ClapMediaOrientation, ClapSegmentCategory, updateClap } from '@aitube/clap'
9
  import Image from 'next/image'
 
10
  import { useFilePicker } from 'use-file-picker'
11
  import { DeviceFrameset } from 'react-device-frameset'
12
  import 'react-device-frameset/styles/marvel-devices.min.css'
@@ -21,6 +22,7 @@ import { createClap } from './server/aitube/createClap'
21
  import { editClapEntities } from './server/aitube/editClapEntities'
22
  import { editClapDialogues } from './server/aitube/editClapDialogues'
23
  import { editClapStoryboards } from './server/aitube/editClapStoryboards'
 
24
  import { editClapMusic } from './server/aitube/editClapMusic'
25
  import { editClapVideos } from './server/aitube/editClapVideos'
26
  import { exportClapToVideo } from './server/aitube/exportClapToVideo'
@@ -35,12 +37,12 @@ import { GenerationStage } from '@/types'
35
  import { FileContent } from 'use-file-picker/dist/interfaces'
36
  import { generateRandomStory } from '@/lib/utils/generateRandomStory'
37
  import { logImage } from '@/lib/utils/logImage'
 
38
 
39
  export function Main() {
40
  const [storyPromptDraft, setStoryPromptDraft] = useLocalStorage<string>(
41
  "AI_STORIES_FACTORY_STORY_PROMPT_DRAFT",
42
- // "Yesterday I was walking in SF when I saw a zebra"
43
- "underwater footage, coral, fishes"
44
  )
45
  const promptDraftRef = useRef("")
46
  promptDraftRef.current = storyPromptDraft
@@ -51,10 +53,12 @@ export function Main() {
51
  const mainCharacterImage = useStore(s => s.mainCharacterImage)
52
  const mainCharacterVoice = useStore(s => s.mainCharacterVoice)
53
  const orientation = useStore(s => s.orientation)
 
54
  const status = useStore(s => s.status)
55
  const parseGenerationStatus = useStore(s => s.parseGenerationStatus)
56
  const storyGenerationStatus = useStore(s => s.storyGenerationStatus)
57
  const assetGenerationStatus = useStore(s => s.assetGenerationStatus)
 
58
  const musicGenerationStatus = useStore(s => s.musicGenerationStatus)
59
  const voiceGenerationStatus = useStore(s => s.voiceGenerationStatus)
60
  const imageGenerationStatus = useStore(s => s.imageGenerationStatus)
@@ -73,6 +77,7 @@ export function Main() {
73
  const setParseGenerationStatus = useStore(s => s.setParseGenerationStatus)
74
  const setStoryGenerationStatus = useStore(s => s.setStoryGenerationStatus)
75
  const setAssetGenerationStatus = useStore(s => s.setAssetGenerationStatus)
 
76
  const setMusicGenerationStatus = useStore(s => s.setMusicGenerationStatus)
77
  const setVoiceGenerationStatus = useStore(s => s.setVoiceGenerationStatus)
78
  const setImageGenerationStatus = useStore(s => s.setImageGenerationStatus)
@@ -90,6 +95,8 @@ export function Main() {
90
  const canSeeBetaFeatures = false // true // getParam<boolean>("beta", false)
91
 
92
  const isBusy = useStore(s => s.isBusy)
 
 
93
 
94
  const importStory = async (fileData: FileContent<ArrayBuffer>): Promise<ClapProject> => {
95
  if (!fileData?.name) { throw new Error(`invalid file (missing file name)`) }
@@ -191,6 +198,33 @@ export function Main() {
191
  }
192
  }
193
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
194
 
195
  const generateMusic = async (clap: ClapProject): Promise<ClapProject> => {
196
  try {
@@ -226,10 +260,10 @@ export function Main() {
226
  setImageGenerationStatus("generating")
227
  clap = await editClapStoryboards({
228
  clap,
229
- // the turbo is mandatory here,
230
- // since this uses a model with character consistency,
231
- // which is not the case for the non-turbo one
232
- turbo: false
233
  }).then(r => r.promise)
234
 
235
  if (!clap) { throw new Error(`failed to edit the storyboards`) }
@@ -349,15 +383,20 @@ export function Main() {
349
  }
350
 
351
  const handleSubmit = async () => {
 
 
352
 
353
  startTransition(async () => {
 
 
 
354
  console.log(`handleSubmit(): generating a clap using prompt = "${promptDraftRef.current}" `)
355
 
356
  try {
357
  let clap = await generateStory()
358
 
359
  const tasks = [
360
- generateMusic(clap),
361
  generateStoryboardsThenVideos(clap)
362
  ]
363
 
@@ -436,7 +475,7 @@ export function Main() {
436
  let clap = await importStory(fileData)
437
 
438
  const claps = await Promise.all([
439
- generateMusic(clap),
440
  generateVideos(clap)
441
  ])
442
 
@@ -494,7 +533,7 @@ export function Main() {
494
  })
495
 
496
  // timerRef.current = setTimeout(timerFn, progressDelayInMsPerStage[stage])
497
- timerRef.current = setTimeout(timerFn, 1000)
498
  }
499
 
500
  useEffect(() => {
@@ -504,6 +543,39 @@ export function Main() {
504
  timerRef.current = setTimeout(timerFn, 0)
505
  }, [isBusy])
506
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
507
  return (
508
  <div className={cn(
509
  `fixed`,
@@ -669,8 +741,7 @@ export function Main() {
669
  setStoryPromptDraft(e.target.value)
670
  promptDraftRef.current = e.target.value
671
  }}
672
- // placeholder="Yesterday I was at my favorite pizza place and.."
673
- placeholder="underwater footage, coral, fishes"
674
  inputClassName="
675
  transition-all duration-200 ease-in-out
676
  h-32 md:h-56 lg:h-64
@@ -892,11 +963,16 @@ export function Main() {
892
  <p className="text-2xl font-bold">{progress}%</p>
893
  <p className="text-base text-white/70">{isBusy
894
  ? (
 
 
 
 
895
  storyGenerationStatus === "generating" ? "Writing story.."
896
  : parseGenerationStatus === "generating" ? "Loading the project.."
897
  : assetGenerationStatus === "generating" ? "Casting characters.."
898
- : musicGenerationStatus === "generating" ? "Producing music.."
899
  : imageGenerationStatus === "generating" ? "Creating storyboards.."
 
 
900
  : videoGenerationStatus === "generating" ? "Filming shots.."
901
  : voiceGenerationStatus === "generating" ? "Recording dialogues.."
902
  : finalGenerationStatus === "generating" ? "Editing final cut.."
 
7
  import { useLocalStorage } from "usehooks-ts"
8
  import { ClapProject, ClapMediaOrientation, ClapSegmentCategory, updateClap } from '@aitube/clap'
9
  import Image from 'next/image'
10
+ import { useSearchParams } from "next/navigation"
11
  import { useFilePicker } from 'use-file-picker'
12
  import { DeviceFrameset } from 'react-device-frameset'
13
  import 'react-device-frameset/styles/marvel-devices.min.css'
 
22
  import { editClapEntities } from './server/aitube/editClapEntities'
23
  import { editClapDialogues } from './server/aitube/editClapDialogues'
24
  import { editClapStoryboards } from './server/aitube/editClapStoryboards'
25
+ import { editClapSounds } from './server/aitube/editClapSounds'
26
  import { editClapMusic } from './server/aitube/editClapMusic'
27
  import { editClapVideos } from './server/aitube/editClapVideos'
28
  import { exportClapToVideo } from './server/aitube/exportClapToVideo'
 
37
  import { FileContent } from 'use-file-picker/dist/interfaces'
38
  import { generateRandomStory } from '@/lib/utils/generateRandomStory'
39
  import { logImage } from '@/lib/utils/logImage'
40
+ import { defaultPrompt } from './config'
41
 
42
  export function Main() {
43
  const [storyPromptDraft, setStoryPromptDraft] = useLocalStorage<string>(
44
  "AI_STORIES_FACTORY_STORY_PROMPT_DRAFT",
45
+ defaultPrompt
 
46
  )
47
  const promptDraftRef = useRef("")
48
  promptDraftRef.current = storyPromptDraft
 
53
  const mainCharacterImage = useStore(s => s.mainCharacterImage)
54
  const mainCharacterVoice = useStore(s => s.mainCharacterVoice)
55
  const orientation = useStore(s => s.orientation)
56
+ const setOrientation = useStore(s => s.setOrientation)
57
  const status = useStore(s => s.status)
58
  const parseGenerationStatus = useStore(s => s.parseGenerationStatus)
59
  const storyGenerationStatus = useStore(s => s.storyGenerationStatus)
60
  const assetGenerationStatus = useStore(s => s.assetGenerationStatus)
61
+ const soundGenerationStatus = useStore(s => s.soundGenerationStatus)
62
  const musicGenerationStatus = useStore(s => s.musicGenerationStatus)
63
  const voiceGenerationStatus = useStore(s => s.voiceGenerationStatus)
64
  const imageGenerationStatus = useStore(s => s.imageGenerationStatus)
 
77
  const setParseGenerationStatus = useStore(s => s.setParseGenerationStatus)
78
  const setStoryGenerationStatus = useStore(s => s.setStoryGenerationStatus)
79
  const setAssetGenerationStatus = useStore(s => s.setAssetGenerationStatus)
80
+ const setSoundGenerationStatus = useStore(s => s.setSoundGenerationStatus)
81
  const setMusicGenerationStatus = useStore(s => s.setMusicGenerationStatus)
82
  const setVoiceGenerationStatus = useStore(s => s.setVoiceGenerationStatus)
83
  const setImageGenerationStatus = useStore(s => s.setImageGenerationStatus)
 
95
  const canSeeBetaFeatures = false // true // getParam<boolean>("beta", false)
96
 
97
  const isBusy = useStore(s => s.isBusy)
98
+ const busyRef = useRef(isBusy)
99
+ busyRef.current = isBusy
100
 
101
  const importStory = async (fileData: FileContent<ArrayBuffer>): Promise<ClapProject> => {
102
  if (!fileData?.name) { throw new Error(`invalid file (missing file name)`) }
 
198
  }
199
  }
200
 
201
+ const generateSounds = async (clap: ClapProject): Promise<ClapProject> => {
202
+ try {
203
+ // setProgress(30)
204
+ setSoundGenerationStatus("generating")
205
+
206
+ clap = await editClapSounds({
207
+ clap,
208
+ turbo: true
209
+ }).then(r => r.promise)
210
+
211
+ if (!clap) { throw new Error(`failed to edit the sound`) }
212
+
213
+ console.log(`handleSubmit(): received a clap with sound = `, clap)
214
+ setCurrentClap(clap)
215
+ setSoundGenerationStatus("finished")
216
+ console.log("---------------- GENERATED SOUND ----------------")
217
+ console.table(clap.segments.filter(s => s.category === ClapSegmentCategory.SOUND), [
218
+ 'endTimeInMs',
219
+ 'prompt',
220
+ 'entityId',
221
+ ])
222
+ return clap
223
+ } catch (err) {
224
+ setSoundGenerationStatus("error")
225
+ throw err
226
+ }
227
+ }
228
 
229
  const generateMusic = async (clap: ClapProject): Promise<ClapProject> => {
230
  try {
 
260
  setImageGenerationStatus("generating")
261
  clap = await editClapStoryboards({
262
  clap,
263
+ // if we use entities, then we MUST use turbo
264
+ // that's because turbo uses PulID,
265
+ // but SDXL doesn't
266
+ turbo: true,
267
  }).then(r => r.promise)
268
 
269
  if (!clap) { throw new Error(`failed to edit the storyboards`) }
 
383
  }
384
 
385
  const handleSubmit = async () => {
386
+ setStatus("generating")
387
+ busyRef.current = true
388
 
389
  startTransition(async () => {
390
+ setStatus("generating")
391
+ busyRef.current = true
392
+
393
  console.log(`handleSubmit(): generating a clap using prompt = "${promptDraftRef.current}" `)
394
 
395
  try {
396
  let clap = await generateStory()
397
 
398
  const tasks = [
399
+ // generateMusic(clap),
400
  generateStoryboardsThenVideos(clap)
401
  ]
402
 
 
475
  let clap = await importStory(fileData)
476
 
477
  const claps = await Promise.all([
478
+ // generateMusic(clap),
479
  generateVideos(clap)
480
  ])
481
 
 
533
  })
534
 
535
  // timerRef.current = setTimeout(timerFn, progressDelayInMsPerStage[stage])
536
+ timerRef.current = setTimeout(timerFn, 1200)
537
  }
538
 
539
  useEffect(() => {
 
543
  timerRef.current = setTimeout(timerFn, 0)
544
  }, [isBusy])
545
 
546
+ // this is how we support query string parameters
547
+ // ?prompt=hello <- set a default prompt
548
+ // ?prompt=hello&autorun=true <- automatically run the app
549
+ // ?orientation=landscape <- can be "landscape" or "portrait" (default)
550
+ const searchParams = useSearchParams()
551
+ const queryStringPrompt = (searchParams?.get('prompt') as string) || ""
552
+ const queryStringAutorun = (searchParams?.get('autorun') as string) || ""
553
+ const queryStringOrientation = (searchParams?.get('orientation') as string) || ""
554
+ useEffect(() => {
555
+ if (queryStringOrientation?.length > 1) {
556
+ console.log(`orientation = "${queryStringOrientation}"`)
557
+ const orientation =
558
+ queryStringOrientation.trim().toLowerCase() === "landscape"
559
+ ? ClapMediaOrientation.LANDSCAPE
560
+ : ClapMediaOrientation.PORTRAIT
561
+ setOrientation(orientation)
562
+ }
563
+ if (queryStringPrompt?.length > 1) {
564
+ console.log(`prompt = "${queryStringPrompt}"`)
565
+ if (queryStringPrompt !== promptDraftRef.current) {
566
+ setStoryPromptDraft(queryStringPrompt)
567
+ }
568
+ const maybeAutorun = queryStringAutorun.trim().toLowerCase()
569
+ console.log(`autorun = "${maybeAutorun}"`)
570
+
571
+ // note: during development we will be called twice,
572
+ // which is why we have a guard on busyRef.current
573
+ if (maybeAutorun === "true" || maybeAutorun === "1" && !busyRef.current) {
574
+ handleSubmit()
575
+ }
576
+ }
577
+ }, [queryStringPrompt, queryStringAutorun, queryStringOrientation])
578
+
579
  return (
580
  <div className={cn(
581
  `fixed`,
 
741
  setStoryPromptDraft(e.target.value)
742
  promptDraftRef.current = e.target.value
743
  }}
744
+ placeholder={defaultPrompt}
 
745
  inputClassName="
746
  transition-all duration-200 ease-in-out
747
  h-32 md:h-56 lg:h-64
 
963
  <p className="text-2xl font-bold">{progress}%</p>
964
  <p className="text-base text-white/70">{isBusy
965
  ? (
966
+ // note: some of those tasks are running in parallel,
967
+ // and some are super-slow (like music or video)
968
+ // by carefully selecting in which order we set the ternaries,
969
+ // we can create the illusion that we just have a succession of reasonably-sized tasks
970
  storyGenerationStatus === "generating" ? "Writing story.."
971
  : parseGenerationStatus === "generating" ? "Loading the project.."
972
  : assetGenerationStatus === "generating" ? "Casting characters.."
 
973
  : imageGenerationStatus === "generating" ? "Creating storyboards.."
974
+ : soundGenerationStatus === "generating" ? "Recording sounds.."
975
+ : musicGenerationStatus === "generating" ? "Producing music.."
976
  : videoGenerationStatus === "generating" ? "Filming shots.."
977
  : voiceGenerationStatus === "generating" ? "Recording dialogues.."
978
  : finalGenerationStatus === "generating" ? "Editing final cut.."
src/app/server/aitube/editClapSounds.ts ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ "use server"
2
+
3
+ import { ClapProject } from "@aitube/clap"
4
+ import { editClapSounds as apiEditClapSounds, ClapCompletionMode } from "@aitube/client"
5
+
6
+ import { getToken } from "./getToken"
7
+ import { Workaround } from "./types"
8
+
9
+ export async function editClapSounds({
10
+ clap,
11
+ turbo = false,
12
+ }: {
13
+ clap: ClapProject
14
+ turbo?: boolean
15
+ }): Workaround<ClapProject> {
16
+ async function promise() {
17
+ return await apiEditClapSounds({
18
+ clap,
19
+ completionMode: ClapCompletionMode.MERGE,
20
+ turbo,
21
+ token: await getToken()
22
+ })
23
+ }
24
+ return {
25
+ promise: promise()
26
+ }
27
+ }
src/app/store.ts CHANGED
@@ -8,6 +8,7 @@ import { getVideoOrientation } from "@/lib/utils/getVideoOrientation"
8
 
9
  import { RESOLUTION_LONG, RESOLUTION_SHORT } from "./server/aitube/config"
10
  import { putTextInTextAreaElement } from "@/lib/utils/putTextInTextAreaElement"
 
11
 
12
  export const useStore = create<{
13
  mainCharacterImage: string
@@ -24,6 +25,7 @@ export const useStore = create<{
24
  parseGenerationStatus: TaskStatus
25
  storyGenerationStatus: TaskStatus
26
  assetGenerationStatus: TaskStatus
 
27
  musicGenerationStatus: TaskStatus
28
  voiceGenerationStatus: TaskStatus
29
  imageGenerationStatus: TaskStatus
@@ -40,6 +42,7 @@ export const useStore = create<{
40
  progress: number
41
  error: string
42
  toggleOrientation: () => void
 
43
  setCurrentVideoOrientation: (currentVideoOrientation: ClapMediaOrientation) => void
44
  setMainCharacterImage: (mainCharacterImage: string) => void
45
  setMainCharacterVoice: (mainCharacterVoice: string) => void
@@ -48,6 +51,7 @@ export const useStore = create<{
48
  setParseGenerationStatus: (parseGenerationStatus: TaskStatus) => void
49
  setStoryGenerationStatus: (storyGenerationStatus: TaskStatus) => void
50
  setAssetGenerationStatus: (assetGenerationStatus: TaskStatus) => void
 
51
  setMusicGenerationStatus: (musicGenerationStatus: TaskStatus) => void
52
  setVoiceGenerationStatus: (voiceGenerationStatus: TaskStatus) => void
53
  setImageGenerationStatus: (imageGenerationStatus: TaskStatus) => void
@@ -67,8 +71,7 @@ export const useStore = create<{
67
  }>((set, get) => ({
68
  mainCharacterImage: "",
69
  mainCharacterVoice: "",
70
- // storyPromptDraft: "Yesterday I was at my favorite pizza place and..",
71
- storyPromptDraft: "underwater footage, coral, fishes",
72
  storyPrompt: "",
73
  orientation: ClapMediaOrientation.PORTRAIT,
74
  status: "idle",
@@ -76,6 +79,7 @@ export const useStore = create<{
76
  parseGenerationStatus: "idle",
77
  storyGenerationStatus: "idle",
78
  assetGenerationStatus: "idle",
 
79
  musicGenerationStatus: "idle",
80
  voiceGenerationStatus: "idle",
81
  imageGenerationStatus: "idle",
@@ -104,6 +108,19 @@ export const useStore = create<{
104
  : orientation
105
  })
106
  },
 
 
 
 
 
 
 
 
 
 
 
 
 
107
  setCurrentVideoOrientation: (currentVideoOrientation: ClapMediaOrientation) => { set({ currentVideoOrientation }) },
108
  setMainCharacterImage: (mainCharacterImage: string) => { set({ mainCharacterImage }) },
109
  setMainCharacterVoice: (mainCharacterVoice: string) => { set({ mainCharacterVoice }) },
@@ -124,6 +141,10 @@ export const useStore = create<{
124
  set({ assetGenerationStatus })
125
  get().syncStatusAndStageState()
126
  },
 
 
 
 
127
  setMusicGenerationStatus: (musicGenerationStatus: TaskStatus) => {
128
  set({ musicGenerationStatus })
129
  get().syncStatusAndStageState()
@@ -145,7 +166,7 @@ export const useStore = create<{
145
  get().syncStatusAndStageState()
146
  },
147
  syncStatusAndStageState: () => {
148
- const { status, storyGenerationStatus, assetGenerationStatus, musicGenerationStatus, voiceGenerationStatus, imageGenerationStatus, videoGenerationStatus, finalGenerationStatus } = get()
149
 
150
  // note: we don't really have "stages" since some things run in parallel,
151
  // and some parallel tasks may finish before the others
@@ -154,6 +175,7 @@ export const useStore = create<{
154
  storyGenerationStatus === "generating" ? "story" :
155
  assetGenerationStatus === "generating" ? "entities" :
156
  musicGenerationStatus === "generating" ? "music" :
 
157
  voiceGenerationStatus === "generating" ? "voices" :
158
  imageGenerationStatus === "generating" ? "images" :
159
  videoGenerationStatus === "generating" ? "videos" :
 
8
 
9
  import { RESOLUTION_LONG, RESOLUTION_SHORT } from "./server/aitube/config"
10
  import { putTextInTextAreaElement } from "@/lib/utils/putTextInTextAreaElement"
11
+ import { defaultPrompt } from "./config"
12
 
13
  export const useStore = create<{
14
  mainCharacterImage: string
 
25
  parseGenerationStatus: TaskStatus
26
  storyGenerationStatus: TaskStatus
27
  assetGenerationStatus: TaskStatus
28
+ soundGenerationStatus: TaskStatus
29
  musicGenerationStatus: TaskStatus
30
  voiceGenerationStatus: TaskStatus
31
  imageGenerationStatus: TaskStatus
 
42
  progress: number
43
  error: string
44
  toggleOrientation: () => void
45
+ setOrientation: (orientation: ClapMediaOrientation) => void
46
  setCurrentVideoOrientation: (currentVideoOrientation: ClapMediaOrientation) => void
47
  setMainCharacterImage: (mainCharacterImage: string) => void
48
  setMainCharacterVoice: (mainCharacterVoice: string) => void
 
51
  setParseGenerationStatus: (parseGenerationStatus: TaskStatus) => void
52
  setStoryGenerationStatus: (storyGenerationStatus: TaskStatus) => void
53
  setAssetGenerationStatus: (assetGenerationStatus: TaskStatus) => void
54
+ setSoundGenerationStatus: (soundGenerationStatus: TaskStatus) => void
55
  setMusicGenerationStatus: (musicGenerationStatus: TaskStatus) => void
56
  setVoiceGenerationStatus: (voiceGenerationStatus: TaskStatus) => void
57
  setImageGenerationStatus: (imageGenerationStatus: TaskStatus) => void
 
71
  }>((set, get) => ({
72
  mainCharacterImage: "",
73
  mainCharacterVoice: "",
74
+ storyPromptDraft: defaultPrompt,
 
75
  storyPrompt: "",
76
  orientation: ClapMediaOrientation.PORTRAIT,
77
  status: "idle",
 
79
  parseGenerationStatus: "idle",
80
  storyGenerationStatus: "idle",
81
  assetGenerationStatus: "idle",
82
+ soundGenerationStatus: "idle",
83
  musicGenerationStatus: "idle",
84
  voiceGenerationStatus: "idle",
85
  imageGenerationStatus: "idle",
 
108
  : orientation
109
  })
110
  },
111
+ setOrientation: (orientation: ClapMediaOrientation) => {
112
+ const { currentVideoOrientation, currentVideo } = get()
113
+
114
+ set({
115
+ orientation,
116
+
117
+ // we normally don't touch the currentVideoOrientation since it will already contain a video
118
+ currentVideoOrientation:
119
+ currentVideo
120
+ ? currentVideoOrientation
121
+ : orientation
122
+ })
123
+ },
124
  setCurrentVideoOrientation: (currentVideoOrientation: ClapMediaOrientation) => { set({ currentVideoOrientation }) },
125
  setMainCharacterImage: (mainCharacterImage: string) => { set({ mainCharacterImage }) },
126
  setMainCharacterVoice: (mainCharacterVoice: string) => { set({ mainCharacterVoice }) },
 
141
  set({ assetGenerationStatus })
142
  get().syncStatusAndStageState()
143
  },
144
+ setSoundGenerationStatus: (soundGenerationStatus: TaskStatus) => {
145
+ set({ soundGenerationStatus })
146
+ get().syncStatusAndStageState()
147
+ },
148
  setMusicGenerationStatus: (musicGenerationStatus: TaskStatus) => {
149
  set({ musicGenerationStatus })
150
  get().syncStatusAndStageState()
 
166
  get().syncStatusAndStageState()
167
  },
168
  syncStatusAndStageState: () => {
169
+ const { status, storyGenerationStatus, assetGenerationStatus, soundGenerationStatus, musicGenerationStatus, voiceGenerationStatus, imageGenerationStatus, videoGenerationStatus, finalGenerationStatus } = get()
170
 
171
  // note: we don't really have "stages" since some things run in parallel,
172
  // and some parallel tasks may finish before the others
 
175
  storyGenerationStatus === "generating" ? "story" :
176
  assetGenerationStatus === "generating" ? "entities" :
177
  musicGenerationStatus === "generating" ? "music" :
178
+ soundGenerationStatus === "generating" ? "sounds" :
179
  voiceGenerationStatus === "generating" ? "voices" :
180
  imageGenerationStatus === "generating" ? "images" :
181
  videoGenerationStatus === "generating" ? "videos" :
src/types.ts CHANGED
@@ -13,6 +13,7 @@ export type GlobalStatus =
13
  export type GenerationStage =
14
  | "story"
15
  | "entities"
 
16
  | "music"
17
  | "voices"
18
  | "images"
 
13
  export type GenerationStage =
14
  | "story"
15
  | "entities"
16
+ | "sounds"
17
  | "music"
18
  | "voices"
19
  | "images"