jbilcke-hf HF staff commited on
Commit
68ec2c8
1 Parent(s): 96139a3

now we serialize the final video as well

Browse files
Files changed (2) hide show
  1. src/app/main.tsx +37 -30
  2. src/app/store.ts +53 -5
src/app/main.tsx CHANGED
@@ -100,27 +100,26 @@ export function Main() {
100
  const busyRef = useRef(isBusy)
101
  busyRef.current = isBusy
102
 
103
- const importStory = async (fileData: FileContent<ArrayBuffer>): Promise<ClapProject> => {
 
 
 
104
  if (!fileData?.name) { throw new Error(`invalid file (missing file name)`) }
105
 
106
  const {
107
- setStatus,
108
- setProgress,
109
  setParseGenerationStatus,
110
  } = useStore.getState()
111
 
112
- let clap: ClapProject | undefined = undefined
113
-
114
  setParseGenerationStatus("generating")
115
 
116
  try {
117
  const blob = new Blob([fileData.content])
118
- clap = await loadClap(blob, fileData.name)
 
 
119
 
120
- if (!clap) { throw new Error(`failed to load the clap file`) }
121
  setParseGenerationStatus("finished")
122
- setCurrentClap(clap)
123
- return clap
124
  } catch (err) {
125
  console.error("failed to load the Clap file:", err)
126
  setParseGenerationStatus("error")
@@ -472,29 +471,39 @@ export function Main() {
472
  setStatus("generating")
473
 
474
  try {
475
- let clap = await importStory(fileData)
476
 
477
  // clap = await generateSounds(clap)
478
 
479
  // setCurrentClap(clap)
480
 
481
  console.log("loadClap(): clap = ", clap)
482
- const claps = await Promise.all([
483
- generateMusic(clap),
484
- generateVideos(clap)
485
- ])
486
 
487
- // console.log("finished processing the 2 tasks in parallel")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
488
 
489
- for (const newerClap of claps) {
490
- clap = await updateClap(clap, newerClap, {
491
- overwriteMeta: false,
492
- inlineReplace: true,
493
- })
494
- }
495
 
496
- setCurrentClap(clap)
497
- await generateFinalVideo(clap)
 
 
 
 
 
498
 
499
  setStatus("finished")
500
  setProgress(100)
@@ -805,8 +814,8 @@ export function Main() {
805
 
806
  {canSeeBetaFeatures &&
807
  <Tooltip>
808
- <TooltipTrigger>
809
- <Button
810
  onClick={openFilePicker}
811
  disabled={isBusy}
812
  // variant="ghost"
@@ -820,8 +829,7 @@ export function Main() {
820
  >
821
  <span className="hidden xl:inline mr-1">Load .clap</span>
822
  <span className="inline xl:hidden mr-1">Load .clap</span>
823
- </Button>
824
- </TooltipTrigger>
825
  <TooltipContent side="top">
826
  <p className="text-xs font-normal text-stone-100/90 text-center">
827
  Clap is a new AI format,<br/>check out the academy<br/>to learn more about it.
@@ -833,8 +841,7 @@ export function Main() {
833
 
834
  {canSeeBetaFeatures &&
835
  <Tooltip>
836
- <TooltipTrigger>
837
- <Button
838
  onClick={() => saveClap()}
839
  disabled={!currentClap || isBusy}
840
  // variant="ghost"
@@ -848,7 +855,7 @@ export function Main() {
848
  >
849
  <span className="hidden xl:inline mr-1">Save .clap</span>
850
  <span className="inline xl:hidden mr-1">Save .clap</span>
851
- </Button> </TooltipTrigger>
852
  <TooltipContent side="top">
853
  <p className="text-xs font-normal text-stone-100/90 text-center">
854
  Clap is a new AI format,<br/>check out the academy<br/>to learn more about it.
 
100
  const busyRef = useRef(isBusy)
101
  busyRef.current = isBusy
102
 
103
+ const importStory = async (fileData: FileContent<ArrayBuffer>): Promise<{
104
+ clap: ClapProject
105
+ regenerateVideo: boolean
106
+ }> => {
107
  if (!fileData?.name) { throw new Error(`invalid file (missing file name)`) }
108
 
109
  const {
 
 
110
  setParseGenerationStatus,
111
  } = useStore.getState()
112
 
 
 
113
  setParseGenerationStatus("generating")
114
 
115
  try {
116
  const blob = new Blob([fileData.content])
117
+ const res = await loadClap(blob, fileData.name)
118
+
119
+ if (!res?.clap) { throw new Error(`failed to load the clap file`) }
120
 
 
121
  setParseGenerationStatus("finished")
122
+ return res
 
123
  } catch (err) {
124
  console.error("failed to load the Clap file:", err)
125
  setParseGenerationStatus("error")
 
471
  setStatus("generating")
472
 
473
  try {
474
+ let { clap, regenerateVideo } = await importStory(fileData)
475
 
476
  // clap = await generateSounds(clap)
477
 
478
  // setCurrentClap(clap)
479
 
480
  console.log("loadClap(): clap = ", clap)
 
 
 
 
481
 
482
+ // it is important to skip regeneration if we already have a video
483
+ if (regenerateVideo) {
484
+ console.log(`regenerating music and videos..`)
485
+ const claps = await Promise.all([
486
+ generateMusic(clap),
487
+ generateVideos(clap)
488
+ ])
489
+
490
+ // console.log("finished processing the 2 tasks in parallel")
491
+
492
+ for (const newerClap of claps) {
493
+ clap = await updateClap(clap, newerClap, {
494
+ overwriteMeta: false,
495
+ inlineReplace: true,
496
+ })
497
+ }
498
 
 
 
 
 
 
 
499
 
500
+ setCurrentClap(clap)
501
+
502
+ await generateFinalVideo(clap)
503
+
504
+ } else {
505
+ console.log(`skipping music and video regeneration`)
506
+ }
507
 
508
  setStatus("finished")
509
  setProgress(100)
 
814
 
815
  {canSeeBetaFeatures &&
816
  <Tooltip>
817
+ <TooltipTrigger asChild><Button
818
+
819
  onClick={openFilePicker}
820
  disabled={isBusy}
821
  // variant="ghost"
 
829
  >
830
  <span className="hidden xl:inline mr-1">Load .clap</span>
831
  <span className="inline xl:hidden mr-1">Load .clap</span>
832
+ </Button></TooltipTrigger>
 
833
  <TooltipContent side="top">
834
  <p className="text-xs font-normal text-stone-100/90 text-center">
835
  Clap is a new AI format,<br/>check out the academy<br/>to learn more about it.
 
841
 
842
  {canSeeBetaFeatures &&
843
  <Tooltip>
844
+ <TooltipTrigger asChild><Button
 
845
  onClick={() => saveClap()}
846
  disabled={!currentClap || isBusy}
847
  // variant="ghost"
 
855
  >
856
  <span className="hidden xl:inline mr-1">Save .clap</span>
857
  <span className="inline xl:hidden mr-1">Save .clap</span>
858
+ </Button></TooltipTrigger>
859
  <TooltipContent side="top">
860
  <p className="text-xs font-normal text-stone-100/90 text-center">
861
  Clap is a new AI format,<br/>check out the academy<br/>to learn more about it.
src/app/store.ts CHANGED
@@ -1,6 +1,6 @@
1
  "use client"
2
 
3
- import { ClapProject, parseClap, serializeClap, ClapMediaOrientation, parseMediaOrientation } from "@aitube/clap"
4
  import { create } from "zustand"
5
 
6
  import { GenerationStage, GlobalStatus, TaskStatus } from "@/types"
@@ -67,7 +67,10 @@ export const useStore = create<{
67
  setError: (error: string) => void
68
  saveVideo: () => Promise<void>
69
  saveClap: () => Promise<void>
70
- loadClap: (blob: Blob, fileName?: string) => Promise<ClapProject>
 
 
 
71
  }>((set, get) => ({
72
  mainCharacterImage: "",
73
  mainCharacterVoice: "",
@@ -242,12 +245,43 @@ export const useStore = create<{
242
  document.body.removeChild(anchor)
243
  },
244
  saveClap: async (): Promise<void> => {
245
- const { currentClap , storyPrompt } = get()
246
 
247
  if (!currentClap) { throw new Error(`cannot save a clap.. if there is no clap`) }
248
 
249
  currentClap.meta.description = storyPrompt
250
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
251
  const currentClapBlob: Blob = await serializeClap(currentClap)
252
 
253
  // Create an object URL for the compressed clap blob
@@ -272,7 +306,10 @@ export const useStore = create<{
272
  URL.revokeObjectURL(objectUrl)
273
  document.body.removeChild(anchor)
274
  },
275
- loadClap: async (blob: Blob, fileName: string = "untitled_story.clap"): Promise<ClapProject> => {
 
 
 
276
  if (!blob) {
277
  throw new Error(`missing blob`)
278
  }
@@ -293,13 +330,24 @@ export const useStore = create<{
293
  currentClap.meta.height = orientation === ClapMediaOrientation.LANDSCAPE ? RESOLUTION_SHORT : RESOLUTION_LONG
294
  currentClap.meta.width = orientation === ClapMediaOrientation.PORTRAIT ? RESOLUTION_SHORT : RESOLUTION_LONG
295
 
 
 
 
 
 
 
 
296
  set({
297
  currentClap,
298
  storyPrompt,
299
  orientation,
 
300
  currentVideoOrientation: orientation,
301
  })
302
 
303
- return currentClap
 
 
 
304
  },
305
  }))
 
1
  "use client"
2
 
3
+ import { ClapProject, parseClap, serializeClap, ClapMediaOrientation, parseMediaOrientation, ClapSegmentCategory, newSegment, getClapAssetSourceType } from "@aitube/clap"
4
  import { create } from "zustand"
5
 
6
  import { GenerationStage, GlobalStatus, TaskStatus } from "@/types"
 
67
  setError: (error: string) => void
68
  saveVideo: () => Promise<void>
69
  saveClap: () => Promise<void>
70
+ loadClap: (blob: Blob, fileName?: string) => Promise<{
71
+ clap: ClapProject
72
+ regenerateVideo: boolean
73
+ }>
74
  }>((set, get) => ({
75
  mainCharacterImage: "",
76
  mainCharacterVoice: "",
 
245
  document.body.removeChild(anchor)
246
  },
247
  saveClap: async (): Promise<void> => {
248
+ const { currentClap , storyPrompt, currentVideo } = get()
249
 
250
  if (!currentClap) { throw new Error(`cannot save a clap.. if there is no clap`) }
251
 
252
  currentClap.meta.description = storyPrompt
253
 
254
+ // make sure we update the total duration
255
+ for (const s of currentClap.segments) {
256
+ if (s.endTimeInMs > currentClap.meta.durationInMs) {
257
+ currentClap.meta.durationInMs = s.endTimeInMs
258
+ }
259
+ }
260
+
261
+ const alreadyAnEmbeddedFinalVideo = currentClap.segments.filter(s =>
262
+ s.category === ClapSegmentCategory.VIDEO &&
263
+ s.status === "completed" &&
264
+ s.startTimeInMs === 0 &&
265
+ s.endTimeInMs === currentClap.meta.durationInMs &&
266
+ s.assetUrl).at(0)
267
+
268
+ // inject the final mp4 video file into the .clap
269
+ if (alreadyAnEmbeddedFinalVideo) {
270
+ console.log(`editing the clap to update the final video`)
271
+ alreadyAnEmbeddedFinalVideo.assetUrl = currentVideo
272
+ } else {
273
+ console.log(`editing the clap to add a new final video`)
274
+ currentClap.segments.push(newSegment({
275
+ category: ClapSegmentCategory.VIDEO,
276
+ status: "completed",
277
+ startTimeInMs: 0,
278
+ endTimeInMs: currentClap.meta.durationInMs,
279
+ assetUrl: currentVideo,
280
+ assetDurationInMs: currentClap.meta.durationInMs,
281
+ assetSourceType: getClapAssetSourceType(currentVideo),
282
+ outputGain: 1.0,
283
+ }))
284
+ }
285
  const currentClapBlob: Blob = await serializeClap(currentClap)
286
 
287
  // Create an object URL for the compressed clap blob
 
306
  URL.revokeObjectURL(objectUrl)
307
  document.body.removeChild(anchor)
308
  },
309
+ loadClap: async (blob: Blob, fileName: string = "untitled_story.clap"): Promise<{
310
+ clap: ClapProject
311
+ regenerateVideo: boolean
312
+ }> => {
313
  if (!blob) {
314
  throw new Error(`missing blob`)
315
  }
 
330
  currentClap.meta.height = orientation === ClapMediaOrientation.LANDSCAPE ? RESOLUTION_SHORT : RESOLUTION_LONG
331
  currentClap.meta.width = orientation === ClapMediaOrientation.PORTRAIT ? RESOLUTION_SHORT : RESOLUTION_LONG
332
 
333
+ const embeddedFinalVideoAssetUrl = currentClap.segments.filter(s =>
334
+ s.category === ClapSegmentCategory.VIDEO &&
335
+ s.status === "completed" &&
336
+ s.startTimeInMs === 0 &&
337
+ s.endTimeInMs === currentClap.meta.durationInMs &&
338
+ s.assetUrl).map(s => s.assetUrl).at(0)
339
+
340
  set({
341
  currentClap,
342
  storyPrompt,
343
  orientation,
344
+ currentVideo: embeddedFinalVideoAssetUrl || get().currentVideo,
345
  currentVideoOrientation: orientation,
346
  })
347
 
348
+ return {
349
+ clap: currentClap,
350
+ regenerateVideo: !embeddedFinalVideoAssetUrl,
351
+ }
352
  },
353
  }))