Commit
•
99c03ec
1
Parent(s):
0465776
upgrade
Browse files- .nvmrc +1 -1
- package-lock.json +0 -0
- package.json +3 -3
- src/app/main.tsx +3 -3
- src/app/server/aitube/createClap.ts +6 -6
- src/app/server/aitube/editClapDialogues.ts +1 -1
- src/app/server/aitube/editClapEntities.ts +1 -1
- src/app/server/aitube/editClapMusic.ts +1 -1
- src/app/server/aitube/editClapSounds.ts +1 -1
- src/app/server/aitube/editClapStory.ts +1 -1
- src/app/server/aitube/editClapStoryboards.ts +1 -1
- src/app/server/aitube/editClapVideos.ts +1 -1
- src/app/server/aitube/exportClapToVideo.ts +1 -1
- src/app/store.ts +42 -42
- src/lib/hooks/useOrientation.ts +12 -12
- src/lib/hooks/useProcessors.ts +8 -8
- src/lib/hooks/useProgressTimer.ts +3 -1
- src/lib/hooks/useQueryStringParams.ts +9 -9
- src/lib/utils/{getVideoOrientation.ts → getImageRatio.ts} +7 -7
.nvmrc
CHANGED
@@ -1 +1 @@
|
|
1 |
-
v20.
|
|
|
1 |
+
v20.17.0
|
package-lock.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
package.json
CHANGED
@@ -9,8 +9,8 @@
|
|
9 |
"lint": "next lint"
|
10 |
},
|
11 |
"dependencies": {
|
12 |
-
"@aitube/clap": "0.
|
13 |
-
"@aitube/client": "0.
|
14 |
"@huggingface/hub": "^0.15.1",
|
15 |
"@radix-ui/react-accordion": "^1.1.2",
|
16 |
"@radix-ui/react-avatar": "^1.0.4",
|
@@ -46,7 +46,7 @@
|
|
46 |
"jimp": "^0.22.12",
|
47 |
"jose": "^5.2.4",
|
48 |
"lucide-react": "^0.334.0",
|
49 |
-
"next": "^14.2.
|
50 |
"next-themes": "^0.2.1",
|
51 |
"postcss": "8.4.38",
|
52 |
"qs": "^6.12.1",
|
|
|
9 |
"lint": "next lint"
|
10 |
},
|
11 |
"dependencies": {
|
12 |
+
"@aitube/clap": "0.2.4",
|
13 |
+
"@aitube/client": "0.2.4-3",
|
14 |
"@huggingface/hub": "^0.15.1",
|
15 |
"@radix-ui/react-accordion": "^1.1.2",
|
16 |
"@radix-ui/react-avatar": "^1.0.4",
|
|
|
46 |
"jimp": "^0.22.12",
|
47 |
"jose": "^5.2.4",
|
48 |
"lucide-react": "^0.334.0",
|
49 |
+
"next": "^14.2.7",
|
50 |
"next-themes": "^0.2.1",
|
51 |
"postcss": "8.4.38",
|
52 |
"qs": "^6.12.1",
|
src/app/main.tsx
CHANGED
@@ -3,7 +3,7 @@
|
|
3 |
import React from "react"
|
4 |
import { IoMdPhonePortrait } from "react-icons/io"
|
5 |
import { GiRollingDices } from "react-icons/gi"
|
6 |
-
import {
|
7 |
|
8 |
import { Card, CardContent, CardHeader } from "@/components/ui/card"
|
9 |
import { Button } from "@/components/ui/button"
|
@@ -28,7 +28,7 @@ import { useStore } from "./store"
|
|
28 |
export function Main() {
|
29 |
const { storyPromptDraft, setStoryPromptDraft, promptDraftRef } = useStoryPromptDraft()
|
30 |
const { isBusy } = useIsBusy()
|
31 |
-
const {
|
32 |
const { handleCreateStory, handleExtendStory } = useProcessors()
|
33 |
useQueryStringParams()
|
34 |
|
@@ -276,7 +276,7 @@ export function Main() {
|
|
276 |
>
|
277 |
<div className={cn(
|
278 |
`transition-all duration-200 ease-in-out`,
|
279 |
-
|
280 |
)}>
|
281 |
<IoMdPhonePortrait size={24} />
|
282 |
</div>
|
|
|
3 |
import React from "react"
|
4 |
import { IoMdPhonePortrait } from "react-icons/io"
|
5 |
import { GiRollingDices } from "react-icons/gi"
|
6 |
+
import { ClapImageRatio } from "@aitube/clap"
|
7 |
|
8 |
import { Card, CardContent, CardHeader } from "@/components/ui/card"
|
9 |
import { Button } from "@/components/ui/button"
|
|
|
28 |
export function Main() {
|
29 |
const { storyPromptDraft, setStoryPromptDraft, promptDraftRef } = useStoryPromptDraft()
|
30 |
const { isBusy } = useIsBusy()
|
31 |
+
const { imageRatio, toggleOrientation } = useOrientation()
|
32 |
const { handleCreateStory, handleExtendStory } = useProcessors()
|
33 |
useQueryStringParams()
|
34 |
|
|
|
276 |
>
|
277 |
<div className={cn(
|
278 |
`transition-all duration-200 ease-in-out`,
|
279 |
+
imageRatio === ClapImageRatio.LANDSCAPE ? `rotate-90` : `rotate-0`
|
280 |
)}>
|
281 |
<IoMdPhonePortrait size={24} />
|
282 |
</div>
|
src/app/server/aitube/createClap.ts
CHANGED
@@ -3,8 +3,8 @@
|
|
3 |
|
4 |
import { Ratelimit } from "@upstash/ratelimit"
|
5 |
import { Redis } from "@upstash/redis"
|
6 |
-
import { ClapProject,
|
7 |
-
import { createClap as apiCreateClap } from "@aitube/client"
|
8 |
|
9 |
import { getToken } from "./getToken"
|
10 |
import { RESOLUTION_LONG, RESOLUTION_SHORT, MAX_PROMPT_LENGTH_IN_CHARS } from "../config"
|
@@ -14,11 +14,11 @@ const rateLimit = getRateLimit()
|
|
14 |
|
15 |
export async function createClap({
|
16 |
prompt = "",
|
17 |
-
|
18 |
turbo = false,
|
19 |
}: {
|
20 |
prompt: string
|
21 |
-
|
22 |
turbo?: boolean
|
23 |
}): Promise<ClapProject> {
|
24 |
|
@@ -42,8 +42,8 @@ export async function createClap({
|
|
42 |
const clap: ClapProject = await apiCreateClap({
|
43 |
prompt: prompt.slice(0, MAX_PROMPT_LENGTH_IN_CHARS),
|
44 |
|
45 |
-
height:
|
46 |
-
width:
|
47 |
|
48 |
turbo,
|
49 |
|
|
|
3 |
|
4 |
import { Ratelimit } from "@upstash/ratelimit"
|
5 |
import { Redis } from "@upstash/redis"
|
6 |
+
import { ClapProject, ClapImageRatio } from "@aitube/clap"
|
7 |
+
import { createClap as apiCreateClap } from "@aitube/api-client"
|
8 |
|
9 |
import { getToken } from "./getToken"
|
10 |
import { RESOLUTION_LONG, RESOLUTION_SHORT, MAX_PROMPT_LENGTH_IN_CHARS } from "../config"
|
|
|
14 |
|
15 |
export async function createClap({
|
16 |
prompt = "",
|
17 |
+
imageRatio = ClapImageRatio.PORTRAIT,
|
18 |
turbo = false,
|
19 |
}: {
|
20 |
prompt: string
|
21 |
+
imageRatio?: ClapImageRatio
|
22 |
turbo?: boolean
|
23 |
}): Promise<ClapProject> {
|
24 |
|
|
|
42 |
const clap: ClapProject = await apiCreateClap({
|
43 |
prompt: prompt.slice(0, MAX_PROMPT_LENGTH_IN_CHARS),
|
44 |
|
45 |
+
height: imageRatio === ClapImageRatio.PORTRAIT ? RESOLUTION_LONG : RESOLUTION_SHORT,
|
46 |
+
width: imageRatio === ClapImageRatio.PORTRAIT ? RESOLUTION_SHORT : RESOLUTION_LONG,
|
47 |
|
48 |
turbo,
|
49 |
|
src/app/server/aitube/editClapDialogues.ts
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
"use server"
|
2 |
|
3 |
import { ClapProject, ClapCompletionMode } from "@aitube/clap"
|
4 |
-
import { editClapDialogues as apiEditClapDialogues } from "@aitube/client"
|
5 |
|
6 |
import { getToken } from "./getToken"
|
7 |
import { Workaround } from "./types"
|
|
|
1 |
"use server"
|
2 |
|
3 |
import { ClapProject, ClapCompletionMode } from "@aitube/clap"
|
4 |
+
import { editClapDialogues as apiEditClapDialogues } from "@aitube/api-client"
|
5 |
|
6 |
import { getToken } from "./getToken"
|
7 |
import { Workaround } from "./types"
|
src/app/server/aitube/editClapEntities.ts
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
"use server"
|
2 |
|
3 |
import { ClapProject, ClapCompletionMode } from "@aitube/clap"
|
4 |
-
import { editClapEntities as apiEditClapEntities, ClapEntityPrompt } from "@aitube/client"
|
5 |
|
6 |
import { getToken } from "./getToken"
|
7 |
import { Workaround } from "./types"
|
|
|
1 |
"use server"
|
2 |
|
3 |
import { ClapProject, ClapCompletionMode } from "@aitube/clap"
|
4 |
+
import { editClapEntities as apiEditClapEntities, ClapEntityPrompt } from "@aitube/api-client"
|
5 |
|
6 |
import { getToken } from "./getToken"
|
7 |
import { Workaround } from "./types"
|
src/app/server/aitube/editClapMusic.ts
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
"use server"
|
2 |
|
3 |
import { ClapProject, ClapCompletionMode } from "@aitube/clap"
|
4 |
-
import { editClapMusic as apiEditClapMusic } from "@aitube/client"
|
5 |
|
6 |
import { getToken } from "./getToken"
|
7 |
import { Workaround } from "./types"
|
|
|
1 |
"use server"
|
2 |
|
3 |
import { ClapProject, ClapCompletionMode } from "@aitube/clap"
|
4 |
+
import { editClapMusic as apiEditClapMusic } from "@aitube/api-client"
|
5 |
|
6 |
import { getToken } from "./getToken"
|
7 |
import { Workaround } from "./types"
|
src/app/server/aitube/editClapSounds.ts
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
"use server"
|
2 |
|
3 |
import { ClapProject, ClapCompletionMode } from "@aitube/clap"
|
4 |
-
import { editClapSounds as apiEditClapSounds } from "@aitube/client"
|
5 |
|
6 |
import { getToken } from "./getToken"
|
7 |
import { Workaround } from "./types"
|
|
|
1 |
"use server"
|
2 |
|
3 |
import { ClapProject, ClapCompletionMode } from "@aitube/clap"
|
4 |
+
import { editClapSounds as apiEditClapSounds } from "@aitube/api-client"
|
5 |
|
6 |
import { getToken } from "./getToken"
|
7 |
import { Workaround } from "./types"
|
src/app/server/aitube/editClapStory.ts
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
"use server"
|
2 |
|
3 |
import { ClapProject, ClapCompletionMode } from "@aitube/clap"
|
4 |
-
import { editClapStory as apiEditClapStory } from "@aitube/client"
|
5 |
|
6 |
import { getToken } from "./getToken"
|
7 |
import { Workaround } from "./types"
|
|
|
1 |
"use server"
|
2 |
|
3 |
import { ClapProject, ClapCompletionMode } from "@aitube/clap"
|
4 |
+
import { editClapStory as apiEditClapStory } from "@aitube/api-client"
|
5 |
|
6 |
import { getToken } from "./getToken"
|
7 |
import { Workaround } from "./types"
|
src/app/server/aitube/editClapStoryboards.ts
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
"use server"
|
2 |
|
3 |
import { ClapProject, ClapCompletionMode } from "@aitube/clap"
|
4 |
-
import { editClapStoryboards as apiEditClapStoryboards } from "@aitube/client"
|
5 |
|
6 |
import { getToken } from "./getToken"
|
7 |
import { Workaround } from "./types"
|
|
|
1 |
"use server"
|
2 |
|
3 |
import { ClapProject, ClapCompletionMode } from "@aitube/clap"
|
4 |
+
import { editClapStoryboards as apiEditClapStoryboards } from "@aitube/api-client"
|
5 |
|
6 |
import { getToken } from "./getToken"
|
7 |
import { Workaround } from "./types"
|
src/app/server/aitube/editClapVideos.ts
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
"use server"
|
2 |
|
3 |
import { ClapProject, ClapCompletionMode } from "@aitube/clap"
|
4 |
-
import { editClapVideos as apiEditClapVideos } from "@aitube/client"
|
5 |
|
6 |
import { getToken } from "./getToken"
|
7 |
import { Workaround } from "./types"
|
|
|
1 |
"use server"
|
2 |
|
3 |
import { ClapProject, ClapCompletionMode } from "@aitube/clap"
|
4 |
+
import { editClapVideos as apiEditClapVideos } from "@aitube/api-client"
|
5 |
|
6 |
import { getToken } from "./getToken"
|
7 |
import { Workaround } from "./types"
|
src/app/server/aitube/exportClapToVideo.ts
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
"use server"
|
2 |
|
3 |
import { ClapProject } from "@aitube/clap"
|
4 |
-
import { exportClapToVideo as apiExportClapToVideo } from "@aitube/client"
|
5 |
|
6 |
import { getToken } from "./getToken"
|
7 |
import { removeFinalVideos } from "@/lib/utils/removeFinalVideos"
|
|
|
1 |
"use server"
|
2 |
|
3 |
import { ClapProject } from "@aitube/clap"
|
4 |
+
import { exportClapToVideo as apiExportClapToVideo } from "@aitube/api-client"
|
5 |
|
6 |
import { getToken } from "./getToken"
|
7 |
import { removeFinalVideos } from "@/lib/utils/removeFinalVideos"
|
src/app/store.ts
CHANGED
@@ -1,10 +1,10 @@
|
|
1 |
"use client"
|
2 |
|
3 |
-
import { ClapProject, parseClap, serializeClap,
|
4 |
import { create } from "zustand"
|
5 |
|
6 |
import { GenerationStage, GlobalStatus, TaskStatus } from "@/types"
|
7 |
-
import {
|
8 |
|
9 |
import { RESOLUTION_LONG, RESOLUTION_SHORT } from "./server/config"
|
10 |
import { putTextInTextAreaElement } from "@/lib/utils/putTextInTextAreaElement"
|
@@ -15,9 +15,9 @@ export const useStore = create<{
|
|
15 |
mainCharacterVoice: string
|
16 |
storyPrompt: string
|
17 |
|
18 |
-
// the desired
|
19 |
-
// but this won't impact the actual
|
20 |
-
|
21 |
|
22 |
status: GlobalStatus
|
23 |
stage: GenerationStage
|
@@ -43,16 +43,16 @@ export const useStore = create<{
|
|
43 |
|
44 |
currentVideo: string
|
45 |
|
46 |
-
//
|
47 |
-
// it will impact the actual
|
48 |
-
|
49 |
progress: number
|
50 |
error: string
|
51 |
showAuthWall: boolean
|
52 |
setShowAuthWall: (showAuthWall: boolean) => void
|
53 |
toggleOrientation: () => void
|
54 |
-
|
55 |
-
setCurrentVideoOrientation: (
|
56 |
setMainCharacterImage: (mainCharacterImage: string) => void
|
57 |
setMainCharacterVoice: (mainCharacterVoice: string) => void
|
58 |
setStoryPrompt: (storyPrompt: string) => void
|
@@ -70,7 +70,7 @@ export const useStore = create<{
|
|
70 |
setSkeletonClap: (fullClap?: ClapProject) => void
|
71 |
setFullClap: (fullClap?: ClapProject) => void
|
72 |
|
73 |
-
// note: this will preload the video, and compute the
|
74 |
setCurrentVideo: (currentVideo: string) => Promise<void>
|
75 |
|
76 |
setProgress: (progress: number) => void
|
@@ -86,7 +86,7 @@ export const useStore = create<{
|
|
86 |
mainCharacterVoice: "",
|
87 |
storyPromptDraft: defaultPrompt,
|
88 |
storyPrompt: "",
|
89 |
-
|
90 |
status: "idle",
|
91 |
stage: "idle",
|
92 |
statusMessage: "",
|
@@ -103,42 +103,42 @@ export const useStore = create<{
|
|
103 |
skeletonClap: undefined,
|
104 |
fullClap: undefined,
|
105 |
currentVideo: "",
|
106 |
-
|
107 |
progress: 0,
|
108 |
error: "",
|
109 |
showAuthWall: false,
|
110 |
setShowAuthWall: (showAuthWall: boolean) => { set({ showAuthWall }) },
|
111 |
toggleOrientation: () => {
|
112 |
-
const {
|
113 |
-
const
|
114 |
-
previousOrientation ===
|
115 |
-
?
|
116 |
-
:
|
117 |
|
118 |
set({
|
119 |
-
|
120 |
|
121 |
-
// we normally don't touch the
|
122 |
-
|
123 |
currentVideo
|
124 |
-
?
|
125 |
-
:
|
126 |
})
|
127 |
},
|
128 |
-
|
129 |
-
const {
|
130 |
|
131 |
set({
|
132 |
-
|
133 |
|
134 |
-
// we normally don't touch the
|
135 |
-
|
136 |
currentVideo
|
137 |
-
?
|
138 |
-
:
|
139 |
})
|
140 |
},
|
141 |
-
setCurrentVideoOrientation: (
|
142 |
setMainCharacterImage: (mainCharacterImage: string) => { set({ mainCharacterImage }) },
|
143 |
setMainCharacterVoice: (mainCharacterVoice: string) => { set({ mainCharacterVoice }) },
|
144 |
setStoryPrompt: (storyPrompt: string) => { set({ storyPrompt }) },
|
@@ -231,18 +231,18 @@ export const useStore = create<{
|
|
231 |
set({
|
232 |
currentVideo,
|
233 |
})
|
234 |
-
const {
|
235 |
-
let
|
236 |
try {
|
237 |
-
let newOrientation = await
|
238 |
if (newOrientation) {
|
239 |
-
|
240 |
}
|
241 |
} catch (err) {
|
242 |
-
console.error(`failed to get the media
|
243 |
}
|
244 |
set({
|
245 |
-
|
246 |
})
|
247 |
|
248 |
// get().syncStatusAndStageState()
|
@@ -359,10 +359,10 @@ export const useStore = create<{
|
|
359 |
storyPrompt
|
360 |
)
|
361 |
|
362 |
-
const
|
363 |
|
364 |
-
fullClap.meta.height =
|
365 |
-
fullClap.meta.width =
|
366 |
|
367 |
const embeddedFinalVideoAssetUrl = fullClap.segments.filter(s =>
|
368 |
s.category === ClapSegmentCategory.VIDEO &&
|
@@ -374,9 +374,9 @@ export const useStore = create<{
|
|
374 |
set({
|
375 |
fullClap,
|
376 |
storyPrompt,
|
377 |
-
|
378 |
currentVideo: embeddedFinalVideoAssetUrl || get().currentVideo,
|
379 |
-
|
380 |
})
|
381 |
|
382 |
return {
|
|
|
1 |
"use client"
|
2 |
|
3 |
+
import { ClapProject, parseClap, serializeClap, ClapImageRatio, parseImageRatio, ClapSegmentCategory, newSegment, getClapAssetSourceType, ClapSegmentStatus } from "@aitube/clap"
|
4 |
import { create } from "zustand"
|
5 |
|
6 |
import { GenerationStage, GlobalStatus, TaskStatus } from "@/types"
|
7 |
+
import { getImageRatio } from "@/lib/utils/getImageRatio"
|
8 |
|
9 |
import { RESOLUTION_LONG, RESOLUTION_SHORT } from "./server/config"
|
10 |
import { putTextInTextAreaElement } from "@/lib/utils/putTextInTextAreaElement"
|
|
|
15 |
mainCharacterVoice: string
|
16 |
storyPrompt: string
|
17 |
|
18 |
+
// the desired imageRatio for the next video
|
19 |
+
// but this won't impact the actual imageRatio of the fake device container
|
20 |
+
imageRatio: ClapImageRatio
|
21 |
|
22 |
status: GlobalStatus
|
23 |
stage: GenerationStage
|
|
|
43 |
|
44 |
currentVideo: string
|
45 |
|
46 |
+
// imageRatio of the currently loaded video (which can be different from `imageRatio`)
|
47 |
+
// it will impact the actual imageRatio of the fake device container
|
48 |
+
currentImageRatio: ClapImageRatio
|
49 |
progress: number
|
50 |
error: string
|
51 |
showAuthWall: boolean
|
52 |
setShowAuthWall: (showAuthWall: boolean) => void
|
53 |
toggleOrientation: () => void
|
54 |
+
setImageRatio: (imageRatio: ClapImageRatio) => void
|
55 |
+
setCurrentVideoOrientation: (currentImageRatio: ClapImageRatio) => void
|
56 |
setMainCharacterImage: (mainCharacterImage: string) => void
|
57 |
setMainCharacterVoice: (mainCharacterVoice: string) => void
|
58 |
setStoryPrompt: (storyPrompt: string) => void
|
|
|
70 |
setSkeletonClap: (fullClap?: ClapProject) => void
|
71 |
setFullClap: (fullClap?: ClapProject) => void
|
72 |
|
73 |
+
// note: this will preload the video, and compute the imageRatio too
|
74 |
setCurrentVideo: (currentVideo: string) => Promise<void>
|
75 |
|
76 |
setProgress: (progress: number) => void
|
|
|
86 |
mainCharacterVoice: "",
|
87 |
storyPromptDraft: defaultPrompt,
|
88 |
storyPrompt: "",
|
89 |
+
imageRatio: ClapImageRatio.PORTRAIT,
|
90 |
status: "idle",
|
91 |
stage: "idle",
|
92 |
statusMessage: "",
|
|
|
103 |
skeletonClap: undefined,
|
104 |
fullClap: undefined,
|
105 |
currentVideo: "",
|
106 |
+
currentImageRatio: ClapImageRatio.PORTRAIT,
|
107 |
progress: 0,
|
108 |
error: "",
|
109 |
showAuthWall: false,
|
110 |
setShowAuthWall: (showAuthWall: boolean) => { set({ showAuthWall }) },
|
111 |
toggleOrientation: () => {
|
112 |
+
const { imageRatio: previousOrientation, currentImageRatio, currentVideo } = get()
|
113 |
+
const imageRatio =
|
114 |
+
previousOrientation === ClapImageRatio.LANDSCAPE
|
115 |
+
? ClapImageRatio.PORTRAIT
|
116 |
+
: ClapImageRatio.LANDSCAPE
|
117 |
|
118 |
set({
|
119 |
+
imageRatio,
|
120 |
|
121 |
+
// we normally don't touch the currentImageRatio since it will already contain a video
|
122 |
+
currentImageRatio:
|
123 |
currentVideo
|
124 |
+
? currentImageRatio
|
125 |
+
: imageRatio
|
126 |
})
|
127 |
},
|
128 |
+
setImageRatio: (imageRatio: ClapImageRatio) => {
|
129 |
+
const { currentImageRatio, currentVideo } = get()
|
130 |
|
131 |
set({
|
132 |
+
imageRatio,
|
133 |
|
134 |
+
// we normally don't touch the currentImageRatio since it will already contain a video
|
135 |
+
currentImageRatio:
|
136 |
currentVideo
|
137 |
+
? currentImageRatio
|
138 |
+
: imageRatio
|
139 |
})
|
140 |
},
|
141 |
+
setCurrentVideoOrientation: (currentImageRatio: ClapImageRatio) => { set({ currentImageRatio }) },
|
142 |
setMainCharacterImage: (mainCharacterImage: string) => { set({ mainCharacterImage }) },
|
143 |
setMainCharacterVoice: (mainCharacterVoice: string) => { set({ mainCharacterVoice }) },
|
144 |
setStoryPrompt: (storyPrompt: string) => { set({ storyPrompt }) },
|
|
|
231 |
set({
|
232 |
currentVideo,
|
233 |
})
|
234 |
+
const { currentImageRatio } = get()
|
235 |
+
let imageRatio: ClapImageRatio = currentImageRatio
|
236 |
try {
|
237 |
+
let newOrientation = await getImageRatio(currentVideo)
|
238 |
if (newOrientation) {
|
239 |
+
imageRatio = newOrientation
|
240 |
}
|
241 |
} catch (err) {
|
242 |
+
console.error(`failed to get the media imageRatio`)
|
243 |
}
|
244 |
set({
|
245 |
+
currentImageRatio: imageRatio
|
246 |
})
|
247 |
|
248 |
// get().syncStatusAndStageState()
|
|
|
359 |
storyPrompt
|
360 |
)
|
361 |
|
362 |
+
const imageRatio = parseImageRatio(fullClap.meta.imageRatio)
|
363 |
|
364 |
+
fullClap.meta.height = imageRatio === ClapImageRatio.LANDSCAPE ? RESOLUTION_SHORT : RESOLUTION_LONG
|
365 |
+
fullClap.meta.width = imageRatio === ClapImageRatio.PORTRAIT ? RESOLUTION_SHORT : RESOLUTION_LONG
|
366 |
|
367 |
const embeddedFinalVideoAssetUrl = fullClap.segments.filter(s =>
|
368 |
s.category === ClapSegmentCategory.VIDEO &&
|
|
|
374 |
set({
|
375 |
fullClap,
|
376 |
storyPrompt,
|
377 |
+
imageRatio,
|
378 |
currentVideo: embeddedFinalVideoAssetUrl || get().currentVideo,
|
379 |
+
currentImageRatio: imageRatio,
|
380 |
})
|
381 |
|
382 |
return {
|
src/lib/hooks/useOrientation.ts
CHANGED
@@ -1,21 +1,21 @@
|
|
1 |
import { useStore } from "@/app/store"
|
2 |
-
import {
|
3 |
|
4 |
export function useOrientation() {
|
5 |
-
const
|
6 |
-
const
|
7 |
-
const
|
8 |
const toggleOrientation = useStore(s => s.toggleOrientation)
|
9 |
-
// note: we are interested in the *current* video
|
10 |
-
// not the requested video
|
11 |
-
const isLandscape =
|
12 |
-
const isPortrait =
|
13 |
-
const isSquare =
|
14 |
|
15 |
return {
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
toggleOrientation,
|
20 |
isLandscape,
|
21 |
isPortrait,
|
|
|
1 |
import { useStore } from "@/app/store"
|
2 |
+
import { ClapImageRatio } from "@aitube/clap"
|
3 |
|
4 |
export function useOrientation() {
|
5 |
+
const imageRatio = useStore(s => s.imageRatio)
|
6 |
+
const setImageRatio = useStore(s => s.setImageRatio)
|
7 |
+
const currentImageRatio = useStore(s => s.currentImageRatio)
|
8 |
const toggleOrientation = useStore(s => s.toggleOrientation)
|
9 |
+
// note: we are interested in the *current* video imageRatio,
|
10 |
+
// not the requested video imageRatio requested for the next video
|
11 |
+
const isLandscape = currentImageRatio === ClapImageRatio.LANDSCAPE
|
12 |
+
const isPortrait = currentImageRatio === ClapImageRatio.PORTRAIT
|
13 |
+
const isSquare = currentImageRatio === ClapImageRatio.SQUARE
|
14 |
|
15 |
return {
|
16 |
+
imageRatio,
|
17 |
+
setImageRatio,
|
18 |
+
currentImageRatio,
|
19 |
toggleOrientation,
|
20 |
isLandscape,
|
21 |
isPortrait,
|
src/lib/hooks/useProcessors.ts
CHANGED
@@ -74,7 +74,7 @@ export function useProcessors() {
|
|
74 |
|
75 |
clap = await createClap({
|
76 |
prompt: promptDraftRef.current,
|
77 |
-
|
78 |
|
79 |
turbo: false,
|
80 |
})
|
@@ -264,22 +264,22 @@ export function useProcessors() {
|
|
264 |
if (!clap) { throw new Error(`failed to edit the storyboards`) }
|
265 |
|
266 |
// const fusion =
|
267 |
-
console.log(`generateStoryboards(): received
|
268 |
|
269 |
setImageGenerationStatus("finished")
|
270 |
-
console.log("---------------- GENERATED
|
271 |
clap.segments
|
272 |
-
.filter(s => s.category === ClapSegmentCategory.
|
273 |
.forEach((s, i) => {
|
274 |
if (s.status === ClapSegmentStatus.COMPLETED && s.assetUrl) {
|
275 |
// console.log(` [${i}] storyboard: ${s.prompt}`)
|
276 |
logImage(s.assetUrl, 0.35)
|
277 |
} else {
|
278 |
-
console.log(` [${i}] failed to generate storyboard`)
|
279 |
}
|
280 |
// console.log(`------------------`)
|
281 |
})
|
282 |
-
console.table(clap.segments.filter(s => s.category === ClapSegmentCategory.
|
283 |
'endTimeInMs',
|
284 |
'prompt',
|
285 |
'assetUrl'
|
@@ -376,14 +376,14 @@ export function useProcessors() {
|
|
376 |
}
|
377 |
|
378 |
const injectCharacters = async (clap: ClapProject): Promise<void> => {
|
379 |
-
const storyboards = clap.segments.filter(s => s.category === ClapSegmentCategory.
|
380 |
|
381 |
let mainCharacter = clap.entities.at(0)
|
382 |
|
383 |
// let's do something basic for now: we only support 1 entity (character)
|
384 |
// and we apply it to *all* the storyboards (we can always improve this later)
|
385 |
if (mainCharacter) {
|
386 |
-
console.log(`injectCharacters(): we use the clap's main character's face on all
|
387 |
storyboards.forEach(storyboard => { storyboard.entityId = mainCharacter!.id })
|
388 |
logImage(mainCharacter.imageId, 0.35)
|
389 |
} else if (mainCharacterImage) {
|
|
|
74 |
|
75 |
clap = await createClap({
|
76 |
prompt: promptDraftRef.current,
|
77 |
+
imageRatio: useStore.getState().imageRatio,
|
78 |
|
79 |
turbo: false,
|
80 |
})
|
|
|
264 |
if (!clap) { throw new Error(`failed to edit the storyboards`) }
|
265 |
|
266 |
// const fusion =
|
267 |
+
console.log(`generateStoryboards(): received storyboard images = `, clap)
|
268 |
|
269 |
setImageGenerationStatus("finished")
|
270 |
+
console.log("---------------- GENERATED STORYBOARD IMAGES ----------------")
|
271 |
clap.segments
|
272 |
+
.filter(s => s.category === ClapSegmentCategory.IMAGE)
|
273 |
.forEach((s, i) => {
|
274 |
if (s.status === ClapSegmentStatus.COMPLETED && s.assetUrl) {
|
275 |
// console.log(` [${i}] storyboard: ${s.prompt}`)
|
276 |
logImage(s.assetUrl, 0.35)
|
277 |
} else {
|
278 |
+
console.log(` [${i}] failed to generate storyboard images`)
|
279 |
}
|
280 |
// console.log(`------------------`)
|
281 |
})
|
282 |
+
console.table(clap.segments.filter(s => s.category === ClapSegmentCategory.IMAGE), [
|
283 |
'endTimeInMs',
|
284 |
'prompt',
|
285 |
'assetUrl'
|
|
|
376 |
}
|
377 |
|
378 |
const injectCharacters = async (clap: ClapProject): Promise<void> => {
|
379 |
+
const storyboards = clap.segments.filter(s => s.category === ClapSegmentCategory.IMAGE)
|
380 |
|
381 |
let mainCharacter = clap.entities.at(0)
|
382 |
|
383 |
// let's do something basic for now: we only support 1 entity (character)
|
384 |
// and we apply it to *all* the storyboards (we can always improve this later)
|
385 |
if (mainCharacter) {
|
386 |
+
console.log(`injectCharacters(): we use the clap's main character's face on all storyboard images`)
|
387 |
storyboards.forEach(storyboard => { storyboard.entityId = mainCharacter!.id })
|
388 |
logImage(mainCharacter.imageId, 0.35)
|
389 |
} else if (mainCharacterImage) {
|
src/lib/hooks/useProgressTimer.ts
CHANGED
@@ -1,10 +1,12 @@
|
|
|
|
|
|
1 |
import { useStore } from "@/app/store"
|
2 |
import { useEffect, useRef } from "react"
|
3 |
import { useIsBusy } from "./useIsBusy"
|
4 |
|
5 |
export function useProgressTimer() {
|
6 |
const runningRef = useRef(false)
|
7 |
-
const timerRef = useRef<
|
8 |
|
9 |
const progress = useStore(s => s.progress)
|
10 |
const stage = useStore(s => s.stage)
|
|
|
1 |
+
"use client"
|
2 |
+
|
3 |
import { useStore } from "@/app/store"
|
4 |
import { useEffect, useRef } from "react"
|
5 |
import { useIsBusy } from "./useIsBusy"
|
6 |
|
7 |
export function useProgressTimer() {
|
8 |
const runningRef = useRef(false)
|
9 |
+
const timerRef = useRef<Timer>()
|
10 |
|
11 |
const progress = useStore(s => s.progress)
|
12 |
const stage = useStore(s => s.stage)
|
src/lib/hooks/useQueryStringParams.ts
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
import { useEffect } from "react"
|
2 |
import { useSearchParams } from "next/navigation"
|
3 |
-
import {
|
4 |
|
5 |
import { useStore } from "@/app/store"
|
6 |
|
@@ -13,24 +13,24 @@ export function useQueryStringParams() {
|
|
13 |
const { busyRef } = useIsBusy()
|
14 |
const { handleCreateStory } = useProcessors()
|
15 |
|
16 |
-
const
|
17 |
// this is how we support query string parameters
|
18 |
// ?prompt=hello <- set a default prompt
|
19 |
// ?prompt=hello&autorun=true <- automatically run the app
|
20 |
-
// ?
|
21 |
const searchParams = useSearchParams()
|
22 |
const queryStringPrompt = (searchParams?.get('prompt') as string) || ""
|
23 |
const queryStringAutorun = (searchParams?.get('autorun') as string) || ""
|
24 |
-
const queryStringOrientation = (searchParams?.get('
|
25 |
|
26 |
useEffect(() => {
|
27 |
if (queryStringOrientation?.length > 1) {
|
28 |
-
console.log(`
|
29 |
-
const
|
30 |
queryStringOrientation.trim().toLowerCase() === "landscape"
|
31 |
-
?
|
32 |
-
:
|
33 |
-
|
34 |
}
|
35 |
if (queryStringPrompt?.length > 1) {
|
36 |
console.log(`prompt = "${queryStringPrompt}"`)
|
|
|
1 |
import { useEffect } from "react"
|
2 |
import { useSearchParams } from "next/navigation"
|
3 |
+
import { ClapImageRatio } from "@aitube/clap"
|
4 |
|
5 |
import { useStore } from "@/app/store"
|
6 |
|
|
|
13 |
const { busyRef } = useIsBusy()
|
14 |
const { handleCreateStory } = useProcessors()
|
15 |
|
16 |
+
const setImageRatio = useStore(s => s.setImageRatio)
|
17 |
// this is how we support query string parameters
|
18 |
// ?prompt=hello <- set a default prompt
|
19 |
// ?prompt=hello&autorun=true <- automatically run the app
|
20 |
+
// ?imageRatio=landscape <- can be "landscape" or "portrait" (default)
|
21 |
const searchParams = useSearchParams()
|
22 |
const queryStringPrompt = (searchParams?.get('prompt') as string) || ""
|
23 |
const queryStringAutorun = (searchParams?.get('autorun') as string) || ""
|
24 |
+
const queryStringOrientation = (searchParams?.get('imageRatio') as string) || ""
|
25 |
|
26 |
useEffect(() => {
|
27 |
if (queryStringOrientation?.length > 1) {
|
28 |
+
console.log(`imageRatio = "${queryStringOrientation}"`)
|
29 |
+
const imageRatio =
|
30 |
queryStringOrientation.trim().toLowerCase() === "landscape"
|
31 |
+
? ClapImageRatio.LANDSCAPE
|
32 |
+
: ClapImageRatio.PORTRAIT
|
33 |
+
setImageRatio(imageRatio)
|
34 |
}
|
35 |
if (queryStringPrompt?.length > 1) {
|
36 |
console.log(`prompt = "${queryStringPrompt}"`)
|
src/lib/utils/{getVideoOrientation.ts → getImageRatio.ts}
RENAMED
@@ -1,19 +1,19 @@
|
|
1 |
-
import {
|
2 |
|
3 |
/**
|
4 |
-
* Determine the video
|
5 |
*
|
6 |
* @param url
|
7 |
* @returns
|
8 |
*/
|
9 |
-
export async function
|
10 |
-
return new Promise<
|
11 |
const video = document.createElement('video')
|
12 |
video.addEventListener( "loadedmetadata", function () {
|
13 |
resolve(
|
14 |
-
this.videoHeight < this.videoWidth ?
|
15 |
-
this.videoHeight > this.videoWidth ?
|
16 |
-
|
17 |
)
|
18 |
}, false)
|
19 |
video.src = url
|
|
|
1 |
+
import { ClapImageRatio } from "@aitube/clap"
|
2 |
|
3 |
/**
|
4 |
+
* Determine the video imageRatio from a video URL (data-uri or hosted)
|
5 |
*
|
6 |
* @param url
|
7 |
* @returns
|
8 |
*/
|
9 |
+
export async function getImageRatio(url: string): Promise<ClapImageRatio> {
|
10 |
+
return new Promise<ClapImageRatio>(resolve => {
|
11 |
const video = document.createElement('video')
|
12 |
video.addEventListener( "loadedmetadata", function () {
|
13 |
resolve(
|
14 |
+
this.videoHeight < this.videoWidth ? ClapImageRatio.LANDSCAPE :
|
15 |
+
this.videoHeight > this.videoWidth ? ClapImageRatio.PORTRAIT :
|
16 |
+
ClapImageRatio.SQUARE
|
17 |
)
|
18 |
}, false)
|
19 |
video.src = url
|