Commit
•
5443f48
1
Parent(s):
9164321
add story extension
Browse files- package-lock.json +37 -37
- package.json +7 -7
- src/app/main.tsx +8 -4
- src/app/server/aitube/createClap.ts +2 -2
- src/app/server/aitube/editClapStory.ts +2 -1
- src/app/server/aitube/exportClapToVideo.ts +10 -0
- src/app/server/config.ts +2 -0
- src/app/store.ts +39 -29
- src/components/interface/characters.tsx +3 -3
- src/components/interface/load-clap-button.tsx +2 -2
- src/components/interface/save-clap-button.tsx +4 -4
- src/lib/hooks/useImportClap.ts +6 -6
- src/lib/hooks/useProcessors.ts +177 -51
- src/lib/hooks/useQueryStringParams.ts +2 -2
- src/lib/utils/removeFinalVideos.ts +36 -0
package-lock.json
CHANGED
@@ -8,8 +8,8 @@
|
|
8 |
"name": "@jbilcke-hf/ai-stories-factory",
|
9 |
"version": "0.0.0",
|
10 |
"dependencies": {
|
11 |
-
"@aitube/clap": "0.0.
|
12 |
-
"@aitube/client": "0.0.
|
13 |
"@huggingface/hub": "^0.15.0",
|
14 |
"@radix-ui/react-accordion": "^1.1.2",
|
15 |
"@radix-ui/react-avatar": "^1.0.4",
|
@@ -54,10 +54,10 @@
|
|
54 |
"react-device-frameset": "^1.3.4",
|
55 |
"react-dom": "18.3.1",
|
56 |
"react-hook-consent": "^3.5.3",
|
57 |
-
"react-icons": "^5.2.
|
58 |
-
"sharp": "^0.33.
|
59 |
-
"sonner": "^1.4.
|
60 |
-
"tailwind-merge": "^2.
|
61 |
"tailwindcss": "^3.4.3",
|
62 |
"tailwindcss-animate": "^1.0.7",
|
63 |
"ts-node": "^10.9.2",
|
@@ -65,14 +65,14 @@
|
|
65 |
"use-file-picker": "^2.1.2",
|
66 |
"usehooks-ts": "^2.14.0",
|
67 |
"uuid": "^9.0.1",
|
68 |
-
"yaml": "^2.4.
|
69 |
"zustand": "^4.5.2"
|
70 |
}
|
71 |
},
|
72 |
"node_modules/@aitube/clap": {
|
73 |
-
"version": "0.0.
|
74 |
-
"resolved": "https://registry.npmjs.org/@aitube/clap/-/clap-0.0.
|
75 |
-
"integrity": "sha512-
|
76 |
"dependencies": {
|
77 |
"pure-uuid": "^1.8.1",
|
78 |
"yaml": "^2.4.2"
|
@@ -82,14 +82,14 @@
|
|
82 |
}
|
83 |
},
|
84 |
"node_modules/@aitube/client": {
|
85 |
-
"version": "0.0.
|
86 |
-
"resolved": "https://registry.npmjs.org/@aitube/client/-/client-0.0.
|
87 |
-
"integrity": "sha512-
|
88 |
"dependencies": {
|
89 |
"query-string": "^9.0.0"
|
90 |
},
|
91 |
"peerDependencies": {
|
92 |
-
"@aitube/clap": "0.0.
|
93 |
}
|
94 |
},
|
95 |
"node_modules/@alloc/quick-lru": {
|
@@ -3059,11 +3059,11 @@
|
|
3059 |
}
|
3060 |
},
|
3061 |
"node_modules/braces": {
|
3062 |
-
"version": "3.0.
|
3063 |
-
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.
|
3064 |
-
"integrity": "sha512-
|
3065 |
"dependencies": {
|
3066 |
-
"fill-range": "^7.
|
3067 |
},
|
3068 |
"engines": {
|
3069 |
"node": ">=8"
|
@@ -3177,9 +3177,9 @@
|
|
3177 |
}
|
3178 |
},
|
3179 |
"node_modules/caniuse-lite": {
|
3180 |
-
"version": "1.0.
|
3181 |
-
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.
|
3182 |
-
"integrity": "sha512
|
3183 |
"funding": [
|
3184 |
{
|
3185 |
"type": "opencollective",
|
@@ -3801,9 +3801,9 @@
|
|
3801 |
"integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA=="
|
3802 |
},
|
3803 |
"node_modules/electron-to-chromium": {
|
3804 |
-
"version": "1.4.
|
3805 |
-
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.
|
3806 |
-
"integrity": "sha512-
|
3807 |
},
|
3808 |
"node_modules/emoji-regex": {
|
3809 |
"version": "9.2.2",
|
@@ -4472,9 +4472,9 @@
|
|
4472 |
}
|
4473 |
},
|
4474 |
"node_modules/fill-range": {
|
4475 |
-
"version": "7.
|
4476 |
-
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.
|
4477 |
-
"integrity": "sha512-
|
4478 |
"dependencies": {
|
4479 |
"to-regex-range": "^5.0.1"
|
4480 |
},
|
@@ -5487,9 +5487,9 @@
|
|
5487 |
}
|
5488 |
},
|
5489 |
"node_modules/language-subtag-registry": {
|
5490 |
-
"version": "0.3.
|
5491 |
-
"resolved": "https://registry.npmjs.org/language-subtag-registry/-/language-subtag-registry-0.3.
|
5492 |
-
"integrity": "sha512-
|
5493 |
},
|
5494 |
"node_modules/language-tags": {
|
5495 |
"version": "1.0.9",
|
@@ -5607,11 +5607,11 @@
|
|
5607 |
}
|
5608 |
},
|
5609 |
"node_modules/micromatch": {
|
5610 |
-
"version": "4.0.
|
5611 |
-
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.
|
5612 |
-
"integrity": "sha512-
|
5613 |
"dependencies": {
|
5614 |
-
"braces": "^3.0.
|
5615 |
"picomatch": "^2.3.1"
|
5616 |
},
|
5617 |
"engines": {
|
@@ -6310,9 +6310,9 @@
|
|
6310 |
}
|
6311 |
},
|
6312 |
"node_modules/postcss-selector-parser": {
|
6313 |
-
"version": "6.0
|
6314 |
-
"resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.
|
6315 |
-
"integrity": "sha512-
|
6316 |
"dependencies": {
|
6317 |
"cssesc": "^3.0.0",
|
6318 |
"util-deprecate": "^1.0.2"
|
|
|
8 |
"name": "@jbilcke-hf/ai-stories-factory",
|
9 |
"version": "0.0.0",
|
10 |
"dependencies": {
|
11 |
+
"@aitube/clap": "0.0.22",
|
12 |
+
"@aitube/client": "0.0.31",
|
13 |
"@huggingface/hub": "^0.15.0",
|
14 |
"@radix-ui/react-accordion": "^1.1.2",
|
15 |
"@radix-ui/react-avatar": "^1.0.4",
|
|
|
54 |
"react-device-frameset": "^1.3.4",
|
55 |
"react-dom": "18.3.1",
|
56 |
"react-hook-consent": "^3.5.3",
|
57 |
+
"react-icons": "^5.2.1",
|
58 |
+
"sharp": "^0.33.4",
|
59 |
+
"sonner": "^1.4.41",
|
60 |
+
"tailwind-merge": "^2.3.0",
|
61 |
"tailwindcss": "^3.4.3",
|
62 |
"tailwindcss-animate": "^1.0.7",
|
63 |
"ts-node": "^10.9.2",
|
|
|
65 |
"use-file-picker": "^2.1.2",
|
66 |
"usehooks-ts": "^2.14.0",
|
67 |
"uuid": "^9.0.1",
|
68 |
+
"yaml": "^2.4.2",
|
69 |
"zustand": "^4.5.2"
|
70 |
}
|
71 |
},
|
72 |
"node_modules/@aitube/clap": {
|
73 |
+
"version": "0.0.22",
|
74 |
+
"resolved": "https://registry.npmjs.org/@aitube/clap/-/clap-0.0.22.tgz",
|
75 |
+
"integrity": "sha512-oN+tfoy0fp95AiwNMVy6pbhkK9k/h/gZ3FTZPpF06bVLig4rZ/Bthyot+wy4E/hr9ERLeyGpfZVW8blRqHCAaw==",
|
76 |
"dependencies": {
|
77 |
"pure-uuid": "^1.8.1",
|
78 |
"yaml": "^2.4.2"
|
|
|
82 |
}
|
83 |
},
|
84 |
"node_modules/@aitube/client": {
|
85 |
+
"version": "0.0.31",
|
86 |
+
"resolved": "https://registry.npmjs.org/@aitube/client/-/client-0.0.31.tgz",
|
87 |
+
"integrity": "sha512-xjdPYRruuEWe8KdmWpMYB1ELylBRF2M6+BYr0K3VAf/fv4qcRqda+F9amitJ27h0t5rvagA7+MLLKFqlF7BT/A==",
|
88 |
"dependencies": {
|
89 |
"query-string": "^9.0.0"
|
90 |
},
|
91 |
"peerDependencies": {
|
92 |
+
"@aitube/clap": "0.0.22"
|
93 |
}
|
94 |
},
|
95 |
"node_modules/@alloc/quick-lru": {
|
|
|
3059 |
}
|
3060 |
},
|
3061 |
"node_modules/braces": {
|
3062 |
+
"version": "3.0.3",
|
3063 |
+
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
|
3064 |
+
"integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
|
3065 |
"dependencies": {
|
3066 |
+
"fill-range": "^7.1.1"
|
3067 |
},
|
3068 |
"engines": {
|
3069 |
"node": ">=8"
|
|
|
3177 |
}
|
3178 |
},
|
3179 |
"node_modules/caniuse-lite": {
|
3180 |
+
"version": "1.0.30001621",
|
3181 |
+
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001621.tgz",
|
3182 |
+
"integrity": "sha512-+NLXZiviFFKX0fk8Piwv3PfLPGtRqJeq2TiNoUff/qB5KJgwecJTvCXDpmlyP/eCI/GUEmp/h/y5j0yckiiZrA==",
|
3183 |
"funding": [
|
3184 |
{
|
3185 |
"type": "opencollective",
|
|
|
3801 |
"integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA=="
|
3802 |
},
|
3803 |
"node_modules/electron-to-chromium": {
|
3804 |
+
"version": "1.4.777",
|
3805 |
+
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.777.tgz",
|
3806 |
+
"integrity": "sha512-n02NCwLJ3wexLfK/yQeqfywCblZqLcXphzmid5e8yVPdtEcida7li0A5WQKghHNG0FeOMCzeFOzEbtAh5riXFw=="
|
3807 |
},
|
3808 |
"node_modules/emoji-regex": {
|
3809 |
"version": "9.2.2",
|
|
|
4472 |
}
|
4473 |
},
|
4474 |
"node_modules/fill-range": {
|
4475 |
+
"version": "7.1.1",
|
4476 |
+
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
|
4477 |
+
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
|
4478 |
"dependencies": {
|
4479 |
"to-regex-range": "^5.0.1"
|
4480 |
},
|
|
|
5487 |
}
|
5488 |
},
|
5489 |
"node_modules/language-subtag-registry": {
|
5490 |
+
"version": "0.3.23",
|
5491 |
+
"resolved": "https://registry.npmjs.org/language-subtag-registry/-/language-subtag-registry-0.3.23.tgz",
|
5492 |
+
"integrity": "sha512-0K65Lea881pHotoGEa5gDlMxt3pctLi2RplBb7Ezh4rRdLEOtgi7n4EwK9lamnUCkKBqaeKRVebTq6BAxSkpXQ=="
|
5493 |
},
|
5494 |
"node_modules/language-tags": {
|
5495 |
"version": "1.0.9",
|
|
|
5607 |
}
|
5608 |
},
|
5609 |
"node_modules/micromatch": {
|
5610 |
+
"version": "4.0.7",
|
5611 |
+
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.7.tgz",
|
5612 |
+
"integrity": "sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q==",
|
5613 |
"dependencies": {
|
5614 |
+
"braces": "^3.0.3",
|
5615 |
"picomatch": "^2.3.1"
|
5616 |
},
|
5617 |
"engines": {
|
|
|
6310 |
}
|
6311 |
},
|
6312 |
"node_modules/postcss-selector-parser": {
|
6313 |
+
"version": "6.1.0",
|
6314 |
+
"resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.0.tgz",
|
6315 |
+
"integrity": "sha512-UMz42UD0UY0EApS0ZL9o1XnLhSTtvvvLe5Dc2H2O56fvRZi+KulDyf5ctDhhtYJBGKStV2FL1fy6253cmLgqVQ==",
|
6316 |
"dependencies": {
|
6317 |
"cssesc": "^3.0.0",
|
6318 |
"util-deprecate": "^1.0.2"
|
package.json
CHANGED
@@ -9,8 +9,8 @@
|
|
9 |
"lint": "next lint"
|
10 |
},
|
11 |
"dependencies": {
|
12 |
-
"@aitube/clap": "0.0.
|
13 |
-
"@aitube/client": "0.0.
|
14 |
"@huggingface/hub": "^0.15.0",
|
15 |
"@radix-ui/react-accordion": "^1.1.2",
|
16 |
"@radix-ui/react-avatar": "^1.0.4",
|
@@ -55,10 +55,10 @@
|
|
55 |
"react-device-frameset": "^1.3.4",
|
56 |
"react-dom": "18.3.1",
|
57 |
"react-hook-consent": "^3.5.3",
|
58 |
-
"react-icons": "^5.2.
|
59 |
-
"sharp": "^0.33.
|
60 |
-
"sonner": "^1.4.
|
61 |
-
"tailwind-merge": "^2.
|
62 |
"tailwindcss": "^3.4.3",
|
63 |
"tailwindcss-animate": "^1.0.7",
|
64 |
"ts-node": "^10.9.2",
|
@@ -66,7 +66,7 @@
|
|
66 |
"use-file-picker": "^2.1.2",
|
67 |
"usehooks-ts": "^2.14.0",
|
68 |
"uuid": "^9.0.1",
|
69 |
-
"yaml": "^2.4.
|
70 |
"zustand": "^4.5.2"
|
71 |
}
|
72 |
}
|
|
|
9 |
"lint": "next lint"
|
10 |
},
|
11 |
"dependencies": {
|
12 |
+
"@aitube/clap": "0.0.22",
|
13 |
+
"@aitube/client": "0.0.31",
|
14 |
"@huggingface/hub": "^0.15.0",
|
15 |
"@radix-ui/react-accordion": "^1.1.2",
|
16 |
"@radix-ui/react-avatar": "^1.0.4",
|
|
|
55 |
"react-device-frameset": "^1.3.4",
|
56 |
"react-dom": "18.3.1",
|
57 |
"react-hook-consent": "^3.5.3",
|
58 |
+
"react-icons": "^5.2.1",
|
59 |
+
"sharp": "^0.33.4",
|
60 |
+
"sonner": "^1.4.41",
|
61 |
+
"tailwind-merge": "^2.3.0",
|
62 |
"tailwindcss": "^3.4.3",
|
63 |
"tailwindcss-animate": "^1.0.7",
|
64 |
"ts-node": "^10.9.2",
|
|
|
66 |
"use-file-picker": "^2.1.2",
|
67 |
"usehooks-ts": "^2.14.0",
|
68 |
"uuid": "^9.0.1",
|
69 |
+
"yaml": "^2.4.2",
|
70 |
"zustand": "^4.5.2"
|
71 |
}
|
72 |
}
|
src/app/main.tsx
CHANGED
@@ -29,9 +29,11 @@ export function Main() {
|
|
29 |
const { storyPromptDraft, setStoryPromptDraft, promptDraftRef } = useStoryPromptDraft()
|
30 |
const { isBusy } = useIsBusy()
|
31 |
const { orientation, toggleOrientation } = useOrientation()
|
32 |
-
const {
|
33 |
useQueryStringParams()
|
34 |
-
|
|
|
|
|
35 |
const showAuthWall = useStore(s => s.showAuthWall)
|
36 |
const { isLoggedIn, enableOAuthWall } = useOAuth()
|
37 |
|
@@ -282,7 +284,7 @@ export function Main() {
|
|
282 |
</div>
|
283 |
{/* END OF ORIENTATION SWITCH */}
|
284 |
<Button
|
285 |
-
onClick={
|
286 |
disabled={!storyPromptDraft || isBusy || !isLoggedIn}
|
287 |
// variant="ghost"
|
288 |
className={cn(
|
@@ -293,7 +295,9 @@ export function Main() {
|
|
293 |
storyPromptDraft ? "opacity-100" : "opacity-80"
|
294 |
)}
|
295 |
>
|
296 |
-
<span className="mr-1.5">
|
|
|
|
|
297 |
</Button>
|
298 |
</div>
|
299 |
|
|
|
29 |
const { storyPromptDraft, setStoryPromptDraft, promptDraftRef } = useStoryPromptDraft()
|
30 |
const { isBusy } = useIsBusy()
|
31 |
const { orientation, toggleOrientation } = useOrientation()
|
32 |
+
const { handleCreateStory, handleExtendStory } = useProcessors()
|
33 |
useQueryStringParams()
|
34 |
+
|
35 |
+
const currentVideo = useStore(s => s.currentVideo)
|
36 |
+
|
37 |
const showAuthWall = useStore(s => s.showAuthWall)
|
38 |
const { isLoggedIn, enableOAuthWall } = useOAuth()
|
39 |
|
|
|
284 |
</div>
|
285 |
{/* END OF ORIENTATION SWITCH */}
|
286 |
<Button
|
287 |
+
onClick={currentVideo ? handleExtendStory : handleCreateStory}
|
288 |
disabled={!storyPromptDraft || isBusy || !isLoggedIn}
|
289 |
// variant="ghost"
|
290 |
className={cn(
|
|
|
295 |
storyPromptDraft ? "opacity-100" : "opacity-80"
|
296 |
)}
|
297 |
>
|
298 |
+
<span className="mr-1.5">{
|
299 |
+
currentVideo ? 'Extend' : 'Create'
|
300 |
+
}</span><span className="hidden md:inline">👉</span><span className="inline md:hidden">👇</span>
|
301 |
</Button>
|
302 |
</div>
|
303 |
|
src/app/server/aitube/createClap.ts
CHANGED
@@ -7,7 +7,7 @@ import { ClapProject, ClapMediaOrientation } from "@aitube/clap"
|
|
7 |
import { createClap as apiCreateClap } from "@aitube/client"
|
8 |
|
9 |
import { getToken } from "./getToken"
|
10 |
-
import { RESOLUTION_LONG, RESOLUTION_SHORT } from "../config"
|
11 |
import { getRateLimit } from "../redis/getRateLimit"
|
12 |
|
13 |
const rateLimit = getRateLimit()
|
@@ -40,7 +40,7 @@ export async function createClap({
|
|
40 |
*/
|
41 |
|
42 |
const clap: ClapProject = await apiCreateClap({
|
43 |
-
prompt: prompt.slice(0,
|
44 |
|
45 |
height: orientation === ClapMediaOrientation.PORTRAIT ? RESOLUTION_LONG : RESOLUTION_SHORT,
|
46 |
width: orientation === ClapMediaOrientation.PORTRAIT ? RESOLUTION_SHORT : RESOLUTION_LONG,
|
|
|
7 |
import { createClap as apiCreateClap } from "@aitube/client"
|
8 |
|
9 |
import { getToken } from "./getToken"
|
10 |
+
import { RESOLUTION_LONG, RESOLUTION_SHORT, MAX_PROMPT_LENGTH_IN_CHARS } from "../config"
|
11 |
import { getRateLimit } from "../redis/getRateLimit"
|
12 |
|
13 |
const rateLimit = getRateLimit()
|
|
|
40 |
*/
|
41 |
|
42 |
const clap: ClapProject = await apiCreateClap({
|
43 |
+
prompt: prompt.slice(0, MAX_PROMPT_LENGTH_IN_CHARS),
|
44 |
|
45 |
height: orientation === ClapMediaOrientation.PORTRAIT ? RESOLUTION_LONG : RESOLUTION_SHORT,
|
46 |
width: orientation === ClapMediaOrientation.PORTRAIT ? RESOLUTION_SHORT : RESOLUTION_LONG,
|
src/app/server/aitube/editClapStory.ts
CHANGED
@@ -5,6 +5,7 @@ import { editClapStory as apiEditClapStory, ClapCompletionMode } from "@aitube/c
|
|
5 |
|
6 |
import { getToken } from "./getToken"
|
7 |
import { Workaround } from "./types"
|
|
|
8 |
|
9 |
export async function editClapStory({
|
10 |
clap,
|
@@ -22,7 +23,7 @@ export async function editClapStory({
|
|
22 |
async function promise() {
|
23 |
return await apiEditClapStory({
|
24 |
clap,
|
25 |
-
prompt,
|
26 |
startTimeInMs,
|
27 |
endTimeInMs,
|
28 |
completionMode: ClapCompletionMode.MERGE,
|
|
|
5 |
|
6 |
import { getToken } from "./getToken"
|
7 |
import { Workaround } from "./types"
|
8 |
+
import { MAX_PROMPT_LENGTH_IN_CHARS } from "../config"
|
9 |
|
10 |
export async function editClapStory({
|
11 |
clap,
|
|
|
23 |
async function promise() {
|
24 |
return await apiEditClapStory({
|
25 |
clap,
|
26 |
+
prompt: `${prompt || ""}`.slice(0, MAX_PROMPT_LENGTH_IN_CHARS),
|
27 |
startTimeInMs,
|
28 |
endTimeInMs,
|
29 |
completionMode: ClapCompletionMode.MERGE,
|
src/app/server/aitube/exportClapToVideo.ts
CHANGED
@@ -4,6 +4,7 @@ import { ClapProject } from "@aitube/clap"
|
|
4 |
import { exportClapToVideo as apiExportClapToVideo } from "@aitube/client"
|
5 |
|
6 |
import { getToken } from "./getToken"
|
|
|
7 |
|
8 |
export async function exportClapToVideo({
|
9 |
clap,
|
@@ -12,6 +13,15 @@ export async function exportClapToVideo({
|
|
12 |
clap: ClapProject
|
13 |
turbo?: boolean
|
14 |
}): Promise<string> {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
15 |
// TODO: maybe we should return a blob instead,
|
16 |
// as this could be big eg. a few megabytes
|
17 |
// or maybe we should convert it to an object id
|
|
|
4 |
import { exportClapToVideo as apiExportClapToVideo } from "@aitube/client"
|
5 |
|
6 |
import { getToken } from "./getToken"
|
7 |
+
import { removeFinalVideos } from "@/lib/utils/removeFinalVideos"
|
8 |
|
9 |
export async function exportClapToVideo({
|
10 |
clap,
|
|
|
13 |
clap: ClapProject
|
14 |
turbo?: boolean
|
15 |
}): Promise<string> {
|
16 |
+
|
17 |
+
// TODO move this safety into apiExportClapToVideo
|
18 |
+
|
19 |
+
// one last precaustion, we make sure to we remove any existing render beforehand, as this eats up space
|
20 |
+
clap.segments = removeFinalVideos(clap)
|
21 |
+
|
22 |
+
// we have to leave the rest it, however..
|
23 |
+
// ours final render needs all the video and audio clips!
|
24 |
+
|
25 |
// TODO: maybe we should return a blob instead,
|
26 |
// as this could be big eg. a few megabytes
|
27 |
// or maybe we should convert it to an object id
|
src/app/server/config.ts
CHANGED
@@ -9,3 +9,5 @@ export const RESOLUTION_LONG = 896 // 832 // 768
|
|
9 |
export const RESOLUTION_SHORT = 512 // 448 // 384
|
10 |
|
11 |
// ValueError: `height` and `width` have to be divisible by 8 but are 512 and 1.
|
|
|
|
|
|
9 |
export const RESOLUTION_SHORT = 512 // 448 // 384
|
10 |
|
11 |
// ValueError: `height` and `width` have to be divisible by 8 but are 512 and 1.
|
12 |
+
|
13 |
+
export const MAX_PROMPT_LENGTH_IN_CHARS = 1024
|
src/app/store.ts
CHANGED
@@ -33,7 +33,14 @@ export const useStore = create<{
|
|
33 |
finalGenerationStatus: TaskStatus
|
34 |
isBusy: boolean
|
35 |
|
36 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
37 |
currentVideo: string
|
38 |
|
39 |
// orientation of the currently loaded video (which can be different from `orientation`)
|
@@ -60,7 +67,8 @@ export const useStore = create<{
|
|
60 |
setVideoGenerationStatus: (videoGenerationStatus: TaskStatus) => void
|
61 |
setFinalGenerationStatus: (finalGenerationStatus: TaskStatus) => void
|
62 |
syncStatusAndStageState: () => void
|
63 |
-
|
|
|
64 |
|
65 |
// note: this will preload the video, and compute the orientation too
|
66 |
setCurrentVideo: (currentVideo: string) => Promise<void>
|
@@ -92,7 +100,8 @@ export const useStore = create<{
|
|
92 |
videoGenerationStatus: "idle",
|
93 |
finalGenerationStatus: "idle",
|
94 |
isBusy: false,
|
95 |
-
|
|
|
96 |
currentVideo: "",
|
97 |
currentVideoOrientation: ClapMediaOrientation.PORTRAIT,
|
98 |
progress: 0,
|
@@ -216,7 +225,8 @@ export const useStore = create<{
|
|
216 |
|
217 |
set({ isBusy, stage, statusMessage })
|
218 |
},
|
219 |
-
|
|
|
220 |
setCurrentVideo: async (currentVideo: string): Promise<void> => {
|
221 |
set({
|
222 |
currentVideo,
|
@@ -244,10 +254,10 @@ export const useStore = create<{
|
|
244 |
|
245 |
if (!currentVideo) { throw new Error(`cannot save a video.. if there is no video`) }
|
246 |
|
247 |
-
const
|
248 |
|
249 |
// Create an object URL for the compressed clap blob
|
250 |
-
const objectUrl = URL.createObjectURL(
|
251 |
|
252 |
// Create an anchor element and force browser download
|
253 |
const anchor = document.createElement("a")
|
@@ -269,24 +279,24 @@ export const useStore = create<{
|
|
269 |
document.body.removeChild(anchor)
|
270 |
},
|
271 |
saveClap: async (): Promise<void> => {
|
272 |
-
const {
|
273 |
|
274 |
-
if (!
|
275 |
|
276 |
-
|
277 |
|
278 |
// make sure we update the total duration
|
279 |
-
for (const s of
|
280 |
-
if (s.endTimeInMs >
|
281 |
-
|
282 |
}
|
283 |
}
|
284 |
|
285 |
-
const alreadyAnEmbeddedFinalVideo =
|
286 |
s.category === ClapSegmentCategory.VIDEO &&
|
287 |
s.status === "completed" &&
|
288 |
s.startTimeInMs === 0 &&
|
289 |
-
s.endTimeInMs ===
|
290 |
s.assetUrl).at(0)
|
291 |
|
292 |
// inject the final mp4 video file into the .clap
|
@@ -295,21 +305,21 @@ export const useStore = create<{
|
|
295 |
alreadyAnEmbeddedFinalVideo.assetUrl = currentVideo
|
296 |
} else {
|
297 |
console.log(`editing the clap to add a new final video`)
|
298 |
-
|
299 |
category: ClapSegmentCategory.VIDEO,
|
300 |
status: "completed",
|
301 |
startTimeInMs: 0,
|
302 |
-
endTimeInMs:
|
303 |
assetUrl: currentVideo,
|
304 |
-
assetDurationInMs:
|
305 |
assetSourceType: getClapAssetSourceType(currentVideo),
|
306 |
outputGain: 1.0,
|
307 |
}))
|
308 |
}
|
309 |
-
const
|
310 |
|
311 |
// Create an object URL for the compressed clap blob
|
312 |
-
const objectUrl = URL.createObjectURL(
|
313 |
|
314 |
// Create an anchor element and force browser download
|
315 |
const anchor = document.createElement("a")
|
@@ -338,31 +348,31 @@ export const useStore = create<{
|
|
338 |
throw new Error(`missing blob`)
|
339 |
}
|
340 |
|
341 |
-
const
|
342 |
|
343 |
-
if (!
|
344 |
|
345 |
-
const storyPrompt =
|
346 |
|
347 |
putTextInTextAreaElement(
|
348 |
document.getElementById("story-prompt-draft") as HTMLTextAreaElement,
|
349 |
storyPrompt
|
350 |
)
|
351 |
|
352 |
-
const orientation = parseMediaOrientation(
|
353 |
|
354 |
-
|
355 |
-
|
356 |
|
357 |
-
const embeddedFinalVideoAssetUrl =
|
358 |
s.category === ClapSegmentCategory.VIDEO &&
|
359 |
s.status === "completed" &&
|
360 |
s.startTimeInMs === 0 &&
|
361 |
-
s.endTimeInMs ===
|
362 |
s.assetUrl).map(s => s.assetUrl).at(0)
|
363 |
|
364 |
set({
|
365 |
-
|
366 |
storyPrompt,
|
367 |
orientation,
|
368 |
currentVideo: embeddedFinalVideoAssetUrl || get().currentVideo,
|
@@ -370,7 +380,7 @@ export const useStore = create<{
|
|
370 |
})
|
371 |
|
372 |
return {
|
373 |
-
clap:
|
374 |
regenerateVideo: !embeddedFinalVideoAssetUrl,
|
375 |
}
|
376 |
},
|
|
|
33 |
finalGenerationStatus: TaskStatus
|
34 |
isBusy: boolean
|
35 |
|
36 |
+
// a clap file stripped of all its image, video and audio assets
|
37 |
+
// this lightweight clap (which can still grow large)
|
38 |
+
// is best suited for doing API calls
|
39 |
+
skeletonClap?: ClapProject
|
40 |
+
|
41 |
+
// the full clap file, with all the binary assets
|
42 |
+
fullClap?: ClapProject
|
43 |
+
|
44 |
currentVideo: string
|
45 |
|
46 |
// orientation of the currently loaded video (which can be different from `orientation`)
|
|
|
67 |
setVideoGenerationStatus: (videoGenerationStatus: TaskStatus) => void
|
68 |
setFinalGenerationStatus: (finalGenerationStatus: TaskStatus) => void
|
69 |
syncStatusAndStageState: () => void
|
70 |
+
setSkeletonClap: (fullClap?: ClapProject) => void
|
71 |
+
setFullClap: (fullClap?: ClapProject) => void
|
72 |
|
73 |
// note: this will preload the video, and compute the orientation too
|
74 |
setCurrentVideo: (currentVideo: string) => Promise<void>
|
|
|
100 |
videoGenerationStatus: "idle",
|
101 |
finalGenerationStatus: "idle",
|
102 |
isBusy: false,
|
103 |
+
skeletonClap: undefined,
|
104 |
+
fullClap: undefined,
|
105 |
currentVideo: "",
|
106 |
currentVideoOrientation: ClapMediaOrientation.PORTRAIT,
|
107 |
progress: 0,
|
|
|
225 |
|
226 |
set({ isBusy, stage, statusMessage })
|
227 |
},
|
228 |
+
setSkeletonClap: (skeletonClap?: ClapProject) => { set({ skeletonClap }) },
|
229 |
+
setFullClap: (fullClap?: ClapProject) => { set({ fullClap }) },
|
230 |
setCurrentVideo: async (currentVideo: string): Promise<void> => {
|
231 |
set({
|
232 |
currentVideo,
|
|
|
254 |
|
255 |
if (!currentVideo) { throw new Error(`cannot save a video.. if there is no video`) }
|
256 |
|
257 |
+
const fullClapBlob: Blob = await fetch(currentVideo).then(r => r.blob())
|
258 |
|
259 |
// Create an object URL for the compressed clap blob
|
260 |
+
const objectUrl = URL.createObjectURL(fullClapBlob)
|
261 |
|
262 |
// Create an anchor element and force browser download
|
263 |
const anchor = document.createElement("a")
|
|
|
279 |
document.body.removeChild(anchor)
|
280 |
},
|
281 |
saveClap: async (): Promise<void> => {
|
282 |
+
const { fullClap , storyPrompt, currentVideo } = get()
|
283 |
|
284 |
+
if (!fullClap) { throw new Error(`cannot save a clap.. if there is no clap`) }
|
285 |
|
286 |
+
fullClap.meta.description = storyPrompt
|
287 |
|
288 |
// make sure we update the total duration
|
289 |
+
for (const s of fullClap.segments) {
|
290 |
+
if (s.endTimeInMs > fullClap.meta.durationInMs) {
|
291 |
+
fullClap.meta.durationInMs = s.endTimeInMs
|
292 |
}
|
293 |
}
|
294 |
|
295 |
+
const alreadyAnEmbeddedFinalVideo = fullClap.segments.filter(s =>
|
296 |
s.category === ClapSegmentCategory.VIDEO &&
|
297 |
s.status === "completed" &&
|
298 |
s.startTimeInMs === 0 &&
|
299 |
+
s.endTimeInMs === fullClap.meta.durationInMs &&
|
300 |
s.assetUrl).at(0)
|
301 |
|
302 |
// inject the final mp4 video file into the .clap
|
|
|
305 |
alreadyAnEmbeddedFinalVideo.assetUrl = currentVideo
|
306 |
} else {
|
307 |
console.log(`editing the clap to add a new final video`)
|
308 |
+
fullClap.segments.push(newSegment({
|
309 |
category: ClapSegmentCategory.VIDEO,
|
310 |
status: "completed",
|
311 |
startTimeInMs: 0,
|
312 |
+
endTimeInMs: fullClap.meta.durationInMs,
|
313 |
assetUrl: currentVideo,
|
314 |
+
assetDurationInMs: fullClap.meta.durationInMs,
|
315 |
assetSourceType: getClapAssetSourceType(currentVideo),
|
316 |
outputGain: 1.0,
|
317 |
}))
|
318 |
}
|
319 |
+
const fullClapBlob: Blob = await serializeClap(fullClap)
|
320 |
|
321 |
// Create an object URL for the compressed clap blob
|
322 |
+
const objectUrl = URL.createObjectURL(fullClapBlob)
|
323 |
|
324 |
// Create an anchor element and force browser download
|
325 |
const anchor = document.createElement("a")
|
|
|
348 |
throw new Error(`missing blob`)
|
349 |
}
|
350 |
|
351 |
+
const fullClap: ClapProject | undefined = await parseClap(blob)
|
352 |
|
353 |
+
if (!fullClap) { throw new Error(`failed to import the clap`) }
|
354 |
|
355 |
+
const storyPrompt = fullClap.meta.description.split("||").pop() || ""
|
356 |
|
357 |
putTextInTextAreaElement(
|
358 |
document.getElementById("story-prompt-draft") as HTMLTextAreaElement,
|
359 |
storyPrompt
|
360 |
)
|
361 |
|
362 |
+
const orientation = parseMediaOrientation(fullClap.meta.orientation)
|
363 |
|
364 |
+
fullClap.meta.height = orientation === ClapMediaOrientation.LANDSCAPE ? RESOLUTION_SHORT : RESOLUTION_LONG
|
365 |
+
fullClap.meta.width = orientation === ClapMediaOrientation.PORTRAIT ? RESOLUTION_SHORT : RESOLUTION_LONG
|
366 |
|
367 |
+
const embeddedFinalVideoAssetUrl = fullClap.segments.filter(s =>
|
368 |
s.category === ClapSegmentCategory.VIDEO &&
|
369 |
s.status === "completed" &&
|
370 |
s.startTimeInMs === 0 &&
|
371 |
+
s.endTimeInMs === fullClap.meta.durationInMs &&
|
372 |
s.assetUrl).map(s => s.assetUrl).at(0)
|
373 |
|
374 |
set({
|
375 |
+
fullClap,
|
376 |
storyPrompt,
|
377 |
orientation,
|
378 |
currentVideo: embeddedFinalVideoAssetUrl || get().currentVideo,
|
|
|
380 |
})
|
381 |
|
382 |
return {
|
383 |
+
clap: fullClap,
|
384 |
regenerateVideo: !embeddedFinalVideoAssetUrl,
|
385 |
}
|
386 |
},
|
src/components/interface/characters.tsx
CHANGED
@@ -2,14 +2,14 @@ import { useStore } from "@/app/store"
|
|
2 |
import { CharacterButton } from "./character-button"
|
3 |
|
4 |
export function Characters() {
|
5 |
-
const
|
6 |
|
7 |
|
8 |
return (
|
9 |
<div className="flex flex-row space-x-0">
|
10 |
-
{
|
11 |
// now: we only support displaying ONE entity for now
|
12 |
-
?
|
13 |
<CharacterButton key={entity.id} entity={entity} />
|
14 |
)
|
15 |
: <CharacterButton />
|
|
|
2 |
import { CharacterButton } from "./character-button"
|
3 |
|
4 |
export function Characters() {
|
5 |
+
const fullClap = useStore(s => s.fullClap)
|
6 |
|
7 |
|
8 |
return (
|
9 |
<div className="flex flex-row space-x-0">
|
10 |
+
{fullClap && fullClap.entities?.length > 0
|
11 |
// now: we only support displaying ONE entity for now
|
12 |
+
? fullClap.entities.slice(0, 1).map(entity =>
|
13 |
<CharacterButton key={entity.id} entity={entity} />
|
14 |
)
|
15 |
: <CharacterButton />
|
src/components/interface/load-clap-button.tsx
CHANGED
@@ -29,8 +29,8 @@ export function LoadClapButton() {
|
|
29 |
storyPromptDraft ? "opacity-100" : "opacity-80"
|
30 |
)}
|
31 |
>
|
32 |
-
<span className="hidden xl:inline
|
33 |
-
<span className="inline xl:hidden
|
34 |
</Button></TooltipTrigger>
|
35 |
<TooltipContent side="top">
|
36 |
<p className="text-xs font-normal text-stone-100/90 text-center">
|
|
|
29 |
storyPromptDraft ? "opacity-100" : "opacity-80"
|
30 |
)}
|
31 |
>
|
32 |
+
<span className="hidden xl:inline">Load</span>
|
33 |
+
<span className="inline xl:hidden">Load</span>
|
34 |
</Button></TooltipTrigger>
|
35 |
<TooltipContent side="top">
|
36 |
<p className="text-xs font-normal text-stone-100/90 text-center">
|
src/components/interface/save-clap-button.tsx
CHANGED
@@ -12,7 +12,7 @@ import { useIsBusy, useStoryPromptDraft } from "@/lib/hooks"
|
|
12 |
|
13 |
export function SaveClapButton() {
|
14 |
const { isBusy } = useIsBusy()
|
15 |
-
const
|
16 |
const saveClap = useStore(s => s.saveClap)
|
17 |
const { storyPromptDraft } = useStoryPromptDraft()
|
18 |
|
@@ -20,7 +20,7 @@ export function SaveClapButton() {
|
|
20 |
<Tooltip>
|
21 |
<TooltipTrigger asChild><Button
|
22 |
onClick={() => saveClap()}
|
23 |
-
disabled={!
|
24 |
// variant="ghost"
|
25 |
className={cn(
|
26 |
`text-xs md:text-sm lg:text-base`,
|
@@ -30,8 +30,8 @@ export function SaveClapButton() {
|
|
30 |
storyPromptDraft ? "opacity-100" : "opacity-80"
|
31 |
)}
|
32 |
>
|
33 |
-
<span className="hidden xl:inline
|
34 |
-
<span className="inline xl:hidden
|
35 |
</Button></TooltipTrigger>
|
36 |
<TooltipContent side="top">
|
37 |
<p className="text-xs font-normal text-stone-100/90 text-center">
|
|
|
12 |
|
13 |
export function SaveClapButton() {
|
14 |
const { isBusy } = useIsBusy()
|
15 |
+
const fullClap = useStore(s => s.fullClap)
|
16 |
const saveClap = useStore(s => s.saveClap)
|
17 |
const { storyPromptDraft } = useStoryPromptDraft()
|
18 |
|
|
|
20 |
<Tooltip>
|
21 |
<TooltipTrigger asChild><Button
|
22 |
onClick={() => saveClap()}
|
23 |
+
disabled={!fullClap || isBusy}
|
24 |
// variant="ghost"
|
25 |
className={cn(
|
26 |
`text-xs md:text-sm lg:text-base`,
|
|
|
30 |
storyPromptDraft ? "opacity-100" : "opacity-80"
|
31 |
)}
|
32 |
>
|
33 |
+
<span className="hidden xl:inline">Save</span>
|
34 |
+
<span className="inline xl:hidden">Save</span>
|
35 |
</Button></TooltipTrigger>
|
36 |
<TooltipContent side="top">
|
37 |
<p className="text-xs font-normal text-stone-100/90 text-center">
|
src/lib/hooks/useImportClap.ts
CHANGED
@@ -9,7 +9,7 @@ import { useProcessors } from "./useProcessors"
|
|
9 |
export function useImportClap() {
|
10 |
|
11 |
const setError = useStore(s => s.setError)
|
12 |
-
const
|
13 |
const loadClap = useStore(s => s.loadClap)
|
14 |
|
15 |
const {
|
@@ -65,13 +65,13 @@ export function useImportClap() {
|
|
65 |
|
66 |
// clap = await generateSounds(clap)
|
67 |
|
68 |
-
//
|
69 |
|
70 |
-
console.log("
|
71 |
|
72 |
// it is important to skip regeneration if we already have a video
|
73 |
if (regenerateVideo) {
|
74 |
-
console.log(`regenerating music and videos..`)
|
75 |
const claps = await Promise.all([
|
76 |
generateMusic(clap),
|
77 |
generateVideos(clap)
|
@@ -87,12 +87,12 @@ export function useImportClap() {
|
|
87 |
}
|
88 |
|
89 |
|
90 |
-
|
91 |
|
92 |
await generateFinalVideo(clap)
|
93 |
|
94 |
} else {
|
95 |
-
console.log(`skipping music and video regeneration`)
|
96 |
}
|
97 |
|
98 |
setStatus("finished")
|
|
|
9 |
export function useImportClap() {
|
10 |
|
11 |
const setError = useStore(s => s.setError)
|
12 |
+
const setFullClap = useStore(s => s.setFullClap)
|
13 |
const loadClap = useStore(s => s.loadClap)
|
14 |
|
15 |
const {
|
|
|
65 |
|
66 |
// clap = await generateSounds(clap)
|
67 |
|
68 |
+
// setFullClap(clap)
|
69 |
|
70 |
+
console.log("importClap: clap = ", clap)
|
71 |
|
72 |
// it is important to skip regeneration if we already have a video
|
73 |
if (regenerateVideo) {
|
74 |
+
console.log(`importClap: regenerating music and videos..`)
|
75 |
const claps = await Promise.all([
|
76 |
generateMusic(clap),
|
77 |
generateVideos(clap)
|
|
|
87 |
}
|
88 |
|
89 |
|
90 |
+
setFullClap(clap)
|
91 |
|
92 |
await generateFinalVideo(clap)
|
93 |
|
94 |
} else {
|
95 |
+
console.log(`importClap: skipping music and video regeneration`)
|
96 |
}
|
97 |
|
98 |
setStatus("finished")
|
src/lib/hooks/useProcessors.ts
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
"use client"
|
2 |
|
3 |
import React, { useState, useTransition } from "react"
|
4 |
-
import { ClapProject, ClapSegmentCategory, getClapAssetSourceType, newEntity, updateClap } from "@aitube/clap"
|
5 |
|
6 |
import { logImage } from "@/lib/utils"
|
7 |
import { useIsBusy, useStoryPromptDraft } from "@/lib/hooks"
|
@@ -11,6 +11,7 @@ import { useToast } from "@/components/ui/use-toast"
|
|
11 |
import { createClap } from "@/app/server/aitube/createClap"
|
12 |
import { editClapEntities } from "@/app/server/aitube/editClapEntities"
|
13 |
import { editClapDialogues } from "@/app/server/aitube/editClapDialogues"
|
|
|
14 |
import { editClapStoryboards } from "@/app/server/aitube/editClapStoryboards"
|
15 |
import { editClapSounds } from "@/app/server/aitube/editClapSounds"
|
16 |
import { editClapMusic } from "@/app/server/aitube/editClapMusic"
|
@@ -19,6 +20,7 @@ import { exportClapToVideo } from "@/app/server/aitube/exportClapToVideo"
|
|
19 |
|
20 |
import { useStore } from "../../app/store"
|
21 |
import { useOAuth } from "../oauth/useOAuth"
|
|
|
22 |
|
23 |
export function useProcessors() {
|
24 |
const [isLocked, setLocked] = useState(false)
|
@@ -30,7 +32,9 @@ export function useProcessors() {
|
|
30 |
const mainCharacterImage = useStore(s => s.mainCharacterImage)
|
31 |
const mainCharacterVoice = useStore(s => s.mainCharacterVoice)
|
32 |
|
33 |
-
const
|
|
|
|
|
34 |
const setStoryPrompt = useStore(s => s.setStoryPrompt)
|
35 |
const setMainCharacterImage = useStore(s => s.setMainCharacterImage)
|
36 |
const setMainCharacterVoice = useStore(s => s.setMainCharacterVoice)
|
@@ -48,7 +52,7 @@ export function useProcessors() {
|
|
48 |
const setImageGenerationStatus = useStore(s => s.setImageGenerationStatus)
|
49 |
const setVideoGenerationStatus = useStore(s => s.setVideoGenerationStatus)
|
50 |
const setFinalGenerationStatus = useStore(s => s.setFinalGenerationStatus)
|
51 |
-
const
|
52 |
const setCurrentVideo = useStore(s => s.setCurrentVideo)
|
53 |
const setProgress = useStore(s => s.setProgress)
|
54 |
|
@@ -84,11 +88,11 @@ export function useProcessors() {
|
|
84 |
console.log(`generateStory(): copying over entities from the previous clap`)
|
85 |
|
86 |
console.log(`generateStory(): later we can add button(s) to clear the project and/or the character(s)`)
|
87 |
-
const {
|
88 |
|
89 |
-
clap.entities = Array.isArray(
|
90 |
|
91 |
-
|
92 |
setStoryGenerationStatus("finished")
|
93 |
|
94 |
console.log("---------------- GENERATED STORY ----------------")
|
@@ -106,6 +110,46 @@ export function useProcessors() {
|
|
106 |
}
|
107 |
}
|
108 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
109 |
const generateEntities = async (clap: ClapProject): Promise<ClapProject> => {
|
110 |
try {
|
111 |
// setProgress(20)
|
@@ -163,7 +207,7 @@ export function useProcessors() {
|
|
163 |
|
164 |
if (!clap) { throw new Error(`failed to edit the sound`) }
|
165 |
|
166 |
-
console.log(`generateSounds(): received a clap with sound = `, clap)
|
167 |
setSoundGenerationStatus("finished")
|
168 |
console.log("---------------- GENERATED SOUND ----------------")
|
169 |
console.table(clap.segments.filter(s => s.category === ClapSegmentCategory.SOUND), [
|
@@ -190,7 +234,7 @@ export function useProcessors() {
|
|
190 |
|
191 |
if (!clap) { throw new Error(`failed to edit the music`) }
|
192 |
|
193 |
-
console.log(`generateMusic(): received a clap with music = `, clap)
|
194 |
setMusicGenerationStatus("finished")
|
195 |
console.log("---------------- GENERATED MUSIC ----------------")
|
196 |
console.table(clap.segments.filter(s => s.category === ClapSegmentCategory.MUSIC), [
|
@@ -259,7 +303,7 @@ export function useProcessors() {
|
|
259 |
|
260 |
if (!clap) { throw new Error(`failed to edit the videos`) }
|
261 |
|
262 |
-
console.log(`
|
263 |
setVideoGenerationStatus("finished")
|
264 |
console.log("---------------- GENERATED VIDEOS ----------------")
|
265 |
console.table(clap.segments.filter(s => s.category === ClapSegmentCategory.VIDEO), [
|
@@ -331,7 +375,44 @@ export function useProcessors() {
|
|
331 |
}
|
332 |
}
|
333 |
|
334 |
-
const
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
335 |
|
336 |
if (busyRef.current) { return }
|
337 |
|
@@ -347,46 +428,13 @@ export function useProcessors() {
|
|
347 |
setStatus("generating")
|
348 |
busyRef.current = true
|
349 |
|
350 |
-
console.log(`
|
351 |
|
352 |
try {
|
353 |
let clap = await generateStory()
|
354 |
-
|
355 |
-
|
356 |
-
const storyboards = clap.segments.filter(s => s.category === ClapSegmentCategory.STORYBOARD)
|
357 |
-
|
358 |
-
let mainCharacter = clap.entities.at(0)
|
359 |
-
|
360 |
-
// let's do something basic for now: we only support 1 entity (character)
|
361 |
-
// and we apply it to *all* the storyboards (we can always improve this later)
|
362 |
-
if (mainCharacter) {
|
363 |
-
console.log(`handleSubmit(): we use the clap's main character's face on all storyboards`)
|
364 |
-
storyboards.forEach(storyboard => { storyboard.entityId = mainCharacter!.id })
|
365 |
-
logImage(mainCharacter.imageId, 0.35)
|
366 |
-
} else if (mainCharacterImage) {
|
367 |
-
console.log(`handleSubmit(): declaring a new entity for our main character`)
|
368 |
-
const entityName = "person"
|
369 |
-
mainCharacter = newEntity({
|
370 |
-
category: ClapSegmentCategory.CHARACTER,
|
371 |
-
triggerName: entityName,
|
372 |
-
label: entityName,
|
373 |
-
description: entityName,
|
374 |
-
author: "auto",
|
375 |
-
thumbnailUrl: mainCharacterImage,
|
376 |
-
|
377 |
-
imagePrompt: "",
|
378 |
-
imageSourceType: getClapAssetSourceType(mainCharacterImage),
|
379 |
-
imageEngine: "",
|
380 |
-
imageId: mainCharacterImage,
|
381 |
-
audioPrompt: "",
|
382 |
-
})
|
383 |
|
384 |
-
|
385 |
-
console.log(`handleSubmit(): we use the main character's face on all storyboards`)
|
386 |
-
|
387 |
-
storyboards.forEach(storyboard => { storyboard.entityId = mainCharacter!.id })
|
388 |
-
logImage(mainCharacterImage, 0.35)
|
389 |
-
}
|
390 |
|
391 |
const tasks = [
|
392 |
generateMusic(clap),
|
@@ -402,7 +450,7 @@ export function useProcessors() {
|
|
402 |
overwriteMeta: false,
|
403 |
inlineReplace: true,
|
404 |
})
|
405 |
-
|
406 |
}
|
407 |
|
408 |
/*
|
@@ -420,7 +468,7 @@ export function useProcessors() {
|
|
420 |
|
421 |
/*
|
422 |
if (mainCharacterImage) {
|
423 |
-
console.log("
|
424 |
// various strategies here, for instance we can assume that the first character is the main character,
|
425 |
// or maybe a more reliable way is to count the number of occurrences.
|
426 |
// there is a risk of misgendering, so ideally we should add some kind of UI to do this,
|
@@ -436,8 +484,8 @@ export function useProcessors() {
|
|
436 |
|
437 |
|
438 |
|
439 |
-
console.log("final clap: ", clap)
|
440 |
-
|
441 |
await generateFinalVideo(clap)
|
442 |
|
443 |
setStatus("finished")
|
@@ -462,6 +510,83 @@ export function useProcessors() {
|
|
462 |
}
|
463 |
})
|
464 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
465 |
|
466 |
return {
|
467 |
generateDialogues,
|
@@ -473,6 +598,7 @@ export function useProcessors() {
|
|
473 |
generateStoryboards,
|
474 |
generateStoryboardsThenVideos,
|
475 |
generateVideos,
|
476 |
-
|
|
|
477 |
}
|
478 |
}
|
|
|
1 |
"use client"
|
2 |
|
3 |
import React, { useState, useTransition } from "react"
|
4 |
+
import { ClapProject, ClapSegmentCategory, filterAssets, getClapAssetSourceType, newEntity, parseClap, serializeClap, updateClap } from "@aitube/clap"
|
5 |
|
6 |
import { logImage } from "@/lib/utils"
|
7 |
import { useIsBusy, useStoryPromptDraft } from "@/lib/hooks"
|
|
|
11 |
import { createClap } from "@/app/server/aitube/createClap"
|
12 |
import { editClapEntities } from "@/app/server/aitube/editClapEntities"
|
13 |
import { editClapDialogues } from "@/app/server/aitube/editClapDialogues"
|
14 |
+
import { editClapStory } from "@/app/server/aitube/editClapStory"
|
15 |
import { editClapStoryboards } from "@/app/server/aitube/editClapStoryboards"
|
16 |
import { editClapSounds } from "@/app/server/aitube/editClapSounds"
|
17 |
import { editClapMusic } from "@/app/server/aitube/editClapMusic"
|
|
|
20 |
|
21 |
import { useStore } from "../../app/store"
|
22 |
import { useOAuth } from "../oauth/useOAuth"
|
23 |
+
import { removeFinalVideos } from "../utils/removeFinalVideos"
|
24 |
|
25 |
export function useProcessors() {
|
26 |
const [isLocked, setLocked] = useState(false)
|
|
|
32 |
const mainCharacterImage = useStore(s => s.mainCharacterImage)
|
33 |
const mainCharacterVoice = useStore(s => s.mainCharacterVoice)
|
34 |
|
35 |
+
const skeletonClap = useStore(s => s.skeletonClap)
|
36 |
+
const fullClap = useStore(s => s.fullClap)
|
37 |
+
const setSkeletonClap = useStore(s => s.setSkeletonClap)
|
38 |
const setStoryPrompt = useStore(s => s.setStoryPrompt)
|
39 |
const setMainCharacterImage = useStore(s => s.setMainCharacterImage)
|
40 |
const setMainCharacterVoice = useStore(s => s.setMainCharacterVoice)
|
|
|
52 |
const setImageGenerationStatus = useStore(s => s.setImageGenerationStatus)
|
53 |
const setVideoGenerationStatus = useStore(s => s.setVideoGenerationStatus)
|
54 |
const setFinalGenerationStatus = useStore(s => s.setFinalGenerationStatus)
|
55 |
+
const setFullClap = useStore(s => s.setFullClap)
|
56 |
const setCurrentVideo = useStore(s => s.setCurrentVideo)
|
57 |
const setProgress = useStore(s => s.setProgress)
|
58 |
|
|
|
88 |
console.log(`generateStory(): copying over entities from the previous clap`)
|
89 |
|
90 |
console.log(`generateStory(): later we can add button(s) to clear the project and/or the character(s)`)
|
91 |
+
const { fullClap } = useStore.getState()
|
92 |
|
93 |
+
clap.entities = Array.isArray(fullClap?.entities) ? fullClap.entities : []
|
94 |
|
95 |
+
setFullClap(clap)
|
96 |
setStoryGenerationStatus("finished")
|
97 |
|
98 |
console.log("---------------- GENERATED STORY ----------------")
|
|
|
110 |
}
|
111 |
}
|
112 |
|
113 |
+
const extendStory = async (clap: ClapProject): Promise<ClapProject> => {
|
114 |
+
try {
|
115 |
+
setStoryGenerationStatus("generating")
|
116 |
+
|
117 |
+
const prompt = promptDraftRef.current.slice(0, 1024)
|
118 |
+
|
119 |
+
clap = await editClapStory({
|
120 |
+
clap,
|
121 |
+
prompt,
|
122 |
+
// startTimeInMs: 0,
|
123 |
+
// endTimeInMs: 0,
|
124 |
+
// generating entities requires a "smart" LLM
|
125 |
+
turbo: false,
|
126 |
+
// turbo: true,
|
127 |
+
}).then(r => r.promise)
|
128 |
+
|
129 |
+
if (!clap) { throw new Error(`failed to create the clap`) }
|
130 |
+
|
131 |
+
if (clap.segments.length <= 1) { throw new Error(`failed to generate more than one segments`) }
|
132 |
+
|
133 |
+
console.log(`generateStory(): received a clap with more shots = `, clap)
|
134 |
+
|
135 |
+
setFullClap(clap)
|
136 |
+
setStoryGenerationStatus("finished")
|
137 |
+
|
138 |
+
console.log("---------------- EXTENDED STORY ----------------")
|
139 |
+
console.table(clap.segments, [
|
140 |
+
// 'startTimeInMs',
|
141 |
+
'endTimeInMs',
|
142 |
+
// 'track',
|
143 |
+
'category',
|
144 |
+
'prompt'
|
145 |
+
])
|
146 |
+
return clap
|
147 |
+
} catch (err) {
|
148 |
+
setStoryGenerationStatus("error")
|
149 |
+
throw err
|
150 |
+
}
|
151 |
+
}
|
152 |
+
|
153 |
const generateEntities = async (clap: ClapProject): Promise<ClapProject> => {
|
154 |
try {
|
155 |
// setProgress(20)
|
|
|
207 |
|
208 |
if (!clap) { throw new Error(`failed to edit the sound`) }
|
209 |
|
210 |
+
// console.log(`generateSounds(): received a clap with sound = `, clap)
|
211 |
setSoundGenerationStatus("finished")
|
212 |
console.log("---------------- GENERATED SOUND ----------------")
|
213 |
console.table(clap.segments.filter(s => s.category === ClapSegmentCategory.SOUND), [
|
|
|
234 |
|
235 |
if (!clap) { throw new Error(`failed to edit the music`) }
|
236 |
|
237 |
+
// console.log(`generateMusic(): received a clap with music = `, clap)
|
238 |
setMusicGenerationStatus("finished")
|
239 |
console.log("---------------- GENERATED MUSIC ----------------")
|
240 |
console.table(clap.segments.filter(s => s.category === ClapSegmentCategory.MUSIC), [
|
|
|
303 |
|
304 |
if (!clap) { throw new Error(`failed to edit the videos`) }
|
305 |
|
306 |
+
console.log(`handleCreateStory(): received individual video clips = `, clap)
|
307 |
setVideoGenerationStatus("finished")
|
308 |
console.log("---------------- GENERATED VIDEOS ----------------")
|
309 |
console.table(clap.segments.filter(s => s.category === ClapSegmentCategory.VIDEO), [
|
|
|
375 |
}
|
376 |
}
|
377 |
|
378 |
+
const injectCharacters = async (clap: ClapProject): Promise<void> => {
|
379 |
+
const storyboards = clap.segments.filter(s => s.category === ClapSegmentCategory.STORYBOARD)
|
380 |
+
|
381 |
+
let mainCharacter = clap.entities.at(0)
|
382 |
+
|
383 |
+
// let's do something basic for now: we only support 1 entity (character)
|
384 |
+
// and we apply it to *all* the storyboards (we can always improve this later)
|
385 |
+
if (mainCharacter) {
|
386 |
+
console.log(`injectCharacters(): we use the clap's main character's face on all storyboards`)
|
387 |
+
storyboards.forEach(storyboard => { storyboard.entityId = mainCharacter!.id })
|
388 |
+
logImage(mainCharacter.imageId, 0.35)
|
389 |
+
} else if (mainCharacterImage) {
|
390 |
+
console.log(`injectCharacters(): declaring a new entity for our main character`)
|
391 |
+
const entityName = "person"
|
392 |
+
mainCharacter = newEntity({
|
393 |
+
category: ClapSegmentCategory.CHARACTER,
|
394 |
+
triggerName: entityName,
|
395 |
+
label: entityName,
|
396 |
+
description: entityName,
|
397 |
+
author: "auto",
|
398 |
+
thumbnailUrl: mainCharacterImage,
|
399 |
+
|
400 |
+
imagePrompt: "",
|
401 |
+
imageSourceType: getClapAssetSourceType(mainCharacterImage),
|
402 |
+
imageEngine: "",
|
403 |
+
imageId: mainCharacterImage,
|
404 |
+
audioPrompt: "",
|
405 |
+
})
|
406 |
+
|
407 |
+
clap.entities.push(mainCharacter!)
|
408 |
+
console.log(`injectCharacters(): we use the main character's face on all storyboards`)
|
409 |
+
|
410 |
+
storyboards.forEach(storyboard => { storyboard.entityId = mainCharacter!.id })
|
411 |
+
logImage(mainCharacterImage, 0.35)
|
412 |
+
}
|
413 |
+
}
|
414 |
+
|
415 |
+
const handleCreateStory = async () => {
|
416 |
|
417 |
if (busyRef.current) { return }
|
418 |
|
|
|
428 |
setStatus("generating")
|
429 |
busyRef.current = true
|
430 |
|
431 |
+
console.log(`handleCreateStory(): generating a clap using prompt = "${promptDraftRef.current}" `)
|
432 |
|
433 |
try {
|
434 |
let clap = await generateStory()
|
435 |
+
setFullClap(clap)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
436 |
|
437 |
+
await injectCharacters(clap)
|
|
|
|
|
|
|
|
|
|
|
438 |
|
439 |
const tasks = [
|
440 |
generateMusic(clap),
|
|
|
450 |
overwriteMeta: false,
|
451 |
inlineReplace: true,
|
452 |
})
|
453 |
+
setFullClap(clap)
|
454 |
}
|
455 |
|
456 |
/*
|
|
|
468 |
|
469 |
/*
|
470 |
if (mainCharacterImage) {
|
471 |
+
console.log("handleCreateStory(): User specified a main character image")
|
472 |
// various strategies here, for instance we can assume that the first character is the main character,
|
473 |
// or maybe a more reliable way is to count the number of occurrences.
|
474 |
// there is a risk of misgendering, so ideally we should add some kind of UI to do this,
|
|
|
484 |
|
485 |
|
486 |
|
487 |
+
console.log("handleCreateStory(): final clap: ", clap)
|
488 |
+
setFullClap(clap)
|
489 |
await generateFinalVideo(clap)
|
490 |
|
491 |
setStatus("finished")
|
|
|
510 |
}
|
511 |
})
|
512 |
}
|
513 |
+
|
514 |
+
const handleExtendStory = async () => {
|
515 |
+
|
516 |
+
if (busyRef.current) { return }
|
517 |
+
|
518 |
+
if (enableOAuthWall && !isLoggedIn) {
|
519 |
+
setShowAuthWall(true)
|
520 |
+
return
|
521 |
+
}
|
522 |
+
|
523 |
+
setStatus("generating")
|
524 |
+
busyRef.current = true
|
525 |
+
|
526 |
+
startTransition(async () => {
|
527 |
+
setStatus("generating")
|
528 |
+
setProgress(0)
|
529 |
+
busyRef.current = true
|
530 |
+
|
531 |
+
let { fullClap } = useStore.getState()
|
532 |
+
|
533 |
+
if (!fullClap) {
|
534 |
+
setStatus("error")
|
535 |
+
setError(`cannot extend the story if there is no current clap file`)
|
536 |
+
return
|
537 |
+
}
|
538 |
+
|
539 |
+
try {
|
540 |
+
console.log(`handleExtendStory(): we strip the clap from its final video (don't worry, it will be re-generated)`)
|
541 |
+
|
542 |
+
fullClap.segments = removeFinalVideos(fullClap)
|
543 |
+
|
544 |
+
let clap = await extendStory(fullClap)
|
545 |
+
|
546 |
+
if (!clap) {
|
547 |
+
setStatus("error")
|
548 |
+
setError(`failed to extend the story (received an empty clap)`)
|
549 |
+
return
|
550 |
+
}
|
551 |
+
|
552 |
+
await injectCharacters(clap)
|
553 |
+
|
554 |
+
console.log(`handleExtendStory(): new clap with extended story = `, clap)
|
555 |
+
|
556 |
+
const tasks = [
|
557 |
+
generateMusic(clap),
|
558 |
+
generateStoryboardsThenVideos(clap)
|
559 |
+
]
|
560 |
+
|
561 |
+
const claps = await Promise.all(tasks)
|
562 |
+
|
563 |
+
console.log(`finished processing ${tasks.length} tasks in parallel`)
|
564 |
+
|
565 |
+
for (const newerClap of claps) {
|
566 |
+
clap = await updateClap(clap, newerClap, {
|
567 |
+
overwriteMeta: false,
|
568 |
+
inlineReplace: true,
|
569 |
+
})
|
570 |
+
setFullClap(clap)
|
571 |
+
}
|
572 |
+
|
573 |
+
console.log("handleExtendStory(): calling generateFinalVideo(clap)")
|
574 |
+
|
575 |
+
await generateFinalVideo(clap)
|
576 |
+
|
577 |
+
setFullClap(clap)
|
578 |
+
|
579 |
+
setStatus("finished")
|
580 |
+
setError("")
|
581 |
+
} catch (err) {
|
582 |
+
console.error(`handleExtendStory(): error: ${err}`)
|
583 |
+
setStoryGenerationStatus("error")
|
584 |
+
setStatus("error")
|
585 |
+
} finally {
|
586 |
+
busyRef.current = false
|
587 |
+
}
|
588 |
+
})
|
589 |
+
}
|
590 |
|
591 |
return {
|
592 |
generateDialogues,
|
|
|
598 |
generateStoryboards,
|
599 |
generateStoryboardsThenVideos,
|
600 |
generateVideos,
|
601 |
+
handleCreateStory,
|
602 |
+
handleExtendStory,
|
603 |
}
|
604 |
}
|
src/lib/hooks/useQueryStringParams.ts
CHANGED
@@ -11,7 +11,7 @@ import { useProcessors } from "./useProcessors"
|
|
11 |
export function useQueryStringParams() {
|
12 |
const { storyPromptDraft, setStoryPromptDraft, promptDraftRef } = useStoryPromptDraft()
|
13 |
const { busyRef } = useIsBusy()
|
14 |
-
const {
|
15 |
|
16 |
const setOrientation = useStore(s => s.setOrientation)
|
17 |
// this is how we support query string parameters
|
@@ -43,7 +43,7 @@ export function useQueryStringParams() {
|
|
43 |
// note: during development we will be called twice,
|
44 |
// which is why we have a guard on busyRef.current
|
45 |
if (maybeAutorun === "true" || maybeAutorun === "1" && !busyRef.current) {
|
46 |
-
|
47 |
}
|
48 |
}
|
49 |
}, [queryStringPrompt, queryStringAutorun, queryStringOrientation])
|
|
|
11 |
export function useQueryStringParams() {
|
12 |
const { storyPromptDraft, setStoryPromptDraft, promptDraftRef } = useStoryPromptDraft()
|
13 |
const { busyRef } = useIsBusy()
|
14 |
+
const { handleCreateStory } = useProcessors()
|
15 |
|
16 |
const setOrientation = useStore(s => s.setOrientation)
|
17 |
// this is how we support query string parameters
|
|
|
43 |
// note: during development we will be called twice,
|
44 |
// which is why we have a guard on busyRef.current
|
45 |
if (maybeAutorun === "true" || maybeAutorun === "1" && !busyRef.current) {
|
46 |
+
handleCreateStory()
|
47 |
}
|
48 |
}
|
49 |
}, [queryStringPrompt, queryStringAutorun, queryStringOrientation])
|
src/lib/utils/removeFinalVideos.ts
ADDED
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { ClapProject, ClapSegment, ClapSegmentCategory } from "@aitube/clap"
|
2 |
+
|
3 |
+
export function removeFinalVideos(clap: ClapProject): ClapSegment[] {
|
4 |
+
const alreadyAnEmbeddedFinalVideo = clap.segments.filter(s =>
|
5 |
+
s.category === ClapSegmentCategory.VIDEO &&
|
6 |
+
s.status === "completed" &&
|
7 |
+
s.startTimeInMs === 0 &&
|
8 |
+
s.endTimeInMs === clap.meta.durationInMs &&
|
9 |
+
s.assetUrl).at(0)
|
10 |
+
|
11 |
+
let ignoreThisVideoSegmentId = ""
|
12 |
+
|
13 |
+
if (alreadyAnEmbeddedFinalVideo) {
|
14 |
+
ignoreThisVideoSegmentId = alreadyAnEmbeddedFinalVideo?.id || ""
|
15 |
+
|
16 |
+
/*
|
17 |
+
you know what.. let's just ignore it, and re-generate fresh content
|
18 |
+
because most probably the user made an honest mistake
|
19 |
+
|
20 |
+
const outputFilePath = await writeBase64ToFile(
|
21 |
+
alreadyAnEmbeddedFinalVideo.assetUrl,
|
22 |
+
join(outputDir, `existing_final_video`)
|
23 |
+
)
|
24 |
+
|
25 |
+
return {
|
26 |
+
tmpWorkDir: outputDir,
|
27 |
+
outputFilePath
|
28 |
+
}
|
29 |
+
*/
|
30 |
+
}
|
31 |
+
|
32 |
+
// we remove the final video from the full list of segments
|
33 |
+
const allCategoriesOfSegments = clap.segments.filter(s => s.id !== ignoreThisVideoSegmentId)
|
34 |
+
|
35 |
+
return allCategoriesOfSegments
|
36 |
+
}
|