"use client" import { useEffect, useState, useTransition } from "react" import { cn } from "@/lib/utils" import { TopMenu } from "./interface/top-menu" import { fonts } from "@/lib/fonts" import { useStore } from "./store" import { Zoom } from "./interface/zoom" import { getStory } from "./queries/getStory" import { BottomBar } from "./interface/bottom-bar" import { Page } from "./interface/page" import { LLMResponse } from "@/types" export default function Main() { const [_isPending, startTransition] = useTransition() const isGeneratingStory = useStore(state => state.isGeneratingStory) const setGeneratingStory = useStore(state => state.setGeneratingStory) const font = useStore(state => state.font) const preset = useStore(state => state.preset) const prompt = useStore(state => state.prompt) const setLayouts = useStore(state => state.setLayouts) const setPanels = useStore(state => state.setPanels) const setCaptions = useStore(state => state.setCaptions) const zoomLevel = useStore(state => state.zoomLevel) const [waitABitMore, setWaitABitMore] = useState(false) // react to prompt changes useEffect(() => { if (!prompt) { return } startTransition(async () => { setWaitABitMore(false) setGeneratingStory(true) // I don't think we are going to need a rate limiter on the LLM part anymore const enableRateLimiter = false // `${process.env.NEXT_PUBLIC_ENABLE_RATE_LIMITER}` === "true" const nbPanels = 4 let llmResponse: LLMResponse = [] try { llmResponse = await getStory({ preset, prompt }) console.log("LLM responded:", llmResponse) } catch (err) { console.log("LLM step failed due to:", err) console.log("we are now switching to a degraded mode, using 4 similar panels") llmResponse = [] for (let p = 0; p < nbPanels; p++) { llmResponse.push({ panel: p, instructions: `${prompt} ${".".repeat(p)}`, caption: "(Sorry, LLM generation failed: using degraded mode)" }) } console.error(err) } // we have to limit the size of the prompt, otherwise the rest of the style won't be followed let limitedPrompt = prompt.slice(0, 77) if (limitedPrompt.length !== prompt.length) { console.log("Sorry folks, the prompt was cut to:", limitedPrompt) } const panelPromptPrefix = preset.imagePrompt(limitedPrompt).join(", ") const newPanels: string[] = [] const newCaptions: string[] = [] setWaitABitMore(true) console.log("Panel prompts for SDXL:") for (let p = 0; p < nbPanels; p++) { newCaptions.push(llmResponse[p]?.caption || "...") const newPanel = [panelPromptPrefix, llmResponse[p]?.instructions || ""].map(chunk => chunk).join(", ") newPanels.push(newPanel) console.log(newPanel) } setCaptions(newCaptions) setPanels(newPanels) setTimeout(() => { setGeneratingStory(false) setWaitABitMore(false) }, enableRateLimiter ? 12000 : 0) }) }, [prompt, preset?.label]) // important: we need to react to preset changes too return (