-
-
Save JanHmmr/ff98b388a0a56fc0e9b459d0be9457fe to your computer and use it in GitHub Desktop.
FrontEnd Realtime Image Gen
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| 'use client'; | |
| import React, { useContext, useState, useEffect, useRef } from 'react'; | |
| import SidebarLayout from '@/components/layout'; | |
| import { LuLock } from "react-icons/lu"; | |
| import { PageContext } from '@/utils/contexts'; | |
| import * as fal from "@fal-ai/serverless-client"; | |
| import Image from 'next/image'; | |
| import * as THREE from 'three'; | |
| import { GLTFLoader } from 'three/examples/jsm/loaders/GLTFLoader'; | |
| import { OrbitControls } from 'three/examples/jsm/controls/OrbitControls'; | |
| fal.config({ | |
| proxyUrl: "/api/fal/proxy", | |
| }); | |
| const seed = Math.floor(Math.random() * 100000); | |
| const baseArgs = { | |
| sync_mode: true, | |
| strength: 0.2, | |
| num_inference_steps: 10, | |
| "model_name": "stabilityai/sd-turbo", | |
| // model_name: 'runwayml/stable-diffusion-v1-5', | |
| // image_size: 'square', | |
| // model_name: 'stabilityai/stable-diffusion-xl-base-1.0', | |
| // image_size: 'square_hd', | |
| prompt: 'masterpiece, best quality cute little robot', | |
| seed, | |
| }; | |
| const TextureGeneration = () => { | |
| const { setauthToken, setloginModal, setupgradeModal } = useContext(PageContext); | |
| const [input, setInput] = useState(''); | |
| const [image, setImage] = useState(null); | |
| const [localImage, setLocalImage] = useState(null); | |
| const [isClient, setIsClient] = useState(false); | |
| const [modelSelectOpen, setModelSelectOpen] = useState(true); | |
| const [models, setModels] = useState([ | |
| { name: 'Model1.glb', image: '/Example1.png' }, | |
| { name: 'Model2.glb', image: '/Example2.png' }, | |
| { name: 'Model3.glb', image: '/Example3.png' }, | |
| { name: 'Model4.glb', image: '/Example4.png' }, | |
| { name: 'Model5.glb', image: '/Example5.png' }, | |
| { name: 'Model6.glb', image: '/Example6.png' }, | |
| { name: 'Model7.glb', image: '/Example7.png' }, | |
| { name: 'Model8.glb', image: '/Example8.png' }, | |
| ]); | |
| const [selectedModel, setSelectedModel] = useState(null); | |
| const canvasRef = useRef(null); | |
| const rendererRef = useRef(null); | |
| const sceneRef = useRef(null); | |
| const cameraRef = useRef(null); | |
| const controlsRef = useRef(null); | |
| const modelRef = useRef(null); | |
| useEffect(() => { | |
| setIsClient(true); | |
| }, []); | |
| useEffect(() => { | |
| const canvas = canvasRef.current; | |
| if (isClient && canvas) { | |
| const renderer = new THREE.WebGLRenderer({ canvas, preserveDrawingBuffer: true }); | |
| rendererRef.current = renderer; | |
| const scene = new THREE.Scene(); | |
| scene.background = new THREE.Color('white'); | |
| sceneRef.current = scene; | |
| const camera = new THREE.PerspectiveCamera(45, canvas.clientWidth / canvas.clientHeight, 0.01, 1000); | |
| camera.position.z = 5; | |
| cameraRef.current = camera; | |
| const controls = new OrbitControls(camera, canvas); | |
| controls.enableDamping = true; | |
| controls.dampingFactor = 0.05; | |
| controls.screenSpacePanning = false; | |
| controlsRef.current = controls; | |
| // Disable default rotate to use Alt key instead | |
| controls.enableRotate = false; | |
| document.addEventListener('keydown', function(event) { | |
| if (event.altKey) { | |
| controls.enableRotate = true; | |
| } | |
| }); | |
| document.addEventListener('keyup', function(event) { | |
| if (!event.altKey) { | |
| controls.enableRotate = false; | |
| } | |
| }); | |
| // Add lighting as previously configured | |
| const directionalLight = new THREE.DirectionalLight(0xffffff, 2); | |
| directionalLight.position.set(1, 1, 1); | |
| scene.add(directionalLight); | |
| const ambientLight = new THREE.AmbientLight(0xffffff, 0.8); | |
| scene.add(ambientLight); | |
| const pointLight1 = new THREE.PointLight(0xffffff, 1.5); | |
| pointLight1.position.set(5, 5, 5); | |
| scene.add(pointLight1); | |
| const pointLight2 = new THREE.PointLight(0xffffff, 1.5); | |
| pointLight2.position.set(-5, 5, -5); | |
| scene.add(pointLight2); | |
| const animate = () => { | |
| requestAnimationFrame(animate); | |
| controls.update(); | |
| renderer.render(scene, camera); | |
| }; | |
| animate(); | |
| return () => { | |
| renderer.dispose(); | |
| document.removeEventListener('keydown', this); | |
| document.removeEventListener('keyup', this); | |
| }; | |
| } | |
| }, [isClient]); | |
| useEffect(() => { | |
| if (selectedModel) { | |
| const loader = new GLTFLoader(); | |
| loader.load(selectedModel.name, (gltf) => { | |
| const model = gltf.scene; | |
| sceneRef.current.add(model); | |
| modelRef.current = model; | |
| }); | |
| } | |
| }, [selectedModel]); | |
| const { send } = fal.realtime.connect('fal-ai/fast-turbo-diffusion/image-to-image', { | |
| connectionKey: 'realtime-3daistudio-app', | |
| clientOnly: true, | |
| onResult(result) { | |
| if (result.error) return; | |
| const imageBytes = result.images[0].content; | |
| const blob = new Blob([imageBytes], { type: 'image/png' }); | |
| const imageUrl = URL.createObjectURL(blob); | |
| setImage(imageUrl); | |
| }, | |
| onError(error) { | |
| console.error('WebSocket error:', error); | |
| }, | |
| onOpen() { | |
| console.log('WebSocket connection opened'); | |
| }, | |
| onClose() { | |
| console.log('WebSocket connection closed'); | |
| } | |
| }); | |
| async function getDataUrl() { | |
| const canvas = canvasRef.current; | |
| if (!canvas) return; | |
| const renderer = rendererRef.current; | |
| renderer.render(sceneRef.current, cameraRef.current); | |
| // resize image from canvas to 1024x1024 before exporting | |
| const resizeCanvas = document.createElement('canvas'); | |
| // resizeCanvas.width = 1024; | |
| // resizeCanvas.height = 1024; | |
| resizeCanvas.width = 512; | |
| resizeCanvas.height = 512; | |
| const resizeCtx = resizeCanvas.getContext('2d'); | |
| // resizeCtx.drawImage(canvas, 0, 0, 1024, 1024); | |
| resizeCtx.drawImage(canvas, 0, 0, 512, 512); | |
| return resizeCanvas.toDataURL('image/jpeg', 0.8); | |
| // return resizeCanvas.toDataURL('image/png'); | |
| } | |
| useEffect(() => { | |
| let timeoutId; | |
| const handleChange = async () => { | |
| const dataUrl = await getDataUrl(); | |
| if (!dataUrl) return; | |
| setLocalImage(dataUrl); | |
| if (dataUrl !== localImage) { | |
| const response = await fetch(dataUrl); | |
| const blob = await response.blob(); | |
| const arrayBuffer = await blob.arrayBuffer(); | |
| const imageBytes = new Uint8Array(arrayBuffer); | |
| send({ | |
| ...baseArgs, | |
| image_bytes: imageBytes, | |
| prompt: input, | |
| }); | |
| } | |
| }; | |
| const debouncedHandleChange = () => { | |
| clearTimeout(timeoutId); | |
| timeoutId = setTimeout(handleChange, 300); | |
| }; | |
| if (isClient && canvasRef.current) { | |
| canvasRef.current.addEventListener('pointerup', debouncedHandleChange); | |
| } | |
| return () => { | |
| if (isClient && canvasRef.current) { | |
| canvasRef.current.removeEventListener('pointerup', debouncedHandleChange); | |
| } | |
| }; | |
| }, [isClient, input, send, localImage]); | |
| const handleFileUpload = (event) => { | |
| const file = event.target.files[0]; | |
| const reader = new FileReader(); | |
| reader.onload = (event) => { | |
| const arrayBuffer = event.target.result; | |
| const loader = new GLTFLoader(); | |
| const scene = sceneRef.current; | |
| if (modelRef.current) { | |
| scene.remove(modelRef.current); | |
| } | |
| loader.parse(arrayBuffer, '', (gltf) => { | |
| const model = gltf.scene; | |
| scene.add(model); | |
| modelRef.current = model; | |
| setModelSelectOpen(false); | |
| }); | |
| }; | |
| reader.readAsArrayBuffer(file); | |
| }; | |
| const handleModelSelect = (model) => { | |
| setSelectedModel(model); | |
| setModelSelectOpen(false); | |
| }; | |
| return ( | |
| <SidebarLayout> | |
| <div> | |
| <div className="relative w-full h-full flex flex-col items-center justify-center bg-[#1f1f1f] text-white"> | |
| <h1 className="text-4xl font-bold mb-8">Realtime Texture Generation</h1> | |
| <div className="flex space-x-8"> | |
| <div className="flex flex-col items-center"> | |
| <div className="w-[550px] h-[550px] border-2 border-gray-500 mb-4"> | |
| <canvas ref={canvasRef} width={550} height={550} /> | |
| </div> | |
| <input | |
| type="file" | |
| accept=".glb" | |
| onChange={handleFileUpload} | |
| className="px-4 py-2 bg-blue-500 text-white rounded-lg cursor-pointer" | |
| /> | |
| </div> | |
| <div className="flex flex-col items-center"> | |
| <div className="w-[550px] h-[550px] border-2 border-gray-500 flex items-center justify-center"> | |
| {image ? ( | |
| <Image src={image} width={550} height={550} alt="fal image" /> | |
| ) : ( | |
| <div className="animate-spin rounded-full h-20 w-20 border-t-4 border-white"></div> | |
| )} | |
| </div> | |
| <input | |
| className="px-4 py-2 border border-gray-500 bg-transparent rounded-lg w-full mt-4" | |
| value={input} | |
| onChange={(e) => { | |
| setInput(e.target.value); | |
| }} | |
| placeholder="Enter prompt" | |
| /> | |
| </div> | |
| </div> | |
| </div> | |
| {modelSelectOpen && ( | |
| <div className="fixed inset-0 flex items-center justify-center z-50 bg-black bg-opacity-50"> | |
| <div className="bg-white p-8 rounded shadow-lg w-[600px]"> | |
| <h2 className="text-2xl font-bold mb-4 text-gray-900">Select Model</h2> | |
| <div className="grid grid-cols-4 gap-4"> | |
| {models.map((model, index) => ( | |
| <div | |
| key={index} | |
| className="cursor-pointer border border-gray-300 p-2 rounded" | |
| onClick={() => handleModelSelect(model)} | |
| > | |
| <Image src={model.image} width={200} height={200} alt={model.name} /> | |
| <p className="text-center">{model.name}</p> | |
| </div> | |
| ))} | |
| </div> | |
| <div className="mt-8 flex justify-between"> | |
| <button | |
| className="px-4 py-2 bg-gray-200 text-gray-700 rounded" | |
| onClick={() => setModelSelectOpen(false)} | |
| > | |
| Cancel | |
| </button> | |
| <input | |
| type="file" | |
| accept=".glb" | |
| onChange={handleFileUpload} | |
| className="px-4 py-2 bg-blue-500 text-white rounded cursor-pointer" | |
| /> | |
| </div> | |
| </div> | |
| </div> | |
| )} | |
| </div> | |
| </SidebarLayout> | |
| ); | |
| }; | |
| export default TextureGeneration; |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment