Created
March 9, 2025 23:22
-
-
Save pwillia7/3ad3e010384a6c9eb0818402e447c784 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| { | |
| "9": { | |
| "inputs": { | |
| "filename_prefix": "stage2_tile_img2img", | |
| "images": [ | |
| "391", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "SaveImage", | |
| "_meta": { | |
| "title": "Save Image" | |
| } | |
| }, | |
| "29": { | |
| "inputs": { | |
| "control_net_name": "SDXL\\controlnet-tile-sdxl-1.0\\diffusion_pytorch_model.safetensors" | |
| }, | |
| "class_type": "ControlNetLoader", | |
| "_meta": { | |
| "title": "Load ControlNet Model" | |
| } | |
| }, | |
| "45": { | |
| "inputs": { | |
| "strength": 0.72, | |
| "start_percent": 0, | |
| "end_percent": 1, | |
| "positive": [ | |
| "120", | |
| 0 | |
| ], | |
| "negative": [ | |
| "121", | |
| 0 | |
| ], | |
| "control_net": [ | |
| "29", | |
| 0 | |
| ], | |
| "image": [ | |
| "105", | |
| 0 | |
| ], | |
| "vae": [ | |
| "380", | |
| 2 | |
| ] | |
| }, | |
| "class_type": "ControlNetApplyAdvanced", | |
| "_meta": { | |
| "title": "Apply ControlNet" | |
| } | |
| }, | |
| "69": { | |
| "inputs": { | |
| "text": [ | |
| "318", | |
| 0 | |
| ], | |
| "text2": "--ar 3:2 --chaos 30 --q 1 --stylize 800\n\nA young boy in a baseball uniform is being handed a ball by an older man in a suit, set against the backdrop of a large crowd watching a baseball game.\n\nThe image is rendered in a realistic style, with detailed textures and shading. The lighting is soft and even, with a warm glow emanating from the sun overhead. The camera angle is slightly elevated, capturing the scene from above and giving it a sense of grandeur." | |
| }, | |
| "class_type": "ShowText|pysssss", | |
| "_meta": { | |
| "title": "Show Text 🐍" | |
| } | |
| }, | |
| "78": { | |
| "inputs": { | |
| "tile_size": 1024, | |
| "overlap": 64, | |
| "samples": [ | |
| "115", | |
| 0 | |
| ], | |
| "vae": [ | |
| "380", | |
| 2 | |
| ] | |
| }, | |
| "class_type": "VAEDecodeTiled", | |
| "_meta": { | |
| "title": "VAE Decode (Tiled)" | |
| } | |
| }, | |
| "81": { | |
| "inputs": { | |
| "tile_size": 1024, | |
| "fast": false, | |
| "color_fix": false, | |
| "pixels": [ | |
| "360", | |
| 0 | |
| ], | |
| "vae": [ | |
| "380", | |
| 2 | |
| ] | |
| }, | |
| "class_type": "VAEEncodeTiled_TiledDiffusion", | |
| "_meta": { | |
| "title": "Tiled VAE Encode" | |
| } | |
| }, | |
| "95": { | |
| "inputs": { | |
| "rgthree_comparer": { | |
| "images": [ | |
| { | |
| "name": "A", | |
| "selected": true, | |
| "url": "/api/view?filename=rgthree.compare._temp_vkbci_00015_.png&type=temp&subfolder=&rand=0.4188783677910386" | |
| }, | |
| { | |
| "name": "B", | |
| "selected": true, | |
| "url": "/api/view?filename=rgthree.compare._temp_vkbci_00016_.png&type=temp&subfolder=&rand=0.9968284165099663" | |
| } | |
| ] | |
| }, | |
| "image_a": [ | |
| "273", | |
| 0 | |
| ], | |
| "image_b": [ | |
| "78", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "Image Comparer (rgthree)", | |
| "_meta": { | |
| "title": "Image Comparer (rgthree)" | |
| } | |
| }, | |
| "105": { | |
| "inputs": { | |
| "pyrUp_iters": 1, | |
| "resolution": 1024, | |
| "image": [ | |
| "256", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "TilePreprocessor", | |
| "_meta": { | |
| "title": "Tile" | |
| } | |
| }, | |
| "115": { | |
| "inputs": { | |
| "seed": [ | |
| "240", | |
| 0 | |
| ], | |
| "tiling": 1, | |
| "steps": 8, | |
| "cfg": 4.5, | |
| "sampler_name": "euler_ancestral", | |
| "scheduler": "normal", | |
| "denoise": 1, | |
| "model": [ | |
| "248", | |
| 0 | |
| ], | |
| "positive": [ | |
| "45", | |
| 0 | |
| ], | |
| "negative": [ | |
| "45", | |
| 1 | |
| ], | |
| "latent_image": [ | |
| "81", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "Tiled KSampler", | |
| "_meta": { | |
| "title": "Tiled KSampler" | |
| } | |
| }, | |
| "120": { | |
| "inputs": { | |
| "width": [ | |
| "193", | |
| 0 | |
| ], | |
| "height": [ | |
| "193", | |
| 1 | |
| ], | |
| "crop_w": 0, | |
| "crop_h": 0, | |
| "target_width": [ | |
| "193", | |
| 0 | |
| ], | |
| "target_height": [ | |
| "193", | |
| 1 | |
| ], | |
| "text_g": [ | |
| "200", | |
| 0 | |
| ], | |
| "text_l": [ | |
| "200", | |
| 0 | |
| ], | |
| "clip": [ | |
| "380", | |
| 1 | |
| ] | |
| }, | |
| "class_type": "CLIPTextEncodeSDXL", | |
| "_meta": { | |
| "title": "CLIPTextEncodeSDXL" | |
| } | |
| }, | |
| "121": { | |
| "inputs": { | |
| "width": [ | |
| "193", | |
| 0 | |
| ], | |
| "height": [ | |
| "193", | |
| 1 | |
| ], | |
| "crop_w": 0, | |
| "crop_h": 0, | |
| "target_width": [ | |
| "193", | |
| 0 | |
| ], | |
| "target_height": [ | |
| "193", | |
| 1 | |
| ], | |
| "text_g": "bad anatomy, deformed, anime, manga, Blurred, blurry, poorly drawn, (crepuscular rays:1.2), dramatic lighting", | |
| "text_l": "bad anatomy, deformed, anime, manga, Blurred, blurry, poorly drawn, (crepuscular rays:1.2), dramatic lighting", | |
| "clip": [ | |
| "380", | |
| 1 | |
| ] | |
| }, | |
| "class_type": "CLIPTextEncodeSDXL", | |
| "_meta": { | |
| "title": "CLIPTextEncodeSDXL" | |
| } | |
| }, | |
| "140": { | |
| "inputs": { | |
| "image1": [ | |
| "273", | |
| 0 | |
| ], | |
| "image2": [ | |
| "78", | |
| 0 | |
| ], | |
| "image3": [ | |
| "299", | |
| 0 | |
| ], | |
| "image4": [ | |
| "409", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "ImpactMakeImageBatch", | |
| "_meta": { | |
| "title": "Make Image Batch" | |
| } | |
| }, | |
| "141": { | |
| "inputs": { | |
| "gap": 0, | |
| "max_rows": 2, | |
| "images": [ | |
| "140", | |
| 0 | |
| ], | |
| "annotation": [ | |
| "145", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "ImagesGridByRows", | |
| "_meta": { | |
| "title": "ImagesGridByRows" | |
| } | |
| }, | |
| "142": { | |
| "inputs": { | |
| "filename_prefix": "wikipix2024/test5/test5-grid", | |
| "images": [ | |
| "141", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "SaveImage", | |
| "_meta": { | |
| "title": "Save Image" | |
| } | |
| }, | |
| "145": { | |
| "inputs": { | |
| "column_texts": "Original, Stage II; Stage I, Stage III", | |
| "row_texts": "", | |
| "font_size": 65 | |
| }, | |
| "class_type": "GridAnnotation", | |
| "_meta": { | |
| "title": "GridAnnotation" | |
| } | |
| }, | |
| "147": { | |
| "inputs": { | |
| "scale": 0.3, | |
| "model": [ | |
| "380", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "PerturbedAttentionGuidance", | |
| "_meta": { | |
| "title": "PerturbedAttentionGuidance" | |
| } | |
| }, | |
| "159": { | |
| "inputs": { | |
| "model_name": "bbox/face_yolov8n_v2.pt" | |
| }, | |
| "class_type": "UltralyticsDetectorProvider", | |
| "_meta": { | |
| "title": "UltralyticsDetectorProvider" | |
| } | |
| }, | |
| "160": { | |
| "inputs": { | |
| "model_name": "sam_vit_b_01ec64.pth", | |
| "device_mode": "AUTO" | |
| }, | |
| "class_type": "SAMLoader", | |
| "_meta": { | |
| "title": "SAMLoader (Impact)" | |
| } | |
| }, | |
| "161": { | |
| "inputs": { | |
| "strength": 0.5, | |
| "start_percent": 0.005, | |
| "end_percent": 0.98, | |
| "positive": [ | |
| "205", | |
| 0 | |
| ], | |
| "negative": [ | |
| "121", | |
| 0 | |
| ], | |
| "control_net": [ | |
| "163", | |
| 0 | |
| ], | |
| "image": [ | |
| "166", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "ControlNetApplyAdvanced", | |
| "_meta": { | |
| "title": "Apply ControlNet (CANNY)" | |
| } | |
| }, | |
| "163": { | |
| "inputs": { | |
| "control_net_name": "controlnetxlCNXL_xinsirCnUnionPromax.safetensors" | |
| }, | |
| "class_type": "ControlNetLoader", | |
| "_meta": { | |
| "title": "Load ControlNet Model" | |
| } | |
| }, | |
| "166": { | |
| "inputs": { | |
| "enable_threshold": "true", | |
| "threshold_low": 0, | |
| "threshold_high": 0.9500000000000001, | |
| "images": [ | |
| "78", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "Image Canny Filter", | |
| "_meta": { | |
| "title": "Image Canny Filter" | |
| } | |
| }, | |
| "168": { | |
| "inputs": { | |
| "rgthree_comparer": { | |
| "images": [ | |
| { | |
| "name": "A", | |
| "selected": true, | |
| "url": "/api/view?filename=rgthree.compare._temp_pmlyb_00013_.png&type=temp&subfolder=&rand=0.9702024869342374" | |
| }, | |
| { | |
| "name": "B", | |
| "selected": true, | |
| "url": "/api/view?filename=rgthree.compare._temp_pmlyb_00014_.png&type=temp&subfolder=&rand=0.8865873416243557" | |
| } | |
| ] | |
| }, | |
| "image_a": [ | |
| "78", | |
| 0 | |
| ], | |
| "image_b": [ | |
| "252", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "Image Comparer (rgthree)", | |
| "_meta": { | |
| "title": "Image Comparer (rgthree)" | |
| } | |
| }, | |
| "169": { | |
| "inputs": { | |
| "rgthree_comparer": { | |
| "images": [ | |
| { | |
| "name": "A", | |
| "selected": true, | |
| "url": "/api/view?filename=rgthree.compare._temp_qjpzq_00011_.png&type=temp&subfolder=&rand=0.8166529154517443" | |
| }, | |
| { | |
| "name": "B", | |
| "selected": true, | |
| "url": "/api/view?filename=rgthree.compare._temp_qjpzq_00012_.png&type=temp&subfolder=&rand=0.6311301502299411" | |
| } | |
| ] | |
| }, | |
| "image_a": [ | |
| "252", | |
| 0 | |
| ], | |
| "image_b": [ | |
| "287", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "Image Comparer (rgthree)", | |
| "_meta": { | |
| "title": "Image Comparer (rgthree)" | |
| } | |
| }, | |
| "171": { | |
| "inputs": { | |
| "control_net_name": "controlnetxlCNXL_xinsirDepth.safetensors" | |
| }, | |
| "class_type": "ControlNetLoader", | |
| "_meta": { | |
| "title": "Load ControlNet Model" | |
| } | |
| }, | |
| "172": { | |
| "inputs": { | |
| "strength": 0.78, | |
| "start_percent": 0.005, | |
| "end_percent": 0.98, | |
| "positive": [ | |
| "176", | |
| 0 | |
| ], | |
| "negative": [ | |
| "176", | |
| 1 | |
| ], | |
| "control_net": [ | |
| "171", | |
| 0 | |
| ], | |
| "image": [ | |
| "385", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "ControlNetApplyAdvanced", | |
| "_meta": { | |
| "title": "Apply ControlNet (DEPTH)" | |
| } | |
| }, | |
| "176": { | |
| "inputs": { | |
| "strength": 0.7000000000000001, | |
| "start_percent": 0.05, | |
| "end_percent": 0.9500000000000001, | |
| "positive": [ | |
| "120", | |
| 0 | |
| ], | |
| "negative": [ | |
| "121", | |
| 0 | |
| ], | |
| "control_net": [ | |
| "163", | |
| 0 | |
| ], | |
| "image": [ | |
| "166", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "ControlNetApplyAdvanced", | |
| "_meta": { | |
| "title": "Apply ControlNet (CANNY)" | |
| } | |
| }, | |
| "193": { | |
| "inputs": { | |
| "image": [ | |
| "256", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "GetImageSize+", | |
| "_meta": { | |
| "title": "🔧 Get Image Size" | |
| } | |
| }, | |
| "199": { | |
| "inputs": { | |
| "text": [ | |
| "69", | |
| 0 | |
| ], | |
| "truncate_by": "words", | |
| "truncate_from": "beginning", | |
| "truncate_to": 110 | |
| }, | |
| "class_type": "Text String Truncate", | |
| "_meta": { | |
| "title": "Text String Truncate" | |
| } | |
| }, | |
| "200": { | |
| "inputs": { | |
| "text": [ | |
| "199", | |
| 0 | |
| ], | |
| "text2": "--ar 3:2 --chaos 30 --q 1 --stylize 800 A young boy in a baseball uniform is being handed a ball by an older man in a suit, set against the backdrop of a large crowd watching a baseball game. The image is rendered in a realistic style, with detailed textures and shading. The lighting is soft and even, with a warm glow emanating from the sun overhead. The camera angle is slightly elevated, capturing the scene from above and giving it a sense of grandeur." | |
| }, | |
| "class_type": "ShowText|pysssss", | |
| "_meta": { | |
| "title": "Show Text 🐍" | |
| } | |
| }, | |
| "205": { | |
| "inputs": { | |
| "width": [ | |
| "193", | |
| 0 | |
| ], | |
| "height": [ | |
| "193", | |
| 1 | |
| ], | |
| "crop_w": 0, | |
| "crop_h": 0, | |
| "target_width": [ | |
| "193", | |
| 0 | |
| ], | |
| "target_height": [ | |
| "193", | |
| 1 | |
| ], | |
| "text_g": [ | |
| "241", | |
| 0 | |
| ], | |
| "text_l": [ | |
| "241", | |
| 0 | |
| ], | |
| "clip": [ | |
| "380", | |
| 1 | |
| ] | |
| }, | |
| "class_type": "CLIPTextEncodeSDXL", | |
| "_meta": { | |
| "title": "CLIPTextEncodeSDXL" | |
| } | |
| }, | |
| "238": { | |
| "inputs": { | |
| "seed": [ | |
| "240", | |
| 0 | |
| ], | |
| "tiling": 0, | |
| "steps": 8, | |
| "cfg": 4, | |
| "sampler_name": "euler_ancestral", | |
| "scheduler": "normal", | |
| "denoise": 1, | |
| "model": [ | |
| "302", | |
| 0 | |
| ], | |
| "positive": [ | |
| "172", | |
| 0 | |
| ], | |
| "negative": [ | |
| "172", | |
| 1 | |
| ], | |
| "latent_image": [ | |
| "115", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "Tiled KSampler", | |
| "_meta": { | |
| "title": "Tiled KSampler" | |
| } | |
| }, | |
| "240": { | |
| "inputs": { | |
| "seed": 568159718233566 | |
| }, | |
| "class_type": "Seed (rgthree)", | |
| "_meta": { | |
| "title": "Seed (rgthree)" | |
| } | |
| }, | |
| "241": { | |
| "inputs": { | |
| "delimiter": ", ", | |
| "clean_whitespace": "true", | |
| "text_a": [ | |
| "242", | |
| 0 | |
| ], | |
| "text_b": [ | |
| "200", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "Text Concatenate", | |
| "_meta": { | |
| "title": "Text Concatenate" | |
| } | |
| }, | |
| "242": { | |
| "inputs": { | |
| "text": "historical face detail, correctly aged face", | |
| "text_b": "", | |
| "text_c": "", | |
| "text_d": "" | |
| }, | |
| "class_type": "Text String", | |
| "_meta": { | |
| "title": "Text String" | |
| } | |
| }, | |
| "248": { | |
| "inputs": { | |
| "scale": 0.45, | |
| "model": [ | |
| "380", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "PerturbedAttentionGuidance", | |
| "_meta": { | |
| "title": "PerturbedAttentionGuidance" | |
| } | |
| }, | |
| "251": { | |
| "inputs": { | |
| "tile_size": 1024, | |
| "overlap": 64, | |
| "samples": [ | |
| "238", | |
| 0 | |
| ], | |
| "vae": [ | |
| "380", | |
| 2 | |
| ] | |
| }, | |
| "class_type": "VAEDecodeTiled", | |
| "_meta": { | |
| "title": "VAE Decode (Tiled)" | |
| } | |
| }, | |
| "252": { | |
| "inputs": { | |
| "amount": 0.8, | |
| "image": [ | |
| "251", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "ImageCASharpening+", | |
| "_meta": { | |
| "title": "🔧 Image Contrast Adaptive Sharpening" | |
| } | |
| }, | |
| "256": { | |
| "inputs": { | |
| "width": 2048, | |
| "height": 2048, | |
| "upscale_method": "lanczos", | |
| "keep_proportion": true, | |
| "divisible_by": 32, | |
| "crop": "disabled", | |
| "image": [ | |
| "389", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "ImageResizeKJ", | |
| "_meta": { | |
| "title": "Resize Image" | |
| } | |
| }, | |
| "273": { | |
| "inputs": { | |
| "width": 2048, | |
| "height": 2048, | |
| "upscale_method": "nearest-exact", | |
| "keep_proportion": true, | |
| "divisible_by": 0, | |
| "crop": "disabled", | |
| "image": [ | |
| "389", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "ImageResizeKJ", | |
| "_meta": { | |
| "title": "Resize Image" | |
| } | |
| }, | |
| "275": { | |
| "inputs": { | |
| "seed": [ | |
| "297", | |
| 0 | |
| ], | |
| "tiling": 0, | |
| "steps": 8, | |
| "cfg": 6, | |
| "sampler_name": "dpmpp_sde_gpu", | |
| "scheduler": "karras", | |
| "denoise": 1, | |
| "model": [ | |
| "381", | |
| 0 | |
| ], | |
| "positive": [ | |
| "172", | |
| 0 | |
| ], | |
| "negative": [ | |
| "172", | |
| 1 | |
| ], | |
| "latent_image": [ | |
| "115", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "Tiled KSampler", | |
| "_meta": { | |
| "title": "Tiled KSampler" | |
| } | |
| }, | |
| "276": { | |
| "inputs": { | |
| "tile_size": 1024, | |
| "overlap": 64, | |
| "samples": [ | |
| "275", | |
| 0 | |
| ], | |
| "vae": [ | |
| "380", | |
| 2 | |
| ] | |
| }, | |
| "class_type": "VAEDecodeTiled", | |
| "_meta": { | |
| "title": "VAE Decode (Tiled)" | |
| } | |
| }, | |
| "277": { | |
| "inputs": { | |
| "amount": 0.8, | |
| "image": [ | |
| "276", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "ImageCASharpening+", | |
| "_meta": { | |
| "title": "🔧 Image Contrast Adaptive Sharpening" | |
| } | |
| }, | |
| "280": { | |
| "inputs": { | |
| "filename_prefix": "stage3_tile_img2img", | |
| "images": [ | |
| "409", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "SaveImage", | |
| "_meta": { | |
| "title": "Save Image" | |
| } | |
| }, | |
| "282": { | |
| "inputs": { | |
| "filename_prefix": "stage1_tile_img2img", | |
| "images": [ | |
| "78", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "SaveImage", | |
| "_meta": { | |
| "title": "Save Image" | |
| } | |
| }, | |
| "283": { | |
| "inputs": { | |
| "scale": 0.75, | |
| "model": [ | |
| "380", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "PerturbedAttentionGuidance", | |
| "_meta": { | |
| "title": "PerturbedAttentionGuidance" | |
| } | |
| }, | |
| "287": { | |
| "inputs": { | |
| "color_space": "LAB", | |
| "factor": 0.6, | |
| "device": "gpu", | |
| "batch_size": 0, | |
| "image": [ | |
| "391", | |
| 0 | |
| ], | |
| "reference": [ | |
| "256", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "ImageColorMatch+", | |
| "_meta": { | |
| "title": "🔧 Image Color Match" | |
| } | |
| }, | |
| "288": { | |
| "inputs": { | |
| "filename_prefix": "stage2_colorfix_\\tile_img2img", | |
| "images": [ | |
| "299", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "SaveImage", | |
| "_meta": { | |
| "title": "Save Image" | |
| } | |
| }, | |
| "297": { | |
| "inputs": { | |
| "seed": 505923945412630 | |
| }, | |
| "class_type": "Seed (rgthree)", | |
| "_meta": { | |
| "title": "Seed (rgthree)" | |
| } | |
| }, | |
| "299": { | |
| "inputs": { | |
| "factor": 1.5, | |
| "images": [ | |
| "301", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "Saturation", | |
| "_meta": { | |
| "title": "Saturation" | |
| } | |
| }, | |
| "301": { | |
| "inputs": { | |
| "black_level": 0, | |
| "mid_level": 127.5, | |
| "white_level": 255, | |
| "image": [ | |
| "287", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "Image Levels Adjustment", | |
| "_meta": { | |
| "title": "Image Levels Adjustment" | |
| } | |
| }, | |
| "302": { | |
| "inputs": { | |
| "mimic_scale": 3.5, | |
| "threshold_percentile": 0.75, | |
| "model": [ | |
| "147", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "DynamicThresholdingSimple", | |
| "_meta": { | |
| "title": "DynamicThresholdingSimple" | |
| } | |
| }, | |
| "306": { | |
| "inputs": { | |
| "directory": "C:\\Users\\Patrick\\Desktop\\Suleymanname", | |
| "image_load_cap": 1, | |
| "start_index": 23, | |
| "load_always": true | |
| }, | |
| "class_type": "LoadImagesFromDir //Inspire", | |
| "_meta": { | |
| "title": "Load Image Batch From Dir (Inspire)" | |
| } | |
| }, | |
| "309": { | |
| "inputs": { | |
| "image": "pasted/image (174).png", | |
| "upload": "image" | |
| }, | |
| "class_type": "LoadImage", | |
| "_meta": { | |
| "title": "Load Image" | |
| } | |
| }, | |
| "318": { | |
| "inputs": { | |
| "query": "Here is a guide to make prompts for a generative ai stable diffusion models image to image. Respond with only the prompt based on the input image. Do not include headers, sections, titles, style text, or any special characters unless otherwise specified below.\n\n1. **Text Description**: Start with a detailed textual description of the image you want to generate. This description should be as specific as possible to guide the AI in creating the image. The more descriptive your prompt, the better, as anything not specified will be randomly defined by the AI.\n\n2. **Parameters**: After the text description, you can include parameters to further specify how the image should be generated. These parameters are preceded by two hyphens (`--`). Here are some examples:\n- `--ar 3:2`: This sets the aspect ratio of the image to 3:2. Aspect ratios greater than 2:1 are experimental and may produce unpredictable results.\n- `--chaos 30`: This sets the chaos level of the image to 30. The chaos parameter controls the randomness of the image generation process. The range is 0-100.\n- `--q 1`: This sets the quality of the image to 1. The quality parameter controls how much rendering quality time you want to spend. The default value is 1. It only accepts the values: .25, .5, and 1 for the current model. Larger values are rounded down to 1. It only influences the initial image generation.\n- `--stylize 800`: This sets the stylization level of the image to 800. This parameter influences how strongly Midjourney's default aesthetic style is applied to Jobs. The range is 0-1000.\n\n3. **Exclusion**: If you want to exclude certain elements from the image, you can add a `--no {element}` parameter. For example, `--no camera` would instruct the AI not to include a camera in the image. The `--no` parameter accepts multiple words separated with commas: `--no item1, item2, item3, item4`. It's important to note that the AI considers any word within the prompt as something you would like to see generated in the final image. Therefore, using phrases like \"without any fruit\" or \"don't add fruit\" are more likely to produce pictures that include fruits because the relationship between \"without\" or \"don't\" and the \"fruit\" is not interpreted by the AI in the same way a human reader would understand it. To improve your results, focus your prompt on what you do want to see in the image and use the `--no` parameter to specify concepts you don't want to include.\n\n4. **Image Style**: Define the style of your image. You can ask Midjourney to imitate the style of a painting or a cartoon by suggesting artists to base it on. You can also specify the type of camera, lens, and model that the AI should imitate.\n\n5. **Subject**: Describe each subject well. If necessary, list the number of individuals.\n\n6. **Environment**: Put your subjects in an environment to give context to your image.\n\n7. **Lighting**: Specify the time of day to guide the lighting, colors, and contrasts of the image.\n\n8. **Angle of View**: You can specify the viewing angle of the image, such as \"Wide-Angle Shot\", \"Medium-Shot\", or \"Close-Up\".\n\n9. **Final Prompt**: Combine the text description, parameters, and the additional elements (image style, subject, environment, lighting, angle of view) to create the final prompt.\n\n**Additional Tips**:\n\n- Invoke unique artists or combine names for new styles (e.g., \"A temple by Greg Rutkowski and Ross Tran\").\n- Specify composition, camera settings, and lighting to create a visually dramatic image.\n- Use various art styles, mediums, and scene descriptors to guide the MJ model.\n- Combine well-defined concepts in unique ways (e.g., \"cyberpunk shinto priest\").\n- Integrate an artist's name or style into your prompt to influence the generated image.\n- Be ultra-descriptive in your prompts. The more specific and detailed your prompt, the better the AI can generate an image that aligns with your vision.\n- Experiment with different parameters and their values to get the desired output.\n- Use the `--no` parameter effectively to exclude certain elements from your image.\n\nRespond with only the best prompt based on the input image. Do not include headers, sections, titles, or style text.", | |
| "debug": "disable", | |
| "url": "http://ptkwilliams.ddns.net:11434/", | |
| "model": "llama3.2-vision:latest", | |
| "keep_alive": 0, | |
| "format": "text", | |
| "seed": 1694229774, | |
| "images": [ | |
| "336", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "OllamaVision", | |
| "_meta": { | |
| "title": "Ollama Vision" | |
| } | |
| }, | |
| "324": { | |
| "inputs": { | |
| "comparison": "a <= b", | |
| "a": [ | |
| "329", | |
| 0 | |
| ], | |
| "b": [ | |
| "328", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "Compare-🔬", | |
| "_meta": { | |
| "title": "Is Image smaller than 2048?" | |
| } | |
| }, | |
| "327": { | |
| "inputs": { | |
| "image": [ | |
| "389", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "GetImageSize+", | |
| "_meta": { | |
| "title": "🔧 Get Image Size" | |
| } | |
| }, | |
| "328": { | |
| "inputs": { | |
| "value": 2048 | |
| }, | |
| "class_type": "Int-🔬", | |
| "_meta": { | |
| "title": "Int" | |
| } | |
| }, | |
| "329": { | |
| "inputs": { | |
| "mode": true, | |
| "a": [ | |
| "327", | |
| 0 | |
| ], | |
| "b": [ | |
| "327", | |
| 1 | |
| ] | |
| }, | |
| "class_type": "ImpactMinMax", | |
| "_meta": { | |
| "title": "ImpactMinMax" | |
| } | |
| }, | |
| "336": { | |
| "inputs": { | |
| "width": 1024, | |
| "height": 1024, | |
| "upscale_method": "nearest-exact", | |
| "keep_proportion": true, | |
| "divisible_by": 0, | |
| "crop": "disabled", | |
| "image": [ | |
| "389", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "ImageResizeKJ", | |
| "_meta": { | |
| "title": "Resize Image" | |
| } | |
| }, | |
| "360": { | |
| "inputs": { | |
| "cond": [ | |
| "324", | |
| 0 | |
| ], | |
| "tt_value": [ | |
| "404", | |
| 0 | |
| ], | |
| "ff_value": [ | |
| "256", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "ImpactConditionalBranch", | |
| "_meta": { | |
| "title": "ImpactConditionalBranch" | |
| } | |
| }, | |
| "378": { | |
| "inputs": { | |
| "trigger_text": [ | |
| "199", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "OllamaKiller", | |
| "_meta": { | |
| "title": "Ollama Process Killer" | |
| } | |
| }, | |
| "380": { | |
| "inputs": { | |
| "ckpt_name": "sleipnirTLHTurbo_v27TLHFP32Main.safetensors" | |
| }, | |
| "class_type": "CheckpointLoaderSimple", | |
| "_meta": { | |
| "title": "Load Checkpoint" | |
| } | |
| }, | |
| "381": { | |
| "inputs": { | |
| "mimic_scale": 3, | |
| "threshold_percentile": 0.85, | |
| "model": [ | |
| "283", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "DynamicThresholdingSimple", | |
| "_meta": { | |
| "title": "DynamicThresholdingSimple" | |
| } | |
| }, | |
| "385": { | |
| "inputs": { | |
| "da_model": [ | |
| "386", | |
| 0 | |
| ], | |
| "images": [ | |
| "360", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "DepthAnything_V2", | |
| "_meta": { | |
| "title": "Depth Anything V2" | |
| } | |
| }, | |
| "386": { | |
| "inputs": { | |
| "model": "depth_anything_v2_vitl_fp32.safetensors" | |
| }, | |
| "class_type": "DownloadAndLoadDepthAnythingV2Model", | |
| "_meta": { | |
| "title": "DownloadAndLoadDepthAnythingV2Model" | |
| } | |
| }, | |
| "389": { | |
| "inputs": { | |
| "data": "%%B64IMAGE%%" | |
| }, | |
| "class_type": "LoadImageFromBase64", | |
| "_meta": { | |
| "title": "Load Image From Base64" | |
| } | |
| }, | |
| "391": { | |
| "inputs": { | |
| "guide_size": 1024, | |
| "guide_size_for": true, | |
| "max_size": 1024, | |
| "seed": [ | |
| "240", | |
| 0 | |
| ], | |
| "steps": 8, | |
| "cfg": 4, | |
| "sampler_name": "euler_ancestral", | |
| "scheduler": "normal", | |
| "denoise": 0.25, | |
| "feather": 5, | |
| "noise_mask": true, | |
| "force_inpaint": true, | |
| "wildcard": "", | |
| "cycle": 1, | |
| "inpaint_model": false, | |
| "noise_mask_feather": 20, | |
| "image": [ | |
| "252", | |
| 0 | |
| ], | |
| "segs": [ | |
| "405", | |
| 0 | |
| ], | |
| "model": [ | |
| "147", | |
| 0 | |
| ], | |
| "clip": [ | |
| "380", | |
| 1 | |
| ], | |
| "vae": [ | |
| "380", | |
| 2 | |
| ], | |
| "positive": [ | |
| "423", | |
| 0 | |
| ], | |
| "negative": [ | |
| "161", | |
| 1 | |
| ] | |
| }, | |
| "class_type": "DetailerForEach", | |
| "_meta": { | |
| "title": "Detailer (SEGS)" | |
| } | |
| }, | |
| "398": { | |
| "inputs": { | |
| "bbox_threshold": 0.5, | |
| "bbox_dilation": 0, | |
| "crop_factor": 3, | |
| "drop_size": 10, | |
| "sub_threshold": 0.5, | |
| "sub_dilation": 0, | |
| "sub_bbox_expansion": 0, | |
| "sam_mask_hint_threshold": 0.7, | |
| "post_dilation": 0, | |
| "bbox_detector": [ | |
| "159", | |
| 0 | |
| ], | |
| "image": [ | |
| "252", | |
| 0 | |
| ], | |
| "sam_model_opt": [ | |
| "160", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "ImpactSimpleDetectorSEGS", | |
| "_meta": { | |
| "title": "Simple Detector (SEGS)" | |
| } | |
| }, | |
| "402": { | |
| "inputs": { | |
| "upscale_model": [ | |
| "403", | |
| 0 | |
| ], | |
| "image": [ | |
| "389", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "ImageUpscaleWithModel", | |
| "_meta": { | |
| "title": "Upscale Image (using Model)" | |
| } | |
| }, | |
| "403": { | |
| "inputs": { | |
| "model_name": "4xUltrasharp_4xUltrasharpV10.pt" | |
| }, | |
| "class_type": "UpscaleModelLoader", | |
| "_meta": { | |
| "title": "Load Upscale Model" | |
| } | |
| }, | |
| "404": { | |
| "inputs": { | |
| "width": 2048, | |
| "height": 2048, | |
| "interpolation": "lanczos", | |
| "method": "keep proportion", | |
| "condition": "always", | |
| "multiple_of": 8, | |
| "image": [ | |
| "402", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "ImageResize+", | |
| "_meta": { | |
| "title": "🔧 Image Resize" | |
| } | |
| }, | |
| "405": { | |
| "inputs": { | |
| "strength": 1.2, | |
| "start_percent": 0, | |
| "end_percent": 1, | |
| "segs": [ | |
| "398", | |
| 0 | |
| ], | |
| "control_net": [ | |
| "163", | |
| 0 | |
| ], | |
| "segs_preprocessor": [ | |
| "406", | |
| 0 | |
| ], | |
| "control_image": [ | |
| "78", | |
| 0 | |
| ], | |
| "vae": [ | |
| "380", | |
| 2 | |
| ] | |
| }, | |
| "class_type": "ImpactControlNetApplyAdvancedSEGS", | |
| "_meta": { | |
| "title": "ControlNetApply (SEGS)" | |
| } | |
| }, | |
| "406": { | |
| "inputs": { | |
| "low_threshold": 0.2, | |
| "high_threshold": 0.89 | |
| }, | |
| "class_type": "Canny_Preprocessor_Provider_for_SEGS //Inspire", | |
| "_meta": { | |
| "title": "Canny Preprocessor Provider (SEGS)" | |
| } | |
| }, | |
| "409": { | |
| "inputs": { | |
| "guide_size": 1024, | |
| "guide_size_for": true, | |
| "max_size": 1024, | |
| "seed": [ | |
| "240", | |
| 0 | |
| ], | |
| "steps": 8, | |
| "cfg": 4, | |
| "sampler_name": "dpmpp_2m_sde_gpu", | |
| "scheduler": "karras", | |
| "denoise": 0.25, | |
| "feather": 5, | |
| "noise_mask": true, | |
| "force_inpaint": true, | |
| "wildcard": "", | |
| "cycle": 1, | |
| "inpaint_model": false, | |
| "noise_mask_feather": 20, | |
| "image": [ | |
| "277", | |
| 0 | |
| ], | |
| "segs": [ | |
| "411", | |
| 0 | |
| ], | |
| "model": [ | |
| "381", | |
| 0 | |
| ], | |
| "clip": [ | |
| "380", | |
| 1 | |
| ], | |
| "vae": [ | |
| "380", | |
| 2 | |
| ], | |
| "positive": [ | |
| "424", | |
| 0 | |
| ], | |
| "negative": [ | |
| "161", | |
| 1 | |
| ] | |
| }, | |
| "class_type": "DetailerForEach", | |
| "_meta": { | |
| "title": "Detailer (SEGS)" | |
| } | |
| }, | |
| "410": { | |
| "inputs": { | |
| "bbox_threshold": 0.5, | |
| "bbox_dilation": 0, | |
| "crop_factor": 3, | |
| "drop_size": 10, | |
| "sub_threshold": 0.5, | |
| "sub_dilation": 0, | |
| "sub_bbox_expansion": 0, | |
| "sam_mask_hint_threshold": 0.7, | |
| "post_dilation": 0, | |
| "bbox_detector": [ | |
| "159", | |
| 0 | |
| ], | |
| "image": [ | |
| "277", | |
| 0 | |
| ], | |
| "sam_model_opt": [ | |
| "160", | |
| 0 | |
| ] | |
| }, | |
| "class_type": "ImpactSimpleDetectorSEGS", | |
| "_meta": { | |
| "title": "Simple Detector (SEGS)" | |
| } | |
| }, | |
| "411": { | |
| "inputs": { | |
| "strength": 1.2, | |
| "start_percent": 0, | |
| "end_percent": 1, | |
| "segs": [ | |
| "410", | |
| 0 | |
| ], | |
| "control_net": [ | |
| "163", | |
| 0 | |
| ], | |
| "segs_preprocessor": [ | |
| "406", | |
| 0 | |
| ], | |
| "control_image": [ | |
| "78", | |
| 0 | |
| ], | |
| "vae": [ | |
| "380", | |
| 2 | |
| ] | |
| }, | |
| "class_type": "ImpactControlNetApplyAdvancedSEGS", | |
| "_meta": { | |
| "title": "ControlNetApply (SEGS)" | |
| } | |
| }, | |
| "423": { | |
| "inputs": { | |
| "width": 1024, | |
| "height": 1024, | |
| "crop_w": 0, | |
| "crop_h": 0, | |
| "target_width": 1024, | |
| "target_height": 1024, | |
| "text_g": "", | |
| "text_l": "", | |
| "clip": [ | |
| "380", | |
| 1 | |
| ] | |
| }, | |
| "class_type": "CLIPTextEncodeSDXL", | |
| "_meta": { | |
| "title": "CLIPTextEncodeSDXL" | |
| } | |
| }, | |
| "424": { | |
| "inputs": { | |
| "width": 1024, | |
| "height": 1024, | |
| "crop_w": 0, | |
| "crop_h": 0, | |
| "target_width": 1024, | |
| "target_height": 1024, | |
| "text_g": "", | |
| "text_l": "", | |
| "clip": [ | |
| "380", | |
| 1 | |
| ] | |
| }, | |
| "class_type": "CLIPTextEncodeSDXL", | |
| "_meta": { | |
| "title": "CLIPTextEncodeSDXL" | |
| } | |
| } | |
| } |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment