mirror of
https://github.com/vladmandic/sdnext.git
synced 2026-01-27 15:02:48 +03:00
- Add detailed hints explaining LoRA fuse behavior and model reload warning - Add hints for force reload, diffusers fuse, and quantization precision options - Improve clarity of auto-apply tags and hash metadata hints - Comment out unimplemented lora_quant setting
1374 lines
150 KiB
JSON
1374 lines
150 KiB
JSON
{"icons": [
|
|
{"id":"","label":"🎲️","localized":"","reload":"","hint":"Use random seed"},
|
|
{"id":"","label":"🔄","localized":"","reload":"","hint":"Reset values"},
|
|
{"id":"","label":"⬆️","localized":"","reload":"","hint":"Upload image"},
|
|
{"id":"","label":"⬅️","localized":"","reload":"","hint":"Reuse image"},
|
|
{"id":"","label":"⇅","localized":"","reload":"","hint":"Swap values"},
|
|
{"id":"","label":"⇨","localized":"","reload":"","hint":"Apply preset to Manual Block Merge tab"},
|
|
{"id":"","label":"🕮","localized":"","reload":"","hint":"Save parameters from last generated image as style template"},
|
|
{"id":"","label":"⇕","localized":"","reload":"","hint":"Sort by: Name asc/desc, Size largest/smallest, Time newest/oldest"},
|
|
{"id":"","label":"⟲","localized":"","reload":"","hint":"Refresh"},
|
|
{"id":"","label":"✕","localized":"","reload":"","hint":"Close"},
|
|
{"id":"","label":"⊜","localized":"","reload":"","hint":"Fill"},
|
|
{"id":"","label":"※","localized":"","reload":"","hint":"Load model as refiner model when selected, otherwise load as base model"},
|
|
{"id":"","label":"🔎︎","localized":"","reload":"","hint":"Scan CivitAI for missing metadata and previews"},
|
|
{"id":"","label":"☲","localized":"","reload":"","hint":"Change view type"},
|
|
{"id":"","label":"⊗","localized":"","reload":"","hint":"Reset values"},
|
|
{"id":"","label":"📐","localized":"","reload":"","hint":"Measure"},
|
|
{"id":"","label":"🔍","localized":"","reload":"","hint":"Search"},
|
|
{"id":"","label":"🖌️","localized":"","reload":"","hint":"LaMa remove selected object from image"},
|
|
{"id":"","label":"🖼️","localized":"","reload":"","hint":"Show preview"},
|
|
{"id":"","label":"","localized":"","reload":"","hint":"Interrogate image"},
|
|
{"id":"","label":"⁜","localized":"","reload":"","hint":"Cycle image fit method"},
|
|
{"id":"","label":"↶","localized":"","reload":"","hint":"Apply selected style to prompt"},
|
|
{"id":"","label":"↷","localized":"","reload":"","hint":"Save current prompt to style"},
|
|
{"id":"","label":"","localized":"","reload":"","hint":"Sort by name, ascending"},
|
|
{"id":"","label":"","localized":"","reload":"","hint":"Sort by name, descending"},
|
|
{"id":"","label":"","localized":"","reload":"","hint":"Sort by size, ascending"},
|
|
{"id":"","label":"","localized":"","reload":"","hint":"Sort by size, descending"},
|
|
{"id":"","label":"","localized":"","reload":"","hint":"Sort by resolution, ascending"},
|
|
{"id":"","label":"","localized":"","reload":"","hint":"Sort by resolution, descending"},
|
|
{"id":"","label":"","localized":"","reload":"","hint":"Sort by time, ascending"},
|
|
{"id":"","label":"","localized":"","reload":"","hint":"Sort by time, descending"}
|
|
],
|
|
"main": [
|
|
{"id":"","label":"Prompt","localized":"","reload":"","hint":"Describe image you want to generate"},
|
|
{"id":"","label":"VLM: Prompt","localized":"Prompt","reload":"","hint":"Enter your prompt/question here."},
|
|
{"id":"","label":"VLM: Advanced Options","localized":"Advanced Options","reload":"","hint":"Advanced configuration options for the VLM model."},
|
|
{"id":"","label":"VLM: Batch Caption","localized":"Batch Caption","reload":"","hint":"Process multiple images in a batch using VLM."},
|
|
{"id":"","label":"CLiP: Advanced Options","localized":"Advanced Options","reload":"","hint":"Advanced configuration options for CLiP interrogation."},
|
|
{"id":"","label":"CLiP: Batch Interrogate","localized":"Batch Interrogate","reload":"","hint":"Process multiple images in a batch using CLiP."},
|
|
{"id":"","label":"Task","localized":"","reload":"","hint":"Changes which task the model will perform. Regular text prompts can be used when the task is set to <b>Use Prompt</b>.<br>When other options are selected, see the hint text inside an empty <b>Prompt</b> field for guidance."},
|
|
{"id":"","label":"Prefill text","localized":"","reload":"","hint":"Pre-fills the start of the model's response to guide its output format or content by forcing it to continue the prefill text.<br>Prefill is filtered out and does not appear in the final response.<br><br>Leave empty to let the model generate its own response from scratch."},
|
|
{"id":"","label":"Start","localized":"","reload":"","hint":"Start"},
|
|
{"id":"","label":"End","localized":"","reload":"","hint":"End"},
|
|
{"id":"","label":"Core","localized":"","reload":"","hint":"Core settings"},
|
|
{"id":"","label":"System prompt","localized":"","reload":"","hint":"System prompt controls behavior of the LLM. Processed first and persists throughout conversation. Has highest priority weighting and is always appended at the beginning of the sequence.<br><br>Use for: Response formatting rules, role definition, style."},
|
|
{"id":"","label":"Negative prompt","localized":"","reload":"","hint":"Describe what you don't want to see in generated image"},
|
|
{"id":"","label":"Text","localized":"","reload":"","hint":"Create image from text"},
|
|
{"id":"","label":"Image","localized":"","reload":"","hint":"Create image from image"},
|
|
{"id":"","label":"Control","localized":"","reload":"","hint":"Create image with full guidance"},
|
|
{"id":"","label":"Images","localized":"","reload":"","hint":"Create images<br>Unified interface<br>Supports T2I and I2I<br>With optional control guidance"},
|
|
{"id":"","label":"T2I","localized":"","reload":"","hint":"Create image from text<br>Legacy interface that mimics original text-to-image interface and behavior"},
|
|
{"id":"","label":"I2I","localized":"","reload":"","hint":"Create image from image<br>Legacy interface that mimics original image-to-image interface and behavior"},
|
|
{"id":"","label":"Process","localized":"","reload":"","hint":"Process existing image<br>Can be used to upscale images, remove backgrounds, obfuscate NSFW content, apply various filters and effects"},
|
|
{"id":"","label":"Caption","localized":"","reload":"","hint":"Analyze existing images and create text descriptions"},
|
|
{"id":"","label":"clip: min length","localized":"Min Length","reload":"","hint":"Minimum number of tokens in the generated caption."},
|
|
{"id":"","label":"clip: max length","localized":"Max Length","reload":"","hint":"Maximum number of tokens in the generated caption."},
|
|
{"id":"","label":"clip: chunk size","localized":"Chunk Size","reload":"","hint":"Batch size for processing description candidates (flavors). Higher values speed up interrogation but increase VRAM usage."},
|
|
{"id":"","label":"clip: min flavors","localized":"Min Flavors","reload":"","hint":"Minimum number of descriptive tags (flavors) to keep in the final prompt."},
|
|
{"id":"","label":"clip: max flavors","localized":"Max Flavors","reload":"","hint":"Maximum number of descriptive tags (flavors) to keep in the final prompt."},
|
|
{"id":"","label":"clip: intermediates","localized":"Intermediates","reload":"","hint":"Size of the intermediate candidate pool when matching image features to descriptive tags (flavours). From this pool, the final tags are selected based on Min/Max Flavors. Higher values may improve quality but are slower."},
|
|
{"id":"","label":"clip: num beams","localized":"CLiP Num Beams","reload":"","hint":"Number of beams for beam search during caption generation. Higher values search more possibilities but are slower."},
|
|
{"id":"","label":"Interrogate","localized":"","reload":"","hint":"Run interrogate to get description of your image"},
|
|
{"id":"","label":"Models","localized":"","reload":"","hint":"Download, convert or merge your models and manage models metadata"},
|
|
{"id":"","label":"Sampler","localized":"","reload":"","hint":"Settings related to sampler and seed selection and configuration. Samplers guide the process of turning noise into an image over multiple steps."},
|
|
{"id":"","label":"Agent Scheduler","localized":"","reload":"","hint":"Enqueue your generate requests and run them in the background"},
|
|
{"id":"","label":"AgentScheduler","localized":"","reload":"","hint":"Enqueue your generate requests and run them in the background"},
|
|
{"id":"","label":"System","localized":"","reload":"","hint":"System settings and information"},
|
|
{"id":"","label":"System Info","localized":"","reload":"","hint":"System information"},
|
|
{"id":"","label":"Settings","localized":"","reload":"","hint":"Application settings"},
|
|
{"id":"","label":"Script","localized":"","reload":"","hint":"Additional scripts to be used"},
|
|
{"id":"","label":"Generate","localized":"","reload":"","hint":"Start processing"},
|
|
{"id":"","label":"Generate forever","localized":"","reload":"","hint":"Start processing and continue until cancelled"},
|
|
{"id":"","label":"Enqueue","localized":"","reload":"","hint":"Add task to background queue in Agent Scheduler"},
|
|
{"id":"","label":"Reprocess","localized":"","reload":"","hint":"Reprocess previous generations using different parameters"},
|
|
{"id":"","label":"Stop","localized":"","reload":"","hint":"Stop processing"},
|
|
{"id":"","label":"Skip","localized":"","reload":"","hint":"Stop processing current job and continue processing"},
|
|
{"id":"","label":"Pause","localized":"","reload":"","hint":"Pause processing"},
|
|
{"id":"","label":"Restore","localized":"","reload":"","hint":"Restore parameters from current prompt or last known generated image"},
|
|
{"id":"","label":"Clear","localized":"","reload":"","hint":"Clear prompts"},
|
|
{"id":"","label":"Networks","localized":"","reload":"","hint":"Networks user interface"},
|
|
{"id":"","label":"Default strength","localized":"","reload":"","hint":"When adding extra network such as Lora to prompt, use this multiplier for it"},
|
|
{"id":"","label":"Upscale","localized":"","reload":"","hint":"Upscale image"},
|
|
{"id":"","label":"Model","localized":"","reload":"","hint":"Base model"},
|
|
{"id":"","label":"Prompts","localized":"","reload":"","hint":"Image prompt and negative prompt"},
|
|
{"id":"","label":"Base","localized":"","reload":"","hint":"Base settings used to run image generation"},
|
|
{"id":"","label":"Style","localized":"","reload":"","hint":"Additional styles to be applied on selected generation parameters"},
|
|
{"id":"","label":"Styles","localized":"","reload":"","hint":"Additional styles to be applied on selected generation parameters"},
|
|
{"id":"","label":"Lora","localized":"","reload":"","hint":"LoRA: Low-Rank Adaptation. Fine-tuned model that is applied on top of a loaded model"},
|
|
{"id":"","label":"Embedding","localized":"","reload":"","hint":"Textual inversion embedding is a trained embedded information about the subject"},
|
|
{"id":"","label":"Hypernetwork","localized":"","reload":"","hint":"Small trained neural network that modifies behavior of the loaded model"},
|
|
{"id":"","label":"VLM Caption","localized":"","reload":"","hint":"Analyze image using vision langugage model"},
|
|
{"id":"","label":"CLiP Interrogate","localized":"","reload":"","hint":"Analyze image using CLiP model"},
|
|
{"id":"","label":"VAE","localized":"","reload":"","hint":"Variational Auto Encoder: model used to run image decode at the end of generate"},
|
|
{"id":"","label":"History","localized":"","reload":"","hint":"List of previous generations that can be further reprocessed"},
|
|
{"id":"","label":"UI disable variable aspect ratio","localized":"","reload":"","hint":"When disabled, all thumbnails appear as squared images"},
|
|
{"id":"","label":"Build info on first access","localized":"","reload":"","hint":"Prevents server from building EN page on server startup and instead build it when requested"},
|
|
{"id":"","label":"Show reference styles","localized":"","reload":"","hint":"Show or hide build-it styles"},
|
|
{"id":"","label":"LoRA load using Diffusers method","localized":"","reload":"","hint":"Alternative method uses diffusers built-in LoRA capabilities instead of native SD.Next implementation (may reduce LoRA compatibility)"},
|
|
{"id":"","label":"LoRA native fuse with model","localized":"","reload":"","hint":"Merge LoRA into the model for lower memory usage.<br><br><b style=\"color: #ef4444\">Warning:</b> After removing or switching a LoRA, you may still see its style in generated images. To get a clean model, reload it from the model selector."},
|
|
{"id":"","label":"LoRA memory cache","localized":"","reload":"","hint":"How many LoRAs to keep in network for future use before requiring reloading from storage"},
|
|
{"id":"","label":"LoRA force reload always","localized":"","reload":"","hint":"Forces LoRA networks to reload from storage on every generation, even if already cached.<br>Useful for debugging or when LoRA files are being modified externally.<br>Disable for normal use to benefit from caching."},
|
|
{"id":"","label":"LoRA diffusers fuse with model","localized":"","reload":"","hint":"Merge LoRA into the model for lower memory usage and torch.compile compatibility.<br><br><b style=\"color: #ef4444\">Warning:</b> After removing or switching a LoRA, you may still see its style in generated images. To get a clean model, reload it from the model selector."},
|
|
{"id":"","label":"LoRA precision when quantized","localized":"","reload":"","hint":"When using a BnB 4-bit model, LoRA is applied by decompressing the weights, adding the LoRA, then recompressing. This controls the format used for recompression.<br><br>Only affects BnB 4-bit models. SDNQ models keep their original format."},
|
|
{"id":"","label":"Local","localized":"","reload":"","hint":"Models that are downlaoded and ready to use"},
|
|
{"id":"","label":"Gallery","localized":"","reload":"","hint":"Image gallery"},
|
|
{"id":"","label":"Reference","localized":"","reload":"","hint":"List of reference models that can be automatically downloaded on first use"},
|
|
{"id":"","label":"Samplers","localized":"","reload":"","hint":"Samplers/schedulers advanced settings"},
|
|
{"id":"","label":"Seed","localized":"","reload":"","hint":"Initial seed and variation"},
|
|
{"id":"","label":"Advanced","localized":"","reload":"","hint":"Advanced settings used to run image generation"},
|
|
{"id":"","label":"Scripts","localized":"","reload":"","hint":"Enable additional features by using selected scripts during generate process"},
|
|
{"id":"","label":"Corrections","localized":"","reload":"","hint":"Control image color/sharpen/brighness corrections during generate process"},
|
|
{"id":"","label":"Parameters","localized":"","reload":"","hint":"Base parameters used during image generation"},
|
|
{"id":"","label":"Refine","localized":"","reload":"","hint":"Refine runs additonal processing after initial processing has completed and can be used to upscale image and run optionally process it again to increase quality and details"},
|
|
{"id":"","label":"Detailer","localized":"","reload":"","hint":"Detailer runs additional generate at higher resolution for a detected objects"},
|
|
{"id":"","label":"Resize","localized":"","reload":"","hint":"Image resizing, can be using fixed resolution on based on scale"},
|
|
{"id":"","label":"Batch","localized":"","reload":"","hint":"Batch processing settings"},
|
|
{"id":"","label":"Denoise","localized":"","reload":"","hint":"Denoising settings. Higher denoise means that more of existing image content is allowed to change during generate"},
|
|
{"id":"","label":"Mask","localized":"","reload":"","hint":"Image masking and mask options"},
|
|
{"id":"","label":"Input","localized":"","reload":"","hint":"Selection of input media"},
|
|
{"id":"","label":"Video","localized":"","reload":"","hint":"Create videos using different methods<br>Supports text-to-image, image-to-image first-last-frame, etc."},
|
|
{"id":"","label":"Control elements","localized":"","reload":"","hint":"Control elements are advanced models that can guide generation towards desired outcome"},
|
|
{"id":"","label":"IP adapter","localized":"","reload":"","hint":"Guide generation towards desired outcome using IP adapters plugin models"},
|
|
{"id":"","label":"IP adapters","localized":"","reload":"","hint":"IP adapters are plugin models that can guide generation towards desired outcome"},
|
|
{"id":"","label":"Extensions","localized":"","reload":"","hint":"Application extensions"},
|
|
{"id":"","label":"XYZ Grid","localized":"","reload":"","hint":"XYZ grid is a powerful module that create image grid based on varying multiple generation parameters"},
|
|
{"id":"","label":"Cover","localized":"","reload":"","hint":"cover full area"},
|
|
{"id":"","label":"Inline","localized":"","reload":"","hint":"inline with all additional elements (scrollable)"},
|
|
{"id":"","label":"Sidebar","localized":"","reload":"","hint":"sidebar on the right side of the screen"},
|
|
{"id":"","label":"SD15","localized":"","reload":"","hint":"StableDiffusion 1.5"},
|
|
{"id":"","label":"SD21","localized":"","reload":"","hint":"StableDiffusion 2.1"},
|
|
{"id":"","label":"SD35","localized":"","reload":"","hint":"StableDiffusion 3.5"},
|
|
{"id":"","label":"SDXL","localized":"","reload":"","hint":"StableDiffusion XL"},
|
|
{"id":"","label":"SC","localized":"","reload":"","hint":"StableCascade"},
|
|
{"id":"","label":"Flux","localized":"","reload":"","hint":"FLUX.1"},
|
|
{"id":"","label":"Show","localized":"","reload":"","hint":"Show image location"},
|
|
{"id":"","label":"Save","localized":"","reload":"","hint":"Save image"},
|
|
{"id":"","label":"Delete","localized":"","reload":"","hint":"Delete image"},
|
|
{"id":"","label":"Replace","localized":"","reload":"","hint":"Replace image"},
|
|
{"id":"","label":"List","localized":"","reload":"","hint":"List all available models"},
|
|
{"id":"","label":"Metadata","localized":"","reload":"","hint":"Update metadata for all available models"},
|
|
{"id":"","label":"Loader","localized":"","reload":"","hint":"Allows to manually assemble a diffusion model from individual modules"},
|
|
{"id":"","label":"➠ Text","localized":"","reload":"","hint":"Transfer image to text interface"},
|
|
{"id":"","label":"➠ Image","localized":"","reload":"","hint":"Transfer image to image interface"},
|
|
{"id":"","label":"➠ Inpaint","localized":"","reload":"","hint":"Transfer image to inpaint interface"},
|
|
{"id":"","label":"➠ Sketch","localized":"","reload":"","hint":"Transfer image to sketch interface"},
|
|
{"id":"","label":"➠ Composite","localized":"","reload":"","hint":"Transfer image to inpaint sketch interface"},
|
|
{"id":"","label":"➠ Process","localized":"","reload":"","hint":"Transfer image to process interface"},
|
|
{"id":"","label":"➠ Control","localized":"","reload":"","hint":"Transfer image to control interface"},
|
|
{"id":"","label":"➠ Caption","localized":"","reload":"","hint":"Transfer image to caption interface"}
|
|
],
|
|
"generate": [
|
|
{"id":"","label":"Sampling method","localized":"","reload":"","hint":"Which algorithm to use to produce the image"},
|
|
{"id":"","label":"Steps","localized":"","reload":"","hint":"How many times to improve the generated image iteratively; higher values take longer; very low values can produce bad results"},
|
|
{"id":"","label":"Tiling","localized":"","reload":"","hint":"Produce an image that can be tiled"},
|
|
{"id":"","label":"Full quality","localized":"","reload":"","hint":"Use full quality VAE to decode latent samples"},
|
|
{"id":"","label":"HiDiffusion","localized":"","reload":"","hint":"HiDiffusion allows creation of high-resolution images using your standard models without duplicates/distortions and improved performance"},
|
|
{"id":"","label":"HDR Clamp","localized":"","reload":"","hint":"Adjusts the level of nonsensical details by pruning values that deviate significantly from the distribution mean. It is particularly useful for enhancing generation at higher guidance scales, identifying outliers early in the process and applying mathematical adjustments based on the Range (Boundary) and Threshold settings. Think of it as setting the range within which you want your image values to be, and adjusting the threshold determines which values should be brought back into that range"},
|
|
{"id":"","label":"HDR Maximize","localized":"","reload":"","hint":"Calculates a 'normalization factor' by dividing the maximum tensor value by the specified range multiplied by 4. This factor is then used to shift the channels within the given boundary, ensuring maximum dynamic range for subsequent processing. The objective is to optimize dynamic range for external applications like Photoshop, particularly for adjusting levels, contrast, and brightness"},
|
|
{"id":"","label":"Enable refine pass","localized":"","reload":"","hint":"Use a similar process as image to image to upscale and/or add detail to the final image. Optionally uses refiner model to enhance image details."},
|
|
{"id":"","label":"Enable detailer pass","localized":"","reload":"","hint":"Detect target objects such as face and reprocess it at higher resolution"},
|
|
{"id":"","label":"Include detections","localized":"","reload":"","hint":"Include original image with detected areas marked"},
|
|
{"id":"","label":"Sort detections","localized":"","reload":"","hint":"Sort detected areas by from left to right instead of detection score"},
|
|
{"id":"","label":"Denoising strength","localized":"","reload":"","hint":"Determines how little respect the algorithm should have for image's content. At 0, nothing will change, and at 1 you'll get an unrelated image. With values below 1.0, processing will take less steps than the Sampling Steps slider specifies"},
|
|
{"id":"","label":"Denoise start","localized":"","reload":"","hint":"Override denoise strength by stating how early base model should finish and when refiner should start. Only applicable to refiner usage. If set to 0 or 1, denoising strength will be used"},
|
|
{"id":"","label":"Hires steps","localized":"","reload":"","hint":"Number of sampling steps for upscaled picture. If 0, uses same as for original"},
|
|
{"id":"","label":"Strength","localized":"","reload":"","hint":"Denoising strength of during image operation controls how much of original image is allowed to change during generate"},
|
|
{"id":"","label":"Upscaler","localized":"","reload":"","hint":"Which pre-trained model to use for the upscaling process."},
|
|
{"id":"","label":"Force Hires","localized":"","reload":"","hint":"Hires runs automatically when Latent upscale is selected, but its skipped when using non-latent upscalers. Enable force hires to run hires with non-latent upscalers"},
|
|
{"id":"","label":"Resize width","localized":"","reload":"","hint":"Resizes image to this width. If 0, width is inferred from either of two nearby sliders"},
|
|
{"id":"","label":"Resize height","localized":"","reload":"","hint":"Resizes image to this height. If 0, height is inferred from either of two nearby sliders"},
|
|
{"id":"","label":"Refine sampler","localized":"","reload":"","hint":"Use specific sampler as fallback sampler if primary is not supported for specific operation"},
|
|
{"id":"","label":"Refiner start","localized":"","reload":"","hint":"Refiner pass will start when base model is this much complete (set to larger than 0 and smaller than 1 to run after full base model run)"},
|
|
{"id":"","label":"Refiner steps","localized":"","reload":"","hint":"Number of steps to use for refiner pass"},
|
|
{"id":"","label":"Refine guidance","localized":"","reload":"","hint":"CFG scale used for refiner pass"},
|
|
{"id":"","label":"Input media","localized":"","reload":"","hint":"Add input image to be used for image-to-image, inpaint or control processing"},
|
|
{"id":"","label":"Control media","localized":"","reload":"","hint":"Add input image as separate initialization image for control processing"},
|
|
{"id":"","label":"Processed preview","localized":"","reload":"","hint":"Display results from pre-processing of input images before actual generate"},
|
|
{"id":"","label":"Attention guidance","localized":"","reload":"","hint":"CFG scale used for with PAG: Perturbed-Attention Guidance"},
|
|
{"id":"","label":"Adaptive scaling","localized":"","reload":"","hint":"Adaptive modifier for attention guidance scale"},
|
|
{"id":"","label":"Rescale guidance","localized":"","reload":"","hint":"Rescale CFG generated noise to avoid overexposed images"},
|
|
{"id":"","label":"Refine Prompt","localized":"","reload":"","hint":"Prompt used for both second encoder in base model (if it exists) and for refiner pass (if enabled)"},
|
|
{"id":"","label":"Refine negative prompt","localized":"","reload":"","hint":"Negative prompt used for both second encoder in base model (if it exists) and for refiner pass (if enabled)"},
|
|
{"id":"","label":"Initial","localized":"","reload":"","hint":"Set image resolution before processing"},
|
|
{"id":"","label":"Post","localized":"","reload":"","hint":"Resize image after processing"},
|
|
{"id":"","label":"Width","localized":"","reload":"","hint":"Image width"},
|
|
{"id":"","label":"Height","localized":"","reload":"","hint":"Image height"},
|
|
{"id":"","label":"Batch count","localized":"","reload":"","hint":"How many batches of images to create (has no impact on generation performance or VRAM usage)"},
|
|
{"id":"","label":"Batch size","localized":"","reload":"","hint":"How many image to create in a single batch (increases generation performance at cost of higher VRAM usage)"},
|
|
{"id":"","label":"Guidance scale","localized":"","reload":"","hint":"Classifier Free Guidance scale: how strongly the image should conform to prompt. Lower values produce more creative results, higher values make it follow the prompt more strictly; recommended values between 5-10"},
|
|
{"id":"","label":"Guidance rescale","localized":"","reload":"","hint":"Rescale guidance to avoid overexposed images at higher guidance values"},
|
|
{"id":"","label":"Guidance End","localized":"","reload":"","hint":"Ends the effect of CFG and PAG early: A value of 1 acts as normal, 0.5 stops guidance at 50% of steps"},
|
|
{"id":"","label":"Initial seed","localized":"","reload":"","hint":"A value that determines the output of random number generator - if you create an image with same parameters and seed as another image, you'll get the same result"},
|
|
{"id":"","label":"Variation","localized":"","reload":"","hint":"Second seed to be mixed with primary seed"},
|
|
{"id":"","label":"Variation strength","localized":"","reload":"","hint":"How strong of a variation to produce. At 0, there will be no effect. At 1, you will get the complete picture with variation seed (except for ancestral samplers, where you will just get something)"},
|
|
{"id":"","label":"Resize method","localized":"","reload":"","hint":"Method used to resize the image: can be simple resize, upscaling model, latent resize or asymmetric decode"},
|
|
{"id":"","label":"Resize seed from width","localized":"","reload":"","hint":"Make an attempt to produce a picture similar to what would have been produced with same seed at specified resolution"},
|
|
{"id":"","label":"Resize seed from height","localized":"","reload":"","hint":"Make an attempt to produce a picture similar to what would have been produced with same seed at specified resolution"},
|
|
{"id":"","label":"Fixed","localized":"","reload":"","hint":"Resize image to target resolution. Unless height and width match, you will get incorrect aspect ratio"},
|
|
{"id":"","label":"scale","localized":"","reload":"","hint":"Resize image to target scale. If resize fixed width/height are set this option is ignored"},
|
|
{"id":"","label":"Crop","localized":"","reload":"","hint":"Resize the image so that entirety of target resolution is filled with the image. Crop parts that stick out"},
|
|
{"id":"","label":"Fill","localized":"","reload":"","hint":"Resize the image so that entirety of image is inside target resolution. Fill empty space with image's colors"},
|
|
{"id":"","label":"Mask blur","localized":"","reload":"","hint":"How much to blur the mask before processing, in pixels"},
|
|
{"id":"","label":"Latent noise","localized":"","reload":"","hint":"fill it with latent space noise"},
|
|
{"id":"","label":"Latent nothing","localized":"","reload":"","hint":"fill it with latent space zeroes"},
|
|
{"id":"","label":"Adapters","localized":"","reload":"","hint":"Settings related to IP Adapters"},
|
|
{"id":"","label":"Inputs","localized":"","reload":"","hint":"Settings related to Input images"},
|
|
{"id":"","label":"Control input type","localized":"","reload":"","hint":"Choose which input image is used for control process"},
|
|
{"id":"","label":"Video format","localized":"","reload":"","hint":"Format and codec of output video"},
|
|
{"id":"","label":"Size & Batch","localized":"","reload":"","hint":"Image size and batch"},
|
|
{"id":"","label":"Sigma adjust","localized":"","reload":"","hint":"Adjust sampler sigma value"},
|
|
{"id":"","label":"Adjust start","localized":"","reload":"","hint":"Starting step when sigma adjust occurs"},
|
|
{"id":"","label":"Adjust end","localized":"","reload":"","hint":"Ending step when sigma adjust occurs"},
|
|
{"id":"","label":"Options","localized":"","reload":"","hint":"Options"},
|
|
{"id":"","label":"ControlNet","localized":"","reload":"","hint":"ControlNet is an advanced guidance model"},
|
|
{"id":"","label":"Processor","localized":"","reload":"","hint":"Processor type to use to preprocess image used for ControlNet"},
|
|
{"id":"","label":"Renoise","localized":"","reload":"","hint":"Apply additional noise during detailing"},
|
|
{"id":"","label":"Renoise end","localized":"","reload":"","hint":"Final step when renoise is applied"},
|
|
{"id":"","label":"Merge detailers","localized":"","reload":"","hint":"Merge results from multiple detailers into single mask before running detailing process"},
|
|
{"id":"","label":"Inpaint mode","localized":"","reload":"","hint":"Inpaint mode"},
|
|
{"id":"","label":"Inpaint area","localized":"","reload":"","hint":"Inpaint area"},
|
|
{"id":"","label":"Texture tiling","localized":"","reload":"","hint":"Apply seamless tiling to generated image so it can be used as a texture"},
|
|
{"id":"","label":"Override","localized":"","reload":"","hint":"Override settings that can change server behavior and are typically applied from imported image metadata"},
|
|
{"id":"","label":"VAE type","localized":"","reload":"","hint":"Choose if you want to run full VAE, reduced quality VAE or attempt to use remote VAE service"},
|
|
{"id":"","label":"Guess Mode","localized":"","reload":"","hint":"Removes the requirement to supply a prompt to a ControlNet. It forces Controlnet encoder to do it's 'best guess' based on the contents of the input control map."},
|
|
{"id":"","label":"Control Only","localized":"","reload":"","hint":"This uses only the Control input below as the source for any ControlNet or IP Adapter type tasks based on any of our various options."},
|
|
{"id":"","label":"Init Image Same As Control","localized":"","reload":"","hint":"Will additionally treat any image placed into the Control input window as a source for img2img type tasks, an image to modify for example."},
|
|
{"id":"","label":"Separate Init Image","localized":"","reload":"","hint":"Creates an additional window next to Control input labeled Init input, so you can have a separate image for both Control operations and an init source."},
|
|
{"id":"","label":"Override settings","localized":"","reload":"","hint":"If generation parameters deviate from your system settings override settings populated with those settings to override your system configuration for this workflow"},
|
|
{"id":"","label":"sigma method","localized":"","reload":"","hint":"Controls how noise levels (sigmas) are distributed across diffusion steps. Options:<br>- default: the model default<br>- karras: smoother noise schedule, higher quality with fewer steps<br>- beta: based on beta schedule values<br>- exponential: exponential decay of noise<br>- lambdas: experimental, balances signal-to-noise<br>- flowmatch: tuned for flow-matching models"},
|
|
{"id":"","label":"timestep spacing","localized":"","reload":"","hint":"Determines how timesteps are spaced across the diffusion process. Options:<br>- default: the model default<br>- leading: creates evenly spaced steps<br>- linspace: includes the first and last steps and evenly selects the remaining intermediate steps<br>- trailing: only includes the last step and evenly selects the remaining intermediate steps starting from the end"},
|
|
{"id":"","label":"beta schedule","localized":"","reload":"","hint":"Defines how beta (noise strength per step) grows. Options:<br>- default: the model default<br>- linear: evenly decays noise per step<br>- scaled: squared version of linear, used only by Stable Diffusion<br>- cosine: smoother decay, often better results with fewer steps<br>- sigmoid: sharp transition, experimental"},
|
|
{"id":"","label":"prediction method","localized":"","reload":"","hint":"Defines what the model predicts at each step. Options:<br>- default: the model default<br>- epsilon: noise (most common for Stable Diffusion)<br>- sample: direct denoised image prediction, also called as x0 prediction<br>- v_prediction: velocity prediction, used by CosXL and NoobAI VPred models<br>- flow_prediction: used with newer flow-matching models like SD3 and Flux"},
|
|
{"id":"","label":"sampler order","localized":"","reload":"","hint":"Order of solver updates in the sampler. Higher order improves stability/accuracy but increases compute cost."},
|
|
{"id":"","label":"flow shift","localized":"","reload":"","hint":"Shift value for flowmatching models. Controls the distribution of denoising steps.<br><br>Values:<br>- >1.0: allocate more steps to early denoising (better structure)<br>-<1.0: allocate more steps to late denoising (better fine details)<br>- 1.0: balanced schedule<br><br>Most flowmatching models use the value of 3 as default. Effectively inactive if dynamic shift is enabled."},
|
|
{"id":"","label":"dynamic","localized":"","reload":"","hint":"Dynamic shifting automatically adjusts the denoising schedule based on your image resolution.<br><br>The scheduler interpolates between base_shift and max_shift based on actual image resolution.<br><br>Enabling disables static Flow shift."},
|
|
{"id":"","label":"base shift","localized":"","reload":"","hint":"Minimum shift value for low resolutions when using dynamic shifting."},
|
|
{"id":"","label":"max shift","localized":"","reload":"","hint":"Maximum shift value for high resolutions when using dynamic shifting."},
|
|
{"id":"","label":"resize mode","localized":"","reload":"","hint":"Defines how the input is resized or adapted in second-pass refinement:<br>- none: no resizing, keep original resolution<br>- fixed: force resize to target resolution (may distort)<br>- crop: center-crop to fit target while keeping aspect ratio<br>- fill: resize to fit and pad empty space with borders<br>- outpaint: extend canvas beyond image borders<br>- context aware: smart resize that blends or adapts surrounding areas"}
|
|
],
|
|
"other": [
|
|
{"id":"","label":"Install","localized":"","reload":"","hint":"Install"},
|
|
{"id":"","label":"Search","localized":"","reload":"","hint":"Search"},
|
|
{"id":"","label":"Sort by","localized":"","reload":"","hint":"Sort by"},
|
|
{"id":"","label":"Nudenet","localized":"","reload":"","hint":"Flexible extension that can detect and obfustate nudity in images"},
|
|
{"id":"","label":"Prompt enhance","localized":"","reload":"","hint":"Extension that can use different LLMs to rewrite prompt for improved results"},
|
|
{"id":"","label":"Enhance now","localized":"","reload":"","hint":"Run prompt enhancement using the selected LLM model"},
|
|
{"id":"","label":"Apply to prompt","localized":"","reload":"","hint":"Automatically copy enhanced result to the prompt input box"},
|
|
{"id":"","label":"Auto enhance","localized":"","reload":"","hint":"Automatically enhance prompt before every image generation"},
|
|
{"id":"","label":"Use vision","localized":"","reload":"","hint":"Include input image when enhancing prompt.<br><br>Only available for vision-capable models, marked with \uf06e icon."},
|
|
{"id":"","label":"LLM model","localized":"","reload":"","hint":"Select the language model to use for prompt enhancement.<br><br>Models supporting vision are marked with \uf06e icon.<br>Models supporting thinking mode are marked with \uf0eb icon."},
|
|
{"id":"","label":"Model repo","localized":"","reload":"","hint":"HuggingFace repository ID for the model"},
|
|
{"id":"","label":"Model gguf","localized":"","reload":"","hint":"Optional GGUF quantized model repository on HuggingFace"},
|
|
{"id":"","label":"Model type","localized":"","reload":"","hint":"Optional GGUF model quantization type"},
|
|
{"id":"","label":"Model file","localized":"","reload":"","hint":"Optional specific GGUF model file inside the repository"},
|
|
{"id":"","label":"Load custom model","localized":"","reload":"","hint":"Load a custom model with the specified configuration"},
|
|
{"id":"","label":"NSFW allowed","localized":"","reload":"","hint":"Allow the model to generate adult content in enhanced prompts"},
|
|
{"id":"","label":"Prompt prefix","localized":"","reload":"","hint":"Text prepended at the beginning of the enhanced prompt result.<br><br>Useful for adding prompt elements which need to be copied to the image prompt unchanged, like quality tags 'masterpiece, best quality' or artist names, which would otherwise be rewritten by the LLM."},
|
|
{"id":"","label":"Prompt suffix","localized":"","reload":"","hint":"Text appended to the end of the enhanced prompt result.<br><br>Useful for adding prompt elements which need to be copied to the image prompt unchanged, which would otherwise be rewritten by the LLM."},
|
|
{"id":"","label":"Enhanced prompt","localized":"","reload":"","hint":"The enhanced prompt output from the LLM"},
|
|
{"id":"","label":"Set prompt","localized":"","reload":"","hint":"Copy the enhanced prompt to the main prompt input"},
|
|
{"id":"","label":"Manage extensions","localized":"","reload":"","hint":"Manage extensions"},
|
|
{"id":"","label":"Manual install","localized":"","reload":"","hint":"Manually install extension"},
|
|
{"id":"","label":"Extension GIT repository URL","localized":"","reload":"","hint":"Specify extension repository URL on GitHub"},
|
|
{"id":"","label":"Specific branch name","localized":"","reload":"","hint":"Specify extension branch name, leave blank for default"},
|
|
{"id":"","label":"Local directory name","localized":"","reload":"","hint":"Directory where to install extension, leave blank for default"},
|
|
{"id":"","label":"Refresh extension list","localized":"","reload":"","hint":"Refresh list of available extensions"},
|
|
{"id":"","label":"Update all installed","localized":"","reload":"","hint":"Update installed extensions to their latest available version"},
|
|
{"id":"","label":"Apply changes","localized":"","reload":"","hint":"Apply all changes and restart server"},
|
|
{"id":"","label":"Uninstall","localized":"","reload":"","hint":"uninstall this extension"},
|
|
{"id":"","label":"User interface","localized":"","reload":"","hint":"Review and set user interface preferences"},
|
|
{"id":"","label":"Set UI defaults","localized":"","reload":"","hint":"Set current values as default values for the user interface"},
|
|
{"id":"","label":"Benchmark","localized":"","reload":"","hint":"Run benchmarks"},
|
|
{"id":"","label":"Models & Networks","localized":"","reload":"","hint":"View lists of all available models and networks"},
|
|
{"id":"","label":"Restore UI defaults","localized":"","reload":"","hint":"Restore default user interface values"},
|
|
{"id":"","label":"Detailer classes","localized":"","reload":"","hint":"Specify specific classes to use if selected detailer model is a multi-class model"},
|
|
{"id":"","label":"Detailer models","localized":"","reload":"","hint":"Select detection models to use for detailing"},
|
|
{"id":"","label":"Detailer negative prompt","localized":"","reload":"","hint":"Use separate negative prompt for detailer. If not present, it will use primary negative prompt"},
|
|
{"id":"","label":"Detailer prompt","localized":"","reload":"","hint":"Use separate prompt for detailer. If not present, it will use primary prompt"},
|
|
{"id":"","label":"Detailer steps","localized":"","reload":"","hint":"Number of steps to run for detailer process"},
|
|
{"id":"","label":"Detailer strength","localized":"","reload":"","hint":"Denoising strength of detailer process"},
|
|
{"id":"","label":"Detailer use model augment","localized":"","reload":"","hint":"Run detailer detection models at extra precision"},
|
|
{"id":"","label":"Max detected","localized":"","reload":"","hint":"Maximum number of detected objects to run detailer on"},
|
|
{"id":"","label":"Edge blur","localized":"","reload":"","hint":"Blur edge of masked area by this percentage"},
|
|
{"id":"","label":"Edge padding","localized":"","reload":"","hint":"Expand edge of masked area by this percentage"},
|
|
{"id":"","label":"Min confidence","localized":"","reload":"","hint":"Minimum confidence in detected item"},
|
|
{"id":"","label":"Max overlap","localized":"","reload":"","hint":"Maximum overlap between two detected items before one is discarded"},
|
|
{"id":"","label":"Min size","localized":"","reload":"","hint":"Minimum size of detected object as percentage of overal image"},
|
|
{"id":"","label":"Max size","localized":"","reload":"","hint":"Maximum size of detected object as percentage of overal image"},
|
|
{"id":"","label":"Process Image","localized":"","reload":"","hint":"Process single image"},
|
|
{"id":"","label":"Process Batch","localized":"","reload":"","hint":"Process batch of images"},
|
|
{"id":"","label":"Process Folder","localized":"","reload":"","hint":"Process all images in a folder"},
|
|
{"id":"","label":"Current","localized":"","reload":"","hint":"Analyze modules inside currently loaded model"},
|
|
{"id":"","label":"Merge","localized":"","reload":"","hint":"Merge two or more models into a new model"},
|
|
{"id":"","label":"Modules","localized":"","reload":"","hint":"Merge and/or replace modules into an existing model"},
|
|
{"id":"","label":"Validate","localized":"","reload":"","hint":"Validate all local models"},
|
|
{"id":"","label":"CivitAI","localized":"","reload":"","hint":"Search and download models from CitivAI"},
|
|
{"id":"","label":"Scale by","localized":"","reload":"","hint":"Use this tab to resize the source image(s) by a chosen factor"},
|
|
{"id":"","label":"Scale to","localized":"","reload":"","hint":"Use this tab to resize the source image(s) to a chosen target size"},
|
|
{"id":"","label":"Input directory","localized":"","reload":"","hint":"Folder where the images are that you want to process"},
|
|
{"id":"","label":"Output directory","localized":"","reload":"","hint":"Folder where the processed images should be saved to"},
|
|
{"id":"","label":"Show result images","localized":"","reload":"","hint":"Enable to show the processed images in the image pane"},
|
|
{"id":"","label":"Crop to fit","localized":"","reload":"","hint":"If the dimensions of your source image (e.g. 512x510) deviate from your target dimensions (e.g. 1024x768) this function will fit your upscaled image into your target size image. Excess will be cropped"},
|
|
{"id":"","label":"Refine Upscaler","localized":"","reload":"","hint":"Select secondary upscaler to run after initial upscaler"},
|
|
{"id":"","label":"Upscaler 2 visibility","localized":"","reload":"","hint":"Strength of the secondary upscaler"},
|
|
{"id":"","label":"Calculate hash for all models","localized":"","reload":"","hint":"Calculates hash for all available models which may take a very long time"},
|
|
{"id":"","label":"Weights Clip","localized":"","reload":"","hint":"Forced merged weights to be no heavier than the original model, preventing burn in and overly saturated models"},
|
|
{"id":"","label":"ReBasin","localized":"","reload":"","hint":"Performs multiple merges with permutations in order to keep more features from both models"},
|
|
{"id":"","label":"Number of ReBasin Iterations","localized":"","reload":"","hint":"Number of times to merge and permute the model before saving"},
|
|
{"id":"","label":"CPU","localized":"","reload":"","hint":"Uses cpu and RAM only: slowest but least likely to OOM"},
|
|
{"id":"","label":"Shuffle","localized":"","reload":"","hint":"Loads full model in RAM and calculates on VRAM: Less speedup, suggested for SDXL merges"},
|
|
{"id":"","label":"In Blocks","localized":"","reload":"","hint":"Downsampling Blocks of the UNet (12 values for SD1.5, 9 values for SDXL)"},
|
|
{"id":"","label":"Mid Block","localized":"","reload":"","hint":"Central Block of the UNet (1 value)"},
|
|
{"id":"","label":"Out Block","localized":"","reload":"","hint":"Upsampling Blocks of the UNet (12 values for SD1.5, 9 values for SDXL)"},
|
|
{"id":"","label":"Preset Interpolation Ratio","localized":"","reload":"","hint":"If two presets are selected, interpolate between them"},
|
|
{"id":"","label":"Adapter","localized":"","reload":"","hint":"IP adapter model"},
|
|
{"id":"","label":"Active ip adapters","localized":"","reload":"","hint":"Number of active IP adapter"},
|
|
{"id":"","label":"Unload adapter","localized":"","reload":"","hint":"Unload IP adapter immediately after generate. Otherwise IP adapter will remain loaded for faster use in next generate process"},
|
|
{"id":"","label":"Crop to portrait","localized":"","reload":"","hint":"Crop input image to portrait-only before using it as IP adapter input"},
|
|
{"id":"","label":"Layer options","localized":"","reload":"","hint":"Manually specify IP adapter advanced layer options"},
|
|
{"id":"","label":"X values","localized":"","reload":"","hint":"Separate values for X axis using commas"},
|
|
{"id":"","label":"Y values","localized":"","reload":"","hint":"Separate values for Y axis using commas"},
|
|
{"id":"","label":"Z values","localized":"","reload":"","hint":"Separate values for Z axis using commas"},
|
|
{"id":"","label":"Loops","localized":"","reload":"","hint":"How many times to process an image. Each output is used as the input of the next loop. If set to 1, behavior will be as if this script were not used"},
|
|
{"id":"","label":"Final denoising strength","localized":"","reload":"","hint":"The denoising strength for the final loop of each image in the batch"},
|
|
{"id":"","label":"Denoising strength curve","localized":"","reload":"","hint":"The denoising curve controls the rate of denoising strength change each loop. Aggressive: Most of the change will happen towards the start of the loops. Linear: Change will be constant through all loops. Lazy: Most of the change will happen towards the end of the loops"},
|
|
{"id":"","label":"Tile overlap","localized":"","reload":"","hint":"For SD upscale, how much overlap in pixels should there be between tiles. Tiles overlap so that when they are merged back into one picture, there is no clearly visible seam"},
|
|
{"id":"","label":"ACI: Color to Mask","localized":"","reload":"","hint":"Pick the color you want to mask and inpaint. Click on the color in the image to automatically select it.<br> Advised to use images like green screens to get precise results."},
|
|
{"id":"","label":"ACI: Color Tolerance","localized":"","reload":"","hint":"Adjust the tolerance to include similar colors in the mask. Lower values = mask only very similar colors. Higher = values mask a wider range of similar colors."},
|
|
{"id":"","label":"ACI: Mask Erode","localized":"","reload":"","hint":"Adjust padding to apply a inside offset to the mask. (Recommended value = 2 to remove leftovers at edges)"},
|
|
{"id":"","label":"ACI: Mask Blur","localized":"","reload":"","hint":"Adjust blur to apply a smooth transition between image and inpainted area. (Recommended value = 0 for sharpness)"},
|
|
{"id":"","label":"ACI: Denoising Strength","localized":"","reload":"","hint":"Change Denoising Strength to achieve desired inpaint amount."}
|
|
],
|
|
"settings": [
|
|
{"id":"","label":"Apply settings","localized":"","reload":"","hint":"Save current settings, server restart is recommended"},
|
|
{"id":"","label":"Model Loading","localized":"","reload":"","hint":"Settings related to how model is loaded"},
|
|
{"id":"","label":"Model Options","localized":"","reload":"","hint":"Settings related to behavior of specific models"},
|
|
{"id":"","label":"Model Offloading","localized":"","reload":"","hint":"Settings related to model offloading and memory management"},
|
|
{"id":"","label":"Model Quantization","localized":"","reload":"","hint":"Settings related to model quantization which is used to reduce memory usage"},
|
|
{"id":"","label":"Image Metadata","localized":"","reload":"","hint":"Settings related to handling of metadata that is created with generated images"},
|
|
{"id":"","label":"Legacy Options","localized":"","reload":"","hint":"Settings related to legacy options - should not be used"},
|
|
{"id":"","label":"Restart server","localized":"","reload":"","hint":"Restart server"},
|
|
{"id":"","label":"Shutdown server","localized":"","reload":"","hint":"Shutdown server"},
|
|
{"id":"","label":"Preview theme","localized":"","reload":"","hint":"Show theme preview"},
|
|
{"id":"","label":"Restore defaults","localized":"","reload":"","hint":"Restore default server settings"},
|
|
{"id":"","label":"Unload model","localized":"","reload":"","hint":"Unload currently loaded model"},
|
|
{"id":"","label":"Reload model","localized":"","reload":"","hint":"Reload currently selected model"},
|
|
{"id":"","label":"Models & Loading","localized":"","reload":"","hint":"Settings related to base models, primary backend and model load behavior"},
|
|
{"id":"","label":"Variational Auto Encoder","localized":"","reload":"","hint":"Settings related to Variational Auto Encoder and image decoding process during generate"},
|
|
{"id":"","label":"Text encoder","localized":"","reload":"","hint":"Settings related to text encoder and prompt encoding processing during generate"},
|
|
{"id":"","label":"Compute Settings","localized":"","reload":"","hint":"Settings related to compute precision, cross attention, and optimizations for computing platforms"},
|
|
{"id":"","label":"Backend Settings","localized":"","reload":"","hint":"Settings related to compute backends: torch, onnx and olive"},
|
|
{"id":"","label":"Pipeline modifiers","localized":"","reload":"","hint":"Additional functionality that can be enabled during generate"},
|
|
{"id":"","label":"Model compile","localized":"","reload":"","hint":"Settings related to different model compilation methods"},
|
|
{"id":"","label":"System Paths","localized":"","reload":"","hint":"Settings related to location of various model directories"},
|
|
{"id":"","label":"Image Options","localized":"","reload":"","hint":"Settings related to image format, metadata, and image grids"},
|
|
{"id":"","label":"Image Paths","localized":"","reload":"","hint":"Settings related to image filenames, and output directories"},
|
|
{"id":"","label":"Live Previews","localized":"","reload":"","hint":"Settings related to live previews, audio notification"},
|
|
{"id":"","label":"Sampler Settings","localized":"","reload":"","hint":"Settings related to sampler selection and configuration, and diffuser specific sampler configuration"},
|
|
{"id":"","label":"Postprocessing","localized":"","reload":"","hint":"Settings related to post image generation processing, face restoration, and upscaling"},
|
|
{"id":"","label":"Control Options","localized":"","reload":"","hint":"Settings related the Control tab"},
|
|
{"id":"","label":"Huggingface","localized":"","reload":"","hint":"Settings related huggingface access"},
|
|
{"id":"","label":"Show all pages","localized":"","reload":"","hint":"Show all settings pages"},
|
|
{"id":"","label":"Base model","localized":"","reload":"","hint":"Main model used for all operations"},
|
|
{"id":"","label":"Refiner model","localized":"","reload":"","hint":"Refiner model used for second-pass operations"},
|
|
{"id":"","label":"Cached models","localized":"","reload":"","hint":"The number of models to store in RAM for quick access"},
|
|
{"id":"","label":"VAE model","localized":"","reload":"","hint":"VAE helps with fine details in the final image and may also alter colors"},
|
|
{"id":"","label":"Model load using streams","localized":"","reload":"","hint":"When loading models attempt stream loading optimized for slow or network storage"},
|
|
{"id":"","label":"xFormers","localized":"","reload":"","hint":"Memory optimization. Non-Deterministic (different results each time)"},
|
|
{"id":"","label":"Scaled-Dot-Product","localized":"","reload":"","hint":"Memory optimization. Non-Deterministic unless SDP memory attention is disabled."},
|
|
{"id":"","label":"Prompt padding","localized":"","reload":"","hint":"Increase coherency by padding from the last comma within n tokens when using more than 75 tokens"},
|
|
{"id":"","label":"Original","localized":"","reload":"","hint":"Original LDM backend"},
|
|
{"id":"","label":"Autocast","localized":"","reload":"","hint":"Automatically determine precision during runtime"},
|
|
{"id":"","label":"Full","localized":"","reload":"","hint":"Always use full precision"},
|
|
{"id":"","label":"FP32","localized":"","reload":"","hint":"Use 32-bit floating point precision for calculations"},
|
|
{"id":"","label":"FP16","localized":"","reload":"","hint":"Use 16-bit floating point precision for calculations"},
|
|
{"id":"","label":"BF16","localized":"","reload":"","hint":"Use modified 16-bit floating point precision for calculations"},
|
|
{"id":"","label":"Full precision (--no-half-vae)","localized":"","reload":"","hint":"Uses FP32 for the VAE. May produce better results while using more VRAM and slower generation"},
|
|
{"id":"","label":"Force full precision (--no-half)","localized":"","reload":"","hint":"Uses FP32 for the model. May produce better results while using more VRAM and slower generation"},
|
|
{"id":"","label":"Upcast sampling","localized":"","reload":"","hint":"Usually produces similar results to --no-half with better performance while using less memory"},
|
|
{"id":"","label":"Attempt VAE roll back for NaN values","localized":"","reload":"","hint":"Requires Torch 2.1 and NaN check enabled"},
|
|
{"id":"","label":"Olive use FP16 on optimization","localized":"","reload":"","hint":"Use 16-bit floating point precision for the output model of Olive optimization process. Use 32-bit floating point precision if disabled"},
|
|
{"id":"","label":"Olive force FP32 for VAE Encoder","localized":"","reload":"","hint":"Use 32-bit floating point precision for VAE Encoder of the output model. This overrides 'use FP16 on optimization' option. If you are getting NaN or black blank images from Img2Img, enable this option and remove cache"},
|
|
{"id":"","label":"Olive use static dimensions","localized":"","reload":"","hint":"Make the inference with Olive optimized models much faster. (OrtTransformersOptimization)"},
|
|
{"id":"","label":"Olive cache optimized models","localized":"","reload":"","hint":"Save Olive processed models as a cache. You can manage them in ONNX tab"},
|
|
{"id":"","label":"File format","localized":"","reload":"","hint":"Select file format for images"},
|
|
{"id":"","label":"Include metadata","localized":"","reload":"","hint":"Save image create parameters as metadata tags inside image file"},
|
|
{"id":"","label":"Images filename pattern","localized":"","reload":"","hint":"Use following tags to define how filenames for images are chosen:<br><pre>seq, uuid<br>date, datetime, job_timestamp<br>generation_number, batch_number<br>model, model_shortname<br>model_hash, model_name<br>sampler, seed, steps, cfg<br>clip_skip, denoising<br>hasprompt, prompt, styles<br>prompt_hash, prompt_no_styles<br>prompt_spaces, prompt_words<br>height, width, image_hash<br></pre>"},
|
|
{"id":"","label":"Row count","localized":"","reload":"","hint":"Use -1 for autodetect and 0 for it to be same as batch size"},
|
|
{"id":"","label":"Directory name pattern","localized":"","reload":"","hint":"Use following tags to define how subdirectories for images and grids are chosen: [steps], [cfg],[prompt_hash], [prompt], [prompt_no_styles], [prompt_spaces], [width], [height], [styles], [sampler], [seed], [model_hash], [model_name], [prompt_words], [date], [datetime], [datetime<Format>], [datetime<Format><Time Zone>], [job_timestamp]; leave empty for default"},
|
|
{"id":"","label":"Inpainting conditioning mask strength","localized":"","reload":"","hint":"Determines how strongly to mask off the original image for inpainting and img2img. 1.0 means fully masked (default). 0.0 means a fully unmasked conditioning. Lower values will help preserve the overall composition of the image, but will struggle with large changes"},
|
|
{"id":"","label":"Clip skip","localized":"","reload":"","hint":"Early stopping parameter for CLIP model; 1 is stop at last layer as usual, 2 is stop at penultimate layer, etc"},
|
|
{"id":"","label":"Images folder","localized":"","reload":"","hint":"If empty, defaults to three directories below"},
|
|
{"id":"","label":"Grids folder","localized":"","reload":"","hint":"If empty, defaults to two directories below"},
|
|
{"id":"","label":"Quicksettings list","localized":"","reload":"","hint":"List of setting names, separated by commas, for settings that should go to the quick access bar at the top instead the setting tab"},
|
|
{"id":"","label":"Live preview display period","localized":"","reload":"","hint":"Request preview image every n steps, set to 0 to disable"},
|
|
{"id":"","label":"Approximate","localized":"","reload":"","hint":"Cheap neural network approximation. Very fast compared to VAE, but produces pictures with 4 times smaller horizontal/vertical resolution and lower quality"},
|
|
{"id":"","label":"Simple","localized":"","reload":"","hint":"Very cheap approximation. Very fast compared to VAE, but produces pictures with 8 times smaller horizontal/vertical resolution and extremely low quality"},
|
|
{"id":"","label":"Progress update period","localized":"","reload":"","hint":"Update period for UI progress bar and preview checks, in milliseconds"},
|
|
{"id":"","label":"Euler a","localized":"","reload":"","hint":"Euler Ancestral - very creative, each can get a completely different picture depending on step count, setting steps higher than 30-40 does not help"},
|
|
{"id":"","label":"DDIM","localized":"","reload":"","hint":"Denoising Diffusion Implicit Models - best at inpainting"},
|
|
{"id":"","label":"UniPC","localized":"","reload":"","hint":"Unified Predictor-Corrector Framework for Fast Sampling of Diffusion Models"},
|
|
{"id":"","label":"Sigma negative guidance minimum","localized":"","reload":"","hint":"Skip negative prompt for some steps when the image is almost ready, 0=disable"},
|
|
{"id":"","label":"Upscaler tile size","localized":"","reload":"","hint":"0 = no tiling"},
|
|
{"id":"","label":"Upscaler tile overlap","localized":"","reload":"","hint":"Low values = visible seam"},
|
|
{"id":"","label":"GFPGAN","localized":"","reload":"","hint":"Restore low quality faces using GFPGAN neural network"},
|
|
{"id":"","label":"CodeFormer","localized":"","reload":"","hint":"Restore low quality faces using Codeformer neural network"},
|
|
{"id":"","label":"CodeFormer weight parameter","localized":"","reload":"","hint":"0 = maximum effect; 1 = minimum effect"},
|
|
{"id":"","label":"ToMe token merging ratio","localized":"","reload":"","hint":"Enable redundant token merging via tomesd for speed and memory improvements, 0=disabled"},
|
|
{"id":"","label":"Todo token merging ratio","localized":"","reload":"","hint":"Enable redundant token merging via todo for speed and memory improvements, 0=disabled"},
|
|
{"id":"","label":"Model pipeline","localized":"","reload":"","hint":"If autodetect does not detect model automatically, select model type before loading a model"},
|
|
{"id":"","label":"VAE slicing","localized":"","reload":"","hint":"Decodes batch latents one image at a time with limited VRAM. Small performance boost in VAE decode on multi-image batches"},
|
|
{"id":"","label":"VAE tiling","localized":"","reload":"","hint":"Divide large images into overlapping tiles with limited VRAM. Results in a minor increase in processing time"},
|
|
{"id":"","label":"Dynamic attention BMM","localized":"","reload":"","hint":"Performs attention computation in steps instead of all at once. Slower inference times, but greatly reduced memory usage"},
|
|
{"id":"","label":"ONNX Execution Provider","localized":"","reload":"","hint":"ONNX Execution Provider"},
|
|
{"id":"","label":"ONNX allow fallback to CPU","localized":"","reload":"","hint":"Allow fallback to CPU when selected execution provider failed"},
|
|
{"id":"","label":"ONNX cache converted models","localized":"","reload":"","hint":"Save the models that are converted to ONNX format as a cache. You can manage them in ONNX tab"},
|
|
{"id":"","label":"ONNX unload base model when processing refiner","localized":"","reload":"","hint":"Unload base model when the refiner is being converted/optimized/processed"},
|
|
{"id":"","label":"Model compile precompile","localized":"","reload":"","hint":"Run model compile immediately on model load instead of first use"},
|
|
{"id":"","label":"Use zeros for prompt padding","localized":"","reload":"","hint":"Force full zero tensor when prompt is empty to remove any residual noise"},
|
|
{"id":"","label":"Include invisible watermark","localized":"","reload":"","hint":"Add invisible watermark to image by altering some pixel values"},
|
|
{"id":"","label":"invisible watermark string","localized":"","reload":"","hint":"Watermark string to add to image. Keep very short to avoid image corruption."},
|
|
{"id":"","label":"show log view","localized":"","reload":"","hint":"Show log view at the bottom of the main window"},
|
|
{"id":"","label":"Log view update period","localized":"","reload":"","hint":"Log view update period, in milliseconds"},
|
|
{"id":"","label":"PAG layer names","localized":"","reload":"","hint":"Space separated list of layers<br>Available: d[0-5], m[0], u[0-8]<br>Default: m0"},
|
|
{"id":"","label":"prompt attention normalization","localized":"","reload":"","hint":"Balances prompt token weights to avoid overly strong/weak influence. Helps stabilize outputs."},
|
|
{"id":"","label":"ck flash attention","localized":"","reload":"","hint":"Custom Flash Attention kernel. Very fast, but may be unstable or hardware-dependent."},
|
|
{"id":"","label":"flash attention","localized":"","reload":"","hint":"Highly optimized attention algorithm. Greatly reduces VRAM use and speeds up inference, but can be non-deterministic."},
|
|
{"id":"","label":"memory attention","localized":"","reload":"","hint":"Uses less VRAM by chunking attention computation. Slower but allows bigger batches or images."},
|
|
{"id":"","label":"math attention","localized":"","reload":"","hint":"Fallback pure-math attention implementation. Stable and predictable but very slow."},
|
|
{"id":"","label":"dynamic attention","localized":"","reload":"","hint":"Adjusts attention computation dynamically per step. Saves VRAM but slows generation."},
|
|
{"id":"","label":"sage attention","localized":"","reload":"","hint":"Experimental attention optimization method. May improve speed but less tested and can cause bugs."},
|
|
{"id":"","label":"batch matrix-matrix","localized":"","reload":"","hint":"Standard batched matrix multiplication for attention. Reliable but not VRAM-efficient."},
|
|
{"id":"","label":"split attention","localized":"","reload":"","hint":"Splits attention layers into smaller chunks. Helps with very large images at the cost of slower inference."},
|
|
{"id":"","label":"deterministic mode","localized":"","reload":"","hint":"Forces deterministic output across runs. Useful for reproducibility, but may disable some optimizations."},
|
|
{"id":"","label":"no-grad","localized":"","reload":"","hint":"Disables gradient tracking with torch.no_grad. Reduces memory usage and speeds up inference."},
|
|
{"id":"","label":"Inference-mode","localized":"","reload":"","hint":"Like no-grad but stricter. Ensures model runs only in inference mode for safety and speed."},
|
|
{"id":"","label":"cudamallocasync","localized":"","reload":"","hint":"Uses CUDA async memory allocator. Improves performance and VRAM fragmentation, but may cause instability on some GPUs."}
|
|
],
|
|
"missing": [
|
|
{"id":"","label":"1st stage","localized":"","reload":"","hint":"1st stage"},
|
|
{"id":"","label":"1st stage backbone","localized":"","reload":"","hint":"1st stage backbone"},
|
|
{"id":"","label":"1st stage skip","localized":"","reload":"","hint":"1st stage skip"},
|
|
{"id":"","label":"2nd restart step","localized":"","reload":"","hint":"2nd restart step"},
|
|
{"id":"","label":"2nd scale","localized":"","reload":"","hint":"2nd scale"},
|
|
{"id":"","label":"2nd stage","localized":"","reload":"","hint":"2nd stage"},
|
|
{"id":"","label":"2nd stage backbone","localized":"","reload":"","hint":"2nd stage backbone"},
|
|
{"id":"","label":"2nd stage skip","localized":"","reload":"","hint":"2nd stage skip"},
|
|
{"id":"","label":"3rd restart step","localized":"","reload":"","hint":"3rd restart step"},
|
|
{"id":"","label":"3rd scale","localized":"","reload":"","hint":"3rd scale"},
|
|
{"id":"","label":"3rd stage","localized":"","reload":"","hint":"3rd stage"},
|
|
{"id":"","label":"4th restart step","localized":"","reload":"","hint":"4th restart step"},
|
|
{"id":"","label":"4th scale","localized":"","reload":"","hint":"4th scale"},
|
|
{"id":"","label":"4th stage","localized":"","reload":"","hint":"4th stage"},
|
|
{"id":"","label":"a1111","localized":"","reload":"","hint":"a1111"},
|
|
{"id":"","label":"accuracy","localized":"","reload":"","hint":"accuracy"},
|
|
{"id":"","label":"aci: mask dilate","localized":"","reload":"","hint":"aci: mask dilate"},
|
|
{"id":"","label":"active","localized":"","reload":"","hint":"active"},
|
|
{"id":"","label":"adain","localized":"","reload":"","hint":"adain"},
|
|
{"id":"","label":"adapter 1","localized":"","reload":"","hint":"adapter 1"},
|
|
{"id":"","label":"adapter 2","localized":"","reload":"","hint":"adapter 2"},
|
|
{"id":"","label":"adapter 3","localized":"","reload":"","hint":"adapter 3"},
|
|
{"id":"","label":"adapter 4","localized":"","reload":"","hint":"adapter 4"},
|
|
{"id":"","label":"adaptive restore","localized":"","reload":"","hint":"adaptive restore"},
|
|
{"id":"","label":"add text info","localized":"","reload":"","hint":"add text info"},
|
|
{"id":"","label":"add time info","localized":"","reload":"","hint":"add time info"},
|
|
{"id":"","label":"additional image browser folders","localized":"","reload":"","hint":"additional image browser folders"},
|
|
{"id":"","label":"additional postprocessing operations","localized":"","reload":"","hint":"additional postprocessing operations"},
|
|
{"id":"","label":"advanced options","localized":"","reload":"","hint":"advanced options"},
|
|
{"id":"","label":"after","localized":"","reload":"","hint":"after"},
|
|
{"id":"","label":"aggressive at step","localized":"","reload":"","hint":"aggressive at step"},
|
|
{"id":"","label":"alias","localized":"","reload":"","hint":"alias"},
|
|
{"id":"","label":"all","localized":"","reload":"","hint":"all"},
|
|
{"id":"","label":"allowed aspect ratios","localized":"","reload":"","hint":"allowed aspect ratios"},
|
|
{"id":"","label":"alpha","localized":"","reload":"","hint":"alpha"},
|
|
{"id":"","label":"alpha block weight preset","localized":"","reload":"","hint":"alpha block weight preset"},
|
|
{"id":"","label":"alpha matting","localized":"","reload":"","hint":"alpha matting"},
|
|
{"id":"","label":"alpha preset","localized":"","reload":"","hint":"alpha preset"},
|
|
{"id":"","label":"alpha ratio","localized":"","reload":"","hint":"alpha ratio"},
|
|
{"id":"","label":"amplify lut","localized":"","reload":"","hint":"amplify lut"},
|
|
{"id":"","label":"analyze","localized":"","reload":"","hint":"analyze"},
|
|
{"id":"","label":"anchor settings","localized":"","reload":"","hint":"anchor settings"},
|
|
{"id":"","label":"animateddiff","localized":"","reload":"","hint":"animateddiff"},
|
|
{"id":"","label":"answer","localized":"","reload":"","hint":"answer"},
|
|
{"id":"","label":"aot_ts_nvfuser","localized":"","reload":"","hint":"aot_ts_nvfuser"},
|
|
{"id":"","label":"appearance","localized":"","reload":"","hint":"appearance"},
|
|
{"id":"","label":"append caption files","localized":"","reload":"","hint":"append caption files"},
|
|
{"id":"","label":"append image info json file","localized":"","reload":"","hint":"append image info json file"},
|
|
{"id":"","label":"append interrogated prompt at each iteration","localized":"","reload":"","hint":"append interrogated prompt at each iteration"},
|
|
{"id":"","label":"apply color correction","localized":"","reload":"","hint":"apply color correction"},
|
|
{"id":"","label":"apply filter","localized":"","reload":"","hint":"apply filter"},
|
|
{"id":"","label":"apply linfusion distillation on load","localized":"","reload":"","hint":"apply linfusion distillation on load"},
|
|
{"id":"","label":"apply mask as overlay","localized":"","reload":"","hint":"apply mask as overlay"},
|
|
{"id":"","label":"apply msw-msa","localized":"","reload":"","hint":"apply msw-msa"},
|
|
{"id":"","label":"apply rau-net","localized":"","reload":"","hint":"apply rau-net"},
|
|
{"id":"","label":"apply to model","localized":"","reload":"","hint":"apply to model"},
|
|
{"id":"","label":"artists","localized":"","reload":"","hint":"artists"},
|
|
{"id":"","label":"atiadlxx (amd only)","localized":"","reload":"","hint":"atiadlxx (amd only)"},
|
|
{"id":"","label":"attention","localized":"","reload":"","hint":"attention"},
|
|
{"id":"","label":"attention adain","localized":"","reload":"","hint":"attention adain"},
|
|
{"id":"","label":"attention cache enabled","localized":"","reload":"","hint":"attention cache enabled"},
|
|
{"id":"","label":"attention chunking threshold","localized":"","reload":"","hint":"attention chunking threshold"},
|
|
{"id":"","label":"attention kv chunk size","localized":"","reload":"","hint":"attention kv chunk size"},
|
|
{"id":"","label":"attention query chunk size","localized":"","reload":"","hint":"attention query chunk size"},
|
|
{"id":"","label":"auto","localized":"","reload":"","hint":"auto"},
|
|
{"id":"","label":"auto apply","localized":"","reload":"","hint":"auto apply"},
|
|
{"id":"","label":"auto-convert sd15 embeddings to sdxl","localized":"","reload":"","hint":"auto-convert sd15 embeddings to sdxl"},
|
|
{"id":"","label":"auto-mask","localized":"","reload":"","hint":"auto-mask"},
|
|
{"id":"","label":"auto-segment","localized":"","reload":"","hint":"auto-segment"},
|
|
{"id":"","label":"autolaunch browser upon startup","localized":"","reload":"","hint":"autolaunch browser upon startup"},
|
|
{"id":"","label":"automatically determine rank","localized":"","reload":"","hint":"automatically determine rank"},
|
|
{"id":"","label":"autorank ratio","localized":"","reload":"","hint":"autorank ratio"},
|
|
{"id":"","label":"available networks","localized":"","reload":"","hint":"available networks"},
|
|
{"id":"","label":"backend","localized":"","reload":"","hint":"backend"},
|
|
{"id":"","label":"backend storage","localized":"","reload":"","hint":"backend storage"},
|
|
{"id":"","label":"background threshold","localized":"","reload":"","hint":"background threshold"},
|
|
{"id":"","label":"balanced","localized":"","reload":"","hint":"balanced"},
|
|
{"id":"","label":"balanced offload cpu high watermark","localized":"","reload":"","hint":"balanced offload cpu high watermark"},
|
|
{"id":"","label":"balanced offload gpu high watermark","localized":"","reload":"","hint":"balanced offload gpu high watermark"},
|
|
{"id":"","label":"balanced offload gpu low watermark","localized":"","reload":"","hint":"balanced offload gpu low watermark"},
|
|
{"id":"","label":"base","localized":"","reload":"","hint":"base"},
|
|
{"id":"","label":"batch caption","localized":"","reload":"","hint":"batch caption"},
|
|
{"id":"","label":"batch input directory","localized":"","reload":"","hint":"batch input directory"},
|
|
{"id":"","label":"batch interogate","localized":"","reload":"","hint":"batch interogate"},
|
|
{"id":"","label":"batch interrogate","localized":"","reload":"","hint":"batch interrogate"},
|
|
{"id":"","label":"batch mask directory","localized":"","reload":"","hint":"batch mask directory"},
|
|
{"id":"","label":"batch mode uses sequential seeds","localized":"","reload":"","hint":"batch mode uses sequential seeds"},
|
|
{"id":"","label":"batch output directory","localized":"","reload":"","hint":"batch output directory"},
|
|
{"id":"","label":"batch uses original name","localized":"","reload":"","hint":"batch uses original name"},
|
|
{"id":"","label":"bdia ddim","localized":"","reload":"","hint":"bdia ddim"},
|
|
{"id":"","label":"before","localized":"","reload":"","hint":"before"},
|
|
{"id":"","label":"benchmark level","localized":"","reload":"","hint":"benchmark level"},
|
|
{"id":"","label":"benchmark steps","localized":"","reload":"","hint":"benchmark steps"},
|
|
{"id":"","label":"beta block weight preset","localized":"","reload":"","hint":"beta block weight preset"},
|
|
{"id":"","label":"beta end","localized":"","reload":"","hint":"beta end"},
|
|
{"id":"","label":"beta ratio","localized":"","reload":"","hint":"beta ratio"},
|
|
{"id":"","label":"beta start","localized":"","reload":"","hint":"beta start"},
|
|
{"id":"","label":"bh1","localized":"","reload":"","hint":"bh1"},
|
|
{"id":"","label":"bh2","localized":"","reload":"","hint":"bh2"},
|
|
{"id":"","label":"block","localized":"","reload":"","hint":"block"},
|
|
{"id":"","label":"block skip range","localized":"","reload":"","hint":"block skip range"},
|
|
{"id":"","label":"blur","localized":"","reload":"","hint":"blur"},
|
|
{"id":"","label":"body","localized":"","reload":"","hint":"body"},
|
|
{"id":"","label":"boost","localized":"","reload":"","hint":"boost"},
|
|
{"id":"","label":"brightness","localized":"","reload":"","hint":"brightness"},
|
|
{"id":"","label":"cache model","localized":"","reload":"","hint":"cache model"},
|
|
{"id":"","label":"cache text encoder results","localized":"","reload":"","hint":"cache text encoder results"},
|
|
{"id":"","label":"canny","localized":"","reload":"","hint":"canny"},
|
|
{"id":"","label":"caption","localized":"","reload":"","hint":"caption"},
|
|
{"id":"","label":"caption model","localized":"","reload":"","hint":"caption model"},
|
|
{"id":"","label":"center","localized":"","reload":"","hint":"center"},
|
|
{"id":"","label":"change log","localized":"","reload":"","hint":"change log"},
|
|
{"id":"","label":"change model","localized":"","reload":"","hint":"change model"},
|
|
{"id":"","label":"change rate","localized":"","reload":"","hint":"change rate"},
|
|
{"id":"","label":"change reference","localized":"","reload":"","hint":"change reference"},
|
|
{"id":"","label":"change refiner","localized":"","reload":"","hint":"change refiner"},
|
|
{"id":"","label":"change vae","localized":"","reload":"","hint":"change vae"},
|
|
{"id":"","label":"channels last","localized":"","reload":"","hint":"channels last"},
|
|
{"id":"","label":"check alternative hash","localized":"","reload":"","hint":"check alternative hash"},
|
|
{"id":"","label":"check for updates","localized":"","reload":"","hint":"check for updates"},
|
|
{"id":"","label":"check status","localized":"","reload":"","hint":"check status"},
|
|
{"id":"","label":"chunk size","localized":"","reload":"","hint":"chunk size"},
|
|
{"id":"","label":"civitai model type","localized":"","reload":"","hint":"civitai model type"},
|
|
{"id":"","label":"civitai token","localized":"","reload":"","hint":"civitai token"},
|
|
{"id":"","label":"ckpt","localized":"","reload":"","hint":"ckpt"},
|
|
{"id":"","label":"cleanup temporary folder on startup","localized":"","reload":"","hint":"cleanup temporary folder on startup"},
|
|
{"id":"","label":"clip model","localized":"","reload":"","hint":"clip model"},
|
|
{"id":"","label":"clip: chunk size","localized":"","reload":"","hint":"clip: chunk size"},
|
|
{"id":"","label":"clip: default captioner","localized":"","reload":"","hint":"clip: default captioner"},
|
|
{"id":"","label":"clip: default mode","localized":"","reload":"","hint":"clip: default mode"},
|
|
{"id":"","label":"clip: default model","localized":"","reload":"","hint":"clip: default model"},
|
|
{"id":"","label":"clip: intermediate flavors","localized":"","reload":"","hint":"clip: intermediate flavors"},
|
|
{"id":"","label":"clip: max flavors","localized":"","reload":"","hint":"clip: max flavors"},
|
|
{"id":"","label":"clip: max length","localized":"","reload":"","hint":"clip: max length"},
|
|
{"id":"","label":"clip: min flavors","localized":"","reload":"","hint":"clip: min flavors"},
|
|
{"id":"","label":"clip: min length","localized":"","reload":"","hint":"clip: min length"},
|
|
{"id":"","label":"clip: num beams","localized":"","reload":"","hint":"clip: num beams"},
|
|
{"id":"","label":"close","localized":"","reload":"","hint":"close"},
|
|
{"id":"","label":"cmsi","localized":"","reload":"","hint":"cmsi"},
|
|
{"id":"","label":"cn end","localized":"","reload":"","hint":"cn end"},
|
|
{"id":"","label":"cn mode","localized":"","reload":"","hint":"cn mode"},
|
|
{"id":"","label":"cn start","localized":"","reload":"","hint":"cn start"},
|
|
{"id":"","label":"cn strength","localized":"","reload":"","hint":"cn strength"},
|
|
{"id":"","label":"cn tiles","localized":"","reload":"","hint":"cn tiles"},
|
|
{"id":"","label":"coarse","localized":"","reload":"","hint":"coarse"},
|
|
{"id":"","label":"color","localized":"","reload":"","hint":"color"},
|
|
{"id":"","label":"color grading","localized":"","reload":"","hint":"color grading"},
|
|
{"id":"","label":"color map","localized":"","reload":"","hint":"color map"},
|
|
{"id":"","label":"color variation","localized":"","reload":"","hint":"color variation"},
|
|
{"id":"","label":"colormap","localized":"","reload":"","hint":"colormap"},
|
|
{"id":"","label":"columns","localized":"","reload":"","hint":"columns"},
|
|
{"id":"","label":"comma","localized":"","reload":"","hint":"comma"},
|
|
{"id":"","label":"comma separated list with optional strength per lora","localized":"","reload":"","hint":"comma separated list with optional strength per lora"},
|
|
{"id":"","label":"compact view","localized":"","reload":"","hint":"compact view"},
|
|
{"id":"","label":"compel","localized":"","reload":"","hint":"compel"},
|
|
{"id":"","label":"composite","localized":"","reload":"","hint":"composite"},
|
|
{"id":"","label":"compress ratio","localized":"","reload":"","hint":"compress ratio"},
|
|
{"id":"","label":"concept tokens","localized":"","reload":"","hint":"concept tokens"},
|
|
{"id":"","label":"context","localized":"","reload":"","hint":"context"},
|
|
{"id":"","label":"context after","localized":"","reload":"","hint":"context after"},
|
|
{"id":"","label":"context before","localized":"","reload":"","hint":"context before"},
|
|
{"id":"","label":"context mask","localized":"","reload":"","hint":"context mask"},
|
|
{"id":"","label":"contrast","localized":"","reload":"","hint":"contrast"},
|
|
{"id":"","label":"control factor","localized":"","reload":"","hint":"control factor"},
|
|
{"id":"","label":"control override denoise strength","localized":"","reload":"","hint":"control override denoise strength"},
|
|
{"id":"","label":"control preprocess input images","localized":"","reload":"","hint":"control preprocess input images"},
|
|
{"id":"","label":"control-lllite unit 1","localized":"","reload":"","hint":"control-lllite unit 1"},
|
|
{"id":"","label":"control-lllite unit 2","localized":"","reload":"","hint":"control-lllite unit 2"},
|
|
{"id":"","label":"control-lllite unit 3","localized":"","reload":"","hint":"control-lllite unit 3"},
|
|
{"id":"","label":"control-lllite unit 4","localized":"","reload":"","hint":"control-lllite unit 4"},
|
|
{"id":"","label":"controlnet","localized":"","reload":"","hint":"controlnet"},
|
|
{"id":"","label":"controlnet unit 1","localized":"","reload":"","hint":"controlnet unit 1"},
|
|
{"id":"","label":"controlnet unit 2","localized":"","reload":"","hint":"controlnet unit 2"},
|
|
{"id":"","label":"controlnet unit 3","localized":"","reload":"","hint":"controlnet unit 3"},
|
|
{"id":"","label":"controlnet unit 4","localized":"","reload":"","hint":"controlnet unit 4"},
|
|
{"id":"","label":"controlnet-xs","localized":"","reload":"","hint":"controlnet-xs"},
|
|
{"id":"","label":"controlnet-xs unit 1","localized":"","reload":"","hint":"controlnet-xs unit 1"},
|
|
{"id":"","label":"controlnet-xs unit 2","localized":"","reload":"","hint":"controlnet-xs unit 2"},
|
|
{"id":"","label":"controlnet-xs unit 3","localized":"","reload":"","hint":"controlnet-xs unit 3"},
|
|
{"id":"","label":"controlnet-xs unit 4","localized":"","reload":"","hint":"controlnet-xs unit 4"},
|
|
{"id":"","label":"correction mode","localized":"","reload":"","hint":"correction mode"},
|
|
{"id":"","label":"cosine background","localized":"","reload":"","hint":"cosine background"},
|
|
{"id":"","label":"cosine scale","localized":"","reload":"","hint":"cosine scale"},
|
|
{"id":"","label":"cosine scale 1","localized":"","reload":"","hint":"cosine scale 1"},
|
|
{"id":"","label":"cosine scale 2","localized":"","reload":"","hint":"cosine scale 2"},
|
|
{"id":"","label":"cosine scale 3","localized":"","reload":"","hint":"cosine scale 3"},
|
|
{"id":"","label":"create image info text file","localized":"","reload":"","hint":"create image info text file"},
|
|
{"id":"","label":"create video","localized":"","reload":"","hint":"create video"},
|
|
{"id":"","label":"create zip archive","localized":"","reload":"","hint":"create zip archive"},
|
|
{"id":"","label":"cross-attention","localized":"","reload":"","hint":"cross-attention"},
|
|
{"id":"","label":"cudagraphs","localized":"","reload":"","hint":"cudagraphs"},
|
|
{"id":"","label":"custom pipeline","localized":"","reload":"","hint":"custom pipeline"},
|
|
{"id":"","label":"dark","localized":"","reload":"","hint":"dark"},
|
|
{"id":"","label":"dc solver","localized":"","reload":"","hint":"dc solver"},
|
|
{"id":"","label":"ddpm","localized":"","reload":"","hint":"ddpm"},
|
|
{"id":"","label":"debug info","localized":"","reload":"","hint":"debug info"},
|
|
{"id":"","label":"decode","localized":"","reload":"","hint":"decode"},
|
|
{"id":"","label":"decode chunks","localized":"","reload":"","hint":"decode chunks"},
|
|
{"id":"","label":"deep-cache","localized":"","reload":"","hint":"deep-cache"},
|
|
{"id":"","label":"deepbooru","localized":"","reload":"","hint":"deepbooru"},
|
|
{"id":"","label":"deepbooru: escape brackets","localized":"","reload":"","hint":"deepbooru: escape brackets"},
|
|
{"id":"","label":"deepbooru: exclude tags","localized":"","reload":"","hint":"deepbooru: exclude tags"},
|
|
{"id":"","label":"deepbooru: include scores in results","localized":"","reload":"","hint":"deepbooru: include scores in results"},
|
|
{"id":"","label":"deepbooru: max tags","localized":"","reload":"","hint":"deepbooru: max tags"},
|
|
{"id":"","label":"deepbooru: score threshold","localized":"","reload":"","hint":"deepbooru: score threshold"},
|
|
{"id":"","label":"deepbooru: sort alphabetically","localized":"","reload":"","hint":"deepbooru: sort alphabetically"},
|
|
{"id":"","label":"deepbooru: use spaces for tags","localized":"","reload":"","hint":"deepbooru: use spaces for tags"},
|
|
{"id":"","label":"deepcache cache interval","localized":"","reload":"","hint":"deepcache cache interval"},
|
|
{"id":"","label":"default","localized":"","reload":"","hint":"default"},
|
|
{"id":"","label":"deis","localized":"","reload":"","hint":"deis"},
|
|
{"id":"","label":"denoising batch size","localized":"","reload":"","hint":"denoising batch size"},
|
|
{"id":"","label":"denoising steps","localized":"","reload":"","hint":"denoising steps"},
|
|
{"id":"","label":"depth and normal","localized":"","reload":"","hint":"depth and normal"},
|
|
{"id":"","label":"depth anything","localized":"","reload":"","hint":"depth anything"},
|
|
{"id":"","label":"depth map","localized":"","reload":"","hint":"depth map"},
|
|
{"id":"","label":"depth threshold","localized":"","reload":"","hint":"depth threshold"},
|
|
{"id":"","label":"description","localized":"","reload":"","hint":"description"},
|
|
{"id":"","label":"details","localized":"","reload":"","hint":"details"},
|
|
{"id":"","label":"device info","localized":"","reload":"","hint":"device info"},
|
|
{"id":"","label":"diffusers","localized":"","reload":"","hint":"diffusers"},
|
|
{"id":"","label":"dilate","localized":"","reload":"","hint":"dilate"},
|
|
{"id":"","label":"dilate tau","localized":"","reload":"","hint":"dilate tau"},
|
|
{"id":"","label":"directml retry ops for nan","localized":"","reload":"","hint":"directml retry ops for nan"},
|
|
{"id":"","label":"directory for temporary images; leave empty for default","localized":"","reload":"","hint":"directory for temporary images; leave empty for default"},
|
|
{"id":"","label":"disable accelerate","localized":"","reload":"","hint":"disable accelerate"},
|
|
{"id":"","label":"disable conditional batching","localized":"","reload":"","hint":"disable conditional batching"},
|
|
{"id":"","label":"disabled","localized":"","reload":"","hint":"disabled"},
|
|
{"id":"","label":"discard penultimate sigma","localized":"","reload":"","hint":"discard penultimate sigma"},
|
|
{"id":"","label":"distance threshold","localized":"","reload":"","hint":"distance threshold"},
|
|
{"id":"","label":"do not change selected model when reading generation parameters","localized":"","reload":"","hint":"do not change selected model when reading generation parameters"},
|
|
{"id":"","label":"do not display video output in ui","localized":"","reload":"","hint":"do not display video output in ui"},
|
|
{"id":"","label":"down","localized":"","reload":"","hint":"down"},
|
|
{"id":"","label":"download","localized":"","reload":"","hint":"download"},
|
|
{"id":"","label":"download model","localized":"","reload":"","hint":"download model"},
|
|
{"id":"","label":"download path","localized":"","reload":"","hint":"download path"},
|
|
{"id":"","label":"download updates","localized":"","reload":"","hint":"download updates"},
|
|
{"id":"","label":"downscale high resolution live previews","localized":"","reload":"","hint":"downscale high resolution live previews"},
|
|
{"id":"","label":"dpm sde","localized":"","reload":"","hint":"dpm sde"},
|
|
{"id":"","label":"dpm++","localized":"","reload":"","hint":"dpm++"},
|
|
{"id":"","label":"dpm++ 1s","localized":"","reload":"","hint":"dpm++ 1s"},
|
|
{"id":"","label":"dpm++ 2m","localized":"","reload":"","hint":"dpm++ 2m"},
|
|
{"id":"","label":"dpm++ 2m edm","localized":"","reload":"","hint":"dpm++ 2m edm"},
|
|
{"id":"","label":"dpm++ 2m inverse","localized":"","reload":"","hint":"dpm++ 2m inverse"},
|
|
{"id":"","label":"dpm++ 2m sde","localized":"","reload":"","hint":"dpm++ 2m sde"},
|
|
{"id":"","label":"dpm++ 3m","localized":"","reload":"","hint":"dpm++ 3m"},
|
|
{"id":"","label":"dpm++ 3m inverse","localized":"","reload":"","hint":"dpm++ 3m inverse"},
|
|
{"id":"","label":"dpm++ cosine","localized":"","reload":"","hint":"dpm++ cosine"},
|
|
{"id":"","label":"dpm++ inverse","localized":"","reload":"","hint":"dpm++ inverse"},
|
|
{"id":"","label":"dpm++ sde","localized":"","reload":"","hint":"dpm++ sde"},
|
|
{"id":"","label":"dpm2 flowmatch","localized":"","reload":"","hint":"dpm2 flowmatch"},
|
|
{"id":"","label":"dpm2++ 2m flowmatch","localized":"","reload":"","hint":"dpm2++ 2m flowmatch"},
|
|
{"id":"","label":"dpm2++ 2m sde flowmatch","localized":"","reload":"","hint":"dpm2++ 2m sde flowmatch"},
|
|
{"id":"","label":"dpm2++ 2s flowmatch","localized":"","reload":"","hint":"dpm2++ 2s flowmatch"},
|
|
{"id":"","label":"dpm2++ 3m sde flowmatch","localized":"","reload":"","hint":"dpm2++ 3m sde flowmatch"},
|
|
{"id":"","label":"dpm2++ sde flowmatch","localized":"","reload":"","hint":"dpm2++ sde flowmatch"},
|
|
{"id":"","label":"dpm2a flowmatch","localized":"","reload":"","hint":"dpm2a flowmatch"},
|
|
{"id":"","label":"draw legend","localized":"","reload":"","hint":"draw legend"},
|
|
{"id":"","label":"dropdown","localized":"","reload":"","hint":"dropdown"},
|
|
{"id":"","label":"duration","localized":"","reload":"","hint":"duration"},
|
|
{"id":"","label":"dwpose","localized":"","reload":"","hint":"dwpose"},
|
|
{"id":"","label":"dynamic attention slicing rate in gb","localized":"","reload":"","hint":"dynamic attention slicing rate in gb"},
|
|
{"id":"","label":"dynamic attention trigger rate in gb","localized":"","reload":"","hint":"dynamic attention trigger rate in gb"},
|
|
{"id":"","label":"edge","localized":"","reload":"","hint":"edge"},
|
|
{"id":"","label":"edit start","localized":"","reload":"","hint":"edit start"},
|
|
{"id":"","label":"edit stop","localized":"","reload":"","hint":"edit stop"},
|
|
{"id":"","label":"embedded metadata","localized":"","reload":"","hint":"embedded metadata"},
|
|
{"id":"","label":"enable embeddings support","localized":"","reload":"","hint":"enable embeddings support"},
|
|
{"id":"","label":"enable file wildcards support","localized":"","reload":"","hint":"enable file wildcards support"},
|
|
{"id":"","label":"enable freeu","localized":"","reload":"","hint":"enable freeu"},
|
|
{"id":"","label":"enable teacache","localized":"","reload":"","hint":"enable teacache"},
|
|
{"id":"","label":"enable tonemap","localized":"","reload":"","hint":"enable tonemap"},
|
|
{"id":"","label":"enable use of reference models","localized":"","reload":"","hint":"enable use of reference models"},
|
|
{"id":"","label":"enabled","localized":"","reload":"","hint":"enabled"},
|
|
{"id":"","label":"encoder","localized":"","reload":"","hint":"encoder"},
|
|
{"id":"","label":"end","localized":"","reload":"","hint":"end"},
|
|
{"id":"","label":"enhance prompt","localized":"","reload":"","hint":"enhance prompt"},
|
|
{"id":"","label":"ensemble size","localized":"","reload":"","hint":"ensemble size"},
|
|
{"id":"","label":"epsilon","localized":"","reload":"","hint":"epsilon"},
|
|
{"id":"","label":"erode","localized":"","reload":"","hint":"erode"},
|
|
{"id":"","label":"erode size","localized":"","reload":"","hint":"erode size"},
|
|
{"id":"","label":"eta","localized":"","reload":"","hint":"eta"},
|
|
{"id":"","label":"euler","localized":"","reload":"","hint":"euler"},
|
|
{"id":"","label":"euler edm","localized":"","reload":"","hint":"euler edm"},
|
|
{"id":"","label":"euler flowmatch","localized":"","reload":"","hint":"euler flowmatch"},
|
|
{"id":"","label":"euler sgm","localized":"","reload":"","hint":"euler sgm"},
|
|
{"id":"","label":"executionprovider.cpu","localized":"","reload":"","hint":"executionprovider.cpu"},
|
|
{"id":"","label":"executionprovider.cuda","localized":"","reload":"","hint":"executionprovider.cuda"},
|
|
{"id":"","label":"executionprovider.directml","localized":"","reload":"","hint":"executionprovider.directml"},
|
|
{"id":"","label":"executionprovider.migraphx","localized":"","reload":"","hint":"executionprovider.migraphx"},
|
|
{"id":"","label":"executionprovider.openvino","localized":"","reload":"","hint":"executionprovider.openvino"},
|
|
{"id":"","label":"executionprovider.rocm","localized":"","reload":"","hint":"executionprovider.rocm"},
|
|
{"id":"","label":"expandable segments","localized":"","reload":"","hint":"expandable segments"},
|
|
{"id":"","label":"exponential","localized":"","reload":"","hint":"exponential"},
|
|
{"id":"","label":"exposure","localized":"","reload":"","hint":"exposure"},
|
|
{"id":"","label":"extra noise multiplier for img2img","localized":"","reload":"","hint":"extra noise multiplier for img2img"},
|
|
{"id":"","label":"extract lora","localized":"","reload":"","hint":"extract lora"},
|
|
{"id":"","label":"face","localized":"","reload":"","hint":"face"},
|
|
{"id":"","label":"face confidence","localized":"","reload":"","hint":"face confidence"},
|
|
{"id":"","label":"faceid model","localized":"","reload":"","hint":"faceid model"},
|
|
{"id":"","label":"fall-off exponent (lower=higher detail)","localized":"","reload":"","hint":"fall-off exponent (lower=higher detail)"},
|
|
{"id":"","label":"false","localized":"","reload":"","hint":"false"},
|
|
{"id":"","label":"fast","localized":"","reload":"","hint":"fast"},
|
|
{"id":"","label":"file or folder with user-defined styles","localized":"","reload":"","hint":"file or folder with user-defined styles"},
|
|
{"id":"","label":"filename","localized":"","reload":"","hint":"filename"},
|
|
{"id":"","label":"first-block cache enabled","localized":"","reload":"","hint":"first-block cache enabled"},
|
|
{"id":"","label":"fixed unet precision","localized":"","reload":"","hint":"fixed unet precision"},
|
|
{"id":"","label":"flavors","localized":"","reload":"","hint":"flavors"},
|
|
{"id":"","label":"folder","localized":"","reload":"","hint":"folder"},
|
|
{"id":"","label":"folder for control generate","localized":"","reload":"","hint":"folder for control generate"},
|
|
{"id":"","label":"folder for control grids","localized":"","reload":"","hint":"folder for control grids"},
|
|
{"id":"","label":"folder for disk offload","localized":"","reload":"","hint":"folder for disk offload"},
|
|
{"id":"","label":"folder for huggingface cache","localized":"","reload":"","hint":"folder for huggingface cache"},
|
|
{"id":"","label":"folder for image generate","localized":"","reload":"","hint":"folder for image generate"},
|
|
{"id":"","label":"folder for img2img grids","localized":"","reload":"","hint":"folder for img2img grids"},
|
|
{"id":"","label":"folder for init images","localized":"","reload":"","hint":"folder for init images"},
|
|
{"id":"","label":"folder for manually saved images","localized":"","reload":"","hint":"folder for manually saved images"},
|
|
{"id":"","label":"folder for onnx cached models","localized":"","reload":"","hint":"folder for onnx cached models"},
|
|
{"id":"","label":"folder for onnx conversion","localized":"","reload":"","hint":"folder for onnx conversion"},
|
|
{"id":"","label":"folder for openvino cache","localized":"","reload":"","hint":"folder for openvino cache"},
|
|
{"id":"","label":"folder for processed images","localized":"","reload":"","hint":"folder for processed images"},
|
|
{"id":"","label":"folder for text generate","localized":"","reload":"","hint":"folder for text generate"},
|
|
{"id":"","label":"folder for tunable ops cache","localized":"","reload":"","hint":"folder for tunable ops cache"},
|
|
{"id":"","label":"folder for txt2img grids","localized":"","reload":"","hint":"folder for txt2img grids"},
|
|
{"id":"","label":"folder for videos","localized":"","reload":"","hint":"folder for videos"},
|
|
{"id":"","label":"folder with bsrgan models","localized":"","reload":"","hint":"folder with bsrgan models"},
|
|
{"id":"","label":"folder with chainner models","localized":"","reload":"","hint":"folder with chainner models"},
|
|
{"id":"","label":"folder with clip models","localized":"","reload":"","hint":"folder with clip models"},
|
|
{"id":"","label":"folder with codeformer models","localized":"","reload":"","hint":"folder with codeformer models"},
|
|
{"id":"","label":"folder with control models","localized":"","reload":"","hint":"folder with control models"},
|
|
{"id":"","label":"folder with esrgan models","localized":"","reload":"","hint":"folder with esrgan models"},
|
|
{"id":"","label":"folder with gfpgan models","localized":"","reload":"","hint":"folder with gfpgan models"},
|
|
{"id":"","label":"folder with huggingface models","localized":"","reload":"","hint":"folder with huggingface models"},
|
|
{"id":"","label":"folder with hypernetwork models","localized":"","reload":"","hint":"folder with hypernetwork models"},
|
|
{"id":"","label":"folder with ldsr models","localized":"","reload":"","hint":"folder with ldsr models"},
|
|
{"id":"","label":"folder with lora network(s)","localized":"","reload":"","hint":"folder with lora network(s)"},
|
|
{"id":"","label":"folder with realesrgan models","localized":"","reload":"","hint":"folder with realesrgan models"},
|
|
{"id":"","label":"folder with scunet models","localized":"","reload":"","hint":"folder with scunet models"},
|
|
{"id":"","label":"folder with stable diffusion models","localized":"","reload":"","hint":"folder with stable diffusion models"},
|
|
{"id":"","label":"folder with swinir models","localized":"","reload":"","hint":"folder with swinir models"},
|
|
{"id":"","label":"folder with text encoder files","localized":"","reload":"","hint":"folder with text encoder files"},
|
|
{"id":"","label":"folder with textual inversion embeddings","localized":"","reload":"","hint":"folder with textual inversion embeddings"},
|
|
{"id":"","label":"folder with unet files","localized":"","reload":"","hint":"folder with unet files"},
|
|
{"id":"","label":"folder with user-defined wildcards","localized":"","reload":"","hint":"folder with user-defined wildcards"},
|
|
{"id":"","label":"folder with vae files","localized":"","reload":"","hint":"folder with vae files"},
|
|
{"id":"","label":"folder with yolo models","localized":"","reload":"","hint":"folder with yolo models"},
|
|
{"id":"","label":"font color","localized":"","reload":"","hint":"font color"},
|
|
{"id":"","label":"font file","localized":"","reload":"","hint":"font file"},
|
|
{"id":"","label":"font size","localized":"","reload":"","hint":"font size"},
|
|
{"id":"","label":"force model eval","localized":"","reload":"","hint":"force model eval"},
|
|
{"id":"","label":"foreground threshold","localized":"","reload":"","hint":"foreground threshold"},
|
|
{"id":"","label":"fp4","localized":"","reload":"","hint":"fp4"},
|
|
{"id":"","label":"frame change sensitivity","localized":"","reload":"","hint":"frame change sensitivity"},
|
|
{"id":"","label":"frames","localized":"","reload":"","hint":"frames"},
|
|
{"id":"","label":"freeinit","localized":"","reload":"","hint":"freeinit"},
|
|
{"id":"","label":"freeu enabled","localized":"","reload":"","hint":"freeu enabled"},
|
|
{"id":"","label":"freeu preset","localized":"","reload":"","hint":"freeu preset"},
|
|
{"id":"","label":"full vae","localized":"","reload":"","hint":"full vae"},
|
|
{"id":"","label":"full-depth cudnn benchmark","localized":"","reload":"","hint":"full-depth cudnn benchmark"},
|
|
{"id":"","label":"fuse strength","localized":"","reload":"","hint":"fuse strength"},
|
|
{"id":"","label":"fused projections","localized":"","reload":"","hint":"fused projections"},
|
|
{"id":"","label":"gamma","localized":"","reload":"","hint":"gamma"},
|
|
{"id":"","label":"gamma corrected","localized":"","reload":"","hint":"gamma corrected"},
|
|
{"id":"","label":"gate step","localized":"","reload":"","hint":"gate step"},
|
|
{"id":"","label":"gc threshold","localized":"","reload":"","hint":"gc threshold"},
|
|
{"id":"","label":"get changelog","localized":"","reload":"","hint":"get changelog"},
|
|
{"id":"","label":"gpu","localized":"","reload":"","hint":"gpu"},
|
|
{"id":"","label":"gradient","localized":"","reload":"","hint":"gradient"},
|
|
{"id":"","label":"grid background color","localized":"","reload":"","hint":"grid background color"},
|
|
{"id":"","label":"grid margins","localized":"","reload":"","hint":"grid margins"},
|
|
{"id":"","label":"grid sections:","localized":"","reload":"","hint":"grid sections:"},
|
|
{"id":"","label":"group size","localized":"","reload":"","hint":"group size"},
|
|
{"id":"","label":"guidance","localized":"","reload":"","hint":"guidance"},
|
|
{"id":"","label":"guidance start","localized":"","reload":"","hint":"guidance start"},
|
|
{"id":"","label":"guidance stop","localized":"","reload":"","hint":"guidance stop"},
|
|
{"id":"","label":"guidance strength","localized":"","reload":"","hint":"guidance strength"},
|
|
{"id":"","label":"hands","localized":"","reload":"","hint":"hands"},
|
|
{"id":"","label":"hdr range","localized":"","reload":"","hint":"hdr range"},
|
|
{"id":"","label":"hed","localized":"","reload":"","hint":"hed"},
|
|
{"id":"","label":"height after","localized":"","reload":"","hint":"height after"},
|
|
{"id":"","label":"height before","localized":"","reload":"","hint":"height before"},
|
|
{"id":"","label":"height mask","localized":"","reload":"","hint":"height mask"},
|
|
{"id":"","label":"heun","localized":"","reload":"","hint":"heun"},
|
|
{"id":"","label":"heun flowmatch","localized":"","reload":"","hint":"heun flowmatch"},
|
|
{"id":"","label":"hidet","localized":"","reload":"","hint":"hidet"},
|
|
{"id":"","label":"high threshold","localized":"","reload":"","hint":"high threshold"},
|
|
{"id":"","label":"hires pass only","localized":"","reload":"","hint":"hires pass only"},
|
|
{"id":"","label":"hq init latents","localized":"","reload":"","hint":"hq init latents"},
|
|
{"id":"","label":"hue","localized":"","reload":"","hint":"hue"},
|
|
{"id":"","label":"huggingface mirror","localized":"","reload":"","hint":"huggingface mirror"},
|
|
{"id":"","label":"huggingface token","localized":"","reload":"","hint":"huggingface token"},
|
|
{"id":"","label":"hunyuan","localized":"","reload":"","hint":"hunyuan"},
|
|
{"id":"","label":"il","localized":"","reload":"","hint":"il"},
|
|
{"id":"","label":"image height","localized":"","reload":"","hint":"image height"},
|
|
{"id":"","label":"image quality","localized":"","reload":"","hint":"image quality"},
|
|
{"id":"","label":"image transparent color fill","localized":"","reload":"","hint":"image transparent color fill"},
|
|
{"id":"","label":"image watermark file","localized":"","reload":"","hint":"image watermark file"},
|
|
{"id":"","label":"image watermark position","localized":"","reload":"","hint":"image watermark position"},
|
|
{"id":"","label":"image width","localized":"","reload":"","hint":"image width"},
|
|
{"id":"","label":"include images","localized":"","reload":"","hint":"include images"},
|
|
{"id":"","label":"include main grid","localized":"","reload":"","hint":"include main grid"},
|
|
{"id":"","label":"include mask in outputs","localized":"","reload":"","hint":"include mask in outputs"},
|
|
{"id":"","label":"include original image","localized":"","reload":"","hint":"include original image"},
|
|
{"id":"","label":"include scores in results when available","localized":"","reload":"","hint":"include scores in results when available"},
|
|
{"id":"","label":"include sub grids","localized":"","reload":"","hint":"include sub grids"},
|
|
{"id":"","label":"inductor","localized":"","reload":"","hint":"inductor"},
|
|
{"id":"","label":"info","localized":"","reload":"","hint":"info"},
|
|
{"id":"","label":"info object","localized":"","reload":"","hint":"info object"},
|
|
{"id":"","label":"inpaint","localized":"","reload":"","hint":"inpaint"},
|
|
{"id":"","label":"inpaint masked only","localized":"","reload":"","hint":"inpaint masked only"},
|
|
{"id":"","label":"inpainting include greyscale mask in results","localized":"","reload":"","hint":"inpainting include greyscale mask in results"},
|
|
{"id":"","label":"inpainting include masked composite in results","localized":"","reload":"","hint":"inpainting include masked composite in results"},
|
|
{"id":"","label":"input model","localized":"","reload":"","hint":"input model"},
|
|
{"id":"","label":"intermediates","localized":"","reload":"","hint":"intermediates"},
|
|
{"id":"","label":"interpolate frames","localized":"","reload":"","hint":"interpolate frames"},
|
|
{"id":"","label":"interpolation method","localized":"","reload":"","hint":"interpolation method"},
|
|
{"id":"","label":"invert","localized":"","reload":"","hint":"invert"},
|
|
{"id":"","label":"invert mask","localized":"","reload":"","hint":"invert mask"},
|
|
{"id":"","label":"iou","localized":"","reload":"","hint":"iou"},
|
|
{"id":"","label":"ipex","localized":"","reload":"","hint":"ipex"},
|
|
{"id":"","label":"ipndm","localized":"","reload":"","hint":"ipndm"},
|
|
{"id":"","label":"item edge blur","localized":"","reload":"","hint":"item edge blur"},
|
|
{"id":"","label":"item padding","localized":"","reload":"","hint":"item padding"},
|
|
{"id":"","label":"iterate seed per line","localized":"","reload":"","hint":"iterate seed per line"},
|
|
{"id":"","label":"iterations","localized":"","reload":"","hint":"iterations"},
|
|
{"id":"","label":"karras","localized":"","reload":"","hint":"karras"},
|
|
{"id":"","label":"kdpm2","localized":"","reload":"","hint":"kdpm2"},
|
|
{"id":"","label":"kdpm2 a","localized":"","reload":"","hint":"kdpm2 a"},
|
|
{"id":"","label":"keep incomplete images","localized":"","reload":"","hint":"keep incomplete images"},
|
|
{"id":"","label":"Keep Thinking Trace","localized":"","reload":"","hint":"Include the model's reasoning process in the final output.<br>Useful for understanding how the model arrived at its answer.<br>Only works with models that support thinking mode."},
|
|
{"id":"","label":"Keep Prefill","localized":"","reload":"","hint":"Include the prefill text at the beginning of the final output.<br>If disabled, the prefill text used to guide the model is removed from the result."},
|
|
{"id":"","label":"large","localized":"","reload":"","hint":"large"},
|
|
{"id":"","label":"latent history size","localized":"","reload":"","hint":"latent history size"},
|
|
{"id":"","label":"latent mode","localized":"","reload":"","hint":"latent mode"},
|
|
{"id":"","label":"layer scales","localized":"","reload":"","hint":"layer scales"},
|
|
{"id":"","label":"layerwise casting storage","localized":"","reload":"","hint":"layerwise casting storage"},
|
|
{"id":"","label":"layerwise non-blocking operations","localized":"","reload":"","hint":"layerwise non-blocking operations"},
|
|
{"id":"","label":"lcm","localized":"","reload":"","hint":"lcm"},
|
|
{"id":"","label":"ldsr processing steps","localized":"","reload":"","hint":"ldsr processing steps"},
|
|
{"id":"","label":"left","localized":"","reload":"","hint":"left"},
|
|
{"id":"","label":"legend","localized":"","reload":"","hint":"legend"},
|
|
{"id":"","label":"length","localized":"","reload":"","hint":"length"},
|
|
{"id":"","label":"leres depth","localized":"","reload":"","hint":"leres depth"},
|
|
{"id":"","label":"level","localized":"","reload":"","hint":"level"},
|
|
{"id":"","label":"libs","localized":"","reload":"","hint":"libs"},
|
|
{"id":"","label":"light","localized":"","reload":"","hint":"light"},
|
|
{"id":"","label":"lineart","localized":"","reload":"","hint":"lineart"},
|
|
{"id":"","label":"list","localized":"","reload":"","hint":"list"},
|
|
{"id":"","label":"list model details","localized":"","reload":"","hint":"list model details"},
|
|
{"id":"","label":"lite","localized":"","reload":"","hint":"lite"},
|
|
{"id":"","label":"live update","localized":"","reload":"","hint":"live update"},
|
|
{"id":"","label":"lmsd","localized":"","reload":"","hint":"lmsd"},
|
|
{"id":"","label":"load custom diffusers pipeline","localized":"","reload":"","hint":"load custom diffusers pipeline"},
|
|
{"id":"","label":"load model directly to gpu","localized":"","reload":"","hint":"load model directly to gpu"},
|
|
{"id":"","label":"loaded lora","localized":"","reload":"","hint":"loaded lora"},
|
|
{"id":"","label":"logsnr","localized":"","reload":"","hint":"logsnr"},
|
|
{"id":"","label":"loop","localized":"","reload":"","hint":"loop"},
|
|
{"id":"","label":"LoRA add hash info to metadata","localized":"","reload":"","hint":"Include LoRA file hashes in generated image metadata.<br>Useful for reproducibility and tracking which exact LoRA versions were used."},
|
|
{"id":"","label":"LoRA auto-apply tags","localized":"","reload":"","hint":"Automatically add trigger words/tags from LoRA metadata to your prompt.<br>Set to the number of tags to auto-apply, e.g., 3 = add top 3 trigger tags.<br>Set to 0 to disable, -1 to add all available tags."},
|
|
{"id":"","label":"lora load using diffusers method for selected models","localized":"","reload":"","hint":"lora load using diffusers method for selected models"},
|
|
{"id":"","label":"lora load using legacy method","localized":"","reload":"","hint":"lora load using legacy method"},
|
|
{"id":"","label":"lora target filename","localized":"","reload":"","hint":"lora target filename"},
|
|
{"id":"","label":"low order","localized":"","reload":"","hint":"low order"},
|
|
{"id":"","label":"low threshold","localized":"","reload":"","hint":"low threshold"},
|
|
{"id":"","label":"ltx model","localized":"","reload":"","hint":"ltx model"},
|
|
{"id":"","label":"lumina: use mask in transformers","localized":"","reload":"","hint":"lumina: use mask in transformers"},
|
|
{"id":"","label":"manual block merge","localized":"","reload":"","hint":"manual block merge"},
|
|
{"id":"","label":"marigold depth","localized":"","reload":"","hint":"marigold depth"},
|
|
{"id":"","label":"mask dropout","localized":"","reload":"","hint":"mask dropout"},
|
|
{"id":"","label":"mask invert","localized":"","reload":"","hint":"mask invert"},
|
|
{"id":"","label":"mask only","localized":"","reload":"","hint":"mask only"},
|
|
{"id":"","label":"mask strength","localized":"","reload":"","hint":"mask strength"},
|
|
{"id":"","label":"masked","localized":"","reload":"","hint":"masked"},
|
|
{"id":"","label":"max faces","localized":"","reload":"","hint":"max faces"},
|
|
{"id":"","label":"max flavors","localized":"","reload":"","hint":"max flavors"},
|
|
{"id":"","label":"max guidance","localized":"","reload":"","hint":"max guidance"},
|
|
{"id":"","label":"max length","localized":"","reload":"","hint":"max length"},
|
|
{"id":"","label":"max object size","localized":"","reload":"","hint":"max object size"},
|
|
{"id":"","label":"max range","localized":"","reload":"","hint":"max range"},
|
|
{"id":"","label":"Max tokens","localized":"","reload":"","hint":"Maximum number of tokens the model can generate in its response.<br>The model is not aware of this limit during generation and it won't make the model try to generate more detailed or more concise responses, it simply sets the hard limit for the length, and will forcefully cut off the response when the limit is reached."},
|
|
{"id":"","label":"max words","localized":"","reload":"","hint":"max words"},
|
|
{"id":"","label":"max-autotune","localized":"","reload":"","hint":"max-autotune"},
|
|
{"id":"","label":"max-autotune-no-cudagraphs","localized":"","reload":"","hint":"max-autotune-no-cudagraphs"},
|
|
{"id":"","label":"maximum image size (mp)","localized":"","reload":"","hint":"maximum image size (mp)"},
|
|
{"id":"","label":"maximum number of units","localized":"","reload":"","hint":"maximum number of units"},
|
|
{"id":"","label":"maximum rank","localized":"","reload":"","hint":"maximum rank"},
|
|
{"id":"","label":"mediapipe face","localized":"","reload":"","hint":"mediapipe face"},
|
|
{"id":"","label":"medium","localized":"","reload":"","hint":"medium"},
|
|
{"id":"","label":"mediums","localized":"","reload":"","hint":"mediums"},
|
|
{"id":"","label":"memory","localized":"","reload":"","hint":"memory"},
|
|
{"id":"","label":"memory limit","localized":"","reload":"","hint":"memory limit"},
|
|
{"id":"","label":"memory optimization","localized":"","reload":"","hint":"memory optimization"},
|
|
{"id":"","label":"merge alpha","localized":"","reload":"","hint":"merge alpha"},
|
|
{"id":"","label":"method","localized":"","reload":"","hint":"method"},
|
|
{"id":"","label":"method after","localized":"","reload":"","hint":"method after"},
|
|
{"id":"","label":"method before","localized":"","reload":"","hint":"method before"},
|
|
{"id":"","label":"method mask","localized":"","reload":"","hint":"method mask"},
|
|
{"id":"","label":"midas depth","localized":"","reload":"","hint":"midas depth"},
|
|
{"id":"","label":"migraphx","localized":"","reload":"","hint":"migraphx"},
|
|
{"id":"","label":"min flavors","localized":"","reload":"","hint":"min flavors"},
|
|
{"id":"","label":"min guidance","localized":"","reload":"","hint":"min guidance"},
|
|
{"id":"","label":"min length","localized":"","reload":"","hint":"min length"},
|
|
{"id":"","label":"min object size","localized":"","reload":"","hint":"min object size"},
|
|
{"id":"","label":"mine","localized":"","reload":"","hint":"mine"},
|
|
{"id":"","label":"mlsd","localized":"","reload":"","hint":"mlsd"},
|
|
{"id":"","label":"mm","localized":"","reload":"","hint":"mm"},
|
|
{"id":"","label":"mode","localized":"","reload":"","hint":"mode"},
|
|
{"id":"","label":"mode after","localized":"","reload":"","hint":"mode after"},
|
|
{"id":"","label":"mode before","localized":"","reload":"","hint":"mode before"},
|
|
{"id":"","label":"mode mask","localized":"","reload":"","hint":"mode mask"},
|
|
{"id":"","label":"mode x-axis","localized":"","reload":"","hint":"mode x-axis"},
|
|
{"id":"","label":"mode y-axis","localized":"","reload":"","hint":"mode y-axis"},
|
|
{"id":"","label":"model auto-download on demand","localized":"","reload":"","hint":"model auto-download on demand"},
|
|
{"id":"","label":"model autoload on start","localized":"","reload":"","hint":"model autoload on start"},
|
|
{"id":"","label":"model compile fullgraph","localized":"","reload":"","hint":"model compile fullgraph"},
|
|
{"id":"","label":"model compile suppress errors","localized":"","reload":"","hint":"model compile suppress errors"},
|
|
{"id":"","label":"model compile verbose mode","localized":"","reload":"","hint":"model compile verbose mode"},
|
|
{"id":"","label":"model info","localized":"","reload":"","hint":"model info"},
|
|
{"id":"","label":"model metadata","localized":"","reload":"","hint":"model metadata"},
|
|
{"id":"","label":"model name","localized":"","reload":"","hint":"model name"},
|
|
{"id":"","label":"model precision","localized":"","reload":"","hint":"model precision"},
|
|
{"id":"","label":"model type","localized":"","reload":"","hint":"model type"},
|
|
{"id":"","label":"model url","localized":"","reload":"","hint":"model url"},
|
|
{"id":"","label":"modern","localized":"","reload":"","hint":"modern"},
|
|
{"id":"","label":"momentum","localized":"","reload":"","hint":"momentum"},
|
|
{"id":"","label":"motion level","localized":"","reload":"","hint":"motion level"},
|
|
{"id":"","label":"mount url subpath","localized":"","reload":"","hint":"mount url subpath"},
|
|
{"id":"","label":"move base model to cpu when using refiner","localized":"","reload":"","hint":"move base model to cpu when using refiner"},
|
|
{"id":"","label":"move base model to cpu when using vae","localized":"","reload":"","hint":"move base model to cpu when using vae"},
|
|
{"id":"","label":"move detailer model to cpu when complete","localized":"","reload":"","hint":"move detailer model to cpu when complete"},
|
|
{"id":"","label":"move refiner model to cpu when not in use","localized":"","reload":"","hint":"move refiner model to cpu when not in use"},
|
|
{"id":"","label":"movements","localized":"","reload":"","hint":"movements"},
|
|
{"id":"","label":"multi decoder","localized":"","reload":"","hint":"multi decoder"},
|
|
{"id":"","label":"multistep restore","localized":"","reload":"","hint":"multistep restore"},
|
|
{"id":"","label":"native","localized":"","reload":"","hint":"native"},
|
|
{"id":"","label":"near threshold","localized":"","reload":"","hint":"near threshold"},
|
|
{"id":"","label":"negative","localized":"","reload":"","hint":"negative"},
|
|
{"id":"","label":"network negative prompt","localized":"","reload":"","hint":"network negative prompt"},
|
|
{"id":"","label":"network parameters","localized":"","reload":"","hint":"network parameters"},
|
|
{"id":"","label":"network prompt","localized":"","reload":"","hint":"network prompt"},
|
|
{"id":"","label":"new model name","localized":"","reload":"","hint":"new model name"},
|
|
{"id":"","label":"nf4","localized":"","reload":"","hint":"nf4"},
|
|
{"id":"","label":"nms","localized":"","reload":"","hint":"nms"},
|
|
{"id":"","label":"noise","localized":"","reload":"","hint":"noise"},
|
|
{"id":"","label":"noise multiplier (eta)","localized":"","reload":"","hint":"noise multiplier (eta)"},
|
|
{"id":"","label":"noise multiplier for image processing","localized":"","reload":"","hint":"noise multiplier for image processing"},
|
|
{"id":"","label":"noise seed delta (eta)","localized":"","reload":"","hint":"noise seed delta (eta)"},
|
|
{"id":"","label":"noise strength","localized":"","reload":"","hint":"noise strength"},
|
|
{"id":"","label":"none","localized":"","reload":"","hint":"none"},
|
|
{"id":"","label":"note","localized":"","reload":"","hint":"note"},
|
|
{"id":"","label":"nothing","localized":"","reload":"","hint":"nothing"},
|
|
{"id":"","label":"num beams","localized":"","reload":"","hint":"Maintains multiple candidate paths simultaneously and selects the overall best sequence.<br>Like exploring several drafts at once to find the best one. More thorough but much slower and less creative than random sampling.<br>Generally not recommended, most modern VLMs perform better with sampling methods.<br>Set to 1 to disable."},
|
|
{"id":"","label":"number","localized":"","reload":"","hint":"number"},
|
|
{"id":"","label":"numbered filenames","localized":"","reload":"","hint":"numbered filenames"},
|
|
{"id":"","label":"offload","localized":"","reload":"","hint":"offload"},
|
|
{"id":"","label":"offload face module","localized":"","reload":"","hint":"offload face module"},
|
|
{"id":"","label":"offload models","localized":"","reload":"","hint":"offload models"},
|
|
{"id":"","label":"olive-ai","localized":"","reload":"","hint":"olive-ai"},
|
|
{"id":"","label":"onediff","localized":"","reload":"","hint":"onediff"},
|
|
{"id":"","label":"onnx","localized":"","reload":"","hint":"onnx"},
|
|
{"id":"","label":"openbody","localized":"","reload":"","hint":"openbody"},
|
|
{"id":"","label":"openclip","localized":"","reload":"","hint":"openclip"},
|
|
{"id":"","label":"openvino disable memory cleanup after compile","localized":"","reload":"","hint":"openvino disable memory cleanup after compile"},
|
|
{"id":"","label":"openvino disable model caching","localized":"","reload":"","hint":"openvino disable model caching"},
|
|
{"id":"","label":"openvino mode","localized":"","reload":"","hint":"openvino mode"},
|
|
{"id":"","label":"openvino_fx","localized":"","reload":"","hint":"openvino_fx"},
|
|
{"id":"","label":"optional image description","localized":"","reload":"","hint":"optional image description"},
|
|
{"id":"","label":"optional init image or video","localized":"","reload":"","hint":"optional init image or video"},
|
|
{"id":"","label":"order","localized":"","reload":"","hint":"order"},
|
|
{"id":"","label":"ortho","localized":"","reload":"","hint":"ortho"},
|
|
{"id":"","label":"outpaint","localized":"","reload":"","hint":"outpaint"},
|
|
{"id":"","label":"output model","localized":"","reload":"","hint":"output model"},
|
|
{"id":"","label":"override resolution","localized":"","reload":"","hint":"override resolution"},
|
|
{"id":"","label":"override sampler","localized":"","reload":"","hint":"override sampler"},
|
|
{"id":"","label":"override scheduler","localized":"","reload":"","hint":"override scheduler"},
|
|
{"id":"","label":"override steps","localized":"","reload":"","hint":"override steps"},
|
|
{"id":"","label":"override t1 ratio","localized":"","reload":"","hint":"override t1 ratio"},
|
|
{"id":"","label":"override t2 ratio","localized":"","reload":"","hint":"override t2 ratio"},
|
|
{"id":"","label":"overwrite existing file","localized":"","reload":"","hint":"overwrite existing file"},
|
|
{"id":"","label":"overwrite model","localized":"","reload":"","hint":"overwrite model"},
|
|
{"id":"","label":"pad frames","localized":"","reload":"","hint":"pad frames"},
|
|
{"id":"","label":"padding","localized":"","reload":"","hint":"padding"},
|
|
{"id":"","label":"parallel process images in batch","localized":"","reload":"","hint":"parallel process images in batch"},
|
|
{"id":"","label":"parameter free","localized":"","reload":"","hint":"parameter free"},
|
|
{"id":"","label":"path to model file","localized":"","reload":"","hint":"path to model file"},
|
|
{"id":"","label":"path to notification sound","localized":"","reload":"","hint":"path to notification sound"},
|
|
{"id":"","label":"peft","localized":"","reload":"","hint":"peft"},
|
|
{"id":"","label":"penalty","localized":"","reload":"","hint":"penalty"},
|
|
{"id":"","label":"perflow","localized":"","reload":"","hint":"perflow"},
|
|
{"id":"","label":"perform injection","localized":"","reload":"","hint":"perform injection"},
|
|
{"id":"","label":"perform sdsa","localized":"","reload":"","hint":"perform sdsa"},
|
|
{"id":"","label":"perform warmup","localized":"","reload":"","hint":"perform warmup"},
|
|
{"id":"","label":"performance","localized":"","reload":"","hint":"performance"},
|
|
{"id":"","label":"photomaker model","localized":"","reload":"","hint":"photomaker model"},
|
|
{"id":"","label":"pidinet","localized":"","reload":"","hint":"pidinet"},
|
|
{"id":"","label":"pipeline","localized":"","reload":"","hint":"pipeline"},
|
|
{"id":"","label":"pixels to expand","localized":"","reload":"","hint":"pixels to expand"},
|
|
{"id":"","label":"platform","localized":"","reload":"","hint":"platform"},
|
|
{"id":"","label":"play","localized":"","reload":"","hint":"play"},
|
|
{"id":"","label":"play a notification upon completion","localized":"","reload":"","hint":"play a notification upon completion"},
|
|
{"id":"","label":"pndm","localized":"","reload":"","hint":"pndm"},
|
|
{"id":"","label":"polyexponential","localized":"","reload":"","hint":"polyexponential"},
|
|
{"id":"","label":"pony","localized":"","reload":"","hint":"pony"},
|
|
{"id":"","label":"pose confidence","localized":"","reload":"","hint":"pose confidence"},
|
|
{"id":"","label":"positive","localized":"","reload":"","hint":"positive"},
|
|
{"id":"","label":"postprocess mask","localized":"","reload":"","hint":"postprocess mask"},
|
|
{"id":"","label":"postprocess upscale","localized":"","reload":"","hint":"postprocess upscale"},
|
|
{"id":"","label":"postprocessing operation order","localized":"","reload":"","hint":"postprocessing operation order"},
|
|
{"id":"","label":"power","localized":"","reload":"","hint":"power"},
|
|
{"id":"","label":"preset","localized":"","reload":"","hint":"preset"},
|
|
{"id":"","label":"preset block merge","localized":"","reload":"","hint":"preset block merge"},
|
|
{"id":"","label":"preview","localized":"","reload":"","hint":"preview"},
|
|
{"id":"","label":"preview end","localized":"","reload":"","hint":"preview end"},
|
|
{"id":"","label":"preview start","localized":"","reload":"","hint":"preview start"},
|
|
{"id":"","label":"primary model","localized":"","reload":"","hint":"primary model"},
|
|
{"id":"","label":"processor move to cpu after use","localized":"","reload":"","hint":"processor move to cpu after use"},
|
|
{"id":"","label":"processor settings","localized":"","reload":"","hint":"processor settings"},
|
|
{"id":"","label":"processor unload after use","localized":"","reload":"","hint":"processor unload after use"},
|
|
{"id":"","label":"prompt ex","localized":"","reload":"","hint":"prompt ex"},
|
|
{"id":"","label":"prompt processor","localized":"","reload":"","hint":"prompt processor"},
|
|
{"id":"","label":"prompt strength","localized":"","reload":"","hint":"prompt strength"},
|
|
{"id":"","label":"prompt thresholds:","localized":"","reload":"","hint":"prompt thresholds:"},
|
|
{"id":"","label":"prompts","localized":"","reload":"","hint":"prompts"},
|
|
{"id":"","label":"provider","localized":"","reload":"","hint":"provider"},
|
|
{"id":"","label":"prune","localized":"","reload":"","hint":"prune"},
|
|
{"id":"","label":"quad","localized":"","reload":"","hint":"quad"},
|
|
{"id":"","label":"quantization activations type","localized":"","reload":"","hint":"quantization activations type"},
|
|
{"id":"","label":"quantization mode","localized":"","reload":"","hint":"quantization mode"},
|
|
{"id":"","label":"quantization type","localized":"","reload":"","hint":"quantization type"},
|
|
{"id":"","label":"quantization weights type","localized":"","reload":"","hint":"quantization weights type"},
|
|
{"id":"","label":"random seeds","localized":"","reload":"","hint":"random seeds"},
|
|
{"id":"","label":"range","localized":"","reload":"","hint":"range"},
|
|
{"id":"","label":"rebase","localized":"","reload":"","hint":"rebase"},
|
|
{"id":"","label":"recursive","localized":"","reload":"","hint":"recursive"},
|
|
{"id":"","label":"reduce-overhead","localized":"","reload":"","hint":"reduce-overhead"},
|
|
{"id":"","label":"redux prompt strength","localized":"","reload":"","hint":"redux prompt strength"},
|
|
{"id":"","label":"reference adain weight","localized":"","reload":"","hint":"reference adain weight"},
|
|
{"id":"","label":"reference query weight","localized":"","reload":"","hint":"reference query weight"},
|
|
{"id":"","label":"reference unit 1","localized":"","reload":"","hint":"reference unit 1"},
|
|
{"id":"","label":"refine foreground","localized":"","reload":"","hint":"refine foreground"},
|
|
{"id":"","label":"refresh bench","localized":"","reload":"","hint":"refresh bench"},
|
|
{"id":"","label":"refresh data","localized":"","reload":"","hint":"refresh data"},
|
|
{"id":"","label":"refresh state","localized":"","reload":"","hint":"refresh state"},
|
|
{"id":"","label":"refresh ui values","localized":"","reload":"","hint":"refresh ui values"},
|
|
{"id":"","label":"reinstall","localized":"","reload":"","hint":"reinstall"},
|
|
{"id":"","label":"remove background","localized":"","reload":"","hint":"remove background"},
|
|
{"id":"","label":"repeat x-axis","localized":"","reload":"","hint":"repeat x-axis"},
|
|
{"id":"","label":"repeat y-axis","localized":"","reload":"","hint":"repeat y-axis"},
|
|
{"id":"","label":"replace vae","localized":"","reload":"","hint":"replace vae"},
|
|
{"id":"","label":"repos","localized":"","reload":"","hint":"repos"},
|
|
{"id":"","label":"reprocess decode","localized":"","reload":"","hint":"reprocess decode"},
|
|
{"id":"","label":"reprocess face","localized":"","reload":"","hint":"reprocess face"},
|
|
{"id":"","label":"reprocess refine","localized":"","reload":"","hint":"reprocess refine"},
|
|
{"id":"","label":"request browser notifications","localized":"","reload":"","hint":"request browser notifications"},
|
|
{"id":"","label":"rescale","localized":"","reload":"","hint":"rescale betas with zero terminal snr"},
|
|
{"id":"","label":"rescale betas with zero terminal snr","localized":"","reload":"","hint":"rescale betas with zero terminal snr"},
|
|
{"id":"","label":"reset anchors","localized":"","reload":"","hint":"reset anchors"},
|
|
{"id":"","label":"residual diff threshold","localized":"","reload":"","hint":"residual diff threshold"},
|
|
{"id":"","label":"resize background color","localized":"","reload":"","hint":"resize background color"},
|
|
{"id":"","label":"resize method","localized":"","reload":"","hint":"resize method"},
|
|
{"id":"","label":"resize scale","localized":"","reload":"","hint":"resize scale"},
|
|
{"id":"","label":"restart step","localized":"","reload":"","hint":"restart step"},
|
|
{"id":"","label":"restore faces: codeformer","localized":"","reload":"","hint":"restore faces: codeformer"},
|
|
{"id":"","label":"restore faces: gfpgan","localized":"","reload":"","hint":"restore faces: gfpgan"},
|
|
{"id":"","label":"restore pipe on end","localized":"","reload":"","hint":"restore pipe on end"},
|
|
{"id":"","label":"restore unparsed prompt","localized":"","reload":"","hint":"restore unparsed prompt"},
|
|
{"id":"","label":"reswapper model","localized":"","reload":"","hint":"reswapper model"},
|
|
{"id":"","label":"return original images","localized":"","reload":"","hint":"return original images"},
|
|
{"id":"","label":"right","localized":"","reload":"","hint":"right"},
|
|
{"id":"","label":"root model folder","localized":"","reload":"","hint":"root model folder"},
|
|
{"id":"","label":"rows","localized":"","reload":"","hint":"rows"},
|
|
{"id":"","label":"run","localized":"","reload":"","hint":"run"},
|
|
{"id":"","label":"run benchmark","localized":"","reload":"","hint":"run benchmark"},
|
|
{"id":"","label":"sa solver","localized":"","reload":"","hint":"sa solver"},
|
|
{"id":"","label":"safetensors","localized":"","reload":"","hint":"safetensors"},
|
|
{"id":"","label":"same as primary","localized":"","reload":"","hint":"same as primary"},
|
|
{"id":"","label":"same latent","localized":"","reload":"","hint":"same latent"},
|
|
{"id":"","label":"sample","localized":"","reload":"","hint":"sample"},
|
|
{"id":"","label":"sampler","localized":"","reload":"","hint":"sampler"},
|
|
{"id":"","label":"sampler shift","localized":"","reload":"","hint":"sampler shift"},
|
|
{"id":"","label":"sana: use complex human instructions","localized":"","reload":"","hint":"sana: use complex human instructions"},
|
|
{"id":"","label":"saturation","localized":"","reload":"","hint":"saturation"},
|
|
{"id":"","label":"save all generated image grids","localized":"","reload":"","hint":"save all generated image grids"},
|
|
{"id":"","label":"save all generated images","localized":"","reload":"","hint":"save all generated images"},
|
|
{"id":"","label":"save caption files","localized":"","reload":"","hint":"save caption files"},
|
|
{"id":"","label":"save diffusers","localized":"","reload":"","hint":"save diffusers"},
|
|
{"id":"","label":"save hdr image","localized":"","reload":"","hint":"save hdr image"},
|
|
{"id":"","label":"save image before color correction","localized":"","reload":"","hint":"save image before color correction"},
|
|
{"id":"","label":"save image before detailer","localized":"","reload":"","hint":"save image before detailer"},
|
|
{"id":"","label":"save image before hires","localized":"","reload":"","hint":"save image before hires"},
|
|
{"id":"","label":"save image before refiner","localized":"","reload":"","hint":"save image before refiner"},
|
|
{"id":"","label":"save images to a subdirectory","localized":"","reload":"","hint":"save images to a subdirectory"},
|
|
{"id":"","label":"save init images","localized":"","reload":"","hint":"save init images"},
|
|
{"id":"","label":"save inpainting mask","localized":"","reload":"","hint":"save inpainting mask"},
|
|
{"id":"","label":"save inpainting masked composite","localized":"","reload":"","hint":"save inpainting masked composite"},
|
|
{"id":"","label":"save metadata","localized":"","reload":"","hint":"save metadata"},
|
|
{"id":"","label":"save only saves selected image","localized":"","reload":"","hint":"save only saves selected image"},
|
|
{"id":"","label":"save output","localized":"","reload":"","hint":"save output"},
|
|
{"id":"","label":"save safetensors","localized":"","reload":"","hint":"save safetensors"},
|
|
{"id":"","label":"save unparsed prompt","localized":"","reload":"","hint":"save unparsed prompt"},
|
|
{"id":"","label":"scale after","localized":"","reload":"","hint":"scale after"},
|
|
{"id":"","label":"scale before","localized":"","reload":"","hint":"scale before"},
|
|
{"id":"","label":"scale mask","localized":"","reload":"","hint":"scale mask"},
|
|
{"id":"","label":"scale factor","localized":"","reload":"","hint":"scale factor"},
|
|
{"id":"","label":"score","localized":"","reload":"","hint":"score"},
|
|
{"id":"","label":"score threshold","localized":"","reload":"","hint":"score threshold"},
|
|
{"id":"","label":"scribble","localized":"","reload":"","hint":"scribble"},
|
|
{"id":"","label":"sd15-attire","localized":"","reload":"","hint":"sd15-attire"},
|
|
{"id":"","label":"sd15-likeness","localized":"","reload":"","hint":"sd15-likeness"},
|
|
{"id":"","label":"sd15-navimixu","localized":"","reload":"","hint":"sd15-navimixu"},
|
|
{"id":"","label":"sd15-sexy","localized":"","reload":"","hint":"sd15-sexy"},
|
|
{"id":"","label":"sdxl-artstyle","localized":"","reload":"","hint":"sdxl-artstyle"},
|
|
{"id":"","label":"sdxl-negative","localized":"","reload":"","hint":"sdxl-negative"},
|
|
{"id":"","label":"sdxl-sexy","localized":"","reload":"","hint":"sdxl-sexy"},
|
|
{"id":"","label":"sdxl-sliders","localized":"","reload":"","hint":"sdxl-sliders"},
|
|
{"id":"","label":"sdxl-toon","localized":"","reload":"","hint":"sdxl-toon"},
|
|
{"id":"","label":"sdxl: use weighted pooled embeds","localized":"","reload":"","hint":"sdxl: use weighted pooled embeds"},
|
|
{"id":"","label":"search changelog","localized":"","reload":"","hint":"search changelog"},
|
|
{"id":"","label":"search models","localized":"","reload":"","hint":"search models"},
|
|
{"id":"","label":"search wiki pages","localized":"","reload":"","hint":"search wiki pages"},
|
|
{"id":"","label":"secondary model","localized":"","reload":"","hint":"secondary model"},
|
|
{"id":"","label":"segmentanything","localized":"","reload":"","hint":"segmentanything"},
|
|
{"id":"","label":"select","localized":"","reload":"","hint":"select"},
|
|
{"id":"","label":"select model","localized":"","reload":"","hint":"select model"},
|
|
{"id":"","label":"send interrupt","localized":"","reload":"","hint":"send interrupt"},
|
|
{"id":"","label":"send seed when sending prompt or image to other interface","localized":"","reload":"","hint":"send seed when sending prompt or image to other interface"},
|
|
{"id":"","label":"send size when sending prompt or image to another interface","localized":"","reload":"","hint":"send size when sending prompt or image to another interface"},
|
|
{"id":"","label":"sequential","localized":"","reload":"","hint":"sequential"},
|
|
{"id":"","label":"server start time","localized":"","reload":"","hint":"server start time"},
|
|
{"id":"","label":"set at prompt start","localized":"","reload":"","hint":"set at prompt start"},
|
|
{"id":"","label":"set ui menu states","localized":"","reload":"","hint":"set ui menu states"},
|
|
{"id":"","label":"share queries","localized":"","reload":"","hint":"share queries"},
|
|
{"id":"","label":"shared options","localized":"","reload":"","hint":"shared options"},
|
|
{"id":"","label":"sharpen","localized":"","reload":"","hint":"sharpen"},
|
|
{"id":"","label":"shift","localized":"","reload":"","hint":"shift"},
|
|
{"id":"","label":"show grid in results","localized":"","reload":"","hint":"show grid in results"},
|
|
{"id":"","label":"show input","localized":"","reload":"","hint":"show input"},
|
|
{"id":"","label":"show metadata in full screen image browser","localized":"","reload":"","hint":"show metadata in full screen image browser"},
|
|
{"id":"","label":"show motd","localized":"","reload":"","hint":"show motd"},
|
|
{"id":"","label":"show preview","localized":"","reload":"","hint":"show preview"},
|
|
{"id":"","label":"shuffle weights","localized":"","reload":"","hint":"shuffle weights"},
|
|
{"id":"","label":"sigma","localized":"","reload":"","hint":"sigma"},
|
|
{"id":"","label":"sigma churn","localized":"","reload":"","hint":"sigma churn"},
|
|
{"id":"","label":"sigma max","localized":"","reload":"","hint":"sigma max"},
|
|
{"id":"","label":"sigma min","localized":"","reload":"","hint":"sigma min"},
|
|
{"id":"","label":"sigma noise","localized":"","reload":"","hint":"sigma noise"},
|
|
{"id":"","label":"sigma tmin","localized":"","reload":"","hint":"sigma tmin"},
|
|
{"id":"","label":"simple merge","localized":"","reload":"","hint":"simple merge"},
|
|
{"id":"","label":"size","localized":"","reload":"","hint":"size"},
|
|
{"id":"","label":"sketch","localized":"","reload":"","hint":"sketch"},
|
|
{"id":"","label":"skip generation if nan found in latents","localized":"","reload":"","hint":"skip generation if nan found in latents"},
|
|
{"id":"","label":"skip guidance layers","localized":"","reload":"","hint":"skip guidance layers"},
|
|
{"id":"","label":"skip input frames","localized":"","reload":"","hint":"skip input frames"},
|
|
{"id":"","label":"slider","localized":"","reload":"","hint":"slider"},
|
|
{"id":"","label":"smooth mask","localized":"","reload":"","hint":"smooth mask"},
|
|
{"id":"","label":"solver order (where","localized":"","reload":"","hint":"solver order (where"},
|
|
{"id":"","label":"sort order","localized":"","reload":"","hint":"sort order"},
|
|
{"id":"","label":"source subject","localized":"","reload":"","hint":"source subject"},
|
|
{"id":"","label":"space","localized":"","reload":"","hint":"space"},
|
|
{"id":"","label":"spatial frequency","localized":"","reload":"","hint":"spatial frequency"},
|
|
{"id":"","label":"specify model revision","localized":"","reload":"","hint":"specify model revision"},
|
|
{"id":"","label":"specify model variant","localized":"","reload":"","hint":"specify model variant"},
|
|
{"id":"","label":"stable-fast","localized":"","reload":"","hint":"stable-fast"},
|
|
{"id":"","label":"standard","localized":"","reload":"","hint":"standard"},
|
|
{"id":"","label":"start","localized":"","reload":"","hint":"start"},
|
|
{"id":"","label":"start profiling","localized":"","reload":"","hint":"start profiling"},
|
|
{"id":"","label":"state","localized":"","reload":"","hint":"state"},
|
|
{"id":"","label":"stride","localized":"","reload":"","hint":"stride"},
|
|
{"id":"","label":"structure","localized":"","reload":"","hint":"structure"},
|
|
{"id":"","label":"style fidelity","localized":"","reload":"","hint":"style fidelity"},
|
|
{"id":"","label":"subject","localized":"","reload":"","hint":"subject"},
|
|
{"id":"","label":"submit results","localized":"","reload":"","hint":"submit results"},
|
|
{"id":"","label":"submodules","localized":"","reload":"","hint":"submodules"},
|
|
{"id":"","label":"swap x/y","localized":"","reload":"","hint":"swap x/y"},
|
|
{"id":"","label":"swap x/z","localized":"","reload":"","hint":"swap x/z"},
|
|
{"id":"","label":"swap y/z","localized":"","reload":"","hint":"swap y/z"},
|
|
{"id":"","label":"t2i adapter","localized":"","reload":"","hint":"t2i adapter"},
|
|
{"id":"","label":"t2i strength","localized":"","reload":"","hint":"t2i strength"},
|
|
{"id":"","label":"t2i-adapter unit 1","localized":"","reload":"","hint":"t2i-adapter unit 1"},
|
|
{"id":"","label":"t2i-adapter unit 2","localized":"","reload":"","hint":"t2i-adapter unit 2"},
|
|
{"id":"","label":"t2i-adapter unit 3","localized":"","reload":"","hint":"t2i-adapter unit 3"},
|
|
{"id":"","label":"t2i-adapter unit 4","localized":"","reload":"","hint":"t2i-adapter unit 4"},
|
|
{"id":"","label":"taesd","localized":"","reload":"","hint":"taesd"},
|
|
{"id":"","label":"taesd decode layers","localized":"","reload":"","hint":"taesd decode layers"},
|
|
{"id":"","label":"taesd variant","localized":"","reload":"","hint":"taesd variant"},
|
|
{"id":"","label":"target subject","localized":"","reload":"","hint":"target subject"},
|
|
{"id":"","label":"tcd","localized":"","reload":"","hint":"tcd"},
|
|
{"id":"","label":"tdd","localized":"","reload":"","hint":"tdd"},
|
|
{"id":"","label":"te","localized":"","reload":"","hint":"te"},
|
|
{"id":"","label":"temperature","localized":"","reload":"","hint":"Controls randomness in token selection by reshaping the probability distribution.<br>Like adjusting a dial between cautious predictability (low values ~0.4) and creative exploration (higher values ~1). Higher temperatures increase willingness to choose less obvious options, but makes outputs more unpredictable.<br><br>Set to 0 to disable, resulting in silent switch to greedy decoding, disabling sampling."},
|
|
{"id":"","label":"Thinking Mode","localized":"","reload":"","hint":"Enables thinking/reasoning, allowing the model to take more time to generate responses.<br>This can lead to more thoughtful and detailed answers, but will increase response time.<br>This setting affects both hybrid and thinking-only models, and in some may result in lower overall quality than expected. For thinking-only models like Qwen3-VL this setting might have to be combined with prefill to guarantee preventing thinking.<br><br>Models supporting this feature are marked with an \uf0eb icon."},
|
|
{"id":"","label":"Repetition penalty","localized":"","reload":"","hint":"Discourages reusing tokens that already appear in the prompt or output by penalizing their probabilities.<br>Like adding friction to revisiting previous choices. Helps break repetitive loops but may reduce coherence at aggressive values.<br><br>Set to 1 to disable."},
|
|
{"id":"","label":"text guidance scale","localized":"","reload":"","hint":"text guidance scale"},
|
|
{"id":"","label":"template","localized":"","reload":"","hint":"template"},
|
|
{"id":"","label":"temporal frequency","localized":"","reload":"","hint":"temporal frequency"},
|
|
{"id":"","label":"tertiary model","localized":"","reload":"","hint":"tertiary model"},
|
|
{"id":"","label":"text encoder cache size","localized":"","reload":"","hint":"text encoder cache size"},
|
|
{"id":"","label":"text encoder model","localized":"","reload":"","hint":"text encoder model"},
|
|
{"id":"","label":"text inputs","localized":"","reload":"","hint":"text inputs"},
|
|
{"id":"","label":"textbox","localized":"","reload":"","hint":"textbox"},
|
|
{"id":"","label":"threshold","localized":"","reload":"","hint":"threshold"},
|
|
{"id":"","label":"thresholding","localized":"","reload":"","hint":"thresholding"},
|
|
{"id":"","label":"tile frames","localized":"","reload":"","hint":"tile frames"},
|
|
{"id":"","label":"tile prompt: x=1 y=1","localized":"","reload":"","hint":"tile prompt: x=1 y=1"},
|
|
{"id":"","label":"tile prompt: x=1 y=2","localized":"","reload":"","hint":"tile prompt: x=1 y=2"},
|
|
{"id":"","label":"tile prompt: x=1 y=3","localized":"","reload":"","hint":"tile prompt: x=1 y=3"},
|
|
{"id":"","label":"tile prompt: x=1 y=4","localized":"","reload":"","hint":"tile prompt: x=1 y=4"},
|
|
{"id":"","label":"tile prompt: x=2 y=1","localized":"","reload":"","hint":"tile prompt: x=2 y=1"},
|
|
{"id":"","label":"tile prompt: x=2 y=2","localized":"","reload":"","hint":"tile prompt: x=2 y=2"},
|
|
{"id":"","label":"tile prompt: x=2 y=3","localized":"","reload":"","hint":"tile prompt: x=2 y=3"},
|
|
{"id":"","label":"tile prompt: x=2 y=4","localized":"","reload":"","hint":"tile prompt: x=2 y=4"},
|
|
{"id":"","label":"tile prompt: x=3 y=1","localized":"","reload":"","hint":"tile prompt: x=3 y=1"},
|
|
{"id":"","label":"tile prompt: x=3 y=2","localized":"","reload":"","hint":"tile prompt: x=3 y=2"},
|
|
{"id":"","label":"tile prompt: x=3 y=3","localized":"","reload":"","hint":"tile prompt: x=3 y=3"},
|
|
{"id":"","label":"tile prompt: x=3 y=4","localized":"","reload":"","hint":"tile prompt: x=3 y=4"},
|
|
{"id":"","label":"tile prompt: x=4 y=1","localized":"","reload":"","hint":"tile prompt: x=4 y=1"},
|
|
{"id":"","label":"tile prompt: x=4 y=2","localized":"","reload":"","hint":"tile prompt: x=4 y=2"},
|
|
{"id":"","label":"tile prompt: x=4 y=3","localized":"","reload":"","hint":"tile prompt: x=4 y=3"},
|
|
{"id":"","label":"tile prompt: x=4 y=4","localized":"","reload":"","hint":"tile prompt: x=4 y=4"},
|
|
{"id":"","label":"tiling options","localized":"","reload":"","hint":"tiling options"},
|
|
{"id":"","label":"time embedding mix","localized":"","reload":"","hint":"time embedding mix"},
|
|
{"id":"","label":"time_quadratic","localized":"","reload":"","hint":"time_quadratic"},
|
|
{"id":"","label":"time_uniform","localized":"","reload":"","hint":"time_uniform"},
|
|
{"id":"","label":"timestep","localized":"","reload":"","hint":"timestep"},
|
|
{"id":"","label":"timestep skip end","localized":"","reload":"","hint":"timestep skip end"},
|
|
{"id":"","label":"timestep skip start","localized":"","reload":"","hint":"timestep skip start"},
|
|
{"id":"","label":"timesteps","localized":"","reload":"","hint":"timesteps"},
|
|
{"id":"","label":"timesteps override","localized":"","reload":"","hint":"timesteps override"},
|
|
{"id":"","label":"timesteps presets","localized":"","reload":"","hint":"timesteps presets"},
|
|
{"id":"","label":"timesteps range","localized":"","reload":"","hint":"timesteps range"},
|
|
{"id":"","label":"tiny","localized":"","reload":"","hint":"tiny"},
|
|
{"id":"","label":"todo","localized":"","reload":"","hint":"todo"},
|
|
{"id":"","label":"tome","localized":"","reload":"","hint":"tome"},
|
|
{"id":"","label":"tool","localized":"","reload":"","hint":"tool"},
|
|
{"id":"","label":"top-k","localized":"","reload":"","hint":"Limits token selection to the K most likely candidates at each step.<br>Lower values (e.g., 40) make outputs more focused and predictable, while higher values allow more diverse choices.<br><br>Set to 0 to disable."},
|
|
{"id":"","label":"top-p","localized":"","reload":"","hint":"Selects tokens from the smallest set whose cumulative probability exceeds P (e.g., 0.9).<br>Dynamically adapts the number of candidates based on model confidence; fewer options when certain, more when uncertain.<br><br>Set to 1 to disable."},
|
|
{"id":"","label":"torch","localized":"","reload":"","hint":"torch"},
|
|
{"id":"","label":"transformer","localized":"","reload":"","hint":"transformer"},
|
|
{"id":"","label":"trigger word","localized":"","reload":"","hint":"trigger word"},
|
|
{"id":"","label":"true","localized":"","reload":"","hint":"true"},
|
|
{"id":"","label":"tunable ops limit","localized":"","reload":"","hint":"tunable ops limit"},
|
|
{"id":"","label":"ufogen","localized":"","reload":"","hint":"ufogen"},
|
|
{"id":"","label":"ui card size (px)","localized":"","reload":"","hint":"ui card size (px)"},
|
|
{"id":"","label":"ui fetch network info on mouse-over","localized":"","reload":"","hint":"ui fetch network info on mouse-over"},
|
|
{"id":"","label":"ui height (%)","localized":"","reload":"","hint":"ui height (%)"},
|
|
{"id":"","label":"ui locale","localized":"","reload":"","hint":"ui locale"},
|
|
{"id":"","label":"ui request timeout","localized":"","reload":"","hint":"ui request timeout"},
|
|
{"id":"","label":"ui show on startup","localized":"","reload":"","hint":"ui show on startup"},
|
|
{"id":"","label":"ui sidebar width (%)","localized":"","reload":"","hint":"ui sidebar width (%)"},
|
|
{"id":"","label":"ui theme","localized":"","reload":"","hint":"ui theme"},
|
|
{"id":"","label":"unet","localized":"","reload":"","hint":"unet"},
|
|
{"id":"","label":"unet depth","localized":"","reload":"","hint":"unet depth"},
|
|
{"id":"","label":"unet enabled","localized":"","reload":"","hint":"unet enabled"},
|
|
{"id":"","label":"unet max tile size","localized":"","reload":"","hint":"unet max tile size"},
|
|
{"id":"","label":"unet min tile size","localized":"","reload":"","hint":"unet min tile size"},
|
|
{"id":"","label":"unet model","localized":"","reload":"","hint":"unet model"},
|
|
{"id":"","label":"unet swap size","localized":"","reload":"","hint":"unet swap size"},
|
|
{"id":"","label":"uniform","localized":"","reload":"","hint":"uniform"},
|
|
{"id":"","label":"units","localized":"","reload":"","hint":"units"},
|
|
{"id":"","label":"unload current model from vram","localized":"","reload":"","hint":"unload current model from vram"},
|
|
{"id":"","label":"unload upscaler after processing","localized":"","reload":"","hint":"unload upscaler after processing"},
|
|
{"id":"","label":"unset","localized":"","reload":"","hint":"unset"},
|
|
{"id":"","label":"up","localized":"","reload":"","hint":"up"},
|
|
{"id":"","label":"upcast attention layer","localized":"","reload":"","hint":"upcast attention layer"},
|
|
{"id":"","label":"update","localized":"","reload":"","hint":"update"},
|
|
{"id":"","label":"upload","localized":"","reload":"","hint":"upload"},
|
|
{"id":"","label":"use brownian noise","localized":"","reload":"","hint":"use brownian noise"},
|
|
{"id":"","label":"use cached model config when available","localized":"","reload":"","hint":"use cached model config when available"},
|
|
{"id":"","label":"use defaults","localized":"","reload":"","hint":"use defaults"},
|
|
{"id":"","label":"use dynamic thresholding","localized":"","reload":"","hint":"use dynamic thresholding"},
|
|
{"id":"","label":"use fixed width thumbnails","localized":"","reload":"","hint":"use fixed width thumbnails"},
|
|
{"id":"","label":"use image gallery cache","localized":"","reload":"","hint":"use image gallery cache"},
|
|
{"id":"","label":"use karras sigmas","localized":"","reload":"","hint":"use karras sigmas"},
|
|
{"id":"","label":"use line break as prompt segment marker","localized":"","reload":"","hint":"use line break as prompt segment marker"},
|
|
{"id":"","label":"use model ema weights when possible","localized":"","reload":"","hint":"use model ema weights when possible"},
|
|
{"id":"","label":"use quantization","localized":"","reload":"","hint":"use quantization"},
|
|
{"id":"","label":"use random seeds","localized":"","reload":"","hint":"use random seeds"},
|
|
{"id":"","label":"use reference values when available","localized":"","reload":"","hint":"use reference values when available"},
|
|
{"id":"","label":"use same seed","localized":"","reload":"","hint":"use same seed"},
|
|
{"id":"","label":"use samplers","localized":"","reload":"","hint":"Enable to use sampling (randomly selecting tokens based on sampling methods like Top-k or Top-p) or disable to use greedy decoding (selecting the most probable token at each step).<br>Enabling makes outputs more diverse and more creative but less deterministic."},
|
|
{"id":"","label":"use separate base dict","localized":"","reload":"","hint":"use separate base dict"},
|
|
{"id":"","label":"use simplified solvers in final steps","localized":"","reload":"","hint":"use simplified solvers in final steps"},
|
|
{"id":"","label":"use text inputs","localized":"","reload":"","hint":"use text inputs"},
|
|
{"id":"","label":"user","localized":"","reload":"","hint":"user"},
|
|
{"id":"","label":"username","localized":"","reload":"","hint":"username"},
|
|
{"id":"","label":"v_prediction","localized":"","reload":"","hint":"v_prediction"},
|
|
{"id":"","label":"vae enabled","localized":"","reload":"","hint":"vae enabled"},
|
|
{"id":"","label":"vae sliced encode","localized":"","reload":"","hint":"vae sliced encode"},
|
|
{"id":"","label":"vae swap size","localized":"","reload":"","hint":"vae swap size"},
|
|
{"id":"","label":"vae tile overlap","localized":"","reload":"","hint":"vae tile overlap"},
|
|
{"id":"","label":"vae tile size","localized":"","reload":"","hint":"vae tile size"},
|
|
{"id":"","label":"vary_coeff","localized":"","reload":"","hint":"vary_coeff"},
|
|
{"id":"","label":"vdm solver","localized":"","reload":"","hint":"vdm solver"},
|
|
{"id":"","label":"version","localized":"","reload":"","hint":"version"},
|
|
{"id":"","label":"vgen params","localized":"","reload":"","hint":"vgen params"},
|
|
{"id":"","label":"vibrance","localized":"","reload":"","hint":"vibrance"},
|
|
{"id":"","label":"video file","localized":"","reload":"","hint":"video file"},
|
|
{"id":"","label":"video type","localized":"","reload":"","hint":"video type"},
|
|
{"id":"","label":"vlm","localized":"","reload":"","hint":"vlm"},
|
|
{"id":"","label":"vlm model","localized":"","reload":"","hint":"Select which model to use for Visual Language tasks.<br><br>Models which support thinking mode are marked with an \uf0eb icon."},
|
|
{"id":"","label":"vlm: default model","localized":"","reload":"","hint":"vlm: default model"},
|
|
{"id":"","label":"vlm: default prompt","localized":"","reload":"","hint":"vlm: default prompt"},
|
|
{"id":"","label":"vlm: max length","localized":"","reload":"","hint":"vlm: max length"},
|
|
{"id":"","label":"VLM Num Beams","localized":"","reload":"","hint":"Maintains multiple candidate paths simultaneously and selects the overall best sequence.<br>Like exploring several drafts at once to find the best one. More thorough but much slower and less creative than random sampling.<br>Generally not recommended, most modern VLMs perform better with sampling methods.<br>Set to 1 to disable."},
|
|
{"id":"","label":"vlm: top-k","localized":"","reload":"","hint":"Limits token selection to the K most likely candidates at each step.<br>Lower values (e.g., 40) make outputs more focused and predictable, while higher values allow more diverse choices.<br>Set to 0 to disable."},
|
|
{"id":"","label":"vlm: top-p","localized":"","reload":"","hint":"Selects tokens from the smallest set whose cumulative probability exceeds P (e.g., 0.9).<br>Dynamically adapts the number of candidates based on model confidence; fewer options when certain, more when uncertain.<br>Set to 1 to disable."},
|
|
{"id":"","label":"vlm: use sample method","localized":"","reload":"","hint":"Enable to use sampling (randomly selecting tokens based on sampling methods like Top-k or Top-p) or disable to use greedy decoding (selecting the most probable token at each step).<br>Enabling makes outputs more diverse and creative but less deterministic."},
|
|
{"id":"","label":"VLM Max tokens","localized":"","reload":"","hint":"Maximum number of tokens the model can generate in its response.<br>The model is not aware of this limit during generation and it won't make the model try to generate more detailed or more concise responses, it simply sets the hard limit for the length, and will forcefully cut off the response when the limit is reached."},
|
|
{"id":"","label":"VLM Temperature","localized":"","reload":"","hint":"Controls randomness in token selection. Lower values (e.g., 0.1) make outputs more focused and deterministic, always choosing high-probability tokens.<br>Higher values (e.g., 0.9) increase creativity and diversity by allowing less probable tokens.<br><br>Set to 0 for fully deterministic output (always picks the most likely token)."},
|
|
{"id":"","label":"warmth","localized":"","reload":"","hint":"warmth"},
|
|
{"id":"","label":"webp lossless compression","localized":"","reload":"","hint":"webp lossless compression"},
|
|
{"id":"","label":"weight","localized":"","reload":"","hint":"weight"},
|
|
{"id":"","label":"width after","localized":"","reload":"","hint":"width after"},
|
|
{"id":"","label":"width before","localized":"","reload":"","hint":"width before"},
|
|
{"id":"","label":"width mask","localized":"","reload":"","hint":"width mask"},
|
|
{"id":"","label":"wiki","localized":"","reload":"","hint":"wiki"},
|
|
{"id":"","label":"wildcards","localized":"","reload":"","hint":"wildcards"},
|
|
{"id":"","label":"x components","localized":"","reload":"","hint":"x components"},
|
|
{"id":"","label":"x overlap","localized":"","reload":"","hint":"x overlap"},
|
|
{"id":"","label":"x type","localized":"","reload":"","hint":"x type"},
|
|
{"id":"","label":"x-axis tile overlap","localized":"","reload":"","hint":"x-axis tile overlap"},
|
|
{"id":"","label":"x-axis tiles","localized":"","reload":"","hint":"x-axis tiles"},
|
|
{"id":"","label":"xhinker","localized":"","reload":"","hint":"xhinker"},
|
|
{"id":"","label":"xs","localized":"","reload":"","hint":"xs"},
|
|
{"id":"","label":"y components","localized":"","reload":"","hint":"y components"},
|
|
{"id":"","label":"y overlap","localized":"","reload":"","hint":"y overlap"},
|
|
{"id":"","label":"y type","localized":"","reload":"","hint":"y type"},
|
|
{"id":"","label":"y-axis tile overlap","localized":"","reload":"","hint":"y-axis tile overlap"},
|
|
{"id":"","label":"y-axis tiles","localized":"","reload":"","hint":"y-axis tiles"},
|
|
{"id":"","label":"z type","localized":"","reload":"","hint":"z type"},
|
|
{"id":"","label":"zero","localized":"","reload":"","hint":"zero"},
|
|
{"id":"","label":"zoe depth","localized":"","reload":"","hint":"zoe depth"}
|
|
]
|
|
}
|