Files
ComfyUI/custom_nodes/rgthree-comfy/py/power_lora_loader.py
jaidaken f09734b0ee
Some checks failed
Python Linting / Run Ruff (push) Has been cancelled
Python Linting / Run Pylint (push) Has been cancelled
Full Comfy CI Workflow Runs / test-stable (12.1, , linux, 3.10, [self-hosted Linux], stable) (push) Has been cancelled
Full Comfy CI Workflow Runs / test-stable (12.1, , linux, 3.11, [self-hosted Linux], stable) (push) Has been cancelled
Full Comfy CI Workflow Runs / test-stable (12.1, , linux, 3.12, [self-hosted Linux], stable) (push) Has been cancelled
Full Comfy CI Workflow Runs / test-unix-nightly (12.1, , linux, 3.11, [self-hosted Linux], nightly) (push) Has been cancelled
Execution Tests / test (macos-latest) (push) Has been cancelled
Execution Tests / test (ubuntu-latest) (push) Has been cancelled
Execution Tests / test (windows-latest) (push) Has been cancelled
Test server launches without errors / test (push) Has been cancelled
Unit Tests / test (macos-latest) (push) Has been cancelled
Unit Tests / test (ubuntu-latest) (push) Has been cancelled
Unit Tests / test (windows-2022) (push) Has been cancelled
Add custom nodes, Civitai loras (LFS), and vast.ai setup script
Includes 30 custom nodes committed directly, 7 Civitai-exclusive
loras stored via Git LFS, and a setup script that installs all
dependencies and downloads HuggingFace-hosted models on vast.ai.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-09 00:56:42 +00:00

102 lines
4.1 KiB
Python

import folder_paths
from typing import Union
from nodes import LoraLoader
from .constants import get_category, get_name
from .power_prompt_utils import get_lora_by_filename
from .utils import FlexibleOptionalInputType, any_type
from .server.utils_info import get_model_info_file_data
from .log import log_node_warn
NODE_NAME = get_name('Power Lora Loader')
class RgthreePowerLoraLoader:
""" The Power Lora Loader is a powerful, flexible node to add multiple loras to a model/clip."""
NAME = NODE_NAME
CATEGORY = get_category()
@classmethod
def INPUT_TYPES(cls): # pylint: disable = invalid-name, missing-function-docstring
return {
"required": {
},
# Since we will pass any number of loras in from the UI, this needs to always allow an
"optional": FlexibleOptionalInputType(type=any_type, data={
"model": ("MODEL",),
"clip": ("CLIP",),
}),
"hidden": {},
}
RETURN_TYPES = ("MODEL", "CLIP")
RETURN_NAMES = ("MODEL", "CLIP")
FUNCTION = "load_loras"
def load_loras(self, model=None, clip=None, **kwargs):
"""Loops over the provided loras in kwargs and applies valid ones."""
for key, value in kwargs.items():
key = key.upper()
if key.startswith('LORA_') and 'on' in value and 'lora' in value and 'strength' in value:
strength_model = value['strength']
# If we just passed one strength value, then use it for both, if we passed a strengthTwo
# as well, then our `strength` will be for the model, and `strengthTwo` for clip.
strength_clip = value['strengthTwo'] if 'strengthTwo' in value else None
if clip is None:
if strength_clip is not None and strength_clip != 0:
log_node_warn(NODE_NAME, 'Recieved clip strength eventhough no clip supplied!')
strength_clip = 0
else:
strength_clip = strength_clip if strength_clip is not None else strength_model
if value['on'] and (strength_model != 0 or strength_clip != 0):
lora = get_lora_by_filename(value['lora'], log_node=self.NAME)
if model is not None and lora is not None:
model, clip = LoraLoader().load_lora(model, clip, lora, strength_model, strength_clip)
return (model, clip)
@classmethod
def get_enabled_loras_from_prompt_node(cls,
prompt_node: dict) -> list[dict[str, Union[str, float]]]:
"""Gets enabled loras of a node within a server prompt."""
result = []
for name, lora in prompt_node['inputs'].items():
if name.startswith('lora_') and lora['on']:
lora_file = get_lora_by_filename(lora['lora'], log_node=cls.NAME)
if lora_file is not None: # Add the same safety check
lora_dict = {
'name': lora['lora'],
'strength': lora['strength'],
'path': folder_paths.get_full_path("loras", lora_file)
}
if 'strengthTwo' in lora:
lora_dict['strength_clip'] = lora['strengthTwo']
result.append(lora_dict)
return result
@classmethod
def get_enabled_triggers_from_prompt_node(cls, prompt_node: dict, max_each: int = 1):
"""Gets trigger words up to the max for enabled loras of a node within a server prompt."""
loras = [l['name'] for l in cls.get_enabled_loras_from_prompt_node(prompt_node)]
trained_words = []
for lora in loras:
info = get_model_info_file_data(lora, 'loras', default={})
if not info or not info.keys():
log_node_warn(
NODE_NAME,
f'No info found for lora {lora} when grabbing triggers. Have you generated an info file'
' from the Power Lora Loader "Show Info" dialog?'
)
continue
if 'trainedWords' not in info or not info['trainedWords']:
log_node_warn(
NODE_NAME,
f'No trained words for lora {lora} when grabbing triggers. Have you fetched data from'
'civitai or manually added words?'
)
continue
trained_words += [w for wi in info['trainedWords'][:max_each] if (wi and (w := wi['word']))]
return trained_words