Add custom nodes, Civitai loras (LFS), and vast.ai setup script
Some checks failed
Python Linting / Run Ruff (push) Has been cancelled
Python Linting / Run Pylint (push) Has been cancelled
Full Comfy CI Workflow Runs / test-stable (12.1, , linux, 3.10, [self-hosted Linux], stable) (push) Has been cancelled
Full Comfy CI Workflow Runs / test-stable (12.1, , linux, 3.11, [self-hosted Linux], stable) (push) Has been cancelled
Full Comfy CI Workflow Runs / test-stable (12.1, , linux, 3.12, [self-hosted Linux], stable) (push) Has been cancelled
Full Comfy CI Workflow Runs / test-unix-nightly (12.1, , linux, 3.11, [self-hosted Linux], nightly) (push) Has been cancelled
Execution Tests / test (macos-latest) (push) Has been cancelled
Execution Tests / test (ubuntu-latest) (push) Has been cancelled
Execution Tests / test (windows-latest) (push) Has been cancelled
Test server launches without errors / test (push) Has been cancelled
Unit Tests / test (macos-latest) (push) Has been cancelled
Unit Tests / test (ubuntu-latest) (push) Has been cancelled
Unit Tests / test (windows-2022) (push) Has been cancelled

Includes 30 custom nodes committed directly, 7 Civitai-exclusive
loras stored via Git LFS, and a setup script that installs all
dependencies and downloads HuggingFace-hosted models on vast.ai.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-09 00:55:26 +00:00
parent 2b70ab9ad0
commit f09734b0ee
2274 changed files with 748556 additions and 3 deletions

View File

@@ -0,0 +1,29 @@
from server import PromptServer
from aiohttp import web
import os
import folder_paths
dir = os.path.abspath(os.path.join(__file__, "../../user"))
if not os.path.exists(dir):
os.mkdir(dir)
file = os.path.join(dir, "autocomplete.txt")
@PromptServer.instance.routes.get("/pysssss/autocomplete")
async def get_autocomplete(request):
if os.path.isfile(file):
return web.FileResponse(file)
return web.Response(status=404)
@PromptServer.instance.routes.post("/pysssss/autocomplete")
async def update_autocomplete(request):
with open(file, "w", encoding="utf-8") as f:
f.write(await request.text())
return web.Response(status=200)
@PromptServer.instance.routes.get("/pysssss/loras")
async def get_loras(request):
loras = folder_paths.get_filename_list("loras")
return web.json_response(list(map(lambda a: os.path.splitext(a)[0], loras)))

View File

@@ -0,0 +1,172 @@
import glob
import os
from nodes import LoraLoader, CheckpointLoaderSimple
import folder_paths
from server import PromptServer
from folder_paths import get_directory_by_type
from aiohttp import web
import shutil
@PromptServer.instance.routes.get("/pysssss/view/{name}")
async def view(request):
name = request.match_info["name"]
pos = name.index("/")
type = name[0:pos]
name = name[pos+1:]
image_path = folder_paths.get_full_path(
type, name)
if not image_path:
return web.Response(status=404)
filename = os.path.basename(image_path)
return web.FileResponse(image_path, headers={"Content-Disposition": f"filename=\"{filename}\""})
@PromptServer.instance.routes.post("/pysssss/save/{name}")
async def save_preview(request):
name = request.match_info["name"]
pos = name.index("/")
type = name[0:pos]
name = name[pos+1:]
body = await request.json()
dir = get_directory_by_type(body.get("type", "output"))
subfolder = body.get("subfolder", "")
full_output_folder = os.path.join(dir, os.path.normpath(subfolder))
filepath = os.path.join(full_output_folder, body.get("filename", ""))
if os.path.commonpath((dir, os.path.abspath(filepath))) != dir:
return web.Response(status=400)
image_path = folder_paths.get_full_path(type, name)
image_path = os.path.splitext(
image_path)[0] + os.path.splitext(filepath)[1]
shutil.copyfile(filepath, image_path)
return web.json_response({
"image": type + "/" + os.path.basename(image_path)
})
@PromptServer.instance.routes.get("/pysssss/examples/{name}")
async def get_examples(request):
name = request.match_info["name"]
pos = name.index("/")
type = name[0:pos]
name = name[pos+1:]
file_path = folder_paths.get_full_path(
type, name)
if not file_path:
return web.Response(status=404)
file_path_no_ext = os.path.splitext(file_path)[0]
examples = []
if os.path.isdir(file_path_no_ext):
examples += sorted(map(lambda t: os.path.relpath(t, file_path_no_ext),
glob.glob(file_path_no_ext + "/*.txt")))
if os.path.isfile(file_path_no_ext + ".txt"):
examples += ["notes"]
return web.json_response(examples)
@PromptServer.instance.routes.post("/pysssss/examples/{name}")
async def save_example(request):
name = request.match_info["name"]
pos = name.index("/")
type = name[0:pos]
name = name[pos+1:]
body = await request.json()
example_name = body["name"]
example = body["example"]
file_path = folder_paths.get_full_path(
type, name)
if not file_path:
return web.Response(status=404)
if not example_name.endswith(".txt"):
example_name += ".txt"
file_path_no_ext = os.path.splitext(file_path)[0]
example_file = os.path.join(file_path_no_ext, example_name)
if not os.path.exists(file_path_no_ext):
os.mkdir(file_path_no_ext)
with open(example_file, 'w', encoding='utf8') as f:
f.write(example)
return web.Response(status=201)
@PromptServer.instance.routes.get("/pysssss/images/{type}")
async def get_images(request):
type = request.match_info["type"]
names = folder_paths.get_filename_list(type)
images = {}
for item_name in names:
file_name = os.path.splitext(item_name)[0]
file_path = folder_paths.get_full_path(type, item_name)
if file_path is None:
continue
file_path_no_ext = os.path.splitext(file_path)[0]
for ext in ["png", "jpg", "jpeg", "preview.png", "preview.jpeg"]:
if os.path.isfile(file_path_no_ext + "." + ext):
images[item_name] = f"{type}/{file_name}.{ext}"
break
return web.json_response(images)
class LoraLoaderWithImages(LoraLoader):
RETURN_TYPES = (*LoraLoader.RETURN_TYPES, "STRING",)
RETURN_NAMES = (*getattr(LoraLoader, "RETURN_NAMES",
LoraLoader.RETURN_TYPES), "example")
@classmethod
def INPUT_TYPES(s):
types = super().INPUT_TYPES()
types["optional"] = {"prompt": ("STRING", {"hidden": True})}
return types
def load_lora(self, **kwargs):
prompt = kwargs.pop("prompt", "")
return (*super().load_lora(**kwargs), prompt)
class CheckpointLoaderSimpleWithImages(CheckpointLoaderSimple):
RETURN_TYPES = (*CheckpointLoaderSimple.RETURN_TYPES, "STRING",)
RETURN_NAMES = (*getattr(CheckpointLoaderSimple, "RETURN_NAMES",
CheckpointLoaderSimple.RETURN_TYPES), "example")
@classmethod
def INPUT_TYPES(s):
types = super().INPUT_TYPES()
types["optional"] = {"prompt": ("STRING", {"hidden": True})}
return types
def load_checkpoint(self, **kwargs):
prompt = kwargs.pop("prompt", "")
return (*super().load_checkpoint(**kwargs), prompt)
NODE_CLASS_MAPPINGS = {
"LoraLoader|pysssss": LoraLoaderWithImages,
"CheckpointLoader|pysssss": CheckpointLoaderSimpleWithImages,
}
NODE_DISPLAY_NAME_MAPPINGS = {
"LoraLoader|pysssss": "Lora Loader 🐍",
"CheckpointLoader|pysssss": "Checkpoint Loader 🐍",
}

View File

@@ -0,0 +1,71 @@
import torch
import numpy as np
from PIL import Image
class ConstrainImage:
"""
A node that constrains an image to a maximum and minimum size while maintaining aspect ratio.
"""
@classmethod
def INPUT_TYPES(cls):
return {
"required": {
"images": ("IMAGE",),
"max_width": ("INT", {"default": 1024, "min": 0}),
"max_height": ("INT", {"default": 1024, "min": 0}),
"min_width": ("INT", {"default": 0, "min": 0}),
"min_height": ("INT", {"default": 0, "min": 0}),
"crop_if_required": (["yes", "no"], {"default": "no"}),
},
}
RETURN_TYPES = ("IMAGE",)
FUNCTION = "constrain_image"
CATEGORY = "image"
OUTPUT_IS_LIST = (True,)
def constrain_image(self, images, max_width, max_height, min_width, min_height, crop_if_required):
crop_if_required = crop_if_required == "yes"
results = []
for image in images:
i = 255. * image.cpu().numpy()
img = Image.fromarray(np.clip(i, 0, 255).astype(np.uint8)).convert("RGB")
current_width, current_height = img.size
aspect_ratio = current_width / current_height
constrained_width = min(max(current_width, min_width), max_width)
constrained_height = min(max(current_height, min_height), max_height)
if constrained_width / constrained_height > aspect_ratio:
constrained_width = max(int(constrained_height * aspect_ratio), min_width)
if crop_if_required:
constrained_height = int(current_height / (current_width / constrained_width))
else:
constrained_height = max(int(constrained_width / aspect_ratio), min_height)
if crop_if_required:
constrained_width = int(current_width / (current_height / constrained_height))
resized_image = img.resize((constrained_width, constrained_height), Image.LANCZOS)
if crop_if_required and (constrained_width > max_width or constrained_height > max_height):
left = max((constrained_width - max_width) // 2, 0)
top = max((constrained_height - max_height) // 2, 0)
right = min(constrained_width, max_width) + left
bottom = min(constrained_height, max_height) + top
resized_image = resized_image.crop((left, top, right, bottom))
resized_image = np.array(resized_image).astype(np.float32) / 255.0
resized_image = torch.from_numpy(resized_image)[None,]
results.append(resized_image)
return (results,)
NODE_CLASS_MAPPINGS = {
"ConstrainImage|pysssss": ConstrainImage,
}
NODE_DISPLAY_NAME_MAPPINGS = {
"ConstrainImage|pysssss": "Constrain Image 🐍",
}

View File

@@ -0,0 +1,72 @@
import torch
import numpy as np
from PIL import Image
class ConstrainImageforVideo:
"""
A node that constrains an image to a maximum and minimum size while maintaining aspect ratio.
"""
@classmethod
def INPUT_TYPES(cls):
return {
"required": {
"images": ("IMAGE",),
"max_width": ("INT", {"default": 1024, "min": 0}),
"max_height": ("INT", {"default": 1024, "min": 0}),
"min_width": ("INT", {"default": 0, "min": 0}),
"min_height": ("INT", {"default": 0, "min": 0}),
"crop_if_required": (["yes", "no"], {"default": "no"}),
},
}
RETURN_TYPES = ("IMAGE",)
RETURN_NAMES = ("IMAGE",)
FUNCTION = "constrain_image_for_video"
CATEGORY = "image"
def constrain_image_for_video(self, images, max_width, max_height, min_width, min_height, crop_if_required):
crop_if_required = crop_if_required == "yes"
results = []
for image in images:
i = 255. * image.cpu().numpy()
img = Image.fromarray(np.clip(i, 0, 255).astype(np.uint8)).convert("RGB")
current_width, current_height = img.size
aspect_ratio = current_width / current_height
constrained_width = max(min(current_width, min_width), max_width)
constrained_height = max(min(current_height, min_height), max_height)
if constrained_width / constrained_height > aspect_ratio:
constrained_width = max(int(constrained_height * aspect_ratio), min_width)
if crop_if_required:
constrained_height = int(current_height / (current_width / constrained_width))
else:
constrained_height = max(int(constrained_width / aspect_ratio), min_height)
if crop_if_required:
constrained_width = int(current_width / (current_height / constrained_height))
resized_image = img.resize((constrained_width, constrained_height), Image.LANCZOS)
if crop_if_required and (constrained_width > max_width or constrained_height > max_height):
left = max((constrained_width - max_width) // 2, 0)
top = max((constrained_height - max_height) // 2, 0)
right = min(constrained_width, max_width) + left
bottom = min(constrained_height, max_height) + top
resized_image = resized_image.crop((left, top, right, bottom))
resized_image = np.array(resized_image).astype(np.float32) / 255.0
resized_image = torch.from_numpy(resized_image)[None,]
results.append(resized_image)
all_images = torch.cat(results, dim=0)
return (all_images, all_images.size(0),)
NODE_CLASS_MAPPINGS = {
"ConstrainImageforVideo|pysssss": ConstrainImageforVideo,
}
NODE_DISPLAY_NAME_MAPPINGS = {
"ConstrainImageforVideo|pysssss": "Constrain Image for Video 🐍",
}

View File

@@ -0,0 +1,252 @@
import ast
import math
import random
import operator as op
# Hack: string type that is always equal in not equal comparisons
class AnyType(str):
def __ne__(self, __value: object) -> bool:
return False
# Our any instance wants to be a wildcard string
any = AnyType("*")
operators = {
ast.Add: op.add,
ast.Sub: op.sub,
ast.Mult: op.mul,
ast.Div: op.truediv,
ast.FloorDiv: op.floordiv,
ast.Pow: op.pow,
ast.BitXor: op.xor,
ast.USub: op.neg,
ast.Mod: op.mod,
ast.BitAnd: op.and_,
ast.BitOr: op.or_,
ast.Invert: op.invert,
ast.And: lambda a, b: 1 if a and b else 0,
ast.Or: lambda a, b: 1 if a or b else 0,
ast.Not: lambda a: 0 if a else 1,
ast.RShift: op.rshift,
ast.LShift: op.lshift
}
# TODO: restructure args to provide more info, generate hint based on args to save duplication
functions = {
"round": {
"args": (1, 2),
"call": lambda a, b = None: round(a, b),
"hint": "number, dp? = 0"
},
"ceil": {
"args": (1, 1),
"call": lambda a: math.ceil(a),
"hint": "number"
},
"floor": {
"args": (1, 1),
"call": lambda a: math.floor(a),
"hint": "number"
},
"min": {
"args": (2, None),
"call": lambda *args: min(*args),
"hint": "...numbers"
},
"max": {
"args": (2, None),
"call": lambda *args: max(*args),
"hint": "...numbers"
},
"randomint": {
"args": (2, 2),
"call": lambda a, b: random.randint(a, b),
"hint": "min, max"
},
"randomchoice": {
"args": (2, None),
"call": lambda *args: random.choice(args),
"hint": "...numbers"
},
"sqrt": {
"args": (1, 1),
"call": lambda a: math.sqrt(a),
"hint": "number"
},
"int": {
"args": (1, 1),
"call": lambda a = None: int(a),
"hint": "number"
},
"iif": {
"args": (3, 3),
"call": lambda a, b, c = None: b if a else c,
"hint": "value, truepart, falsepart"
},
}
autocompleteWords = list({
"text": x,
"value": f"{x}()",
"showValue": False,
"hint": f"{functions[x]['hint']}",
"caretOffset": -1
} for x in functions.keys())
class MathExpression:
@classmethod
def INPUT_TYPES(cls):
return {
"required": {
"expression": ("STRING", {"multiline": True, "dynamicPrompts": False, "pysssss.autocomplete": {
"words": autocompleteWords,
"separator": ""
}}),
},
"optional": {
"a": (any, ),
"b": (any,),
"c": (any, ),
},
"hidden": {"extra_pnginfo": "EXTRA_PNGINFO",
"prompt": "PROMPT"},
}
RETURN_TYPES = ("INT", "FLOAT", )
FUNCTION = "evaluate"
CATEGORY = "utils"
OUTPUT_NODE = True
@classmethod
def IS_CHANGED(s, expression, **kwargs):
if "random" in expression:
return float("nan")
return expression
def get_widget_value(self, extra_pnginfo, prompt, node_name, widget_name):
workflow = extra_pnginfo["workflow"] if "workflow" in extra_pnginfo else { "nodes": [] }
node_id = None
for node in workflow["nodes"]:
name = node["type"]
if "properties" in node:
if "Node name for S&R" in node["properties"]:
name = node["properties"]["Node name for S&R"]
if name == node_name:
node_id = node["id"]
break
if "title" in node:
name = node["title"]
if name == node_name:
node_id = node["id"]
break
if node_id is not None:
values = prompt[str(node_id)]
if "inputs" in values:
if widget_name in values["inputs"]:
value = values["inputs"][widget_name]
if isinstance(value, list):
raise ValueError("Converted widgets are not supported via named reference, use the inputs instead.")
return value
raise NameError(f"Widget not found: {node_name}.{widget_name}")
raise NameError(f"Node not found: {node_name}.{widget_name}")
def get_size(self, target, property):
if isinstance(target, dict) and "samples" in target:
# Latent
if property == "width":
return target["samples"].shape[3] * 8
return target["samples"].shape[2] * 8
else:
# Image
if property == "width":
return target.shape[2]
return target.shape[1]
def evaluate(self, expression, prompt, extra_pnginfo={}, a=None, b=None, c=None):
expression = expression.replace('\n', ' ').replace('\r', '')
node = ast.parse(expression, mode='eval').body
lookup = {"a": a, "b": b, "c": c}
def eval_op(node, l, r):
l = eval_expr(l)
r = eval_expr(r)
l = l if isinstance(l, int) else float(l)
r = r if isinstance(r, int) else float(r)
return operators[type(node.op)](l, r)
def eval_expr(node):
if isinstance(node, ast.Constant) or isinstance(node, ast.Num):
return node.n
elif isinstance(node, ast.BinOp):
return eval_op(node, node.left, node.right)
elif isinstance(node, ast.BoolOp):
return eval_op(node, node.values[0], node.values[1])
elif isinstance(node, ast.UnaryOp):
return operators[type(node.op)](eval_expr(node.operand))
elif isinstance(node, ast.Attribute):
if node.value.id in lookup:
if node.attr == "width" or node.attr == "height":
return self.get_size(lookup[node.value.id], node.attr)
return self.get_widget_value(extra_pnginfo, prompt, node.value.id, node.attr)
elif isinstance(node, ast.Name):
if node.id in lookup:
val = lookup[node.id]
if isinstance(val, (int, float, complex)):
return val
else:
raise TypeError(
f"Compex types (LATENT/IMAGE) need to reference their width/height, e.g. {node.id}.width")
raise NameError(f"Name not found: {node.id}")
elif isinstance(node, ast.Call):
if node.func.id in functions:
fn = functions[node.func.id]
l = len(node.args)
if l < fn["args"][0] or (fn["args"][1] is not None and l > fn["args"][1]):
if fn["args"][1] is None:
toErr = " or more"
else:
toErr = f" to {fn['args'][1]}"
raise SyntaxError(
f"Invalid function call: {node.func.id} requires {fn['args'][0]}{toErr} arguments")
args = []
for arg in node.args:
args.append(eval_expr(arg))
return fn["call"](*args)
raise NameError(f"Invalid function call: {node.func.id}")
elif isinstance(node, ast.Compare):
l = eval_expr(node.left)
r = eval_expr(node.comparators[0])
if isinstance(node.ops[0], ast.Eq):
return 1 if l == r else 0
if isinstance(node.ops[0], ast.NotEq):
return 1 if l != r else 0
if isinstance(node.ops[0], ast.Gt):
return 1 if l > r else 0
if isinstance(node.ops[0], ast.GtE):
return 1 if l >= r else 0
if isinstance(node.ops[0], ast.Lt):
return 1 if l < r else 0
if isinstance(node.ops[0], ast.LtE):
return 1 if l <= r else 0
raise NotImplementedError(
"Operator " + node.ops[0].__class__.__name__ + " not supported.")
else:
raise TypeError(node)
r = eval_expr(node)
return {"ui": {"value": [r]}, "result": (int(r), float(r),)}
NODE_CLASS_MAPPINGS = {
"MathExpression|pysssss": MathExpression,
}
NODE_DISPLAY_NAME_MAPPINGS = {
"MathExpression|pysssss": "Math Expression 🐍",
}

View File

@@ -0,0 +1,115 @@
import hashlib
import json
from aiohttp import web
from server import PromptServer
import folder_paths
import os
def get_metadata(filepath):
with open(filepath, "rb") as file:
# https://github.com/huggingface/safetensors#format
# 8 bytes: N, an unsigned little-endian 64-bit integer, containing the size of the header
header_size = int.from_bytes(file.read(8), "little", signed=False)
if header_size <= 0:
raise BufferError("Invalid header size")
header = file.read(header_size)
if header_size <= 0:
raise BufferError("Invalid header")
header_json = json.loads(header)
return header_json["__metadata__"] if "__metadata__" in header_json else None
@PromptServer.instance.routes.post("/pysssss/metadata/notes/{name}")
async def save_notes(request):
name = request.match_info["name"]
pos = name.index("/")
type = name[0:pos]
name = name[pos+1:]
file_path = None
if type == "embeddings" or type == "loras":
name = name.lower()
files = folder_paths.get_filename_list(type)
for f in files:
lower_f = f.lower()
if lower_f == name:
file_path = folder_paths.get_full_path(type, f)
else:
n = os.path.splitext(f)[0].lower()
if n == name:
file_path = folder_paths.get_full_path(type, f)
if file_path is not None:
break
else:
file_path = folder_paths.get_full_path(
type, name)
if not file_path:
return web.Response(status=404)
file_no_ext = os.path.splitext(file_path)[0]
info_file = file_no_ext + ".txt"
with open(info_file, "w") as f:
f.write(await request.text())
return web.Response(status=200)
@PromptServer.instance.routes.get("/pysssss/metadata/{name}")
async def load_metadata(request):
name = request.match_info["name"]
pos = name.index("/")
type = name[0:pos]
name = name[pos+1:]
file_path = None
if type == "embeddings" or type == "loras":
name = name.lower()
files = folder_paths.get_filename_list(type)
for f in files:
lower_f = f.lower()
if lower_f == name:
file_path = folder_paths.get_full_path(type, f)
else:
n = os.path.splitext(f)[0].lower()
if n == name:
file_path = folder_paths.get_full_path(type, f)
if file_path is not None:
break
else:
file_path = folder_paths.get_full_path(
type, name)
if not file_path:
return web.Response(status=404)
try:
meta = get_metadata(file_path)
except:
meta = None
if meta is None:
meta = {}
file_no_ext = os.path.splitext(file_path)[0]
info_file = file_no_ext + ".txt"
if os.path.isfile(info_file):
with open(info_file, "r") as f:
meta["pysssss.notes"] = f.read()
hash_file = file_no_ext + ".sha256"
if os.path.isfile(hash_file):
with open(hash_file, "rt") as f:
meta["pysssss.sha256"] = f.read()
else:
with open(file_path, "rb") as f:
meta["pysssss.sha256"] = hashlib.sha256(f.read()).hexdigest()
with open(hash_file, "wt") as f:
f.write(meta["pysssss.sha256"])
return web.json_response(meta)

View File

@@ -0,0 +1,42 @@
# Hack: string type that is always equal in not equal comparisons
class AnyType(str):
def __ne__(self, __value: object) -> bool:
return False
# Our any instance wants to be a wildcard string
any = AnyType("*")
class PlaySound:
@classmethod
def INPUT_TYPES(s):
return {"required": {
"any": (any, {}),
"mode": (["always", "on empty queue"], {}),
"volume": ("FLOAT", {"min": 0, "max": 1, "step": 0.1, "default": 0.5}),
"file": ("STRING", { "default": "notify.mp3" })
}}
FUNCTION = "nop"
INPUT_IS_LIST = True
OUTPUT_IS_LIST = (True,)
OUTPUT_NODE = True
RETURN_TYPES = (any,)
CATEGORY = "utils"
def IS_CHANGED(self, **kwargs):
return float("NaN")
def nop(self, any, mode, volume, file):
return {"ui": {"a": []}, "result": (any,)}
NODE_CLASS_MAPPINGS = {
"PlaySound|pysssss": PlaySound,
}
NODE_DISPLAY_NAME_MAPPINGS = {
"PlaySound|pysssss": "PlaySound 🐍",
}

View File

@@ -0,0 +1,46 @@
# Hack: string type that is always equal in not equal comparisons
class AnyType(str):
def __ne__(self, __value: object) -> bool:
return False
# Our any instance wants to be a wildcard string
any = AnyType("*")
class Repeater:
@classmethod
def INPUT_TYPES(s):
return {"required": {
"source": (any, {}),
"repeats": ("INT", {"min": 0, "max": 5000, "default": 2}),
"output": (["single", "multi"], {}),
"node_mode": (["reuse", "create"], {}),
}}
RETURN_TYPES = (any,)
FUNCTION = "repeat"
OUTPUT_NODE = False
OUTPUT_IS_LIST = (True,)
CATEGORY = "utils"
def repeat(self, repeats, output, node_mode, **kwargs):
if output == "multi":
# Multi outputs are split to indiviual nodes on the frontend when serializing
return ([kwargs["source"]],)
elif node_mode == "reuse":
# When reusing we have a single input node, repeat that N times
return ([kwargs["source"]] * repeats,)
else:
# When creating new nodes, they'll be added dynamically when the graph is serialized
return ((list(kwargs.values())),)
NODE_CLASS_MAPPINGS = {
"Repeater|pysssss": Repeater,
}
NODE_DISPLAY_NAME_MAPPINGS = {
"Repeater|pysssss": "Repeater 🐍",
}

View File

@@ -0,0 +1,59 @@
# Hack: string type that is always equal in not equal comparisons
class AnyType(str):
def __ne__(self, __value: object) -> bool:
return False
# Our any instance wants to be a wildcard string
any = AnyType("*")
class ReroutePrimitive:
@classmethod
def INPUT_TYPES(cls):
return {
"required": {"value": (any, )},
}
@classmethod
def VALIDATE_INPUTS(s, **kwargs):
return True
RETURN_TYPES = (any,)
FUNCTION = "route"
CATEGORY = "__hidden__"
def route(self, value):
return (value,)
class MultiPrimitive:
@classmethod
def INPUT_TYPES(cls):
return {
"required": {},
"optional": {"value": (any, )},
}
@classmethod
def VALIDATE_INPUTS(s, **kwargs):
return True
RETURN_TYPES = (any,)
FUNCTION = "listify"
CATEGORY = "utils"
OUTPUT_IS_LIST = (True,)
def listify(self, **kwargs):
return (list(kwargs.values()),)
NODE_CLASS_MAPPINGS = {
"ReroutePrimitive|pysssss": ReroutePrimitive,
# "MultiPrimitive|pysssss": MultiPrimitive,
}
NODE_DISPLAY_NAME_MAPPINGS = {
"ReroutePrimitive|pysssss": "Reroute Primitive 🐍",
# "MultiPrimitive|pysssss": "Multi Primitive 🐍",
}

View File

@@ -0,0 +1,49 @@
class ShowText:
@classmethod
def INPUT_TYPES(s):
return {
"required": {
"text": ("STRING", {"forceInput": True}),
},
"hidden": {
"unique_id": "UNIQUE_ID",
"extra_pnginfo": "EXTRA_PNGINFO",
},
}
INPUT_IS_LIST = True
RETURN_TYPES = ("STRING",)
FUNCTION = "notify"
OUTPUT_NODE = True
OUTPUT_IS_LIST = (True,)
CATEGORY = "utils"
def notify(self, text, unique_id=None, extra_pnginfo=None):
if unique_id is not None and extra_pnginfo is not None:
if not isinstance(extra_pnginfo, list):
print("Error: extra_pnginfo is not a list")
elif (
not isinstance(extra_pnginfo[0], dict)
or "workflow" not in extra_pnginfo[0]
):
print("Error: extra_pnginfo[0] is not a dict or missing 'workflow' key")
else:
workflow = extra_pnginfo[0]["workflow"]
node = next(
(x for x in workflow["nodes"] if str(x["id"]) == str(unique_id[0])),
None,
)
if node:
node["widgets_values"] = [text]
return {"ui": {"text": text}, "result": (text,)}
NODE_CLASS_MAPPINGS = {
"ShowText|pysssss": ShowText,
}
NODE_DISPLAY_NAME_MAPPINGS = {
"ShowText|pysssss": "Show Text 🐍",
}

View File

@@ -0,0 +1,49 @@
import re
class StringFunction:
@classmethod
def INPUT_TYPES(s):
return {
"required": {
"action": (["append", "replace"], {}),
"tidy_tags": (["yes", "no"], {}),
},
"optional": {
"text_a": ("STRING", {"multiline": True, "dynamicPrompts": False}),
"text_b": ("STRING", {"multiline": True, "dynamicPrompts": False}),
"text_c": ("STRING", {"multiline": True, "dynamicPrompts": False})
}
}
RETURN_TYPES = ("STRING",)
FUNCTION = "exec"
CATEGORY = "utils"
OUTPUT_NODE = True
def exec(self, action, tidy_tags, text_a="", text_b="", text_c=""):
tidy_tags = tidy_tags == "yes"
out = ""
if action == "append":
out = (", " if tidy_tags else "").join(filter(None, [text_a, text_b, text_c]))
else:
if text_c is None:
text_c = ""
if text_b.startswith("/") and text_b.endswith("/"):
regex = text_b[1:-1]
out = re.sub(regex, text_c, text_a)
else:
out = text_a.replace(text_b, text_c)
if tidy_tags:
out = re.sub(r"\s{2,}", " ", out)
out = out.replace(" ,", ",")
out = re.sub(r",{2,}", ",", out)
out = out.strip()
return {"ui": {"text": (out,)}, "result": (out,)}
NODE_CLASS_MAPPINGS = {
"StringFunction|pysssss": StringFunction,
}
NODE_DISPLAY_NAME_MAPPINGS = {
"StringFunction|pysssss": "String Function 🐍",
}

View File

@@ -0,0 +1,41 @@
# Hack: string type that is always equal in not equal comparisons
class AnyType(str):
def __ne__(self, __value: object) -> bool:
return False
# Our any instance wants to be a wildcard string
any = AnyType("*")
class SystemNotification:
@classmethod
def INPUT_TYPES(s):
return {"required": {
"message": ("STRING", {"default": "Your notification has triggered."}),
"any": (any, {}),
"mode": (["always", "on empty queue"], {}),
}}
FUNCTION = "nop"
INPUT_IS_LIST = True
OUTPUT_IS_LIST = (True,)
OUTPUT_NODE = True
RETURN_TYPES = (any,)
CATEGORY = "utils"
def IS_CHANGED(self, **kwargs):
return float("NaN")
def nop(self, any, message, mode):
return {"ui": {"message": message, "mode": mode}, "result": (any,)}
NODE_CLASS_MAPPINGS = {
"SystemNotification|pysssss": SystemNotification,
}
NODE_DISPLAY_NAME_MAPPINGS = {
"SystemNotification|pysssss": "SystemNotification 🐍",
}

View File

@@ -0,0 +1,200 @@
import os
import folder_paths
import json
from server import PromptServer
import glob
from aiohttp import web
def get_allowed_dirs():
dir = os.path.abspath(os.path.join(__file__, "../../user"))
file = os.path.join(dir, "text_file_dirs.json")
with open(file, "r") as f:
return json.loads(f.read())
def get_valid_dirs():
return get_allowed_dirs().keys()
def get_dir_from_name(name):
dirs = get_allowed_dirs()
if name not in dirs:
raise KeyError(name + " dir not found")
path = dirs[name]
path = path.replace("$input", folder_paths.get_input_directory())
path = path.replace("$output", folder_paths.get_output_directory())
path = path.replace("$temp", folder_paths.get_temp_directory())
return path
def is_child_dir(parent_path, child_path):
parent_path = os.path.abspath(parent_path)
child_path = os.path.abspath(child_path)
return os.path.commonpath([parent_path]) == os.path.commonpath([parent_path, child_path])
def get_real_path(dir):
dir = dir.replace("/**/", "/")
dir = os.path.abspath(dir)
dir = os.path.split(dir)[0]
return dir
@PromptServer.instance.routes.get("/pysssss/text-file/{name}")
async def get_files(request):
name = request.match_info["name"]
dir = get_dir_from_name(name)
recursive = "/**/" in dir
# Ugh cant use root_path on glob... lazy hack..
pre = get_real_path(dir)
files = list(map(lambda t: os.path.relpath(t, pre),
glob.glob(dir, recursive=recursive)))
if len(files) == 0:
files = ["[none]"]
return web.json_response(files)
def get_file(root_dir, file):
if file == "[none]" or not file or not file.strip():
raise ValueError("No file")
root_dir = get_dir_from_name(root_dir)
root_dir = get_real_path(root_dir)
if not os.path.exists(root_dir):
os.mkdir(root_dir)
full_path = os.path.join(root_dir, file)
if not is_child_dir(root_dir, full_path):
raise ReferenceError()
return full_path
class TextFileNode:
RETURN_TYPES = ("STRING",)
CATEGORY = "utils"
@classmethod
def VALIDATE_INPUTS(self, root_dir, file, **kwargs):
if file == "[none]" or not file or not file.strip():
return True
get_file(root_dir, file)
return True
def load_text(self, **kwargs):
self.file = get_file(kwargs["root_dir"], kwargs["file"])
with open(self.file, "r") as f:
return (f.read(), )
class LoadText(TextFileNode):
@classmethod
def IS_CHANGED(self, **kwargs):
return os.path.getmtime(self.file)
@classmethod
def INPUT_TYPES(s):
return {
"required": {
"root_dir": (list(get_valid_dirs()), {}),
"file": (["[none]"], {
"pysssss.binding": [{
"source": "root_dir",
"callback": [{
"type": "set",
"target": "$this.disabled",
"value": True
}, {
"type": "fetch",
"url": "/pysssss/text-file/{$source.value}",
"then": [{
"type": "set",
"target": "$this.options.values",
"value": "$result"
}, {
"type": "validate-combo"
}, {
"type": "set",
"target": "$this.disabled",
"value": False
}]
}],
}]
})
},
}
FUNCTION = "load_text"
class SaveText(TextFileNode):
OUTPUT_NODE = True
@classmethod
def IS_CHANGED(self, **kwargs):
return float("nan")
@classmethod
def INPUT_TYPES(s):
return {
"required": {
"root_dir": (list(get_valid_dirs()), {}),
"file": ("STRING", {"default": "file.txt"}),
"append": (["append", "overwrite", "new only"], {}),
"insert": ("BOOLEAN", {
"default": True, "label_on": "new line", "label_off": "none",
"pysssss.binding": [{
"source": "append",
"callback": [{
"type": "if",
"condition": [{
"left": "$source.value",
"op": "eq",
"right": '"append"'
}],
"true": [{
"type": "set",
"target": "$this.disabled",
"value": False
}],
"false": [{
"type": "set",
"target": "$this.disabled",
"value": True
}],
}]
}]
}),
"text": ("STRING", {"forceInput": True, "multiline": True})
},
}
FUNCTION = "write_text"
def write_text(self, **kwargs):
self.file = get_file(kwargs["root_dir"], kwargs["file"])
if kwargs["append"] == "new only" and os.path.exists(self.file):
raise FileExistsError(
self.file + " already exists and 'new only' is selected.")
with open(self.file, "a+" if kwargs["append"] == "append" else "w") as f:
is_append = f.tell() != 0
if is_append and kwargs["insert"]:
f.write("\n")
f.write(kwargs["text"])
return super().load_text(**kwargs)
NODE_CLASS_MAPPINGS = {
"LoadText|pysssss": LoadText,
"SaveText|pysssss": SaveText,
}
NODE_DISPLAY_NAME_MAPPINGS = {
"LoadText|pysssss": "Load Text 🐍",
"SaveText|pysssss": "Save Text 🐍",
}

View File

@@ -0,0 +1,61 @@
from server import PromptServer
from aiohttp import web
import os
import inspect
import json
import importlib
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
import pysssss
root_directory = os.path.dirname(inspect.getfile(PromptServer))
workflows_directory = os.path.join(root_directory, "pysssss-workflows")
workflows_directory = pysssss.get_config_value(
"workflows.directory", workflows_directory)
if not os.path.isabs(workflows_directory):
workflows_directory = os.path.abspath(os.path.join(root_directory, workflows_directory))
NODE_CLASS_MAPPINGS = {}
NODE_DISPLAY_NAME_MAPPINGS = {}
@PromptServer.instance.routes.get("/pysssss/workflows")
async def get_workflows(request):
files = []
for dirpath, directories, file in os.walk(workflows_directory):
for file in file:
if (file.endswith(".json")):
files.append(os.path.relpath(os.path.join(
dirpath, file), workflows_directory))
return web.json_response(list(map(lambda f: os.path.splitext(f)[0].replace("\\", "/"), files)))
@PromptServer.instance.routes.get("/pysssss/workflows/{name:.+}")
async def get_workflow(request):
file = os.path.abspath(os.path.join(
workflows_directory, request.match_info["name"] + ".json"))
if os.path.commonpath([file, workflows_directory]) != workflows_directory:
return web.Response(status=403)
return web.FileResponse(file)
@PromptServer.instance.routes.post("/pysssss/workflows")
async def save_workflow(request):
json_data = await request.json()
file = os.path.abspath(os.path.join(
workflows_directory, json_data["name"] + ".json"))
if os.path.commonpath([file, workflows_directory]) != workflows_directory:
return web.Response(status=403)
if os.path.exists(file) and ("overwrite" not in json_data or json_data["overwrite"] == False):
return web.Response(status=409)
sub_path = os.path.dirname(file)
if not os.path.exists(sub_path):
os.makedirs(sub_path)
with open(file, "w") as f:
f.write(json.dumps(json_data["workflow"]))
return web.Response(status=201)