Compare commits
11 Commits
pysssss-mo
...
v3-definit
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
50603859ab | ||
|
|
0d185b721f | ||
|
|
8642757971 | ||
|
|
de86d8e32b | ||
|
|
8b331c5ca2 | ||
|
|
937d2d5325 | ||
|
|
0400497d5e | ||
|
|
5f0e04e2d7 | ||
|
|
96c2e3856d | ||
|
|
880f756dc1 | ||
|
|
4480ed488e |
84
alembic.ini
84
alembic.ini
@@ -1,84 +0,0 @@
|
|||||||
# A generic, single database configuration.
|
|
||||||
|
|
||||||
[alembic]
|
|
||||||
# path to migration scripts
|
|
||||||
# Use forward slashes (/) also on windows to provide an os agnostic path
|
|
||||||
script_location = alembic_db
|
|
||||||
|
|
||||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
|
||||||
# Uncomment the line below if you want the files to be prepended with date and time
|
|
||||||
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
|
||||||
# for all available tokens
|
|
||||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
|
||||||
|
|
||||||
# sys.path path, will be prepended to sys.path if present.
|
|
||||||
# defaults to the current working directory.
|
|
||||||
prepend_sys_path = .
|
|
||||||
|
|
||||||
# timezone to use when rendering the date within the migration file
|
|
||||||
# as well as the filename.
|
|
||||||
# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
|
|
||||||
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
|
||||||
# string value is passed to ZoneInfo()
|
|
||||||
# leave blank for localtime
|
|
||||||
# timezone =
|
|
||||||
|
|
||||||
# max length of characters to apply to the "slug" field
|
|
||||||
# truncate_slug_length = 40
|
|
||||||
|
|
||||||
# set to 'true' to run the environment during
|
|
||||||
# the 'revision' command, regardless of autogenerate
|
|
||||||
# revision_environment = false
|
|
||||||
|
|
||||||
# set to 'true' to allow .pyc and .pyo files without
|
|
||||||
# a source .py file to be detected as revisions in the
|
|
||||||
# versions/ directory
|
|
||||||
# sourceless = false
|
|
||||||
|
|
||||||
# version location specification; This defaults
|
|
||||||
# to alembic_db/versions. When using multiple version
|
|
||||||
# directories, initial revisions must be specified with --version-path.
|
|
||||||
# The path separator used here should be the separator specified by "version_path_separator" below.
|
|
||||||
# version_locations = %(here)s/bar:%(here)s/bat:alembic_db/versions
|
|
||||||
|
|
||||||
# version path separator; As mentioned above, this is the character used to split
|
|
||||||
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
|
||||||
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
|
||||||
# Valid values for version_path_separator are:
|
|
||||||
#
|
|
||||||
# version_path_separator = :
|
|
||||||
# version_path_separator = ;
|
|
||||||
# version_path_separator = space
|
|
||||||
# version_path_separator = newline
|
|
||||||
#
|
|
||||||
# Use os.pathsep. Default configuration used for new projects.
|
|
||||||
version_path_separator = os
|
|
||||||
|
|
||||||
# set to 'true' to search source files recursively
|
|
||||||
# in each "version_locations" directory
|
|
||||||
# new in Alembic version 1.10
|
|
||||||
# recursive_version_locations = false
|
|
||||||
|
|
||||||
# the output encoding used when revision files
|
|
||||||
# are written from script.py.mako
|
|
||||||
# output_encoding = utf-8
|
|
||||||
|
|
||||||
sqlalchemy.url = sqlite:///user/comfyui.db
|
|
||||||
|
|
||||||
|
|
||||||
[post_write_hooks]
|
|
||||||
# post_write_hooks defines scripts or Python functions that are run
|
|
||||||
# on newly generated revision scripts. See the documentation for further
|
|
||||||
# detail and examples
|
|
||||||
|
|
||||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
|
||||||
# hooks = black
|
|
||||||
# black.type = console_scripts
|
|
||||||
# black.entrypoint = black
|
|
||||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
|
||||||
|
|
||||||
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
|
|
||||||
# hooks = ruff
|
|
||||||
# ruff.type = exec
|
|
||||||
# ruff.executable = %(here)s/.venv/bin/ruff
|
|
||||||
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
## Generate new revision
|
|
||||||
|
|
||||||
1. Update models in `/app/database/models.py`
|
|
||||||
2. Run `alembic revision --autogenerate -m "{your message}"`
|
|
||||||
@@ -1,69 +0,0 @@
|
|||||||
from sqlalchemy import engine_from_config
|
|
||||||
from sqlalchemy import pool
|
|
||||||
|
|
||||||
from alembic import context
|
|
||||||
|
|
||||||
# this is the Alembic Config object, which provides
|
|
||||||
# access to the values within the .ini file in use.
|
|
||||||
config = context.config
|
|
||||||
|
|
||||||
|
|
||||||
from app.database.models import Base
|
|
||||||
target_metadata = Base.metadata
|
|
||||||
|
|
||||||
# other values from the config, defined by the needs of env.py,
|
|
||||||
# can be acquired:
|
|
||||||
# my_important_option = config.get_main_option("my_important_option")
|
|
||||||
# ... etc.
|
|
||||||
|
|
||||||
|
|
||||||
def run_migrations_offline() -> None:
|
|
||||||
"""Run migrations in 'offline' mode.
|
|
||||||
|
|
||||||
This configures the context with just a URL
|
|
||||||
and not an Engine, though an Engine is acceptable
|
|
||||||
here as well. By skipping the Engine creation
|
|
||||||
we don't even need a DBAPI to be available.
|
|
||||||
|
|
||||||
Calls to context.execute() here emit the given string to the
|
|
||||||
script output.
|
|
||||||
|
|
||||||
"""
|
|
||||||
url = config.get_main_option("sqlalchemy.url")
|
|
||||||
context.configure(
|
|
||||||
url=url,
|
|
||||||
target_metadata=target_metadata,
|
|
||||||
literal_binds=True,
|
|
||||||
dialect_opts={"paramstyle": "named"},
|
|
||||||
)
|
|
||||||
|
|
||||||
with context.begin_transaction():
|
|
||||||
context.run_migrations()
|
|
||||||
|
|
||||||
|
|
||||||
def run_migrations_online() -> None:
|
|
||||||
"""Run migrations in 'online' mode.
|
|
||||||
|
|
||||||
In this scenario we need to create an Engine
|
|
||||||
and associate a connection with the context.
|
|
||||||
|
|
||||||
"""
|
|
||||||
connectable = engine_from_config(
|
|
||||||
config.get_section(config.config_ini_section, {}),
|
|
||||||
prefix="sqlalchemy.",
|
|
||||||
poolclass=pool.NullPool,
|
|
||||||
)
|
|
||||||
|
|
||||||
with connectable.connect() as connection:
|
|
||||||
context.configure(
|
|
||||||
connection=connection, target_metadata=target_metadata
|
|
||||||
)
|
|
||||||
|
|
||||||
with context.begin_transaction():
|
|
||||||
context.run_migrations()
|
|
||||||
|
|
||||||
|
|
||||||
if context.is_offline_mode():
|
|
||||||
run_migrations_offline()
|
|
||||||
else:
|
|
||||||
run_migrations_online()
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
"""${message}
|
|
||||||
|
|
||||||
Revision ID: ${up_revision}
|
|
||||||
Revises: ${down_revision | comma,n}
|
|
||||||
Create Date: ${create_date}
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
${imports if imports else ""}
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = ${repr(up_revision)}
|
|
||||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
|
||||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Upgrade schema."""
|
|
||||||
${upgrades if upgrades else "pass"}
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Downgrade schema."""
|
|
||||||
${downgrades if downgrades else "pass"}
|
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
"""init
|
|
||||||
|
|
||||||
Revision ID: e9c714da8d57
|
|
||||||
Revises:
|
|
||||||
Create Date: 2025-05-30 20:14:33.772039
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = 'e9c714da8d57'
|
|
||||||
down_revision: Union[str, None] = None
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
"""Upgrade schema."""
|
|
||||||
op.create_table('model',
|
|
||||||
sa.Column('type', sa.Text(), nullable=False),
|
|
||||||
sa.Column('path', sa.Text(), nullable=False),
|
|
||||||
sa.Column('file_name', sa.Text(), nullable=True),
|
|
||||||
sa.Column('file_size', sa.Integer(), nullable=True),
|
|
||||||
sa.Column('hash', sa.Text(), nullable=True),
|
|
||||||
sa.Column('hash_algorithm', sa.Text(), nullable=True),
|
|
||||||
sa.Column('source_url', sa.Text(), nullable=True),
|
|
||||||
sa.Column('date_added', sa.DateTime(), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=True),
|
|
||||||
sa.PrimaryKeyConstraint('type', 'path')
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
"""Downgrade schema."""
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_table('model')
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
@@ -1,112 +0,0 @@
|
|||||||
import logging
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
from app.logger import log_startup_warning
|
|
||||||
from utils.install_util import get_missing_requirements_message
|
|
||||||
from comfy.cli_args import args
|
|
||||||
|
|
||||||
_DB_AVAILABLE = False
|
|
||||||
Session = None
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
from alembic import command
|
|
||||||
from alembic.config import Config
|
|
||||||
from alembic.runtime.migration import MigrationContext
|
|
||||||
from alembic.script import ScriptDirectory
|
|
||||||
from sqlalchemy import create_engine
|
|
||||||
from sqlalchemy.orm import sessionmaker
|
|
||||||
|
|
||||||
_DB_AVAILABLE = True
|
|
||||||
except ImportError as e:
|
|
||||||
log_startup_warning(
|
|
||||||
f"""
|
|
||||||
------------------------------------------------------------------------
|
|
||||||
Error importing dependencies: {e}
|
|
||||||
|
|
||||||
{get_missing_requirements_message()}
|
|
||||||
|
|
||||||
This error is happening because ComfyUI now uses a local sqlite database.
|
|
||||||
------------------------------------------------------------------------
|
|
||||||
""".strip()
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def dependencies_available():
|
|
||||||
"""
|
|
||||||
Temporary function to check if the dependencies are available
|
|
||||||
"""
|
|
||||||
return _DB_AVAILABLE
|
|
||||||
|
|
||||||
|
|
||||||
def can_create_session():
|
|
||||||
"""
|
|
||||||
Temporary function to check if the database is available to create a session
|
|
||||||
During initial release there may be environmental issues (or missing dependencies) that prevent the database from being created
|
|
||||||
"""
|
|
||||||
return dependencies_available() and Session is not None
|
|
||||||
|
|
||||||
|
|
||||||
def get_alembic_config():
|
|
||||||
root_path = os.path.join(os.path.dirname(__file__), "../..")
|
|
||||||
config_path = os.path.abspath(os.path.join(root_path, "alembic.ini"))
|
|
||||||
scripts_path = os.path.abspath(os.path.join(root_path, "alembic_db"))
|
|
||||||
|
|
||||||
config = Config(config_path)
|
|
||||||
config.set_main_option("script_location", scripts_path)
|
|
||||||
config.set_main_option("sqlalchemy.url", args.database_url)
|
|
||||||
|
|
||||||
return config
|
|
||||||
|
|
||||||
|
|
||||||
def get_db_path():
|
|
||||||
url = args.database_url
|
|
||||||
if url.startswith("sqlite:///"):
|
|
||||||
return url.split("///")[1]
|
|
||||||
else:
|
|
||||||
raise ValueError(f"Unsupported database URL '{url}'.")
|
|
||||||
|
|
||||||
|
|
||||||
def init_db():
|
|
||||||
db_url = args.database_url
|
|
||||||
logging.debug(f"Database URL: {db_url}")
|
|
||||||
db_path = get_db_path()
|
|
||||||
db_exists = os.path.exists(db_path)
|
|
||||||
|
|
||||||
config = get_alembic_config()
|
|
||||||
|
|
||||||
# Check if we need to upgrade
|
|
||||||
engine = create_engine(db_url)
|
|
||||||
conn = engine.connect()
|
|
||||||
|
|
||||||
context = MigrationContext.configure(conn)
|
|
||||||
current_rev = context.get_current_revision()
|
|
||||||
|
|
||||||
script = ScriptDirectory.from_config(config)
|
|
||||||
target_rev = script.get_current_head()
|
|
||||||
|
|
||||||
if current_rev != target_rev:
|
|
||||||
# Backup the database pre upgrade
|
|
||||||
backup_path = db_path + ".bkp"
|
|
||||||
if db_exists:
|
|
||||||
shutil.copy(db_path, backup_path)
|
|
||||||
else:
|
|
||||||
backup_path = None
|
|
||||||
|
|
||||||
try:
|
|
||||||
command.upgrade(config, target_rev)
|
|
||||||
logging.info(f"Database upgraded from {current_rev} to {target_rev}")
|
|
||||||
except Exception as e:
|
|
||||||
if backup_path:
|
|
||||||
# Restore the database from backup if upgrade fails
|
|
||||||
shutil.copy(backup_path, db_path)
|
|
||||||
os.remove(backup_path)
|
|
||||||
logging.error(f"Error upgrading database: {e}")
|
|
||||||
raise e
|
|
||||||
|
|
||||||
global Session
|
|
||||||
Session = sessionmaker(bind=engine)
|
|
||||||
|
|
||||||
|
|
||||||
def create_session():
|
|
||||||
return Session()
|
|
||||||
@@ -1,59 +0,0 @@
|
|||||||
from sqlalchemy import (
|
|
||||||
Column,
|
|
||||||
Integer,
|
|
||||||
Text,
|
|
||||||
DateTime,
|
|
||||||
)
|
|
||||||
from sqlalchemy.orm import declarative_base
|
|
||||||
from sqlalchemy.sql import func
|
|
||||||
|
|
||||||
Base = declarative_base()
|
|
||||||
|
|
||||||
|
|
||||||
def to_dict(obj):
|
|
||||||
fields = obj.__table__.columns.keys()
|
|
||||||
return {
|
|
||||||
field: (val.to_dict() if hasattr(val, "to_dict") else val)
|
|
||||||
for field in fields
|
|
||||||
if (val := getattr(obj, field))
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class Model(Base):
|
|
||||||
"""
|
|
||||||
sqlalchemy model representing a model file in the system.
|
|
||||||
|
|
||||||
This class defines the database schema for storing information about model files,
|
|
||||||
including their type, path, hash, and when they were added to the system.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
type (Text): The type of the model, this is the name of the folder in the models folder (primary key)
|
|
||||||
path (Text): The file path of the model relative to the type folder (primary key)
|
|
||||||
file_name (Text): The name of the model file
|
|
||||||
file_size (Integer): The size of the model file in bytes
|
|
||||||
hash (Text): A hash of the model file
|
|
||||||
hash_algorithm (Text): The algorithm used to generate the hash
|
|
||||||
source_url (Text): The URL of the model file
|
|
||||||
date_added (DateTime): Timestamp of when the model was added to the system
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = "model"
|
|
||||||
|
|
||||||
type = Column(Text, primary_key=True)
|
|
||||||
path = Column(Text, primary_key=True)
|
|
||||||
file_name = Column(Text)
|
|
||||||
file_size = Column(Integer)
|
|
||||||
hash = Column(Text)
|
|
||||||
hash_algorithm = Column(Text)
|
|
||||||
source_url = Column(Text)
|
|
||||||
date_added = Column(DateTime, server_default=func.now())
|
|
||||||
|
|
||||||
def to_dict(self):
|
|
||||||
"""
|
|
||||||
Convert the model instance to a dictionary representation.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict: A dictionary containing the attributes of the model
|
|
||||||
"""
|
|
||||||
dict = to_dict(self)
|
|
||||||
return dict
|
|
||||||
@@ -16,15 +16,26 @@ from importlib.metadata import version
|
|||||||
import requests
|
import requests
|
||||||
from typing_extensions import NotRequired
|
from typing_extensions import NotRequired
|
||||||
|
|
||||||
from utils.install_util import get_missing_requirements_message, requirements_path
|
|
||||||
from comfy.cli_args import DEFAULT_VERSION_STRING
|
from comfy.cli_args import DEFAULT_VERSION_STRING
|
||||||
import app.logger
|
import app.logger
|
||||||
|
|
||||||
|
# The path to the requirements.txt file
|
||||||
|
req_path = Path(__file__).parents[1] / "requirements.txt"
|
||||||
|
|
||||||
|
|
||||||
def frontend_install_warning_message():
|
def frontend_install_warning_message():
|
||||||
|
"""The warning message to display when the frontend version is not up to date."""
|
||||||
|
|
||||||
|
extra = ""
|
||||||
|
if sys.flags.no_user_site:
|
||||||
|
extra = "-s "
|
||||||
return f"""
|
return f"""
|
||||||
{get_missing_requirements_message()}
|
Please install the updated requirements.txt file by running:
|
||||||
|
{sys.executable} {extra}-m pip install -r {req_path}
|
||||||
|
|
||||||
This error is happening because the ComfyUI frontend is no longer shipped as part of the main repo but as a pip package instead.
|
This error is happening because the ComfyUI frontend is no longer shipped as part of the main repo but as a pip package instead.
|
||||||
|
|
||||||
|
If you are on the portable package you can run: update\\update_comfyui.bat to solve this problem
|
||||||
""".strip()
|
""".strip()
|
||||||
|
|
||||||
|
|
||||||
@@ -37,7 +48,7 @@ def check_frontend_version():
|
|||||||
try:
|
try:
|
||||||
frontend_version_str = version("comfyui-frontend-package")
|
frontend_version_str = version("comfyui-frontend-package")
|
||||||
frontend_version = parse_version(frontend_version_str)
|
frontend_version = parse_version(frontend_version_str)
|
||||||
with open(requirements_path, "r", encoding="utf-8") as f:
|
with open(req_path, "r", encoding="utf-8") as f:
|
||||||
required_frontend = parse_version(f.readline().split("=")[-1])
|
required_frontend = parse_version(f.readline().split("=")[-1])
|
||||||
if frontend_version < required_frontend:
|
if frontend_version < required_frontend:
|
||||||
app.logger.log_startup_warning(
|
app.logger.log_startup_warning(
|
||||||
@@ -151,30 +162,10 @@ def download_release_asset_zip(release: Release, destination_path: str) -> None:
|
|||||||
|
|
||||||
|
|
||||||
class FrontendManager:
|
class FrontendManager:
|
||||||
"""
|
|
||||||
A class to manage ComfyUI frontend versions and installations.
|
|
||||||
|
|
||||||
This class handles the initialization and management of different frontend versions,
|
|
||||||
including the default frontend from the pip package and custom frontend versions
|
|
||||||
from GitHub repositories.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
CUSTOM_FRONTENDS_ROOT (str): The root directory where custom frontend versions are stored.
|
|
||||||
"""
|
|
||||||
|
|
||||||
CUSTOM_FRONTENDS_ROOT = str(Path(__file__).parents[1] / "web_custom_versions")
|
CUSTOM_FRONTENDS_ROOT = str(Path(__file__).parents[1] / "web_custom_versions")
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def default_frontend_path(cls) -> str:
|
def default_frontend_path(cls) -> str:
|
||||||
"""
|
|
||||||
Get the path to the default frontend installation from the pip package.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: The path to the default frontend static files.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
SystemExit: If the comfyui-frontend-package is not installed.
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
import comfyui_frontend_package
|
import comfyui_frontend_package
|
||||||
|
|
||||||
@@ -195,15 +186,6 @@ comfyui-frontend-package is not installed.
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def templates_path(cls) -> str:
|
def templates_path(cls) -> str:
|
||||||
"""
|
|
||||||
Get the path to the workflow templates.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: The path to the workflow templates directory.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
SystemExit: If the comfyui-workflow-templates package is not installed.
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
import comfyui_workflow_templates
|
import comfyui_workflow_templates
|
||||||
|
|
||||||
@@ -239,16 +221,11 @@ comfyui-workflow-templates is not installed.
|
|||||||
@classmethod
|
@classmethod
|
||||||
def parse_version_string(cls, value: str) -> tuple[str, str, str]:
|
def parse_version_string(cls, value: str) -> tuple[str, str, str]:
|
||||||
"""
|
"""
|
||||||
Parse a version string into its components.
|
|
||||||
|
|
||||||
The version string should be in the format: 'owner/repo@version'
|
|
||||||
where version can be either a semantic version (v1.2.3) or 'latest'.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
value (str): The version string to parse.
|
value (str): The version string to parse.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
tuple[str, str, str]: A tuple containing (owner, repo, version).
|
tuple[str, str]: A tuple containing provider name and version.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
argparse.ArgumentTypeError: If the version string is invalid.
|
argparse.ArgumentTypeError: If the version string is invalid.
|
||||||
@@ -265,22 +242,18 @@ comfyui-workflow-templates is not installed.
|
|||||||
cls, version_string: str, provider: Optional[FrontEndProvider] = None
|
cls, version_string: str, provider: Optional[FrontEndProvider] = None
|
||||||
) -> str:
|
) -> str:
|
||||||
"""
|
"""
|
||||||
Initialize a frontend version without error handling.
|
Initializes the frontend for the specified version.
|
||||||
|
|
||||||
This method attempts to initialize a specific frontend version, either from
|
|
||||||
the default pip package or from a custom GitHub repository. It will download
|
|
||||||
and extract the frontend files if necessary.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
version_string (str): The version string specifying which frontend to use.
|
version_string (str): The version string.
|
||||||
provider (FrontEndProvider, optional): The provider to use for custom frontends.
|
provider (FrontEndProvider, optional): The provider to use. Defaults to None.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: The path to the initialized frontend.
|
str: The path to the initialized frontend.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
Exception: If there is an error during initialization (e.g., network timeout,
|
Exception: If there is an error during the initialization process.
|
||||||
invalid URL, or missing assets).
|
main error source might be request timeout or invalid URL.
|
||||||
"""
|
"""
|
||||||
if version_string == DEFAULT_VERSION_STRING:
|
if version_string == DEFAULT_VERSION_STRING:
|
||||||
check_frontend_version()
|
check_frontend_version()
|
||||||
@@ -332,17 +305,13 @@ comfyui-workflow-templates is not installed.
|
|||||||
@classmethod
|
@classmethod
|
||||||
def init_frontend(cls, version_string: str) -> str:
|
def init_frontend(cls, version_string: str) -> str:
|
||||||
"""
|
"""
|
||||||
Initialize a frontend version with error handling.
|
Initializes the frontend with the specified version string.
|
||||||
|
|
||||||
This is the main method to initialize a frontend version. It wraps init_frontend_unsafe
|
|
||||||
with error handling, falling back to the default frontend if initialization fails.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
version_string (str): The version string specifying which frontend to use.
|
version_string (str): The version string to initialize the frontend with.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
str: The path to the initialized frontend. If initialization fails,
|
str: The path of the initialized frontend.
|
||||||
returns the path to the default frontend.
|
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
return cls.init_frontend_unsafe(version_string)
|
return cls.init_frontend_unsafe(version_string)
|
||||||
|
|||||||
@@ -1,331 +0,0 @@
|
|||||||
import os
|
|
||||||
import logging
|
|
||||||
import time
|
|
||||||
|
|
||||||
import requests
|
|
||||||
from tqdm import tqdm
|
|
||||||
from folder_paths import get_relative_path, get_full_path
|
|
||||||
from app.database.db import create_session, dependencies_available, can_create_session
|
|
||||||
import blake3
|
|
||||||
import comfy.utils
|
|
||||||
|
|
||||||
|
|
||||||
if dependencies_available():
|
|
||||||
from app.database.models import Model
|
|
||||||
|
|
||||||
|
|
||||||
class ModelProcessor:
|
|
||||||
def _validate_path(self, model_path):
|
|
||||||
try:
|
|
||||||
if not self._file_exists(model_path):
|
|
||||||
logging.error(f"Model file not found: {model_path}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
result = get_relative_path(model_path)
|
|
||||||
if not result:
|
|
||||||
logging.error(
|
|
||||||
f"Model file not in a recognized model directory: {model_path}"
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
|
|
||||||
return result
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Error validating model path {model_path}: {str(e)}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _file_exists(self, path):
|
|
||||||
"""Check if a file exists."""
|
|
||||||
return os.path.exists(path)
|
|
||||||
|
|
||||||
def _get_file_size(self, path):
|
|
||||||
"""Get file size."""
|
|
||||||
return os.path.getsize(path)
|
|
||||||
|
|
||||||
def _get_hasher(self):
|
|
||||||
return blake3.blake3()
|
|
||||||
|
|
||||||
def _hash_file(self, model_path):
|
|
||||||
try:
|
|
||||||
hasher = self._get_hasher()
|
|
||||||
with open(model_path, "rb", buffering=0) as f:
|
|
||||||
b = bytearray(128 * 1024)
|
|
||||||
mv = memoryview(b)
|
|
||||||
while n := f.readinto(mv):
|
|
||||||
hasher.update(mv[:n])
|
|
||||||
return hasher.hexdigest()
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Error hashing file {model_path}: {str(e)}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _get_existing_model(self, session, model_type, model_relative_path):
|
|
||||||
return (
|
|
||||||
session.query(Model)
|
|
||||||
.filter(Model.type == model_type)
|
|
||||||
.filter(Model.path == model_relative_path)
|
|
||||||
.first()
|
|
||||||
)
|
|
||||||
|
|
||||||
def _ensure_source_url(self, session, model, source_url):
|
|
||||||
if model.source_url is None:
|
|
||||||
model.source_url = source_url
|
|
||||||
session.commit()
|
|
||||||
|
|
||||||
def _update_database(
|
|
||||||
self,
|
|
||||||
session,
|
|
||||||
model_type,
|
|
||||||
model_path,
|
|
||||||
model_relative_path,
|
|
||||||
model_hash,
|
|
||||||
model,
|
|
||||||
source_url,
|
|
||||||
):
|
|
||||||
try:
|
|
||||||
if not model:
|
|
||||||
model = self._get_existing_model(
|
|
||||||
session, model_type, model_relative_path
|
|
||||||
)
|
|
||||||
|
|
||||||
if not model:
|
|
||||||
model = Model(
|
|
||||||
path=model_relative_path,
|
|
||||||
type=model_type,
|
|
||||||
file_name=os.path.basename(model_path),
|
|
||||||
)
|
|
||||||
session.add(model)
|
|
||||||
|
|
||||||
model.file_size = self._get_file_size(model_path)
|
|
||||||
model.hash = model_hash
|
|
||||||
if model_hash:
|
|
||||||
model.hash_algorithm = "blake3"
|
|
||||||
model.source_url = source_url
|
|
||||||
|
|
||||||
session.commit()
|
|
||||||
return model
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(
|
|
||||||
f"Error updating database for {model_relative_path}: {str(e)}"
|
|
||||||
)
|
|
||||||
|
|
||||||
def process_file(self, model_path, source_url=None, model_hash=None):
|
|
||||||
"""
|
|
||||||
Process a model file and update the database with metadata.
|
|
||||||
If the file already exists and matches the database, it will not be processed again.
|
|
||||||
Returns the model object or if an error occurs, returns None.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
if not can_create_session():
|
|
||||||
return
|
|
||||||
|
|
||||||
result = self._validate_path(model_path)
|
|
||||||
if not result:
|
|
||||||
return
|
|
||||||
model_type, model_relative_path = result
|
|
||||||
|
|
||||||
with create_session() as session:
|
|
||||||
session.expire_on_commit = False
|
|
||||||
|
|
||||||
existing_model = self._get_existing_model(
|
|
||||||
session, model_type, model_relative_path
|
|
||||||
)
|
|
||||||
if (
|
|
||||||
existing_model
|
|
||||||
and existing_model.hash
|
|
||||||
and existing_model.file_size == self._get_file_size(model_path)
|
|
||||||
):
|
|
||||||
# File exists with hash and same size, no need to process
|
|
||||||
self._ensure_source_url(session, existing_model, source_url)
|
|
||||||
return existing_model
|
|
||||||
|
|
||||||
if model_hash:
|
|
||||||
model_hash = model_hash.lower()
|
|
||||||
logging.info(f"Using provided hash: {model_hash}")
|
|
||||||
else:
|
|
||||||
start_time = time.time()
|
|
||||||
logging.info(f"Hashing model {model_relative_path}")
|
|
||||||
model_hash = self._hash_file(model_path)
|
|
||||||
if not model_hash:
|
|
||||||
return
|
|
||||||
logging.info(
|
|
||||||
f"Model hash: {model_hash} (duration: {time.time() - start_time} seconds)"
|
|
||||||
)
|
|
||||||
|
|
||||||
return self._update_database(
|
|
||||||
session,
|
|
||||||
model_type,
|
|
||||||
model_path,
|
|
||||||
model_relative_path,
|
|
||||||
model_hash,
|
|
||||||
existing_model,
|
|
||||||
source_url,
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Error processing model file {model_path}: {str(e)}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
def retrieve_model_by_hash(self, model_hash, model_type=None, session=None):
|
|
||||||
"""
|
|
||||||
Retrieve a model file from the database by hash and optionally by model type.
|
|
||||||
Returns the model object or None if the model doesnt exist or an error occurs.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
if not can_create_session():
|
|
||||||
return
|
|
||||||
|
|
||||||
dispose_session = False
|
|
||||||
|
|
||||||
if session is None:
|
|
||||||
session = create_session()
|
|
||||||
dispose_session = True
|
|
||||||
|
|
||||||
model = session.query(Model).filter(Model.hash == model_hash)
|
|
||||||
if model_type is not None:
|
|
||||||
model = model.filter(Model.type == model_type)
|
|
||||||
return model.first()
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Error retrieving model by hash {model_hash}: {str(e)}")
|
|
||||||
return None
|
|
||||||
finally:
|
|
||||||
if dispose_session:
|
|
||||||
session.close()
|
|
||||||
|
|
||||||
def retrieve_hash(self, model_path, model_type=None):
|
|
||||||
"""
|
|
||||||
Retrieve the hash of a model file from the database.
|
|
||||||
Returns the hash or None if the model doesnt exist or an error occurs.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
if not can_create_session():
|
|
||||||
return
|
|
||||||
|
|
||||||
if model_type is not None:
|
|
||||||
result = self._validate_path(model_path)
|
|
||||||
if not result:
|
|
||||||
return None
|
|
||||||
model_type, model_relative_path = result
|
|
||||||
|
|
||||||
with create_session() as session:
|
|
||||||
model = self._get_existing_model(
|
|
||||||
session, model_type, model_relative_path
|
|
||||||
)
|
|
||||||
if model and model.hash:
|
|
||||||
return model.hash
|
|
||||||
return None
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Error retrieving hash for {model_path}: {str(e)}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _validate_file_extension(self, file_name):
|
|
||||||
"""Validate that the file extension is supported."""
|
|
||||||
extension = os.path.splitext(file_name)[1]
|
|
||||||
if extension not in (".safetensors", ".sft", ".txt", ".csv", ".json", ".yaml"):
|
|
||||||
raise ValueError(f"Unsupported unsafe file for download: {file_name}")
|
|
||||||
|
|
||||||
def _check_existing_file(self, model_type, file_name, expected_hash):
|
|
||||||
"""Check if file exists and has correct hash."""
|
|
||||||
destination_path = get_full_path(model_type, file_name, allow_missing=True)
|
|
||||||
if self._file_exists(destination_path):
|
|
||||||
model = self.process_file(destination_path)
|
|
||||||
if model and (expected_hash is None or model.hash == expected_hash):
|
|
||||||
logging.debug(
|
|
||||||
f"File {destination_path} already exists in the database and has the correct hash or no hash was provided."
|
|
||||||
)
|
|
||||||
return destination_path
|
|
||||||
else:
|
|
||||||
raise ValueError(
|
|
||||||
f"File {destination_path} exists with hash {model.hash if model else 'unknown'} but expected {expected_hash}. Please delete the file and try again."
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _check_existing_file_by_hash(self, hash, type, url):
|
|
||||||
"""Check if a file with the given hash exists in the database and on disk."""
|
|
||||||
hash = hash.lower()
|
|
||||||
with create_session() as session:
|
|
||||||
model = self.retrieve_model_by_hash(hash, type, session)
|
|
||||||
if model:
|
|
||||||
existing_path = get_full_path(type, model.path)
|
|
||||||
if existing_path:
|
|
||||||
logging.debug(
|
|
||||||
f"File {model.path} already exists in the database at {existing_path}"
|
|
||||||
)
|
|
||||||
self._ensure_source_url(session, model, url)
|
|
||||||
return existing_path
|
|
||||||
else:
|
|
||||||
logging.debug(
|
|
||||||
f"File {model.path} exists in the database but not on disk"
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _download_file(self, url, destination_path, hasher):
|
|
||||||
"""Download a file and update the hasher with its contents."""
|
|
||||||
response = requests.get(url, stream=True)
|
|
||||||
logging.info(f"Downloading {url} to {destination_path}")
|
|
||||||
|
|
||||||
with open(destination_path, "wb") as f:
|
|
||||||
total_size = int(response.headers.get("content-length", 0))
|
|
||||||
if total_size > 0:
|
|
||||||
pbar = comfy.utils.ProgressBar(total_size)
|
|
||||||
else:
|
|
||||||
pbar = None
|
|
||||||
with tqdm(total=total_size, unit="B", unit_scale=True) as progress_bar:
|
|
||||||
for chunk in response.iter_content(chunk_size=128 * 1024):
|
|
||||||
if chunk:
|
|
||||||
f.write(chunk)
|
|
||||||
hasher.update(chunk)
|
|
||||||
progress_bar.update(len(chunk))
|
|
||||||
if pbar:
|
|
||||||
pbar.update(len(chunk))
|
|
||||||
|
|
||||||
def _verify_downloaded_hash(self, calculated_hash, expected_hash, destination_path):
|
|
||||||
"""Verify that the downloaded file has the expected hash."""
|
|
||||||
if expected_hash is not None and calculated_hash != expected_hash:
|
|
||||||
self._remove_file(destination_path)
|
|
||||||
raise ValueError(
|
|
||||||
f"Downloaded file hash {calculated_hash} does not match expected hash {expected_hash}"
|
|
||||||
)
|
|
||||||
|
|
||||||
def _remove_file(self, file_path):
|
|
||||||
"""Remove a file from disk."""
|
|
||||||
os.remove(file_path)
|
|
||||||
|
|
||||||
def ensure_downloaded(self, type, url, desired_file_name, hash=None):
|
|
||||||
"""
|
|
||||||
Ensure a model file is downloaded and has the correct hash.
|
|
||||||
Returns the path to the downloaded file.
|
|
||||||
"""
|
|
||||||
logging.debug(
|
|
||||||
f"Ensuring {type} file is downloaded. URL='{url}' Destination='{desired_file_name}' Hash='{hash}'"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Validate file extension
|
|
||||||
self._validate_file_extension(desired_file_name)
|
|
||||||
|
|
||||||
# Check if file exists with correct hash
|
|
||||||
if hash:
|
|
||||||
existing_path = self._check_existing_file_by_hash(hash, type, url)
|
|
||||||
if existing_path:
|
|
||||||
return existing_path
|
|
||||||
|
|
||||||
# Check if file exists locally
|
|
||||||
destination_path = get_full_path(type, desired_file_name, allow_missing=True)
|
|
||||||
existing_path = self._check_existing_file(type, desired_file_name, hash)
|
|
||||||
if existing_path:
|
|
||||||
return existing_path
|
|
||||||
|
|
||||||
# Download the file
|
|
||||||
hasher = self._get_hasher()
|
|
||||||
self._download_file(url, destination_path, hasher)
|
|
||||||
|
|
||||||
# Verify hash
|
|
||||||
calculated_hash = hasher.hexdigest()
|
|
||||||
self._verify_downloaded_hash(calculated_hash, hash, destination_path)
|
|
||||||
|
|
||||||
# Update database
|
|
||||||
self.process_file(destination_path, url, calculated_hash)
|
|
||||||
|
|
||||||
# TODO: Notify frontend to reload models
|
|
||||||
|
|
||||||
return destination_path
|
|
||||||
|
|
||||||
|
|
||||||
model_processor = ModelProcessor()
|
|
||||||
@@ -203,12 +203,6 @@ parser.add_argument(
|
|||||||
help="Set the base URL for the ComfyUI API. (default: https://api.comfy.org)",
|
help="Set the base URL for the ComfyUI API. (default: https://api.comfy.org)",
|
||||||
)
|
)
|
||||||
|
|
||||||
database_default_path = os.path.abspath(
|
|
||||||
os.path.join(os.path.dirname(__file__), "..", "user", "comfyui.db")
|
|
||||||
)
|
|
||||||
parser.add_argument("--database-url", type=str, default=f"sqlite:///{database_default_path}", help="Specify the database URL, e.g. for an in-memory database you can use 'sqlite:///:memory:'.")
|
|
||||||
parser.add_argument("--disable-model-processing", action="store_true", help="Disable model file processing, e.g. computing hashes and extracting metadata.")
|
|
||||||
|
|
||||||
if comfy.options.args_parsing:
|
if comfy.options.args_parsing:
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -102,13 +102,6 @@ def model_sampling(model_config, model_type):
|
|||||||
return ModelSampling(model_config)
|
return ModelSampling(model_config)
|
||||||
|
|
||||||
|
|
||||||
def convert_tensor(extra, dtype):
|
|
||||||
if hasattr(extra, "dtype"):
|
|
||||||
if extra.dtype != torch.int and extra.dtype != torch.long:
|
|
||||||
extra = extra.to(dtype)
|
|
||||||
return extra
|
|
||||||
|
|
||||||
|
|
||||||
class BaseModel(torch.nn.Module):
|
class BaseModel(torch.nn.Module):
|
||||||
def __init__(self, model_config, model_type=ModelType.EPS, device=None, unet_model=UNetModel):
|
def __init__(self, model_config, model_type=ModelType.EPS, device=None, unet_model=UNetModel):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
@@ -172,13 +165,13 @@ class BaseModel(torch.nn.Module):
|
|||||||
extra_conds = {}
|
extra_conds = {}
|
||||||
for o in kwargs:
|
for o in kwargs:
|
||||||
extra = kwargs[o]
|
extra = kwargs[o]
|
||||||
|
|
||||||
if hasattr(extra, "dtype"):
|
if hasattr(extra, "dtype"):
|
||||||
extra = convert_tensor(extra, dtype)
|
if extra.dtype != torch.int and extra.dtype != torch.long:
|
||||||
elif isinstance(extra, list):
|
extra = extra.to(dtype)
|
||||||
|
if isinstance(extra, list):
|
||||||
ex = []
|
ex = []
|
||||||
for ext in extra:
|
for ext in extra:
|
||||||
ex.append(convert_tensor(ext, dtype))
|
ex.append(ext.to(dtype))
|
||||||
extra = ex
|
extra = ex
|
||||||
extra_conds[o] = extra
|
extra_conds[o] = extra
|
||||||
|
|
||||||
|
|||||||
@@ -49,16 +49,10 @@ if hasattr(torch.serialization, "add_safe_globals"): # TODO: this was added in
|
|||||||
else:
|
else:
|
||||||
logging.info("Warning, you are using an old pytorch version and some ckpt/pt files might be loaded unsafely. Upgrading to 2.4 or above is recommended.")
|
logging.info("Warning, you are using an old pytorch version and some ckpt/pt files might be loaded unsafely. Upgrading to 2.4 or above is recommended.")
|
||||||
|
|
||||||
def is_html_file(file_path):
|
|
||||||
with open(file_path, "rb") as f:
|
|
||||||
content = f.read(100)
|
|
||||||
return b"<!DOCTYPE html>" in content or b"<html" in content
|
|
||||||
|
|
||||||
def load_torch_file(ckpt, safe_load=False, device=None, return_metadata=False):
|
def load_torch_file(ckpt, safe_load=False, device=None, return_metadata=False):
|
||||||
if device is None:
|
if device is None:
|
||||||
device = torch.device("cpu")
|
device = torch.device("cpu")
|
||||||
metadata = None
|
metadata = None
|
||||||
|
|
||||||
if ckpt.lower().endswith(".safetensors") or ckpt.lower().endswith(".sft"):
|
if ckpt.lower().endswith(".safetensors") or ckpt.lower().endswith(".sft"):
|
||||||
try:
|
try:
|
||||||
with safetensors.safe_open(ckpt, framework="pt", device=device.type) as f:
|
with safetensors.safe_open(ckpt, framework="pt", device=device.type) as f:
|
||||||
@@ -68,8 +62,6 @@ def load_torch_file(ckpt, safe_load=False, device=None, return_metadata=False):
|
|||||||
if return_metadata:
|
if return_metadata:
|
||||||
metadata = f.metadata()
|
metadata = f.metadata()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if is_html_file(ckpt):
|
|
||||||
raise ValueError("{}\n\nFile path: {}\n\nThe requested file is an HTML document not a safetensors file. Please re-download the file, not the web page.".format(e, ckpt))
|
|
||||||
if len(e.args) > 0:
|
if len(e.args) > 0:
|
||||||
message = e.args[0]
|
message = e.args[0]
|
||||||
if "HeaderTooLarge" in message:
|
if "HeaderTooLarge" in message:
|
||||||
@@ -96,13 +88,6 @@ def load_torch_file(ckpt, safe_load=False, device=None, return_metadata=False):
|
|||||||
sd = pl_sd
|
sd = pl_sd
|
||||||
else:
|
else:
|
||||||
sd = pl_sd
|
sd = pl_sd
|
||||||
|
|
||||||
try:
|
|
||||||
from app.model_processor import model_processor
|
|
||||||
model_processor.process_file(ckpt)
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Error processing file {ckpt}: {e}")
|
|
||||||
|
|
||||||
return (sd, metadata) if return_metadata else sd
|
return (sd, metadata) if return_metadata else sd
|
||||||
|
|
||||||
def save_torch_file(sd, ckpt, metadata=None):
|
def save_torch_file(sd, ckpt, metadata=None):
|
||||||
|
|||||||
855
comfy_api/v3/io.py
Normal file
855
comfy_api/v3/io.py
Normal file
@@ -0,0 +1,855 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
from typing import Any, Literal
|
||||||
|
from enum import Enum
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from dataclasses import dataclass, asdict
|
||||||
|
from comfy.comfy_types.node_typing import IO
|
||||||
|
|
||||||
|
|
||||||
|
class InputBehavior(str, Enum):
|
||||||
|
required = "required"
|
||||||
|
optional = "optional"
|
||||||
|
|
||||||
|
|
||||||
|
def is_class(obj):
|
||||||
|
'''
|
||||||
|
Returns True if is a class type.
|
||||||
|
Returns False if is a class instance.
|
||||||
|
'''
|
||||||
|
return isinstance(obj, type)
|
||||||
|
|
||||||
|
|
||||||
|
class NumberDisplay(str, Enum):
|
||||||
|
number = "number"
|
||||||
|
slider = "slider"
|
||||||
|
|
||||||
|
|
||||||
|
class IO_V3:
|
||||||
|
'''
|
||||||
|
Base class for V3 Inputs and Outputs.
|
||||||
|
'''
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __init_subclass__(cls, io_type: IO | str, **kwargs):
|
||||||
|
cls.io_type = io_type
|
||||||
|
super().__init_subclass__(**kwargs)
|
||||||
|
|
||||||
|
class InputV3(IO_V3, io_type=None):
|
||||||
|
'''
|
||||||
|
Base class for a V3 Input.
|
||||||
|
'''
|
||||||
|
def __init__(self, id: str, display_name: str=None, behavior=InputBehavior.required, tooltip: str=None, lazy: bool=None):
|
||||||
|
super().__init__()
|
||||||
|
self.id = id
|
||||||
|
self.display_name = display_name
|
||||||
|
self.behavior = behavior
|
||||||
|
self.tooltip = tooltip
|
||||||
|
self.lazy = lazy
|
||||||
|
|
||||||
|
def as_dict_V1(self):
|
||||||
|
return prune_dict({
|
||||||
|
"display_name": self.display_name,
|
||||||
|
"tooltip": self.tooltip,
|
||||||
|
"lazy": self.lazy
|
||||||
|
})
|
||||||
|
|
||||||
|
def get_io_type_V1(self):
|
||||||
|
return self.io_type
|
||||||
|
|
||||||
|
class WidgetInputV3(InputV3, io_type=None):
|
||||||
|
'''
|
||||||
|
Base class for a V3 Input with widget.
|
||||||
|
'''
|
||||||
|
def __init__(self, id: str, display_name: str=None, behavior=InputBehavior.required, tooltip: str=None, lazy: bool=None,
|
||||||
|
default: Any=None,
|
||||||
|
socketless: bool=None, widgetType: str=None):
|
||||||
|
super().__init__(id, display_name, behavior, tooltip, lazy)
|
||||||
|
self.default = default
|
||||||
|
self.socketless = socketless
|
||||||
|
self.widgetType = widgetType
|
||||||
|
|
||||||
|
def as_dict_V1(self):
|
||||||
|
return super().as_dict_V1() | prune_dict({
|
||||||
|
"default": self.default,
|
||||||
|
"socketless": self.socketless,
|
||||||
|
"widgetType": self.widgetType,
|
||||||
|
})
|
||||||
|
|
||||||
|
def CustomType(io_type: IO | str) -> type[IO_V3]:
|
||||||
|
name = f"{io_type}_IO_V3"
|
||||||
|
return type(name, (IO_V3,), {}, io_type=io_type)
|
||||||
|
|
||||||
|
def CustomInput(id: str, io_type: IO | str, display_name: str=None, behavior=InputBehavior.required, tooltip: str=None, lazy: bool=None) -> InputV3:
|
||||||
|
'''
|
||||||
|
Defines input for 'io_type'. Can be used to stand in for non-core types.
|
||||||
|
'''
|
||||||
|
input_kwargs = {
|
||||||
|
"id": id,
|
||||||
|
"display_name": display_name,
|
||||||
|
"behavior": behavior,
|
||||||
|
"tooltip": tooltip,
|
||||||
|
"lazy": lazy,
|
||||||
|
}
|
||||||
|
return type(f"{io_type}Input", (InputV3,), {}, io_type=io_type)(**input_kwargs)
|
||||||
|
|
||||||
|
def CustomOutput(id: str, io_type: IO | str, display_name: str=None, tooltip: str=None) -> OutputV3:
|
||||||
|
'''
|
||||||
|
Defines output for 'io_type'. Can be used to stand in for non-core types.
|
||||||
|
'''
|
||||||
|
input_kwargs = {
|
||||||
|
"id": id,
|
||||||
|
"display_name": display_name,
|
||||||
|
"tooltip": tooltip,
|
||||||
|
}
|
||||||
|
return type(f"{io_type}Output", (OutputV3,), {}, io_type=io_type)(**input_kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
class BooleanInput(WidgetInputV3, io_type=IO.BOOLEAN):
|
||||||
|
'''
|
||||||
|
Boolean input.
|
||||||
|
'''
|
||||||
|
def __init__(self, id: str, display_name: str=None, behavior=InputBehavior.required, tooltip: str=None, lazy: bool=None,
|
||||||
|
default: bool=None, label_on: str=None, label_off: str=None,
|
||||||
|
socketless: bool=None, widgetType: str=None):
|
||||||
|
super().__init__(id, display_name, behavior, tooltip, lazy, default, socketless, widgetType)
|
||||||
|
self.label_on = label_on
|
||||||
|
self.label_off = label_off
|
||||||
|
self.default: bool
|
||||||
|
|
||||||
|
def as_dict_V1(self):
|
||||||
|
return super().as_dict_V1() | prune_dict({
|
||||||
|
"label_on": self.label_on,
|
||||||
|
"label_off": self.label_off,
|
||||||
|
})
|
||||||
|
|
||||||
|
class IntegerInput(WidgetInputV3, io_type=IO.INT):
|
||||||
|
'''
|
||||||
|
Integer input.
|
||||||
|
'''
|
||||||
|
def __init__(self, id: str, display_name: str=None, behavior=InputBehavior.required, tooltip: str=None, lazy: bool=None,
|
||||||
|
default: int=None, min: int=None, max: int=None, step: int=None, control_after_generate: bool=None,
|
||||||
|
display_mode: NumberDisplay=None, socketless: bool=None, widgetType: str=None):
|
||||||
|
super().__init__(id, display_name, behavior, tooltip, lazy, default, socketless, widgetType)
|
||||||
|
self.min = min
|
||||||
|
self.max = max
|
||||||
|
self.step = step
|
||||||
|
self.control_after_generate = control_after_generate
|
||||||
|
self.display_mode = display_mode
|
||||||
|
self.default: int
|
||||||
|
|
||||||
|
def as_dict_V1(self):
|
||||||
|
return super().as_dict_V1() | prune_dict({
|
||||||
|
"min": self.min,
|
||||||
|
"max": self.max,
|
||||||
|
"step": self.step,
|
||||||
|
"control_after_generate": self.control_after_generate,
|
||||||
|
"display": self.display_mode, # NOTE: in frontend, the parameter is called "display"
|
||||||
|
})
|
||||||
|
|
||||||
|
class FloatInput(WidgetInputV3, io_type=IO.FLOAT):
|
||||||
|
'''
|
||||||
|
Float input.
|
||||||
|
'''
|
||||||
|
def __init__(self, id: str, display_name: str=None, behavior=InputBehavior.required, tooltip: str=None, lazy: bool=None,
|
||||||
|
default: float=None, min: float=None, max: float=None, step: float=None, round: float=None,
|
||||||
|
display_mode: NumberDisplay=None, socketless: bool=None, widgetType: str=None):
|
||||||
|
super().__init__(id, display_name, behavior, tooltip, lazy, default, socketless, widgetType)
|
||||||
|
self.default = default
|
||||||
|
self.min = min
|
||||||
|
self.max = max
|
||||||
|
self.step = step
|
||||||
|
self.round = round
|
||||||
|
self.display_mode = display_mode
|
||||||
|
self.default: float
|
||||||
|
|
||||||
|
def as_dict_V1(self):
|
||||||
|
return super().as_dict_V1() | prune_dict({
|
||||||
|
"min": self.min,
|
||||||
|
"max": self.max,
|
||||||
|
"step": self.step,
|
||||||
|
"round": self.round,
|
||||||
|
"display": self.display_mode, # NOTE: in frontend, the parameter is called "display"
|
||||||
|
})
|
||||||
|
|
||||||
|
class StringInput(WidgetInputV3, io_type=IO.STRING):
|
||||||
|
'''
|
||||||
|
String input.
|
||||||
|
'''
|
||||||
|
def __init__(self, id: str, display_name: str=None, behavior=InputBehavior.required, tooltip: str=None, lazy: bool=None,
|
||||||
|
multiline=False, placeholder: str=None, default: int=None,
|
||||||
|
socketless: bool=None, widgetType: str=None):
|
||||||
|
super().__init__(id, display_name, behavior, tooltip, lazy, default, socketless, widgetType)
|
||||||
|
self.multiline = multiline
|
||||||
|
self.placeholder = placeholder
|
||||||
|
self.default: str
|
||||||
|
|
||||||
|
def as_dict_V1(self):
|
||||||
|
return super().as_dict_V1() | prune_dict({
|
||||||
|
"multiline": self.multiline,
|
||||||
|
"placeholder": self.placeholder,
|
||||||
|
})
|
||||||
|
|
||||||
|
class ComboInput(WidgetInputV3, io_type=IO.COMBO):
|
||||||
|
'''Combo input (dropdown).'''
|
||||||
|
def __init__(self, id: str, options: list[str], display_name: str=None, behavior=InputBehavior.required, tooltip: str=None, lazy: bool=None,
|
||||||
|
default: str=None, control_after_generate: bool=None,
|
||||||
|
socketless: bool=None, widgetType: str=None):
|
||||||
|
super().__init__(id, display_name, behavior, tooltip, lazy, default, socketless, widgetType)
|
||||||
|
self.multiselect = False
|
||||||
|
self.options = options
|
||||||
|
self.control_after_generate = control_after_generate
|
||||||
|
self.default: str
|
||||||
|
|
||||||
|
def as_dict_V1(self):
|
||||||
|
return super().as_dict_V1() | prune_dict({
|
||||||
|
"multiselect": self.multiselect,
|
||||||
|
"options": self.options,
|
||||||
|
"control_after_generate": self.control_after_generate,
|
||||||
|
})
|
||||||
|
|
||||||
|
class MultiselectComboWidget(ComboInput, io_type=IO.COMBO):
|
||||||
|
'''Multiselect Combo input (dropdown for selecting potentially more than one value).'''
|
||||||
|
def __init__(self, id: str, options: list[str], display_name: str=None, behavior=InputBehavior.required, tooltip: str=None, lazy: bool=None,
|
||||||
|
default: list[str]=None, placeholder: str=None, chip: bool=None, control_after_generate: bool=None,
|
||||||
|
socketless: bool=None, widgetType: str=None):
|
||||||
|
super().__init__(id, options, display_name, behavior, tooltip, lazy, default, control_after_generate, socketless, widgetType)
|
||||||
|
self.multiselect = True
|
||||||
|
self.placeholder = placeholder
|
||||||
|
self.chip = chip
|
||||||
|
self.default: list[str]
|
||||||
|
|
||||||
|
def as_dict_V1(self):
|
||||||
|
return super().as_dict_V1() | prune_dict({
|
||||||
|
"multiselect": self.multiselect,
|
||||||
|
"placeholder": self.placeholder,
|
||||||
|
"chip": self.chip,
|
||||||
|
})
|
||||||
|
|
||||||
|
class ImageInput(InputV3, io_type=IO.IMAGE):
|
||||||
|
'''
|
||||||
|
Image input.
|
||||||
|
'''
|
||||||
|
def __init__(self, id: str, display_name: str=None, behavior=InputBehavior.required, tooltip: str=None):
|
||||||
|
super().__init__(id, display_name, behavior, tooltip)
|
||||||
|
|
||||||
|
class MaskInput(InputV3, io_type=IO.MASK):
|
||||||
|
'''
|
||||||
|
Mask input.
|
||||||
|
'''
|
||||||
|
def __init__(self, id: str, display_name: str=None, behavior=InputBehavior.required, tooltip: str=None):
|
||||||
|
super().__init__(id, display_name, behavior, tooltip)
|
||||||
|
|
||||||
|
class LatentInput(InputV3, io_type=IO.LATENT):
|
||||||
|
'''
|
||||||
|
Latent input.
|
||||||
|
'''
|
||||||
|
def __init__(self, id: str, display_name: str=None, behavior=InputBehavior.required, tooltip: str=None):
|
||||||
|
super().__init__(id, display_name, behavior, tooltip)
|
||||||
|
|
||||||
|
class MultitypedInput(InputV3, io_type="COMFY_MULTITYPED_V3"):
|
||||||
|
'''
|
||||||
|
Input that permits more than one input type.
|
||||||
|
'''
|
||||||
|
def __init__(self, id: str, io_types: list[type[IO_V3] | InputV3 | IO |str], display_name: str=None, behavior=InputBehavior.required, tooltip: str=None,):
|
||||||
|
super().__init__(id, display_name, behavior, tooltip)
|
||||||
|
self._io_types = io_types
|
||||||
|
|
||||||
|
@property
|
||||||
|
def io_types(self) -> list[type[InputV3]]:
|
||||||
|
'''
|
||||||
|
Returns list of InputV3 class types permitted.
|
||||||
|
'''
|
||||||
|
io_types = []
|
||||||
|
for x in self._io_types:
|
||||||
|
if not is_class(x):
|
||||||
|
io_types.append(type(x))
|
||||||
|
else:
|
||||||
|
io_types.append(x)
|
||||||
|
return io_types
|
||||||
|
|
||||||
|
def get_io_type_V1(self):
|
||||||
|
return ",".join(x.io_type for x in self.io_types)
|
||||||
|
|
||||||
|
|
||||||
|
class OutputV3:
|
||||||
|
def __init__(self, id: str, display_name: str=None, tooltip: str=None,
|
||||||
|
is_output_list=False):
|
||||||
|
self.id = id
|
||||||
|
self.display_name = display_name
|
||||||
|
self.tooltip = tooltip
|
||||||
|
self.is_output_list = is_output_list
|
||||||
|
|
||||||
|
def __init_subclass__(cls, io_type, **kwargs):
|
||||||
|
cls.io_type = io_type
|
||||||
|
super().__init_subclass__(**kwargs)
|
||||||
|
|
||||||
|
class IntegerOutput(OutputV3, io_type=IO.INT):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class FloatOutput(OutputV3, io_type=IO.FLOAT):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class StringOutput(OutputV3, io_type=IO.STRING):
|
||||||
|
pass
|
||||||
|
# def __init__(self, id: str, display_name: str=None, tooltip: str=None):
|
||||||
|
# super().__init__(id, display_name, tooltip)
|
||||||
|
|
||||||
|
class ImageOutput(OutputV3, io_type=IO.IMAGE):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class MaskOutput(OutputV3, io_type=IO.MASK):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class LatentOutput(OutputV3, io_type=IO.LATENT):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DynamicInput(InputV3, io_type=None):
|
||||||
|
'''
|
||||||
|
Abstract class for dynamic input registration.
|
||||||
|
'''
|
||||||
|
def __init__(self, io_type: str, id: str, display_name: str=None):
|
||||||
|
super().__init__(io_type, id, display_name)
|
||||||
|
|
||||||
|
class DynamicOutput(OutputV3, io_type=None):
|
||||||
|
'''
|
||||||
|
Abstract class for dynamic output registration.
|
||||||
|
'''
|
||||||
|
def __init__(self, io_type: str, id: str, display_name: str=None):
|
||||||
|
super().__init__(io_type, id, display_name)
|
||||||
|
|
||||||
|
class AutoGrowDynamicInput(DynamicInput, io_type="COMFY_MULTIGROW_V3"):
|
||||||
|
'''
|
||||||
|
Dynamic Input that adds another template_input each time one is provided.
|
||||||
|
|
||||||
|
Additional inputs are forced to have 'InputBehavior.optional'.
|
||||||
|
'''
|
||||||
|
def __init__(self, id: str, template_input: InputV3, min: int=1, max: int=None):
|
||||||
|
super().__init__("AutoGrowDynamicInput", id)
|
||||||
|
self.template_input = template_input
|
||||||
|
if min is not None:
|
||||||
|
assert(min >= 1)
|
||||||
|
if max is not None:
|
||||||
|
assert(max >= 1)
|
||||||
|
self.min = min
|
||||||
|
self.max = max
|
||||||
|
|
||||||
|
class ComboDynamicInput(DynamicInput, io_type="COMFY_COMBODYNAMIC_V3"):
|
||||||
|
def __init__(self, id: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
AutoGrowDynamicInput(id="dynamic", template_input=ImageInput(id="image"))
|
||||||
|
|
||||||
|
|
||||||
|
class Hidden(str, Enum):
|
||||||
|
'''
|
||||||
|
Enumerator for requesting hidden variables in nodes.
|
||||||
|
'''
|
||||||
|
|
||||||
|
unique_id = "UNIQUE_ID"
|
||||||
|
"""UNIQUE_ID is the unique identifier of the node, and matches the id property of the node on the client side. It is commonly used in client-server communications (see messages)."""
|
||||||
|
prompt = "PROMPT"
|
||||||
|
"""PROMPT is the complete prompt sent by the client to the server. See the prompt object for a full description."""
|
||||||
|
extra_pnginfo = "EXTRA_PNGINFO"
|
||||||
|
"""EXTRA_PNGINFO is a dictionary that will be copied into the metadata of any .png files saved. Custom nodes can store additional information in this dictionary for saving (or as a way to communicate with a downstream node)."""
|
||||||
|
dynprompt = "DYNPROMPT"
|
||||||
|
"""DYNPROMPT is an instance of comfy_execution.graph.DynamicPrompt. It differs from PROMPT in that it may mutate during the course of execution in response to Node Expansion."""
|
||||||
|
auth_token_comfy_org = "AUTH_TOKEN_COMFY_ORG"
|
||||||
|
"""AUTH_TOKEN_COMFY_ORG is a token acquired from signing into a ComfyOrg account on frontend."""
|
||||||
|
api_key_comfy_org = "API_KEY_COMFY_ORG"
|
||||||
|
"""API_KEY_COMFY_ORG is an API Key generated by ComfyOrg that allows skipping signing into a ComfyOrg account on frontend."""
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class NodeInfoV1:
|
||||||
|
input: dict=None
|
||||||
|
input_order: dict[str, list[str]]=None
|
||||||
|
output: list[str]=None
|
||||||
|
output_is_list: list[bool]=None
|
||||||
|
output_name: list[str]=None
|
||||||
|
output_tooltips: list[str]=None
|
||||||
|
name: str=None
|
||||||
|
display_name: str=None
|
||||||
|
description: str=None
|
||||||
|
python_module: Any=None
|
||||||
|
category: str=None
|
||||||
|
output_node: bool=None
|
||||||
|
deprecated: bool=None
|
||||||
|
experimental: bool=None
|
||||||
|
api_node: bool=None
|
||||||
|
|
||||||
|
|
||||||
|
def as_pruned_dict(dataclass_obj):
|
||||||
|
'''Return dict of dataclass object with pruned None values.'''
|
||||||
|
return prune_dict(asdict(dataclass_obj))
|
||||||
|
|
||||||
|
def prune_dict(d: dict):
|
||||||
|
return {k: v for k,v in d.items() if v is not None}
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class SchemaV3:
|
||||||
|
"""Definition of V3 node properties."""
|
||||||
|
|
||||||
|
node_id: str
|
||||||
|
"""ID of node - should be globally unique. If this is a custom node, add a prefix or postfix to avoid name clashes."""
|
||||||
|
display_name: str = None
|
||||||
|
"""Display name of node."""
|
||||||
|
category: str = "sd"
|
||||||
|
"""The category of the node, as per the "Add Node" menu."""
|
||||||
|
inputs: list[InputV3]=None
|
||||||
|
outputs: list[OutputV3]=None
|
||||||
|
hidden: list[Hidden]=None
|
||||||
|
description: str=""
|
||||||
|
"""Node description, shown as a tooltip when hovering over the node."""
|
||||||
|
is_input_list: bool = False
|
||||||
|
"""A flag indicating if this node implements the additional code necessary to deal with OUTPUT_IS_LIST nodes.
|
||||||
|
|
||||||
|
All inputs of ``type`` will become ``list[type]``, regardless of how many items are passed in. This also affects ``check_lazy_status``.
|
||||||
|
|
||||||
|
From the docs:
|
||||||
|
|
||||||
|
A node can also override the default input behaviour and receive the whole list in a single call. This is done by setting a class attribute `INPUT_IS_LIST` to ``True``.
|
||||||
|
|
||||||
|
Comfy Docs: https://docs.comfy.org/custom-nodes/backend/lists#list-processing
|
||||||
|
"""
|
||||||
|
is_output_node: bool=False
|
||||||
|
"""Flags this node as an output node, causing any inputs it requires to be executed.
|
||||||
|
|
||||||
|
If a node is not connected to any output nodes, that node will not be executed. Usage::
|
||||||
|
|
||||||
|
OUTPUT_NODE = True
|
||||||
|
|
||||||
|
From the docs:
|
||||||
|
|
||||||
|
By default, a node is not considered an output. Set ``OUTPUT_NODE = True`` to specify that it is.
|
||||||
|
|
||||||
|
Comfy Docs: https://docs.comfy.org/custom-nodes/backend/server_overview#output-node
|
||||||
|
"""
|
||||||
|
is_deprecated: bool=False
|
||||||
|
"""Flags a node as deprecated, indicating to users that they should find alternatives to this node."""
|
||||||
|
is_experimental: bool=False
|
||||||
|
"""Flags a node as experimental, informing users that it may change or not work as expected."""
|
||||||
|
is_api_node: bool=False
|
||||||
|
"""Flags a node as an API node. See: https://docs.comfy.org/tutorials/api-nodes/overview."""
|
||||||
|
|
||||||
|
# class SchemaV3Class:
|
||||||
|
# def __init__(self,
|
||||||
|
# node_id: str,
|
||||||
|
# node_name: str,
|
||||||
|
# category: str,
|
||||||
|
# inputs: list[InputV3],
|
||||||
|
# outputs: list[OutputV3]=None,
|
||||||
|
# hidden: list[Hidden]=None,
|
||||||
|
# description: str="",
|
||||||
|
# is_input_list: bool = False,
|
||||||
|
# is_output_node: bool=False,
|
||||||
|
# is_deprecated: bool=False,
|
||||||
|
# is_experimental: bool=False,
|
||||||
|
# is_api_node: bool=False,
|
||||||
|
# ):
|
||||||
|
# self.node_id = node_id
|
||||||
|
# """ID of node - should be globally unique. If this is a custom node, add a prefix or postfix to avoid name clashes."""
|
||||||
|
# self.node_name = node_name
|
||||||
|
# """Display name of node."""
|
||||||
|
# self.category = category
|
||||||
|
# """The category of the node, as per the "Add Node" menu."""
|
||||||
|
# self.inputs = inputs
|
||||||
|
# self.outputs = outputs
|
||||||
|
# self.hidden = hidden
|
||||||
|
# self.description = description
|
||||||
|
# """Node description, shown as a tooltip when hovering over the node."""
|
||||||
|
# self.is_input_list = is_input_list
|
||||||
|
# """A flag indicating if this node implements the additional code necessary to deal with OUTPUT_IS_LIST nodes.
|
||||||
|
|
||||||
|
# All inputs of ``type`` will become ``list[type]``, regardless of how many items are passed in. This also affects ``check_lazy_status``.
|
||||||
|
|
||||||
|
# From the docs:
|
||||||
|
|
||||||
|
# A node can also override the default input behaviour and receive the whole list in a single call. This is done by setting a class attribute `INPUT_IS_LIST` to ``True``.
|
||||||
|
|
||||||
|
# Comfy Docs: https://docs.comfy.org/custom-nodes/backend/lists#list-processing
|
||||||
|
# """
|
||||||
|
# self.is_output_node = is_output_node
|
||||||
|
# """Flags this node as an output node, causing any inputs it requires to be executed.
|
||||||
|
|
||||||
|
# If a node is not connected to any output nodes, that node will not be executed. Usage::
|
||||||
|
|
||||||
|
# OUTPUT_NODE = True
|
||||||
|
|
||||||
|
# From the docs:
|
||||||
|
|
||||||
|
# By default, a node is not considered an output. Set ``OUTPUT_NODE = True`` to specify that it is.
|
||||||
|
|
||||||
|
# Comfy Docs: https://docs.comfy.org/custom-nodes/backend/server_overview#output-node
|
||||||
|
# """
|
||||||
|
# self.is_deprecated = is_deprecated
|
||||||
|
# """Flags a node as deprecated, indicating to users that they should find alternatives to this node."""
|
||||||
|
# self.is_experimental = is_experimental
|
||||||
|
# """Flags a node as experimental, informing users that it may change or not work as expected."""
|
||||||
|
# self.is_api_node = is_api_node
|
||||||
|
# """Flags a node as an API node. See: https://docs.comfy.org/tutorials/api-nodes/overview."""
|
||||||
|
|
||||||
|
|
||||||
|
class classproperty(object):
|
||||||
|
def __init__(self, f):
|
||||||
|
self.f = f
|
||||||
|
def __get__(self, obj, owner):
|
||||||
|
return self.f(owner)
|
||||||
|
|
||||||
|
|
||||||
|
class ComfyNodeV3(ABC):
|
||||||
|
"""Common base class for all V3 nodes."""
|
||||||
|
|
||||||
|
RELATIVE_PYTHON_MODULE = None
|
||||||
|
#############################################
|
||||||
|
# V1 Backwards Compatibility code
|
||||||
|
#--------------------------------------------
|
||||||
|
_DESCRIPTION = None
|
||||||
|
@classproperty
|
||||||
|
def DESCRIPTION(cls):
|
||||||
|
if cls._DESCRIPTION is None:
|
||||||
|
cls.GET_SCHEMA()
|
||||||
|
return cls._DESCRIPTION
|
||||||
|
|
||||||
|
_CATEGORY = None
|
||||||
|
@classproperty
|
||||||
|
def CATEGORY(cls):
|
||||||
|
if cls._CATEGORY is None:
|
||||||
|
cls.GET_SCHEMA()
|
||||||
|
return cls._CATEGORY
|
||||||
|
|
||||||
|
_EXPERIMENTAL = None
|
||||||
|
@classproperty
|
||||||
|
def EXPERIMENTAL(cls):
|
||||||
|
if cls._EXPERIMENTAL is None:
|
||||||
|
cls.GET_SCHEMA()
|
||||||
|
return cls._EXPERIMENTAL
|
||||||
|
|
||||||
|
_DEPRECATED = None
|
||||||
|
@classproperty
|
||||||
|
def DEPRECATED(cls):
|
||||||
|
if cls._DEPRECATED is None:
|
||||||
|
cls.GET_SCHEMA()
|
||||||
|
return cls._DEPRECATED
|
||||||
|
|
||||||
|
_API_NODE = None
|
||||||
|
@classproperty
|
||||||
|
def API_NODE(cls):
|
||||||
|
if cls._API_NODE is None:
|
||||||
|
cls.GET_SCHEMA()
|
||||||
|
return cls._API_NODE
|
||||||
|
|
||||||
|
_OUTPUT_NODE = None
|
||||||
|
@classproperty
|
||||||
|
def OUTPUT_NODE(cls):
|
||||||
|
if cls._OUTPUT_NODE is None:
|
||||||
|
cls.GET_SCHEMA()
|
||||||
|
return cls._OUTPUT_NODE
|
||||||
|
|
||||||
|
_INPUT_IS_LIST = None
|
||||||
|
@classproperty
|
||||||
|
def INPUT_IS_LIST(cls):
|
||||||
|
if cls._INPUT_IS_LIST is None:
|
||||||
|
cls.GET_SCHEMA()
|
||||||
|
return cls._INPUT_IS_LIST
|
||||||
|
_OUTPUT_IS_LIST = None
|
||||||
|
|
||||||
|
@classproperty
|
||||||
|
def OUTPUT_IS_LIST(cls):
|
||||||
|
if cls._OUTPUT_IS_LIST is None:
|
||||||
|
cls.GET_SCHEMA()
|
||||||
|
return cls._OUTPUT_IS_LIST
|
||||||
|
|
||||||
|
_RETURN_TYPES = None
|
||||||
|
@classproperty
|
||||||
|
def RETURN_TYPES(cls):
|
||||||
|
if cls._RETURN_TYPES is None:
|
||||||
|
cls.GET_SCHEMA()
|
||||||
|
return cls._RETURN_TYPES
|
||||||
|
|
||||||
|
_RETURN_NAMES = None
|
||||||
|
@classproperty
|
||||||
|
def RETURN_NAMES(cls):
|
||||||
|
if cls._RETURN_NAMES is None:
|
||||||
|
cls.GET_SCHEMA()
|
||||||
|
return cls._RETURN_NAMES
|
||||||
|
|
||||||
|
_OUTPUT_TOOLTIPS = None
|
||||||
|
@classproperty
|
||||||
|
def OUTPUT_TOOLTIPS(cls):
|
||||||
|
if cls._OUTPUT_TOOLTIPS is None:
|
||||||
|
cls.GET_SCHEMA()
|
||||||
|
return cls._OUTPUT_TOOLTIPS
|
||||||
|
|
||||||
|
FUNCTION = "execute"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def INPUT_TYPES(cls) -> dict[str, dict]:
|
||||||
|
schema = cls.DEFINE_SCHEMA()
|
||||||
|
# for V1, make inputs be a dict with potential keys {required, optional, hidden}
|
||||||
|
input = {
|
||||||
|
"required": {}
|
||||||
|
}
|
||||||
|
if schema.inputs:
|
||||||
|
for i in schema.inputs:
|
||||||
|
input.setdefault(i.behavior.value, {})[i.id] = (i.get_io_type_V1(), i.as_dict_V1())
|
||||||
|
if schema.hidden:
|
||||||
|
for hidden in schema.hidden:
|
||||||
|
input.setdefault("hidden", {})[hidden.name] = (hidden.value,)
|
||||||
|
return input
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def GET_SCHEMA(cls) -> SchemaV3:
|
||||||
|
schema = cls.DEFINE_SCHEMA()
|
||||||
|
if cls._DESCRIPTION is None:
|
||||||
|
cls._DESCRIPTION = schema.description
|
||||||
|
if cls._CATEGORY is None:
|
||||||
|
cls._CATEGORY = schema.category
|
||||||
|
if cls._EXPERIMENTAL is None:
|
||||||
|
cls._EXPERIMENTAL = schema.is_experimental
|
||||||
|
if cls._DEPRECATED is None:
|
||||||
|
cls._DEPRECATED = schema.is_deprecated
|
||||||
|
if cls._API_NODE is None:
|
||||||
|
cls._API_NODE = schema.is_api_node
|
||||||
|
if cls._OUTPUT_NODE is None:
|
||||||
|
cls._OUTPUT_NODE = schema.is_output_node
|
||||||
|
if cls._INPUT_IS_LIST is None:
|
||||||
|
cls._INPUT_IS_LIST = schema.is_input_list
|
||||||
|
|
||||||
|
if cls._RETURN_TYPES is None:
|
||||||
|
output = []
|
||||||
|
output_name = []
|
||||||
|
output_is_list = []
|
||||||
|
output_tooltips = []
|
||||||
|
if schema.outputs:
|
||||||
|
for o in schema.outputs:
|
||||||
|
output.append(o.io_type)
|
||||||
|
output_name.append(o.display_name if o.display_name else o.io_type)
|
||||||
|
output_is_list.append(o.is_output_list)
|
||||||
|
output_tooltips.append(o.tooltip if o.tooltip else None)
|
||||||
|
|
||||||
|
cls._RETURN_TYPES = output
|
||||||
|
cls._RETURN_NAMES = output_name
|
||||||
|
cls._OUTPUT_IS_LIST = output_is_list
|
||||||
|
cls._OUTPUT_TOOLTIPS = output_tooltips
|
||||||
|
|
||||||
|
return schema
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def GET_NODE_INFO_V1(cls) -> dict[str, Any]:
|
||||||
|
schema = cls.GET_SCHEMA()
|
||||||
|
# get V1 inputs
|
||||||
|
input = cls.INPUT_TYPES()
|
||||||
|
|
||||||
|
# create separate lists from output fields
|
||||||
|
output = []
|
||||||
|
output_is_list = []
|
||||||
|
output_name = []
|
||||||
|
output_tooltips = []
|
||||||
|
if schema.outputs:
|
||||||
|
for o in schema.outputs:
|
||||||
|
output.append(o.io_type)
|
||||||
|
output_is_list.append(o.is_output_list)
|
||||||
|
output_name.append(o.display_name if o.display_name else o.io_type)
|
||||||
|
output_tooltips.append(o.tooltip if o.tooltip else None)
|
||||||
|
|
||||||
|
info = NodeInfoV1(
|
||||||
|
input=input,
|
||||||
|
input_order={key: list(value.keys()) for (key, value) in input.items()},
|
||||||
|
output=output,
|
||||||
|
output_is_list=output_is_list,
|
||||||
|
output_name=output_name,
|
||||||
|
output_tooltips=output_tooltips,
|
||||||
|
name=schema.node_id,
|
||||||
|
display_name=schema.display_name,
|
||||||
|
category=schema.category,
|
||||||
|
description=schema.description,
|
||||||
|
output_node=schema.is_output_node,
|
||||||
|
deprecated=schema.is_deprecated,
|
||||||
|
experimental=schema.is_experimental,
|
||||||
|
api_node=schema.is_api_node,
|
||||||
|
python_module=getattr(cls, "RELATIVE_PYTHON_MODULE", "nodes")
|
||||||
|
)
|
||||||
|
return asdict(info)
|
||||||
|
#--------------------------------------------
|
||||||
|
#############################################
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def GET_NODE_INFO_V3(cls) -> dict[str, Any]:
|
||||||
|
schema = cls.GET_SCHEMA()
|
||||||
|
# TODO: finish
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@abstractmethod
|
||||||
|
def DEFINE_SCHEMA(cls) -> SchemaV3:
|
||||||
|
"""
|
||||||
|
Override this function with one that returns a SchemaV3 instance.
|
||||||
|
"""
|
||||||
|
return None
|
||||||
|
DEFINE_SCHEMA = None
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
if self.DEFINE_SCHEMA is None:
|
||||||
|
raise Exception("No DEFINE_SCHEMA function was defined for this node.")
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def execute(self, **kwargs) -> NodeOutput:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
# class ReturnedInputs:
|
||||||
|
# def __init__(self):
|
||||||
|
# pass
|
||||||
|
|
||||||
|
# class ReturnedOutputs:
|
||||||
|
# def __init__(self):
|
||||||
|
# pass
|
||||||
|
|
||||||
|
|
||||||
|
class NodeOutput:
|
||||||
|
'''
|
||||||
|
Standardized output of a node; can pass in any number of args and/or a UIOutput into 'ui' kwarg.
|
||||||
|
'''
|
||||||
|
def __init__(self, *args: Any, ui: UIOutput | dict=None, expand: dict=None, block_execution: str=None, **kwargs):
|
||||||
|
self.args = args
|
||||||
|
self.ui = ui
|
||||||
|
self.expand = expand
|
||||||
|
self.block_execution = block_execution
|
||||||
|
|
||||||
|
@property
|
||||||
|
def result(self):
|
||||||
|
return self.args if len(self.args) > 0 else None
|
||||||
|
|
||||||
|
|
||||||
|
class SavedResult:
|
||||||
|
def __init__(self, filename: str, subfolder: str, type: Literal["input", "output", "temp"]):
|
||||||
|
self.filename = filename
|
||||||
|
self.subfolder = subfolder
|
||||||
|
self.type = type
|
||||||
|
|
||||||
|
def as_dict(self):
|
||||||
|
return {
|
||||||
|
"filename": self.filename,
|
||||||
|
"subfolder": self.subfolder,
|
||||||
|
"type": self.type
|
||||||
|
}
|
||||||
|
|
||||||
|
class UIOutput(ABC):
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def as_dict(self) -> dict:
|
||||||
|
... # TODO: finish
|
||||||
|
|
||||||
|
class UIImages(UIOutput):
|
||||||
|
def __init__(self, values: list[SavedResult | dict], animated=False, **kwargs):
|
||||||
|
self.values = values
|
||||||
|
self.animated = animated
|
||||||
|
|
||||||
|
def as_dict(self):
|
||||||
|
values = [x.as_dict() if isinstance(x, SavedResult) else x for x in self.values]
|
||||||
|
return {
|
||||||
|
"images": values,
|
||||||
|
"animated": (self.animated,)
|
||||||
|
}
|
||||||
|
|
||||||
|
class UILatents(UIOutput):
|
||||||
|
def __init__(self, values: list[SavedResult | dict], **kwargs):
|
||||||
|
self.values = values
|
||||||
|
|
||||||
|
def as_dict(self):
|
||||||
|
values = [x.as_dict() if isinstance(x, SavedResult) else x for x in self.values]
|
||||||
|
return {
|
||||||
|
"latents": values,
|
||||||
|
}
|
||||||
|
|
||||||
|
class UIAudio(UIOutput):
|
||||||
|
def __init__(self, values: list[SavedResult | dict], **kwargs):
|
||||||
|
self.values = values
|
||||||
|
|
||||||
|
def as_dict(self):
|
||||||
|
values = [x.as_dict() if isinstance(x, SavedResult) else x for x in self.values]
|
||||||
|
return {
|
||||||
|
"audio": values,
|
||||||
|
}
|
||||||
|
|
||||||
|
class UI3D(UIOutput):
|
||||||
|
def __init__(self, values: list[SavedResult | dict], **kwargs):
|
||||||
|
self.values = values
|
||||||
|
|
||||||
|
def as_dict(self):
|
||||||
|
values = [x.as_dict() if isinstance(x, SavedResult) else x for x in self.values]
|
||||||
|
return {
|
||||||
|
"3d": values,
|
||||||
|
}
|
||||||
|
|
||||||
|
class UIText(UIOutput):
|
||||||
|
def __init__(self, value: str, **kwargs):
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
def as_dict(self):
|
||||||
|
return {"text": (self.value,)}
|
||||||
|
|
||||||
|
|
||||||
|
class TestNode(ComfyNodeV3):
|
||||||
|
SCHEMA = SchemaV3(
|
||||||
|
node_id="TestNode_v3",
|
||||||
|
display_name="Test Node (V3)",
|
||||||
|
category="v3_test",
|
||||||
|
inputs=[IntegerInput("my_int"),
|
||||||
|
#AutoGrowDynamicInput("growing", ImageInput),
|
||||||
|
MaskInput("thing"),
|
||||||
|
],
|
||||||
|
outputs=[ImageOutput("image_output")],
|
||||||
|
hidden=[Hidden.api_key_comfy_org, Hidden.auth_token_comfy_org, Hidden.unique_id]
|
||||||
|
)
|
||||||
|
|
||||||
|
# @classmethod
|
||||||
|
# def GET_SCHEMA(cls):
|
||||||
|
# return cls.SCHEMA
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def DEFINE_SCHEMA(cls):
|
||||||
|
return cls.SCHEMA
|
||||||
|
|
||||||
|
def execute(**kwargs):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
print("hello there")
|
||||||
|
inputs: list[InputV3] = [
|
||||||
|
IntegerInput("my_int"),
|
||||||
|
CustomInput("xyz", "XYZ"),
|
||||||
|
CustomInput("model1", "MODEL_M"),
|
||||||
|
ImageInput("my_image"),
|
||||||
|
FloatInput("my_float"),
|
||||||
|
MultitypedInput("my_inputs", [CustomType("MODEL_M"), CustomType("XYZ")]),
|
||||||
|
]
|
||||||
|
|
||||||
|
outputs: list[OutputV3] = [
|
||||||
|
ImageOutput("image"),
|
||||||
|
CustomOutput("xyz", "XYZ")
|
||||||
|
]
|
||||||
|
|
||||||
|
for c in inputs:
|
||||||
|
if isinstance(c, MultitypedInput):
|
||||||
|
print(f"{c}, {type(c)}, {type(c).io_type}, {c.id}, {[x.io_type for x in c.io_types]}")
|
||||||
|
print(c.get_io_type_V1())
|
||||||
|
else:
|
||||||
|
print(f"{c}, {type(c)}, {type(c).io_type}, {c.id}")
|
||||||
|
|
||||||
|
for c in outputs:
|
||||||
|
print(f"{c}, {type(c)}, {type(c).io_type}, {c.id}")
|
||||||
|
|
||||||
|
zz = TestNode()
|
||||||
|
print(zz.GET_NODE_INFO_V1())
|
||||||
|
|
||||||
|
# aa = NodeInfoV1()
|
||||||
|
# print(asdict(aa))
|
||||||
|
# print(as_pruned_dict(aa))
|
||||||
67
comfy_extras/nodes_v3_test.py
Normal file
67
comfy_extras/nodes_v3_test.py
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
import torch
|
||||||
|
|
||||||
|
from comfy_api.v3.io import (
|
||||||
|
ComfyNodeV3, SchemaV3, CustomType, CustomInput, CustomOutput, InputBehavior, NumberDisplay,
|
||||||
|
IntegerInput, MaskInput, ImageInput, ComboDynamicInput, NodeOutput,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class V3TestNode(ComfyNodeV3):
|
||||||
|
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def DEFINE_SCHEMA(cls):
|
||||||
|
return SchemaV3(
|
||||||
|
node_id="V3TestNode1",
|
||||||
|
display_name="V3 Test Node (1djekjd)",
|
||||||
|
description="This is a funky V3 node test.",
|
||||||
|
category="v3 nodes",
|
||||||
|
inputs=[
|
||||||
|
IntegerInput("some_int", display_name="new_name", min=0, tooltip="My tooltip 😎", display_mode=NumberDisplay.slider),
|
||||||
|
MaskInput("mask", behavior=InputBehavior.optional),
|
||||||
|
ImageInput("image", display_name="new_image"),
|
||||||
|
# IntegerInput("some_int", display_name="new_name", min=0, tooltip="My tooltip 😎", display=NumberDisplay.slider, ),
|
||||||
|
# ComboDynamicInput("mask", behavior=InputBehavior.optional),
|
||||||
|
# IntegerInput("some_int", display_name="new_name", min=0, tooltip="My tooltip 😎", display=NumberDisplay.slider,
|
||||||
|
# dependent_inputs=[ComboDynamicInput("mask", behavior=InputBehavior.optional)],
|
||||||
|
# dependent_values=[lambda my_value: IO.STRING if my_value < 5 else IO.NUMBER],
|
||||||
|
# ),
|
||||||
|
# ["option1", "option2". "option3"]
|
||||||
|
# ComboDynamicInput["sdfgjhl", [ComboDynamicOptions("option1", [IntegerInput("some_int", display_name="new_name", min=0, tooltip="My tooltip 😎", display=NumberDisplay.slider, ImageInput(), MaskInput(), String()]),
|
||||||
|
# CombyDynamicOptons("option2", [])
|
||||||
|
# ]]
|
||||||
|
],
|
||||||
|
is_output_node=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
def execute(self, some_int: int, image: torch.Tensor, mask: torch.Tensor=None, **kwargs):
|
||||||
|
a = NodeOutput(1)
|
||||||
|
aa = NodeOutput(1, "hellothere")
|
||||||
|
ab = NodeOutput(1, "hellothere", ui={"lol": "jk"})
|
||||||
|
b = NodeOutput()
|
||||||
|
c = NodeOutput(ui={"lol": "jk"})
|
||||||
|
return NodeOutput()
|
||||||
|
return NodeOutput(1)
|
||||||
|
return NodeOutput(1, block_execution="Kill yourself")
|
||||||
|
return ()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
NODES_LIST: list[ComfyNodeV3] = [
|
||||||
|
V3TestNode,
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# NODE_CLASS_MAPPINGS = {}
|
||||||
|
# NODE_DISPLAY_NAME_MAPPINGS = {}
|
||||||
|
# for node in NODES_LIST:
|
||||||
|
# schema = node.GET_SCHEMA()
|
||||||
|
# NODE_CLASS_MAPPINGS[schema.node_id] = node
|
||||||
|
# if schema.display_name:
|
||||||
|
# NODE_DISPLAY_NAME_MAPPINGS[schema.node_id] = schema.display_name
|
||||||
17
execution.py
17
execution.py
@@ -17,6 +17,7 @@ from comfy_execution.graph import get_input_info, ExecutionList, DynamicPrompt,
|
|||||||
from comfy_execution.graph_utils import is_link, GraphBuilder
|
from comfy_execution.graph_utils import is_link, GraphBuilder
|
||||||
from comfy_execution.caching import HierarchicalCache, LRUCache, DependencyAwareCache, CacheKeySetInputSignature, CacheKeySetID
|
from comfy_execution.caching import HierarchicalCache, LRUCache, DependencyAwareCache, CacheKeySetInputSignature, CacheKeySetID
|
||||||
from comfy_execution.validation import validate_node_input
|
from comfy_execution.validation import validate_node_input
|
||||||
|
from comfy_api.v3.io import NodeOutput
|
||||||
|
|
||||||
class ExecutionResult(Enum):
|
class ExecutionResult(Enum):
|
||||||
SUCCESS = 0
|
SUCCESS = 0
|
||||||
@@ -242,6 +243,22 @@ def get_output_data(obj, input_data_all, execution_block_cb=None, pre_execute_cb
|
|||||||
result = tuple([result] * len(obj.RETURN_TYPES))
|
result = tuple([result] * len(obj.RETURN_TYPES))
|
||||||
results.append(result)
|
results.append(result)
|
||||||
subgraph_results.append((None, result))
|
subgraph_results.append((None, result))
|
||||||
|
elif isinstance(r, NodeOutput):
|
||||||
|
if r.ui is not None:
|
||||||
|
uis.append(r.ui.as_dict())
|
||||||
|
if r.expand is not None:
|
||||||
|
has_subgraph = True
|
||||||
|
new_graph = r.expand
|
||||||
|
result = r.result
|
||||||
|
if r.block_execution is not None:
|
||||||
|
result = tuple([ExecutionBlocker(r.block_execution)] * len(obj.RETURN_TYPES))
|
||||||
|
subgraph_results.append((new_graph, result))
|
||||||
|
elif r.result is not None:
|
||||||
|
result = r.result
|
||||||
|
if r.block_execution is not None:
|
||||||
|
result = tuple([ExecutionBlocker(r.block_execution)] * len(obj.RETURN_TYPES))
|
||||||
|
results.append(result)
|
||||||
|
subgraph_results.append((None, result))
|
||||||
else:
|
else:
|
||||||
if isinstance(r, ExecutionBlocker):
|
if isinstance(r, ExecutionBlocker):
|
||||||
r = tuple([r] * len(obj.RETURN_TYPES))
|
r = tuple([r] * len(obj.RETURN_TYPES))
|
||||||
|
|||||||
@@ -275,7 +275,7 @@ def filter_files_extensions(files: Collection[str], extensions: Collection[str])
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
def get_full_path(folder_name: str, filename: str, allow_missing: bool = False) -> str | None:
|
def get_full_path(folder_name: str, filename: str) -> str | None:
|
||||||
global folder_names_and_paths
|
global folder_names_and_paths
|
||||||
folder_name = map_legacy(folder_name)
|
folder_name = map_legacy(folder_name)
|
||||||
if folder_name not in folder_names_and_paths:
|
if folder_name not in folder_names_and_paths:
|
||||||
@@ -288,8 +288,6 @@ def get_full_path(folder_name: str, filename: str, allow_missing: bool = False)
|
|||||||
return full_path
|
return full_path
|
||||||
elif os.path.islink(full_path):
|
elif os.path.islink(full_path):
|
||||||
logging.warning("WARNING path {} exists but doesn't link anywhere, skipping.".format(full_path))
|
logging.warning("WARNING path {} exists but doesn't link anywhere, skipping.".format(full_path))
|
||||||
elif allow_missing:
|
|
||||||
return full_path
|
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -301,27 +299,6 @@ def get_full_path_or_raise(folder_name: str, filename: str) -> str:
|
|||||||
return full_path
|
return full_path
|
||||||
|
|
||||||
|
|
||||||
def get_relative_path(full_path: str) -> tuple[str, str] | None:
|
|
||||||
"""Convert a full path back to a type-relative path.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
full_path: The full path to the file
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
tuple[str, str] | None: A tuple of (model_type, relative_path) if found, None otherwise
|
|
||||||
"""
|
|
||||||
global folder_names_and_paths
|
|
||||||
full_path = os.path.normpath(full_path)
|
|
||||||
|
|
||||||
for model_type, (paths, _) in folder_names_and_paths.items():
|
|
||||||
for base_path in paths:
|
|
||||||
base_path = os.path.normpath(base_path)
|
|
||||||
if full_path.startswith(base_path):
|
|
||||||
relative_path = os.path.relpath(full_path, base_path)
|
|
||||||
return model_type, relative_path
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_filename_list_(folder_name: str) -> tuple[list[str], dict[str, float], float]:
|
def get_filename_list_(folder_name: str) -> tuple[list[str], dict[str, float], float]:
|
||||||
folder_name = map_legacy(folder_name)
|
folder_name = map_legacy(folder_name)
|
||||||
global folder_names_and_paths
|
global folder_names_and_paths
|
||||||
|
|||||||
9
main.py
9
main.py
@@ -147,6 +147,7 @@ def cuda_malloc_warning():
|
|||||||
if cuda_malloc_warning:
|
if cuda_malloc_warning:
|
||||||
logging.warning("\nWARNING: this card most likely does not support cuda-malloc, if you get \"CUDA error\" please run ComfyUI with: --disable-cuda-malloc\n")
|
logging.warning("\nWARNING: this card most likely does not support cuda-malloc, if you get \"CUDA error\" please run ComfyUI with: --disable-cuda-malloc\n")
|
||||||
|
|
||||||
|
|
||||||
def prompt_worker(q, server_instance):
|
def prompt_worker(q, server_instance):
|
||||||
current_time: float = 0.0
|
current_time: float = 0.0
|
||||||
cache_type = execution.CacheType.CLASSIC
|
cache_type = execution.CacheType.CLASSIC
|
||||||
@@ -236,13 +237,6 @@ def cleanup_temp():
|
|||||||
if os.path.exists(temp_dir):
|
if os.path.exists(temp_dir):
|
||||||
shutil.rmtree(temp_dir, ignore_errors=True)
|
shutil.rmtree(temp_dir, ignore_errors=True)
|
||||||
|
|
||||||
def setup_database():
|
|
||||||
try:
|
|
||||||
from app.database.db import init_db, dependencies_available
|
|
||||||
if dependencies_available():
|
|
||||||
init_db()
|
|
||||||
except Exception as e:
|
|
||||||
logging.error(f"Failed to initialize database. Please ensure you have installed the latest requirements. If the error persists, please report this as in future the database will be required: {e}")
|
|
||||||
|
|
||||||
def start_comfyui(asyncio_loop=None):
|
def start_comfyui(asyncio_loop=None):
|
||||||
"""
|
"""
|
||||||
@@ -272,7 +266,6 @@ def start_comfyui(asyncio_loop=None):
|
|||||||
hook_breaker_ac10a0.restore_functions()
|
hook_breaker_ac10a0.restore_functions()
|
||||||
|
|
||||||
cuda_malloc_warning()
|
cuda_malloc_warning()
|
||||||
setup_database()
|
|
||||||
|
|
||||||
prompt_server.add_routes()
|
prompt_server.add_routes()
|
||||||
hijack_progress(prompt_server)
|
hijack_progress(prompt_server)
|
||||||
|
|||||||
16
nodes.py
16
nodes.py
@@ -26,6 +26,7 @@ import comfy.sd
|
|||||||
import comfy.utils
|
import comfy.utils
|
||||||
import comfy.controlnet
|
import comfy.controlnet
|
||||||
from comfy.comfy_types import IO, ComfyNodeABC, InputTypeDict, FileLocator
|
from comfy.comfy_types import IO, ComfyNodeABC, InputTypeDict, FileLocator
|
||||||
|
from comfy_api.v3.io import ComfyNodeV3
|
||||||
|
|
||||||
import comfy.clip_vision
|
import comfy.clip_vision
|
||||||
|
|
||||||
@@ -2129,6 +2130,7 @@ def load_custom_node(module_path: str, ignore=set(), module_parent="custom_nodes
|
|||||||
if os.path.isdir(web_dir):
|
if os.path.isdir(web_dir):
|
||||||
EXTENSION_WEB_DIRS[module_name] = web_dir
|
EXTENSION_WEB_DIRS[module_name] = web_dir
|
||||||
|
|
||||||
|
# V1 node definition
|
||||||
if hasattr(module, "NODE_CLASS_MAPPINGS") and getattr(module, "NODE_CLASS_MAPPINGS") is not None:
|
if hasattr(module, "NODE_CLASS_MAPPINGS") and getattr(module, "NODE_CLASS_MAPPINGS") is not None:
|
||||||
for name, node_cls in module.NODE_CLASS_MAPPINGS.items():
|
for name, node_cls in module.NODE_CLASS_MAPPINGS.items():
|
||||||
if name not in ignore:
|
if name not in ignore:
|
||||||
@@ -2137,8 +2139,19 @@ def load_custom_node(module_path: str, ignore=set(), module_parent="custom_nodes
|
|||||||
if hasattr(module, "NODE_DISPLAY_NAME_MAPPINGS") and getattr(module, "NODE_DISPLAY_NAME_MAPPINGS") is not None:
|
if hasattr(module, "NODE_DISPLAY_NAME_MAPPINGS") and getattr(module, "NODE_DISPLAY_NAME_MAPPINGS") is not None:
|
||||||
NODE_DISPLAY_NAME_MAPPINGS.update(module.NODE_DISPLAY_NAME_MAPPINGS)
|
NODE_DISPLAY_NAME_MAPPINGS.update(module.NODE_DISPLAY_NAME_MAPPINGS)
|
||||||
return True
|
return True
|
||||||
|
# V3 node definition
|
||||||
|
elif getattr(module, "NODES_LIST", None) is not None:
|
||||||
|
for node_cls in module.NODES_LIST:
|
||||||
|
node_cls: ComfyNodeV3
|
||||||
|
schema = node_cls.GET_SCHEMA()
|
||||||
|
if schema.node_id not in ignore:
|
||||||
|
NODE_CLASS_MAPPINGS[schema.node_id] = node_cls
|
||||||
|
node_cls.RELATIVE_PYTHON_MODULE = "{}.{}".format(module_parent, get_module_name(module_path))
|
||||||
|
if schema.display_name is not None:
|
||||||
|
NODE_DISPLAY_NAME_MAPPINGS[schema.node_id] = schema.display_name
|
||||||
|
return True
|
||||||
else:
|
else:
|
||||||
logging.warning(f"Skip {module_path} module for custom nodes due to the lack of NODE_CLASS_MAPPINGS.")
|
logging.warning(f"Skip {module_path} module for custom nodes due to the lack of NODE_CLASS_MAPPINGS or NODES_LIST (need one).")
|
||||||
return False
|
return False
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.warning(traceback.format_exc())
|
logging.warning(traceback.format_exc())
|
||||||
@@ -2258,6 +2271,7 @@ def init_builtin_extra_nodes():
|
|||||||
"nodes_ace.py",
|
"nodes_ace.py",
|
||||||
"nodes_string.py",
|
"nodes_string.py",
|
||||||
"nodes_camera_trajectory.py",
|
"nodes_camera_trajectory.py",
|
||||||
|
"nodes_v3_test.py",
|
||||||
]
|
]
|
||||||
|
|
||||||
import_failed = []
|
import_failed = []
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
comfyui-frontend-package==1.21.3
|
comfyui-frontend-package==1.21.3
|
||||||
comfyui-workflow-templates==0.1.25
|
comfyui-workflow-templates==0.1.23
|
||||||
comfyui-embedded-docs==0.2.0
|
comfyui-embedded-docs==0.2.0
|
||||||
torch
|
torch
|
||||||
torchsde
|
torchsde
|
||||||
@@ -18,9 +18,6 @@ Pillow
|
|||||||
scipy
|
scipy
|
||||||
tqdm
|
tqdm
|
||||||
psutil
|
psutil
|
||||||
alembic
|
|
||||||
SQLAlchemy
|
|
||||||
blake3
|
|
||||||
|
|
||||||
#non essential dependencies:
|
#non essential dependencies:
|
||||||
kornia>=0.7.1
|
kornia>=0.7.1
|
||||||
|
|||||||
@@ -29,6 +29,7 @@ import comfy.model_management
|
|||||||
import node_helpers
|
import node_helpers
|
||||||
from comfyui_version import __version__
|
from comfyui_version import __version__
|
||||||
from app.frontend_management import FrontendManager
|
from app.frontend_management import FrontendManager
|
||||||
|
from comfy_api.v3.io import ComfyNodeV3
|
||||||
|
|
||||||
from app.user_manager import UserManager
|
from app.user_manager import UserManager
|
||||||
from app.model_manager import ModelFileManager
|
from app.model_manager import ModelFileManager
|
||||||
@@ -555,6 +556,8 @@ class PromptServer():
|
|||||||
|
|
||||||
def node_info(node_class):
|
def node_info(node_class):
|
||||||
obj_class = nodes.NODE_CLASS_MAPPINGS[node_class]
|
obj_class = nodes.NODE_CLASS_MAPPINGS[node_class]
|
||||||
|
if isinstance(obj_class, ComfyNodeV3):
|
||||||
|
return obj_class.GET_NODE_INFO_V1()
|
||||||
info = {}
|
info = {}
|
||||||
info['input'] = obj_class.INPUT_TYPES()
|
info['input'] = obj_class.INPUT_TYPES()
|
||||||
info['input_order'] = {key: list(value.keys()) for (key, value) in obj_class.INPUT_TYPES().items()}
|
info['input_order'] = {key: list(value.keys()) for (key, value) in obj_class.INPUT_TYPES().items()}
|
||||||
|
|||||||
@@ -1,253 +0,0 @@
|
|||||||
import pytest
|
|
||||||
from unittest.mock import patch, MagicMock
|
|
||||||
from sqlalchemy import create_engine
|
|
||||||
from sqlalchemy.orm import sessionmaker
|
|
||||||
from app.model_processor import ModelProcessor
|
|
||||||
from app.database.models import Model, Base
|
|
||||||
import os
|
|
||||||
|
|
||||||
# Test data constants
|
|
||||||
TEST_MODEL_TYPE = "checkpoints"
|
|
||||||
TEST_URL = "http://example.com/model.safetensors"
|
|
||||||
TEST_FILE_NAME = "model.safetensors"
|
|
||||||
TEST_EXPECTED_HASH = "abc123"
|
|
||||||
TEST_DESTINATION_PATH = "/path/to/model.safetensors"
|
|
||||||
|
|
||||||
|
|
||||||
def create_test_model(session, file_name, model_type, hash_value, file_size=1000, source_url=None):
|
|
||||||
"""Helper to create a test model in the database."""
|
|
||||||
model = Model(path=file_name, type=model_type, hash=hash_value, file_size=file_size, source_url=source_url)
|
|
||||||
session.add(model)
|
|
||||||
session.commit()
|
|
||||||
return model
|
|
||||||
|
|
||||||
|
|
||||||
def setup_mock_hash_calculation(model_processor, hash_value):
|
|
||||||
"""Helper to setup hash calculation mocks."""
|
|
||||||
mock_hash = MagicMock()
|
|
||||||
mock_hash.hexdigest.return_value = hash_value
|
|
||||||
return patch.object(model_processor, "_get_hasher", return_value=mock_hash)
|
|
||||||
|
|
||||||
|
|
||||||
def verify_model_in_db(session, file_name, expected_hash=None, expected_type=None):
|
|
||||||
"""Helper to verify model exists in database with correct attributes."""
|
|
||||||
db_model = session.query(Model).filter_by(path=file_name).first()
|
|
||||||
assert db_model is not None
|
|
||||||
if expected_hash:
|
|
||||||
assert db_model.hash == expected_hash
|
|
||||||
if expected_type:
|
|
||||||
assert db_model.type == expected_type
|
|
||||||
return db_model
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def db_engine():
|
|
||||||
# Configure in-memory database
|
|
||||||
engine = create_engine("sqlite:///:memory:")
|
|
||||||
Base.metadata.create_all(engine)
|
|
||||||
yield engine
|
|
||||||
Base.metadata.drop_all(engine)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def db_session(db_engine):
|
|
||||||
Session = sessionmaker(bind=db_engine)
|
|
||||||
session = Session()
|
|
||||||
yield session
|
|
||||||
session.close()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def mock_get_relative_path():
|
|
||||||
with patch("app.model_processor.get_relative_path") as mock:
|
|
||||||
mock.side_effect = lambda path: (TEST_MODEL_TYPE, os.path.basename(path))
|
|
||||||
yield mock
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def mock_get_full_path():
|
|
||||||
with patch("app.model_processor.get_full_path") as mock:
|
|
||||||
mock.return_value = TEST_DESTINATION_PATH
|
|
||||||
yield mock
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def model_processor(db_session, mock_get_relative_path, mock_get_full_path):
|
|
||||||
with patch("app.model_processor.create_session", return_value=db_session):
|
|
||||||
with patch("app.model_processor.can_create_session", return_value=True):
|
|
||||||
processor = ModelProcessor()
|
|
||||||
# Setup test state
|
|
||||||
processor.removed_files = []
|
|
||||||
processor.downloaded_files = []
|
|
||||||
processor.file_exists = {}
|
|
||||||
|
|
||||||
def mock_download_file(url, destination_path, hasher):
|
|
||||||
processor.downloaded_files.append((url, destination_path))
|
|
||||||
processor.file_exists[destination_path] = True
|
|
||||||
# Simulate writing some data to the file
|
|
||||||
test_data = b"test data"
|
|
||||||
hasher.update(test_data)
|
|
||||||
|
|
||||||
def mock_remove_file(file_path):
|
|
||||||
processor.removed_files.append(file_path)
|
|
||||||
if file_path in processor.file_exists:
|
|
||||||
del processor.file_exists[file_path]
|
|
||||||
|
|
||||||
# Setup common patches
|
|
||||||
file_exists_patch = patch.object(
|
|
||||||
processor,
|
|
||||||
"_file_exists",
|
|
||||||
side_effect=lambda path: processor.file_exists.get(path, False),
|
|
||||||
)
|
|
||||||
file_size_patch = patch.object(
|
|
||||||
processor,
|
|
||||||
"_get_file_size",
|
|
||||||
side_effect=lambda path: (
|
|
||||||
1000 if processor.file_exists.get(path, False) else 0
|
|
||||||
),
|
|
||||||
)
|
|
||||||
download_file_patch = patch.object(
|
|
||||||
processor, "_download_file", side_effect=mock_download_file
|
|
||||||
)
|
|
||||||
remove_file_patch = patch.object(
|
|
||||||
processor, "_remove_file", side_effect=mock_remove_file
|
|
||||||
)
|
|
||||||
|
|
||||||
with (
|
|
||||||
file_exists_patch,
|
|
||||||
file_size_patch,
|
|
||||||
download_file_patch,
|
|
||||||
remove_file_patch,
|
|
||||||
):
|
|
||||||
yield processor
|
|
||||||
|
|
||||||
|
|
||||||
def test_ensure_downloaded_invalid_extension(model_processor):
|
|
||||||
# Ensure that an unsupported file extension raises an error to prevent unsafe file downloads
|
|
||||||
with pytest.raises(ValueError, match="Unsupported unsafe file for download"):
|
|
||||||
model_processor.ensure_downloaded(TEST_MODEL_TYPE, TEST_URL, "model.exe")
|
|
||||||
|
|
||||||
|
|
||||||
def test_ensure_downloaded_existing_file_with_hash(model_processor, db_session):
|
|
||||||
# Ensure that a file with the same hash but from a different source is not downloaded again
|
|
||||||
SOURCE_URL = "https://example.com/other.sft"
|
|
||||||
create_test_model(db_session, TEST_FILE_NAME, TEST_MODEL_TYPE, TEST_EXPECTED_HASH, source_url=SOURCE_URL)
|
|
||||||
model_processor.file_exists[TEST_DESTINATION_PATH] = True
|
|
||||||
|
|
||||||
result = model_processor.ensure_downloaded(
|
|
||||||
TEST_MODEL_TYPE, TEST_URL, TEST_FILE_NAME, TEST_EXPECTED_HASH
|
|
||||||
)
|
|
||||||
|
|
||||||
assert result == TEST_DESTINATION_PATH
|
|
||||||
model = verify_model_in_db(db_session, TEST_FILE_NAME, TEST_EXPECTED_HASH, TEST_MODEL_TYPE)
|
|
||||||
assert model.source_url == SOURCE_URL # Ensure the source URL is not overwritten
|
|
||||||
|
|
||||||
|
|
||||||
def test_ensure_downloaded_existing_file_hash_mismatch(model_processor, db_session):
|
|
||||||
# Ensure that a file with a different hash raises an error
|
|
||||||
create_test_model(db_session, TEST_FILE_NAME, TEST_MODEL_TYPE, "different_hash")
|
|
||||||
model_processor.file_exists[TEST_DESTINATION_PATH] = True
|
|
||||||
|
|
||||||
with pytest.raises(ValueError, match="File .* exists with hash .* but expected .*"):
|
|
||||||
model_processor.ensure_downloaded(
|
|
||||||
TEST_MODEL_TYPE, TEST_URL, TEST_FILE_NAME, TEST_EXPECTED_HASH
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_ensure_downloaded_new_file(model_processor, db_session):
|
|
||||||
# Ensure that a new file is downloaded
|
|
||||||
model_processor.file_exists[TEST_DESTINATION_PATH] = False
|
|
||||||
|
|
||||||
with setup_mock_hash_calculation(model_processor, TEST_EXPECTED_HASH):
|
|
||||||
result = model_processor.ensure_downloaded(
|
|
||||||
TEST_MODEL_TYPE, TEST_URL, TEST_FILE_NAME, TEST_EXPECTED_HASH
|
|
||||||
)
|
|
||||||
|
|
||||||
assert result == TEST_DESTINATION_PATH
|
|
||||||
assert len(model_processor.downloaded_files) == 1
|
|
||||||
assert model_processor.downloaded_files[0] == (TEST_URL, TEST_DESTINATION_PATH)
|
|
||||||
assert model_processor.file_exists[TEST_DESTINATION_PATH]
|
|
||||||
verify_model_in_db(db_session, TEST_FILE_NAME, TEST_EXPECTED_HASH, TEST_MODEL_TYPE)
|
|
||||||
|
|
||||||
|
|
||||||
def test_ensure_downloaded_hash_mismatch(model_processor, db_session):
|
|
||||||
# Ensure that download that results in a different hash raises an error
|
|
||||||
model_processor.file_exists[TEST_DESTINATION_PATH] = False
|
|
||||||
|
|
||||||
with setup_mock_hash_calculation(model_processor, "different_hash"):
|
|
||||||
with pytest.raises(
|
|
||||||
ValueError,
|
|
||||||
match="Downloaded file hash .* does not match expected hash .*",
|
|
||||||
):
|
|
||||||
model_processor.ensure_downloaded(
|
|
||||||
TEST_MODEL_TYPE,
|
|
||||||
TEST_URL,
|
|
||||||
TEST_FILE_NAME,
|
|
||||||
TEST_EXPECTED_HASH,
|
|
||||||
)
|
|
||||||
|
|
||||||
assert len(model_processor.removed_files) == 1
|
|
||||||
assert model_processor.removed_files[0] == TEST_DESTINATION_PATH
|
|
||||||
assert TEST_DESTINATION_PATH not in model_processor.file_exists
|
|
||||||
assert db_session.query(Model).filter_by(path=TEST_FILE_NAME).first() is None
|
|
||||||
|
|
||||||
|
|
||||||
def test_process_file_without_hash(model_processor, db_session):
|
|
||||||
# Test processing file without provided hash
|
|
||||||
model_processor.file_exists[TEST_DESTINATION_PATH] = True
|
|
||||||
|
|
||||||
with patch.object(model_processor, "_hash_file", return_value=TEST_EXPECTED_HASH):
|
|
||||||
result = model_processor.process_file(TEST_DESTINATION_PATH)
|
|
||||||
assert result is not None
|
|
||||||
assert result.hash == TEST_EXPECTED_HASH
|
|
||||||
|
|
||||||
|
|
||||||
def test_retrieve_model_by_hash(model_processor, db_session):
|
|
||||||
# Test retrieving model by hash
|
|
||||||
create_test_model(db_session, TEST_FILE_NAME, TEST_MODEL_TYPE, TEST_EXPECTED_HASH)
|
|
||||||
result = model_processor.retrieve_model_by_hash(TEST_EXPECTED_HASH)
|
|
||||||
assert result is not None
|
|
||||||
assert result.hash == TEST_EXPECTED_HASH
|
|
||||||
|
|
||||||
|
|
||||||
def test_retrieve_model_by_hash_and_type(model_processor, db_session):
|
|
||||||
# Test retrieving model by hash and type
|
|
||||||
create_test_model(db_session, TEST_FILE_NAME, TEST_MODEL_TYPE, TEST_EXPECTED_HASH)
|
|
||||||
result = model_processor.retrieve_model_by_hash(TEST_EXPECTED_HASH, TEST_MODEL_TYPE)
|
|
||||||
assert result is not None
|
|
||||||
assert result.hash == TEST_EXPECTED_HASH
|
|
||||||
assert result.type == TEST_MODEL_TYPE
|
|
||||||
|
|
||||||
|
|
||||||
def test_retrieve_hash(model_processor, db_session):
|
|
||||||
# Test retrieving hash for existing model
|
|
||||||
create_test_model(db_session, TEST_FILE_NAME, TEST_MODEL_TYPE, TEST_EXPECTED_HASH)
|
|
||||||
with patch.object(
|
|
||||||
model_processor,
|
|
||||||
"_validate_path",
|
|
||||||
return_value=(TEST_MODEL_TYPE, TEST_FILE_NAME),
|
|
||||||
):
|
|
||||||
result = model_processor.retrieve_hash(TEST_DESTINATION_PATH, TEST_MODEL_TYPE)
|
|
||||||
assert result == TEST_EXPECTED_HASH
|
|
||||||
|
|
||||||
|
|
||||||
def test_validate_file_extension_valid_extensions(model_processor):
|
|
||||||
# Test all valid file extensions
|
|
||||||
valid_extensions = [".safetensors", ".sft", ".txt", ".csv", ".json", ".yaml"]
|
|
||||||
for ext in valid_extensions:
|
|
||||||
model_processor._validate_file_extension(f"test{ext}") # Should not raise
|
|
||||||
|
|
||||||
|
|
||||||
def test_process_file_existing_without_source_url(model_processor, db_session):
|
|
||||||
# Test processing an existing file that needs its source URL updated
|
|
||||||
model_processor.file_exists[TEST_DESTINATION_PATH] = True
|
|
||||||
|
|
||||||
create_test_model(db_session, TEST_FILE_NAME, TEST_MODEL_TYPE, TEST_EXPECTED_HASH)
|
|
||||||
result = model_processor.process_file(TEST_DESTINATION_PATH, source_url=TEST_URL)
|
|
||||||
|
|
||||||
assert result is not None
|
|
||||||
assert result.hash == TEST_EXPECTED_HASH
|
|
||||||
assert result.source_url == TEST_URL
|
|
||||||
|
|
||||||
db_model = db_session.query(Model).filter_by(path=TEST_FILE_NAME).first()
|
|
||||||
assert db_model.source_url == TEST_URL
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
from pathlib import Path
|
|
||||||
import sys
|
|
||||||
|
|
||||||
# The path to the requirements.txt file
|
|
||||||
requirements_path = Path(__file__).parents[1] / "requirements.txt"
|
|
||||||
|
|
||||||
|
|
||||||
def get_missing_requirements_message():
|
|
||||||
"""The warning message to display when a package is missing."""
|
|
||||||
|
|
||||||
extra = ""
|
|
||||||
if sys.flags.no_user_site:
|
|
||||||
extra = "-s "
|
|
||||||
return f"""
|
|
||||||
Please install the updated requirements.txt file by running:
|
|
||||||
{sys.executable} {extra}-m pip install -r {requirements_path}
|
|
||||||
|
|
||||||
If you are on the portable package you can run: update\\update_comfyui.bat to solve this problem.
|
|
||||||
""".strip()
|
|
||||||
Reference in New Issue
Block a user