# ComfyUI on NVIDIA — manual install per
# https://docs.comfy.org/installation/manual_install#nvidia
#
# Base image provides system CUDA / cuDNN libs that some custom nodes dlopen at
# runtime. The PyTorch wheels installed below bundle their own CUDA runtime —
# the base only needs to be ABI-compatible with the host driver via
# nvidia-container-toolkit.
#
# CUDA 12.6 + cu126 wheels chosen for broad driver compatibility (driver >=
# 545). If the host driver is >= 580, switch the wheel index URL to cu130 for
# the newest stable PyTorch.

FROM nvidia/cuda:12.6.3-cudnn-runtime-ubuntu24.04

ENV DEBIAN_FRONTEND=noninteractive \
    PYTHONUNBUFFERED=1 \
    PIP_NO_CACHE_DIR=1 \
    PIP_DISABLE_PIP_VERSION_CHECK=1 \
    COMFYUI_HOME=/opt/comfyui

RUN apt-get update && apt-get install -y --no-install-recommends \
        python3.12 \
        python3.12-venv \
        python3-pip \
        git \
        ca-certificates \
        curl \
        libgl1 \
        libglib2.0-0 \
    && rm -rf /var/lib/apt/lists/*

# Pin a venv so future pip installs (custom nodes) stay isolated.
RUN python3.12 -m venv /opt/venv
ENV PATH=/opt/venv/bin:$PATH

# Upstream wheel for NVIDIA. cu126 covers driver >= 545; bump to cu130 for
# driver >= 580.
RUN pip install --upgrade pip && \
    pip install torch torchvision torchaudio \
        --extra-index-url https://download.pytorch.org/whl/cu126

# Pull ComfyUI itself. Pinning to a tag would be safer for reproducibility but
# the project ships breaking changes infrequently and most users want latest.
RUN git clone --depth 1 https://github.com/comfyanonymous/ComfyUI.git ${COMFYUI_HOME}
WORKDIR ${COMFYUI_HOME}
RUN pip install -r requirements.txt

# ComfyUI-Manager — community node manager. Lets users install/update custom
# nodes from the web UI instead of editing the Dockerfile. Remove this block if
# you want a pristine ComfyUI.
RUN git clone --depth 1 https://github.com/ltdrdata/ComfyUI-Manager.git \
        ${COMFYUI_HOME}/custom_nodes/ComfyUI-Manager && \
    pip install -r ${COMFYUI_HOME}/custom_nodes/ComfyUI-Manager/requirements.txt

EXPOSE 8188

# --listen 0.0.0.0 binds to every interface so the Open WebUI container on the
# shared compose network can reach it. --port is explicit for clarity.
CMD ["python", "main.py", "--listen", "0.0.0.0", "--port", "8188"]
