mirror of
https://github.com/comfyanonymous/ComfyUI.git
synced 2025-07-08 00:07:09 +08:00
commit
796b752e0a
36
Dockerfile
36
Dockerfile
@ -1,4 +1,18 @@
|
|||||||
FROM nvidia/cuda:12.6.3-cudnn-runtime-ubuntu24.04 AS base
|
# Nvidia GPU Base Images
|
||||||
|
# For NVIDIA GPU with stable CUDA version
|
||||||
|
# FROM nvidia/cuda:12.6.3-cudnn-runtime-ubuntu24.04 AS base
|
||||||
|
|
||||||
|
# For NVIDIA GPU with latest CUDA version
|
||||||
|
# FROM nvidia/cuda:12.8.1-cudnn-runtime-ubuntu24.04 AS base
|
||||||
|
|
||||||
|
# AMD GPU Base Images
|
||||||
|
# For AMD GPU with stable ROCm version
|
||||||
|
# FROM rocm/dev-ubuntu-24.04:6.2.4-complete AS base
|
||||||
|
|
||||||
|
# For AMD GPU with latest ROCm version
|
||||||
|
# FROM rocm/dev-ubuntu-24.04:6.3.4-complete AS base
|
||||||
|
|
||||||
|
# Environment variables
|
||||||
ENV DEBIAN_FRONTEND=noninteractive
|
ENV DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
# Install necessary dependencies and Python 3.12
|
# Install necessary dependencies and Python 3.12
|
||||||
@ -29,15 +43,29 @@ RUN git clone https://github.com/comfyanonymous/ComfyUI.git /app/comfyui \
|
|||||||
&& /app/venv/bin/pip install pyyaml \
|
&& /app/venv/bin/pip install pyyaml \
|
||||||
&& /app/venv/bin/pip install -r /app/comfyui/requirements.txt
|
&& /app/venv/bin/pip install -r /app/comfyui/requirements.txt
|
||||||
|
|
||||||
# Install PyTorch with CUDA 12.6 support (stable version)
|
|
||||||
RUN /app/venv/bin/pip install torch torchvision torchaudio --extra-index-url https://download.pytorch.org/whl/cu126
|
|
||||||
|
|
||||||
# Clone ComfyUI-Manager and install its dependencies
|
# Clone ComfyUI-Manager and install its dependencies
|
||||||
RUN git clone https://github.com/ltdrdata/ComfyUI-Manager.git /app/temp/ComfyUI-Manager \
|
RUN git clone https://github.com/ltdrdata/ComfyUI-Manager.git /app/temp/ComfyUI-Manager \
|
||||||
&& mv /app/temp/* /app/comfyui/custom_nodes/ \
|
&& mv /app/temp/* /app/comfyui/custom_nodes/ \
|
||||||
&& rm -rf /app/temp \
|
&& rm -rf /app/temp \
|
||||||
&& /app/venv/bin/pip install -r /app/comfyui/custom_nodes/ComfyUI-Manager/requirements.txt
|
&& /app/venv/bin/pip install -r /app/comfyui/custom_nodes/ComfyUI-Manager/requirements.txt
|
||||||
|
|
||||||
|
# NVIDIA GPU PyTorch Installation
|
||||||
|
|
||||||
|
# Install PyTorch with CUDA 12.6 support (stable version)
|
||||||
|
# RUN /app/venv/bin/pip install torch torchvision torchaudio --extra-index-url https://download.pytorch.org/whl/cu126
|
||||||
|
|
||||||
|
# Install PyTorch with CUDA 12.8 support (latest version)
|
||||||
|
# RUN /app/venv/bin/pip install --pre torch torchvision torchaudio --index-url https://download.pytorch.org/whl/nightly/cu128
|
||||||
|
|
||||||
|
# AMD GPU PyTorch Installation
|
||||||
|
|
||||||
|
# Install PyTorch with ROCm 6.2 support (stable version)
|
||||||
|
# RUN /app/venv/bin/pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/rocm6.2.4
|
||||||
|
|
||||||
|
# Install PyTorch with ROCm 6.3 support (latest version)
|
||||||
|
# RUN /app/venv/bin/pip3 install --pre torch torchvision torchaudio --index-url https://download.pytorch.org/whl/nightly/rocm6.3
|
||||||
|
|
||||||
|
|
||||||
# Expose the backend port
|
# Expose the backend port
|
||||||
EXPOSE 8188
|
EXPOSE 8188
|
||||||
|
|
||||||
|
120
docker-build.sh
120
docker-build.sh
@ -1,8 +1,120 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Backup the original Dockerfile
|
||||||
|
BACKUP_FILE="Dockerfile.bak"
|
||||||
|
cp Dockerfile "$BACKUP_FILE"
|
||||||
|
|
||||||
|
# Function to restore the Dockerfile
|
||||||
|
restore_dockerfile() {
|
||||||
|
echo "Restoring Dockerfile to its original state..."
|
||||||
|
mv "$BACKUP_FILE" Dockerfile
|
||||||
|
echo "Dockerfile restored."
|
||||||
|
}
|
||||||
|
|
||||||
|
# Set up trap to restore Dockerfile on script exit (success or failure)
|
||||||
|
trap restore_dockerfile EXIT
|
||||||
|
|
||||||
|
# Function to display version information
|
||||||
|
display_version_info() {
|
||||||
|
echo "==========================================================="
|
||||||
|
echo "Stable Version:"
|
||||||
|
echo " - This is the latest stable version released by PyTorch."
|
||||||
|
echo " - It is thoroughly tested and recommended for deployment."
|
||||||
|
echo " - Pros: Reliable, well-tested, fewer bugs."
|
||||||
|
echo " - Cons: May not include the latest features or optimizations."
|
||||||
|
echo ""
|
||||||
|
echo "Latest Version:"
|
||||||
|
echo " - This is the latest development version of PyTorch."
|
||||||
|
echo " - It includes the newest features and optimizations but may have bugs."
|
||||||
|
echo " - Pros: Cutting-edge features, performance improvements."
|
||||||
|
echo " - Cons: Less stable, potential for encountering bugs."
|
||||||
|
echo "==========================================================="
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to ask user for GPU type
|
||||||
|
ask_gpu_type() {
|
||||||
|
echo "What GPU do you have?"
|
||||||
|
select gpu in "NVIDIA" "AMD"; do
|
||||||
|
case $gpu in
|
||||||
|
NVIDIA)
|
||||||
|
echo "You selected NVIDIA."
|
||||||
|
break
|
||||||
|
;;
|
||||||
|
AMD)
|
||||||
|
echo "You selected AMD."
|
||||||
|
break
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Invalid option. Please choose 1 or 2."
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to ask user for version preference
|
||||||
|
ask_version() {
|
||||||
|
echo "Which version would you like to use?"
|
||||||
|
select version in "Stable" "Latest"; do
|
||||||
|
case $version in
|
||||||
|
Stable)
|
||||||
|
echo "You selected Stable."
|
||||||
|
break
|
||||||
|
;;
|
||||||
|
Latest)
|
||||||
|
echo "You selected Latest."
|
||||||
|
break
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Invalid option. Please choose 1 or 2."
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
# Display version information
|
||||||
|
display_version_info
|
||||||
|
|
||||||
|
# Ask user for GPU type and version
|
||||||
|
ask_gpu_type
|
||||||
|
ask_version
|
||||||
|
|
||||||
|
# Set base image and PyTorch installation command based on user input
|
||||||
|
if [[ "$gpu" == "NVIDIA" ]]; then
|
||||||
|
if [[ "$version" == "Stable" ]]; then
|
||||||
|
BASE_IMAGE="nvidia/cuda:12.6.3-cudnn-runtime-ubuntu24.04"
|
||||||
|
TORCH_INSTALL="pip install torch torchvision torchaudio --extra-index-url https://download.pytorch.org/whl/cu126"
|
||||||
|
# Uncomment the stable NVIDIA FROM line
|
||||||
|
sed -i '/# FROM nvidia\/cuda:12.6.3-cudnn-runtime-ubuntu24.04 AS base/s/^# //' Dockerfile
|
||||||
|
# Uncomment the stable NVIDIA PyTorch installation line
|
||||||
|
sed -i '/# RUN \/app\/venv\/bin\/pip install torch torchvision torchaudio --extra-index-url https:\/\/download.pytorch.org\/whl\/cu126/s/^# //' Dockerfile
|
||||||
|
else
|
||||||
|
BASE_IMAGE="nvidia/cuda:12.8.1-cudnn-runtime-ubuntu24.04"
|
||||||
|
TORCH_INSTALL="pip install --pre torch torchvision torchaudio --index-url https://download.pytorch.org/whl/nightly/cu128"
|
||||||
|
# Uncomment the latest NVIDIA FROM line
|
||||||
|
sed -i '/# FROM nvidia\/cuda:12.8.1-cudnn-runtime-ubuntu24.04 AS base/s/^# //' Dockerfile
|
||||||
|
# Uncomment the latest NVIDIA PyTorch installation line
|
||||||
|
sed -i '/# RUN \/app\/venv\/bin\/pip install --pre torch torchvision torchaudio --index-url https:\/\/download.pytorch.org\/whl\/nightly\/cu128/s/^# //' Dockerfile
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
if [[ "$version" == "Stable" ]]; then
|
||||||
|
BASE_IMAGE="rocm/dev-ubuntu-24.04:6.2.4-complete"
|
||||||
|
TORCH_INSTALL="pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/rocm6.2.4"
|
||||||
|
# Uncomment the stable AMD FROM line
|
||||||
|
sed -i '/# FROM rocm\/dev-ubuntu-24.04:6.2.4-complete AS base/s/^# //' Dockerfile
|
||||||
|
# Uncomment the stable AMD PyTorch installation line
|
||||||
|
sed -i '/# RUN \/app\/venv\/bin\/pip install torch torchvision torchaudio --index-url https:\/\/download.pytorch.org\/whl\/rocm6.2.4/s/^# //' Dockerfile
|
||||||
|
else
|
||||||
|
BASE_IMAGE="rocm/dev-ubuntu-24.04:6.3.4-complete"
|
||||||
|
TORCH_INSTALL="pip install --pre torch torchvision torchaudio --index-url https://download.pytorch.org/whl/nightly/rocm6.3"
|
||||||
|
# Uncomment the latest AMD FROM line
|
||||||
|
sed -i '/# FROM rocm\/dev-ubuntu-24.04:6.3.4-complete AS base/s/^# //' Dockerfile
|
||||||
|
# Uncomment the latest AMD PyTorch installation line
|
||||||
|
sed -i '/# RUN \/app\/venv\/bin\/pip3 install --pre torch torchvision torchaudio --index-url https:\/\/download.pytorch.org\/whl\/nightly\/rocm6.3/s/^# //' Dockerfile
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
# Check if Docker and Docker Compose are installed
|
# Check if Docker and Docker Compose are installed
|
||||||
if ! command -v docker &> /dev/null || ! command -v docker-compose &> /dev/null
|
if ! command -v docker &> /dev/null || ! command -v docker-compose &> /dev/null; then
|
||||||
then
|
|
||||||
echo "Docker or Docker Compose not found. Please install them before proceeding."
|
echo "Docker or Docker Compose not found. Please install them before proceeding."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
@ -17,7 +129,7 @@ else
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Step 2: Start the container without mounting the volumes (venv, custom_nodes)
|
# Step 2: Start the container without mounting the volumes (venv, custom_nodes)
|
||||||
echo "Starting the container..."
|
echo "Starting the container..."
|
||||||
COMPOSE_BAKE=true docker-compose up -d
|
COMPOSE_BAKE=true docker-compose up -d
|
||||||
if [ $? -eq 0 ]; then
|
if [ $? -eq 0 ]; then
|
||||||
@ -93,7 +205,7 @@ else
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Step 6.2: Update the Docker Compose file to mount the venv volume
|
# Step 6.2: Update the Docker Compose file to mount the custom_nodes volume
|
||||||
echo "Updating Docker Compose file to mount the custom_nodes..."
|
echo "Updating Docker Compose file to mount the custom_nodes..."
|
||||||
sed -i '/# Mount the custom nodes directory directly inside/a \ \ \ \ \ \ - ./custom_nodes:/app/comfyui/custom_nodes' docker-compose.yml
|
sed -i '/# Mount the custom nodes directory directly inside/a \ \ \ \ \ \ - ./custom_nodes:/app/comfyui/custom_nodes' docker-compose.yml
|
||||||
if [ $? -eq 0 ]; then
|
if [ $? -eq 0 ]; then
|
||||||
|
Loading…
x
Reference in New Issue
Block a user