Skip to content

Instantly share code, notes, and snippets.

@belst-n
Forked from twinnedAI/Flux_FP16_Provisioning
Created August 21, 2024 01:54
Show Gist options
  • Save belst-n/5973bfcc0d9fc2e3d89b337f45f60174 to your computer and use it in GitHub Desktop.
Save belst-n/5973bfcc0d9fc2e3d89b337f45f60174 to your computer and use it in GitHub Desktop.
#!/bin/bash
# This file will be sourced in init.sh
# https://raw.githubusercontent.com/ai-dock/comfyui/main/config/provisioning/flux1-dev.sh
# Packages are installed after nodes so we can fix them...
if [ -z "${HF_TOKEN}" ]; then
echo "HF_TOKEN is not set. Exiting."
exit 1
fi
PYTHON_PACKAGES=(
#"opencv-python==4.7.0.72"
)
NODES=(
"https://github.com/ltdrdata/ComfyUI-Manager"
"https://github.com/ltdrdata/ComfyUI-Impact-Pack"
"https://github.com/talesofai/comfyui-browser"
"https://github.com/11cafe/comfyui-workspace-manager"
"https://github.com/chrisgoringe/cg-use-everywhere"
"https://github.com/WASasquatch/was-node-suite-comfyui"
"https://github.com/pythongosssss/ComfyUI-Custom-Scripts"
"https://github.com/cubiq/ComfyUI_IPAdapter_plus"
"https://github.com/Ttl/ComfyUi_NNLatentUpscale"
"https://github.com/Derfuu/Derfuu_ComfyUI_ModdedNodes"
"https://github.com/twri/sdxl_prompt_styler"
"https://github.com/sipherxyz/comfyui-art-venture"
# Controlnet preprocessors
"https://github.com/Fannovel16/comfyui_controlnet_aux"
)
CHECKPOINT_MODELS=(
#"https://huggingface.co/Comfy-Org/flux1-dev/resolve/main/flux1-dev-fp8.safetensors"
)
UNET_MODELS=(
"https://huggingface.co/black-forest-labs/FLUX.1-dev/resolve/main/flux1-dev.safetensors"
)
LORA_MODELS=(
#"https://civitai.com/api/download/models/16576"
)
VAE_MODELS=(
"https://huggingface.co/black-forest-labs/FLUX.1-dev/resolve/main/ae.safetensors"
)
CLIP_MODELS=(
"https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp16.safetensors"
"https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/clip_l.safetensors"
# use fp8 for vram lower than 32gb
# more info: https://comfyanonymous.github.io/ComfyUI_examples/flux/#simple-to-use-fp8-checkpoint-version
#"https://huggingface.co/comfyanonymous/flux_text_encoders/resolve/main/t5xxl_fp8_e4m3fn.safetensors"
)
ESRGAN_MODELS=(
"https://huggingface.co/ai-forever/Real-ESRGAN/resolve/main/RealESRGAN_x4.pth"
"https://huggingface.co/FacehugmanIII/4x_foolhardy_Remacri/resolve/main/4x_foolhardy_Remacri.pth"
"https://huggingface.co/Akumetsu971/SD_Anime_Futuristic_Armor/resolve/main/4x_NMKD-Siax_200k.pth"
)
CONTROLNET_MODELS=(
# "https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/control_canny-fp16.safetensors"
#"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/control_depth-fp16.safetensors"
# "https://huggingface.co/kohya-ss/ControlNet-diff-modules/resolve/main/diff_control_sd15_depth_fp16.safetensors"
#"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/control_hed-fp16.safetensors"
#"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/control_mlsd-fp16.safetensors"
#"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/control_normal-fp16.safetensors"
# "https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/control_openpose-fp16.safetensors"
#"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/control_scribble-fp16.safetensors"
#"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/control_seg-fp16.safetensors"
# "https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/t2iadapter_canny-fp16.safetensors"
#"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/t2iadapter_color-fp16.safetensors"
#"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/t2iadapter_depth-fp16.safetensors"
#"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/t2iadapter_keypose-fp16.safetensors"
# "https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/t2iadapter_openpose-fp16.safetensors"
#"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/t2iadapter_seg-fp16.safetensors"
#"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/t2iadapter_sketch-fp16.safetensors"
#"https://huggingface.co/webui/ControlNet-modules-safetensors/resolve/main/t2iadapter_style-fp16.safetensors"
)
### DO NOT EDIT BELOW HERE UNLESS YOU KNOW WHAT YOU ARE DOING ###
function provisioning_start() {
DISK_GB_AVAILABLE=$(($(df --output=avail -m "${WORKSPACE}" | tail -n1) / 1000))
DISK_GB_USED=$(($(df --output=used -m "${WORKSPACE}" | tail -n1) / 1000))
DISK_GB_ALLOCATED=$(($DISK_GB_AVAILABLE + $DISK_GB_USED))
provisioning_print_header
provisioning_get_nodes
provisioning_install_python_packages
provisioning_get_models \
"${WORKSPACE}/storage/stable_diffusion/models/ckpt" \
"${CHECKPOINT_MODELS[@]}"
provisioning_get_models \
"${WORKSPACE}/storage/stable_diffusion/models/unet" \
"${UNET_MODELS[@]}"
provisioning_get_models \
"${WORKSPACE}/storage/stable_diffusion/models/lora" \
"${LORA_MODELS[@]}"
provisioning_get_models \
"${WORKSPACE}/storage/stable_diffusion/models/controlnet" \
"${CONTROLNET_MODELS[@]}"
provisioning_get_models \
"${WORKSPACE}/storage/stable_diffusion/models/vae" \
"${VAE_MODELS[@]}"
provisioning_get_models \
"${WORKSPACE}/storage/stable_diffusion/models/clip" \
"${CLIP_MODELS[@]}"
provisioning_get_models \
"${WORKSPACE}/storage/stable_diffusion/models/esrgan" \
"${ESRGAN_MODELS[@]}"
provisioning_print_end
}
function provisioning_get_nodes() {
for repo in "${NODES[@]}"; do
dir="${repo##*/}"
path="/opt/ComfyUI/custom_nodes/${dir}"
requirements="${path}/requirements.txt"
if [[ -d $path ]]; then
if [[ ${AUTO_UPDATE,,} != "false" ]]; then
printf "Updating node: %s...\n" "${repo}"
( cd "$path" && git pull )
if [[ -e $requirements ]]; then
micromamba -n comfyui run ${PIP_INSTALL} -r "$requirements"
fi
fi
else
printf "Downloading node: %s...\n" "${repo}"
git clone "${repo}" "${path}" --recursive
if [[ -e $requirements ]]; then
micromamba -n comfyui run ${PIP_INSTALL} -r "${requirements}"
fi
fi
done
}
function provisioning_install_python_packages() {
if [ ${#PYTHON_PACKAGES[@]} -gt 0 ]; then
micromamba -n comfyui run ${PIP_INSTALL} ${PYTHON_PACKAGES[*]}
fi
}
function provisioning_get_models() {
if [[ -z $2 ]]; then return 1; fi
dir="$1"
mkdir -p "$dir"
shift
if [[ $DISK_GB_ALLOCATED -ge $DISK_GB_REQUIRED ]]; then
arr=("$@")
else
printf "WARNING: Low disk space allocation - Only the first model will be downloaded!\n"
arr=("$1")
fi
printf "Downloading %s model(s) to %s...\n" "${#arr[@]}" "$dir"
for url in "${arr[@]}"; do
printf "Downloading: %s\n" "${url}"
provisioning_download "${url}" "${dir}"
printf "\n"
done
}
function provisioning_print_header() {
printf "\n##############################################\n# #\n# Provisioning container #\n# #\n# This will take some time #\n# #\n# Your container will be ready on completion #\n# #\n##############################################\n\n"
if [[ $DISK_GB_ALLOCATED -lt $DISK_GB_REQUIRED ]]; then
printf "WARNING: Your allocated disk size (%sGB) is below the recommended %sGB - Some models will not be downloaded\n" "$DISK_GB_ALLOCATED" "$DISK_GB_REQUIRED"
fi
}
function provisioning_print_end() {
printf "\nProvisioning complete: Web UI will start now\n\n"
}
# Download from $1 URL to $2 file path
function provisioning_download() {
wget --header="Authorization: Bearer $HF_TOKEN" -qnc --content-disposition --show-progress -e dotbytes="${3:-4M}" -P "$2" "$1"
}
provisioning_start
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment