|
|
|
import os |
|
import threading |
|
|
|
from pathlib import Path |
|
import subprocess |
|
import pandas as pd |
|
import shutil |
|
import os |
|
import time |
|
import re |
|
import gc |
|
import requests |
|
import zipfile |
|
import threading |
|
import time |
|
import socket |
|
from concurrent.futures import ProcessPoolExecutor |
|
|
|
os.system("pip install nvidia-ml-py3") |
|
os.chdir(f"/home/xlab-app-center") |
|
os.system(f"git clone https://openi.pcl.ac.cn/2575044704/sd-v1.7.0 /home/xlab-app-center/stable-diffusion-webui") |
|
os.system(f"cp /home/xlab-app-center/styles.csv /home/xlab-app-center/stable-diffusion-webui/styles.csv") |
|
os.chdir(f"/home/xlab-app-center/stable-diffusion-webui") |
|
os.system(f"git lfs install") |
|
os.system(f"git reset --hard") |
|
import os |
|
|
|
def create_directory(directory_path): |
|
if not os.path.exists(directory_path): |
|
os.makedirs(directory_path) |
|
|
|
def download_file(url, destination_path): |
|
os.system(f'wget -O {destination_path} {url}') |
|
|
|
|
|
base_directory = '/home/xlab-app-center/stable-diffusion-webui' |
|
configs_directory = os.path.join(base_directory, 'configs') |
|
|
|
|
|
create_directory(configs_directory) |
|
|
|
|
|
|
|
|
|
|
|
|
|
download_file('https://hf-mirror.com/datasets/ACCA225/openxlab/resolve/main/config-pub.json', os.path.join(base_directory, 'config.json')) |
|
download_file('https://hf-mirror.com/datasets/ACCA225/openxlab/resolve/main/ui-config-pri2.json', os.path.join(base_directory, 'ui-config-pri.json')) |
|
download_file('https://hf-mirror.com/datasets/ACCA225/openxlab/resolve/main/ui-config-pub.json', os.path.join(base_directory, 'ui-config.json')) |
|
|
|
os.chdir(f"/home/xlab-app-center/stable-diffusion-webui/extensions") |
|
show_shell_info = False |
|
def run(command, cwd=None, desc=None, errdesc=None, custom_env=None,try_error:bool=True) -> str: |
|
global show_shell_info |
|
if desc is not None: |
|
print(desc) |
|
|
|
run_kwargs = { |
|
"args": command, |
|
"shell": True, |
|
"cwd": cwd, |
|
"env": os.environ if custom_env is None else custom_env, |
|
"encoding": 'utf8', |
|
"errors": 'ignore', |
|
} |
|
|
|
if not show_shell_info: |
|
run_kwargs["stdout"] = run_kwargs["stderr"] = subprocess.PIPE |
|
|
|
result = subprocess.run(**run_kwargs) |
|
|
|
if result.returncode != 0: |
|
error_bits = [ |
|
f"{errdesc or 'Error running command'}.", |
|
f"Command: {command}", |
|
f"Error code: {result.returncode}", |
|
] |
|
if result.stdout: |
|
error_bits.append(f"stdout: {result.stdout}") |
|
if result.stderr: |
|
error_bits.append(f"stderr: {result.stderr}") |
|
if try_error: |
|
print((RuntimeError("\n".join(error_bits)))) |
|
else: |
|
raise RuntimeError("\n".join(error_bits)) |
|
|
|
if show_shell_info: |
|
print((result.stdout or "")) |
|
return (result.stdout or "") |
|
import subprocess |
|
def sdmodel(): |
|
def download_with_aria2(urls): |
|
|
|
for url in urls: |
|
|
|
url = url.replace('huggingface.co', 'hf-mirror.com') |
|
file_name = url.split('/')[-1].split('?')[0] |
|
|
|
command = f'aria2c -x 16 -s 16 -c -k 1M -o "{file_name}" "{url}" -d /home/xlab-app-center/stable-diffusion-webui/models/Stable-diffusion' |
|
|
|
try: |
|
|
|
subprocess.run(command, shell=True, check=True) |
|
except subprocess.CalledProcessError as e: |
|
|
|
print(f'Error downloading {url}: {e}') |
|
|
|
|
|
download_urls = """ |
|
https://huggingface.co/HiroHiroHirook/meinamix_meinaV8/resolve/main/meinamix_meinaV8.safetensors?download=true |
|
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/anything_v50.safetensors?download=true |
|
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/blindbox_v1_mix.safetensors?download=true |
|
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/cuteyukimixAdorable_naiV3style.safetensors?download=true |
|
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/ddicon_v10.ckpt?download=true |
|
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/meinamix_meinaV11.safetensors?download=true |
|
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/meinapastel_v6-inpainting.safetensors?download=true |
|
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/meinapastel_v6Pastel.safetensors?download=true |
|
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/midjourney_20230624181825.safetensors?download=true |
|
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/mixProV4_v4.safetensors?download=true |
|
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/qteamixQ_omegaFp16.safetensors?download=true |
|
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/uberRealisticPornMerge_urpmv13.safetensors?download=true |
|
https://huggingface.co/datasets/ACCA225/kagglemodel/resolve/main/kaggle/temp/velaMix_velaMixVersion2.safetensors?download=true |
|
""" |
|
|
|
|
|
urls_list = download_urls.strip().splitlines() |
|
|
|
|
|
download_with_aria2(urls_list) |
|
|
|
def mkdirs(path, exist_ok=True): |
|
if path and not Path(path).exists(): |
|
os.makedirs(path,exist_ok=exist_ok) |
|
plugins = [ |
|
"https://gitcode.net/overbill1683/stable-diffusion-webui-localization-zh_Hans", |
|
"https://gitcode.net/ranting8323/multidiffusion-upscaler-for-automatic1111", |
|
"https://gitcode.net/ranting8323/adetailer", |
|
|
|
"https://gitcode.net/ranting8323/sd-webui-inpaint-anything", |
|
"https://gitcode.net/ranting8323/a1111-sd-webui-tagcomplete", |
|
"https://gitcode.net/nightaway/sd-webui-infinite-image-browsing", |
|
"https://openi.pcl.ac.cn/2575044704/sd-extension-system-info", |
|
"https://openi.pcl.ac.cn/2575044704/batchlinks-webui", |
|
'https://openi.pcl.ac.cn/2575044704/stable-diffusion-webui-localization-zh_CN', |
|
|
|
'https://openi.pcl.ac.cn/2575044704/sd-webui-lora-block-weight', |
|
'https://openi.pcl.ac.cn/2575044704/sd-skin-extension', |
|
] |
|
|
|
for plugin in plugins: |
|
os.system(f"git clone {plugin}") |
|
os.makedirs('/home/xlab-app-center/stable-diffusion-webui/models/adetailer', exist_ok=True) |
|
os.chdir(f"/home/xlab-app-center/stable-diffusion-webui/models/adetailer") |
|
os.system(f"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://hf-mirror.com/Bingsu/adetailer/resolve/main/hand_yolov8s.pt -d /home/xlab-app-center/stable-diffusion-webui/models/adetailer -o hand_yolov8s.pt") |
|
os.system(f"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://hf-mirror.com/Bingsu/adetailer/resolve/main/hand_yolov8n.pt -d /home/xlab-app-center/stable-diffusion-webui/models/adetailer -o hand_yolov8n.pt") |
|
os.system(f"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://hf-mirror.com/datasets/ACCC1380/private-model/resolve/main/kaggle/input/museum/131-half.safetensors -d /home/xlab-app-center/stable-diffusion-webui/models/Stable-diffusion -o [萌二次元]131-half.safetensors") |
|
os.system(f"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://hf-mirror.com/datasets/ACCC1380/private-model/resolve/main/ba.safetensors -d /home/xlab-app-center/stable-diffusion-webui/models/Lora -o ba.safetensors") |
|
os.system(f"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://hf-mirror.com/datasets/ACCC1380/private-model/resolve/main/racaco2.safetensors -d /home/xlab-app-center/stable-diffusion-webui/models/Lora -o racaco2.safetensors") |
|
os.system(f"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://hf-mirror.com/coinz/Add-detail/resolve/main/add_detail.safetensors -d /home/xlab-app-center/stable-diffusion-webui/models/Lora -o add_detail.safetensors") |
|
os.system(f"aria2c --console-log-level=error -c -x 16 -s 16 -k 1M https://hf-mirror.com/datasets/VASVASVAS/vae/resolve/main/pastel-waifu-diffusion.vae.pt -d /home/xlab-app-center/stable-diffusion-webui/models/VAE -o pastel-waifu-diffusion.vae.pt") |
|
|
|
|
|
os.chdir(f"/home/xlab-app-center/stable-diffusion-webui") |
|
print('webui launching...') |
|
package_envs = [ |
|
{"env": "STABLE_DIFFUSION_REPO", "url": os.environ.get('STABLE_DIFFUSION_REPO', "https://gitcode.net/overbill1683/stablediffusion")}, |
|
{"env": "STABLE_DIFFUSION_XL_REPO", "url": os.environ.get('STABLE_DIFFUSION_XL_REPO', "https://gitcode.net/overbill1683/generative-models")}, |
|
{"env": "K_DIFFUSION_REPO", "url": os.environ.get('K_DIFFUSION_REPO', "https://gitcode.net/overbill1683/k-diffusion")}, |
|
{"env": "CODEFORMER_REPO", "url": os.environ.get('CODEFORMER_REPO', "https://gitcode.net/overbill1683/CodeFormer")}, |
|
{"env": "BLIP_REPO", "url": os.environ.get('BLIP_REPO', "https://gitcode.net/overbill1683/BLIP")}, |
|
] |
|
os.environ["PIP_INDEX_URL"] = "https://mirrors.aliyun.com/pypi/simple/" |
|
for i in package_envs: |
|
os.environ[i["env"]] = i["url"] |
|
|
|
import os |
|
import time |
|
import wandb |
|
import nvidia_smi |
|
import os |
|
import time |
|
import wandb |
|
|
|
|
|
def echoToFile(content:str,path:str): |
|
if path.find('/') >= 0: |
|
_path = '/'.join(path.split('/')[:-1]) |
|
run(f'''mkdir -p {_path}''') |
|
with open(path,'w') as sh: |
|
sh.write(content) |
|
|
|
def check_service(host, port): |
|
try: |
|
socket.create_connection((host, port), timeout=5) |
|
return True |
|
except socket.error: |
|
return False |
|
def localProxy(): |
|
os.system('sudo apt install nginx -y') |
|
download_file('https://huggingface.co/datasets/ACCA225/openxlab/resolve/main/proxy_nginx.conf', os.path.join(base_directory, 'proxy_nginx.conf')) |
|
if not check_service('localhost',_server_port): |
|
run(f'''nginx -c /home/xlab-app-center/stable-diffusion-webui/proxy_nginx.conf''') |
|
run(f'''nginx -s reload''') |
|
|
|
|
|
os.system('wandb login 5c00964de1bb95ec1ab24869d4c523c59e0fb8e3') |
|
nvidia_smi.nvmlInit() |
|
|
|
wandb.init(project="gpu-temperature-monitor") |
|
|
|
import os |
|
import threading |
|
import wandb |
|
import time |
|
def monitor_gpu(): |
|
start_thread = threading.Thread(target=start) |
|
start_thread.start() |
|
while True: |
|
try: |
|
|
|
handle = nvidia_smi.nvmlDeviceGetHandleByIndex(0) |
|
gpu_temperature = nvidia_smi.nvmlDeviceGetTemperature(handle, nvidia_smi.NVML_TEMPERATURE_GPU) |
|
|
|
|
|
utilization = nvidia_smi.nvmlDeviceGetUtilizationRates(handle) |
|
gpu_usage = utilization.gpu |
|
|
|
|
|
wandb.log({"GPU 温度": gpu_temperature, "GPU 使用率": gpu_usage}) |
|
|
|
except Exception as e: |
|
print(f"Error: {e}") |
|
|
|
time.sleep(60) |
|
def zrok(): |
|
|
|
pass |
|
def start(): |
|
os.system("pip install pyngrok") |
|
|
|
|
|
|
|
|
|
|
|
|
|
try: |
|
|
|
os.system(f"python launch.py --api --xformers --exit --enable-insecure-extension-access --gradio-queue --disable-safe-unpickle") |
|
|
|
|
|
command = "python launch.py --api --xformers --enable-insecure-extension-access --gradio-queue --disable-safe-unpickle" |
|
command1 = "python launch.py --ui-config-file=/home/xlab-app-center/stable-diffusion-webui/ui-config-pri.json --api --xformers --enable-insecure-extension-access --gradio-queue --disable-safe-unpickle --port=7862" |
|
command2 = "python launch.py --ui-config-file=/home/xlab-app-center/stable-diffusion-webui/ui-config-pri.json --api --xformers --enable-insecure-extension-access --gradio-queue --disable-safe-unpickle --port=7863" |
|
process = subprocess.Popen(command, shell=True) |
|
time.sleep(250) |
|
process = subprocess.Popen(command1, shell=True) |
|
time.sleep(100) |
|
print("启动ngrok中") |
|
os.system(f"{command2} & ngrok http 7862 --authtoken=2bgXLjjKFvxfbuZFlR2NMZkvL8n_4WrK7f15FLtWb8p7v3oaF & ngrok http 7863 --authtoken=2CXyNlT9xGfFoL5ruI6hQV20FNq_7tbmuzS9RtyNTkyEe1J6C") |
|
except Exception as e: |
|
|
|
print(f"启动SD发生错误: {e}") |
|
try: |
|
|
|
os.system(f"python launch.py --api --xformers --enable-insecure-extension-access --gradio-queue --disable-safe-unpickle") |
|
except Exception as e: |
|
|
|
print(f"An error occurred: {e}") |
|
|
|
|
|
net_thread = threading.Thread(target=zrok) |
|
|
|
net_thread.start() |
|
|
|
monitor_gpu() |