import spaces import gradio as gr import numpy as np import torch from chrislib.general import uninvert, invert, view, view_scale from intrinsic.pipeline import load_models, run_pipeline DEVICE = torch.device('cuda' if torch.cuda.is_available() else 'cpu') intrinsic_models = load_models('v2', device=DEVICE) def generate_pipeline(models): def pipeline_func(image, **kwargs): return run_pipeline(models, image, **kwargs) return pipeline_func pipeline_func = generate_pipeline(intrinsic_models) @spaces.GPU def process_image(image): print(image.shape) image = image.astype(np.single) / 255. result = pipeline_func(image, device=DEVICE) return [view(result['hr_alb']), 1 - invert(result['dif_shd']), view_scale(result['pos_res'])] with gr.Blocks( css=""" #download { height: 118px; } .slider .inner { width: 5px; background: #FFF; } .viewport { aspect-ratio: 4/3; } .tabs button.selected { font-size: 20px !important; color: crimson !important; } h1 { text-align: center; display: block; } h2 { text-align: center; display: block; } h3 { text-align: center; display: block; } .md_feedback li { margin-bottom: 0px !important; } """, ) as demo: gr.Markdown( """ # Colorful Diffuse Intrinsic Image Decomposition in the Wild
""" ) with gr.Row(): input_img = gr.Image(label="Input Image") with gr.Row(): alb_img = gr.Image(label="Albedo") shd_img = gr.Image(label="Diffuse Shading") dif_img = gr.Image(label="Diffuse Image") input_img.change(process_image, inputs=input_img, outputs=[alb_img, shd_img, dif_img]) demo.launch(show_error=True)