pifuhd-fasi / app.py
leonelhs's picture
clear examples
88e3c7c
raw
history blame
2.4 kB
import gradio as gr
import numpy as np
from PIFuHD.data import EvalWMetaDataset
from PIFuHD.data.ImageBundle import ImageBundle
from PIFuHD.options import BaseOptions
from PIFuHD.recontructor import Reconstructor
from huggingface_hub import hf_hub_download
from human_pose_estimator import PoseEstimator
from estimator import rect
REPO_ID = "cxeep/PIFuHD"
pose_estimator = PoseEstimator("cpu")
checkpoint_path = hf_hub_download(repo_id=REPO_ID, filename="pifuhd.pt")
cmd = [
'--dataroot', './data',
'--results_path', './results',
'--loadSize', '1024',
'--resolution', '256',
'--load_netMR_checkpoint_path', checkpoint_path,
'--start_id', '-1',
'--end_id', '-1'
]
parser = BaseOptions()
opts = parser.parse(cmd)
reconstructor = Reconstructor(opts)
def make_bundle(image, name):
image, rects = rect(pose_estimator, image)
return ImageBundle(img=image, name=name, meta=rects)
def predict(img: np.ndarray):
bundle = make_bundle(img, "Model3D")
dataset = EvalWMetaDataset(opts, [bundle])
img, model = reconstructor.evaluate(dataset)
return img, model, model
footer = r"""
<center>
<b>
Demo for <a href='https://github.com/facebookresearch/pifuhd'>PIFuHD</a>
</b>
</center>
"""
with gr.Blocks(title="PIFuHD") as app:
gr.HTML("<center><h1>3D Human Digitization</h1></center>")
gr.HTML("<center><h3>PIFuHD: Multi-Level Pixel-Aligned Implicit Function for High-Resolution 3D Human Digitization (CVPR 2020)</h3></center>")
with gr.Row(equal_height=False):
with gr.Column():
input_img = gr.Image(type="numpy", label="Input image")
run_btn = gr.Button(variant="primary")
with gr.Column():
output_obj = gr.Model3D(label="Output model")
output_img = gr.Image(type="filepath", label="Output image")
output_file = gr.File(label="Download 3D Model")
gr.ClearButton(components=[input_img, output_img, output_obj, output_file], variant="stop")
run_btn.click(predict, [input_img], [output_img, output_obj, output_file])
with gr.Row():
blobs = [[f"examples/{x:02d}.png"] for x in range(1, 4)]
examples = gr.Dataset(components=[input_img], samples=blobs)
examples.click(lambda x: x[0], [examples], [input_img])
with gr.Row():
gr.HTML(footer)
app.launch(share=False, debug=True, show_error=True)
app.queue()