File size: 4,349 Bytes
bdcd5f4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
"""
SAM 3D Body MCP Server
Image β†’ 3D Human Mesh (GLB)
"""
import os
import sys
import subprocess
import tempfile
import uuid
from pathlib import Path

import gradio as gr
import numpy as np
import spaces
import torch
import bpy
from huggingface_hub import snapshot_download
from PIL import Image

# Clone sam-3d-body repo if not exists
SAM3D_PATH = Path("/home/user/app/sam-3d-body")
if not SAM3D_PATH.exists():
    print("Cloning sam-3d-body repository...")
    subprocess.run([
        "git", "clone", 
        "https://github.com/facebookresearch/sam-3d-body.git",
        str(SAM3D_PATH)
    ], check=True)
    sys.path.insert(0, str(SAM3D_PATH))

# Add to path
sys.path.insert(0, str(SAM3D_PATH))

# Global model
MODEL = None
FACES = None

def load_model():
    """Load SAM 3D Body model"""
    global MODEL, FACES
    
    if MODEL is not None:
        return MODEL, FACES
    
    print("Loading SAM 3D Body model...")
    
    # Download checkpoint
    checkpoint_dir = snapshot_download(
        repo_id="facebook/sam-3d-body-dinov3",
        token=os.environ.get("HF_TOKEN")
    )
    
    from sam_3d_body import load_sam_3d_body, SAM3DBodyEstimator
    
    device = "cuda" if torch.cuda.is_available() else "cpu"
    
    model, model_cfg = load_sam_3d_body(
        checkpoint_path=f"{checkpoint_dir}/model.ckpt",
        device=device,
        mhr_path=f"{checkpoint_dir}/assets/mhr_model.pt"
    )
    
    MODEL = SAM3DBodyEstimator(
        sam_3d_body_model=model,
        model_cfg=model_cfg,
    )
    FACES = MODEL.faces
    
    print("βœ“ Model loaded")
    return MODEL, FACES


@spaces.GPU(duration=60)
def reconstruct_body(image: np.ndarray) -> tuple:
    """
    Reconstruct 3D body mesh from image.
    
    Args:
        image: Input RGB image
    
    Returns:
        tuple: (glb_path, status)
    """
    if image is None:
        return None, "❌ No image provided"
    
    try:
        estimator, faces = load_model()
        
        # Process image
        if isinstance(image, Image.Image):
            image = np.array(image)
        
        # BGR for OpenCV
        import cv2
        img_bgr = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
        
        outputs = estimator.process_one_image(img_bgr, bbox_thr=0.5)
        
        if not outputs:
            return None, "⚠️ No humans detected"
        
        # Export first person as GLB via Blender
        person = outputs[0]
        vertices = person["pred_vertices"]
        
        # Reset Blender scene
        bpy.ops.wm.read_factory_settings(use_empty=True)
        
        # Create mesh
        mesh = bpy.data.meshes.new("body_mesh")
        mesh.from_pydata(vertices.tolist(), [], faces.tolist())
        mesh.update()
        
        # Create object
        obj = bpy.data.objects.new("body", mesh)
        bpy.context.collection.objects.link(obj)
        bpy.context.view_layer.objects.active = obj
        obj.select_set(True)
        
        # Smooth shading
        for poly in mesh.polygons:
            poly.use_smooth = True
        
        # Save GLB
        output_dir = tempfile.mkdtemp()
        glb_path = f"{output_dir}/body_{uuid.uuid4().hex[:8]}.glb"
        bpy.ops.export_scene.gltf(
            filepath=glb_path,
            export_format='GLB',
            use_selection=True
        )
        
        return glb_path, f"βœ“ Reconstructed {len(outputs)} person(s)"
        
    except Exception as e:
        import traceback
        traceback.print_exc()
        return None, f"❌ Error: {e}"


# Gradio Interface
with gr.Blocks(title="SAM 3D Body MCP") as demo:
    gr.Markdown("# 🧍 SAM 3D Body MCP Server\n**Image β†’ 3D Human Mesh (GLB)**")
    
    with gr.Row():
        with gr.Column():
            input_image = gr.Image(label="Input Image", type="numpy")
            btn = gr.Button("🎯 Reconstruct", variant="primary")
        
        with gr.Column():
            output_file = gr.File(label="3D Mesh (GLB)")
            status = gr.Textbox(label="Status")
    
    btn.click(reconstruct_body, inputs=[input_image], outputs=[output_file, status])
    
    gr.Markdown("""
    ---
    ### MCP Server
    ```json
    {"mcpServers": {"sam3d": {"command": "npx", "args": ["mcp-remote", "URL/gradio_api/mcp/sse"]}}}
    ```
    """)


if __name__ == "__main__":
    demo.launch(mcp_server=True)