$ cat node-template.py
3D Model Creation
// Generates a 3D model (GLB) from a single image using Hunyuan3D 2.1 via a native GPU service. Creates mesh, applies textures via multi-view generation, and exports a ready-to-use GLB file.
Process
3D
template.py
1import os2import sys3import json4import subprocess5import time6import traceback78try:9 import requests10except ImportError:11 subprocess.check_call([sys.executable, "-m", "pip", "install", "requests"])12 import requests1314NATIVE_MODEL_CREATION_SERVICE_URL = os.getenv(15 "NATIVE_MODEL_CREATION_SERVICE_URL", "http://native-model-creation-service:8111"16)17_EMBLEMA_VERSION = os.getenv("EMBLEMA_VERSION", "dev")18NATIVE_MODEL_CREATION_SERVICE_IMAGE = os.getenv(19 "NATIVE_MODEL_CREATION_SERVICE_IMAGE",20 f"emblema/native-model-creation-service:{_EMBLEMA_VERSION}",21)22HF_CACHE_HOST_PATH = os.getenv("HF_CACHE_HOST_PATH", "/root/.cache/huggingface")23CONTAINER_NAME = "native-model-creation-service"24INPUT_DIR = "/data/input"25OUTPUT_DIR = "/data/output"262728def start_container():29 """Create and start native-model-creation-service, removing any stale container first."""30 subprocess.run(31 ["docker", "rm", "-f", CONTAINER_NAME],32 capture_output=True, text=True33 )3435 hf_token = os.getenv("HUGGINGFACE_TOKEN", "")36 print(f"Creating container {CONTAINER_NAME}...", file=sys.stderr)37 run_cmd = [38 "docker", "run", "-d",39 "--name", CONTAINER_NAME,40 "--network", "emblema",41 "--gpus", "all",42 "-e", "PORT=8111",43 "-e", "DEVICE=cuda",44 "-e", f"HF_TOKEN={hf_token}",45 "-v", f"{HF_CACHE_HOST_PATH}:/root/.cache/huggingface",46 NATIVE_MODEL_CREATION_SERVICE_IMAGE,47 ]48 result = subprocess.run(run_cmd, capture_output=True, text=True)49 if result.returncode != 0:50 print(f"docker run failed (exit {result.returncode}): {result.stderr}", file=sys.stderr)51 raise RuntimeError(f"Failed to start container: {result.stderr}")5253 # Poll health endpoint54 timeout = 36055 interval = 556 elapsed = 057 health_url = f"{NATIVE_MODEL_CREATION_SERVICE_URL}/health"58 while elapsed < timeout:59 try:60 r = requests.get(health_url, timeout=5)61 if r.status_code == 200:62 print(f"Container healthy (waited {elapsed}s).", file=sys.stderr)63 return64 except requests.ConnectionError:65 pass66 time.sleep(interval)67 elapsed += interval6869 raise RuntimeError(f"Container did not become healthy within {timeout}s")707172def stop_container():73 """Remove the container."""74 try:75 subprocess.run(76 ["docker", "rm", "-f", CONTAINER_NAME],77 capture_output=True, text=True, timeout=3078 )79 print(f"Container {CONTAINER_NAME} removed.", file=sys.stderr)80 except Exception as e:81 print(f"Warning: failed to remove container: {e}", file=sys.stderr)828384def main():85 try:86 input_json = sys.stdin.read()87 execution_input = json.loads(input_json)88 inputs = execution_input.get("inputs", {})8990 image = inputs.get("image", "")91 if not image:92 raise ValueError("Input image is required")9394 steps = inputs.get("steps", 25)95 guidance_scale = inputs.get("guidance_scale", 7.5)96 max_faces = inputs.get("max_faces", 200000)97 texture_size = inputs.get("texture_size", "1024")9899 local_path = os.path.join(INPUT_DIR, image)100 if not os.path.exists(local_path):101 raise FileNotFoundError(f"Input image not found: {local_path}")102103 os.makedirs(OUTPUT_DIR, exist_ok=True)104105 # Start the container106 start_container()107108 try:109 # Send image and parameters to service110 with open(local_path, "rb") as f:111 resp = requests.post(112 f"{NATIVE_MODEL_CREATION_SERVICE_URL}/generate",113 files={"image": (os.path.basename(local_path), f, "image/png")},114 data={115 "steps": str(steps),116 "guidance_scale": str(guidance_scale),117 "max_faces": str(max_faces),118 "texture_size": str(texture_size),119 },120 timeout=900,121 )122123 if resp.status_code != 200:124 try:125 error_detail = resp.json()126 except Exception:127 error_detail = resp.text128 raise RuntimeError(129 f"Model creation service returned {resp.status_code}: {error_detail}"130 )131132 # Save result as binary GLB133 out_filename = "generated_model.glb"134 out_path = os.path.join(OUTPUT_DIR, out_filename)135 with open(out_path, "wb") as f:136 f.write(resp.content)137138 inference_time = resp.headers.get("X-Inference-Time-Ms", "unknown")139 print(140 f"3D model generated: time={inference_time}ms, steps={steps}, "141 f"guidance_scale={guidance_scale}, max_faces={max_faces}, "142 f"texture_size={texture_size}",143 file=sys.stderr,144 )145146 output = {147 "model": out_filename,148 }149 print(json.dumps(output, indent=2))150151 finally:152 stop_container()153154 except Exception as e:155 error_output = {156 "error": str(e),157 "errorType": type(e).__name__,158 "traceback": traceback.format_exc(),159 }160 print(json.dumps(error_output), file=sys.stderr)161 sys.exit(1)162163164if __name__ == "__main__":165 main()