$ cat node-template.py

Image Creation

// Generates an image from a text prompt using a native diffusers-based GPU service. Supports configurable aspect ratio, megapixel, and advanced generation parameters.

Process
Image
template.py
1import os2import sys3import json4import math5import random6import subprocess7import time8import traceback910try:11    import requests12except ImportError:13    subprocess.check_call([sys.executable, "-m", "pip", "install", "requests"])14    import requests1516NATIVE_IMAGE_SERVICE_URL = os.getenv("NATIVE_IMAGE_SERVICE_URL", "http://native-image-service:8100")17_EMBLEMA_VERSION = os.getenv("EMBLEMA_VERSION", "dev")18NATIVE_IMAGE_SERVICE_IMAGE = os.getenv(19    "NATIVE_IMAGE_SERVICE_IMAGE",20    f"emblema/native-image-service:{_EMBLEMA_VERSION}",21)22HF_CACHE_HOST_PATH = os.getenv("HF_CACHE_HOST_PATH", "/root/.cache/huggingface")23CONTAINER_NAME = "native-image-service"24OUTPUT_DIR = "/data/output"252627def compute_dimensions(aspect_ratio: float, megapixel: float) -> tuple:28    """Compute width and height from aspect ratio and megapixel, divisible by 64."""29    total_pixels = megapixel * 1_000_00030    height = int(math.sqrt(total_pixels / aspect_ratio))31    width = int(height * aspect_ratio)32    width = max(64, (width // 64) * 64)33    height = max(64, (height // 64) * 64)34    return width, height353637def start_container():38    """Create and start native-image-service, removing any stale container first."""39    # Remove any existing container (stale from previous run)40    subprocess.run(41        ["docker", "rm", "-f", CONTAINER_NAME],42        capture_output=True, text=True43    )4445    hf_token = os.getenv("HUGGINGFACE_TOKEN", "")46    print(f"Creating container {CONTAINER_NAME}...", file=sys.stderr)47    run_cmd = [48        "docker", "run", "-d",49        "--name", CONTAINER_NAME,50        "--network", "emblema",51        "--gpus", "all",52        "-e", "PORT=8100",53        "-e", "DEFAULT_MODEL=flux-2-klein",54        "-e", "DEVICE=cuda",55        "-e", "DTYPE=float16",56        "-e", f"HF_TOKEN={hf_token}",57        "-v", f"{HF_CACHE_HOST_PATH}:/root/.cache/huggingface",58        NATIVE_IMAGE_SERVICE_IMAGE,59    ]60    result = subprocess.run(run_cmd, capture_output=True, text=True)61    if result.returncode != 0:62        print(f"docker run failed (exit {result.returncode}): {result.stderr}", file=sys.stderr)63        raise RuntimeError(f"Failed to start container: {result.stderr}")6465    # Poll health endpoint66    timeout = 18067    interval = 368    elapsed = 069    health_url = f"{NATIVE_IMAGE_SERVICE_URL}/health"70    while elapsed < timeout:71        try:72            r = requests.get(health_url, timeout=5)73            if r.status_code == 200:74                print(f"Container healthy (waited {elapsed}s).", file=sys.stderr)75                return76        except requests.ConnectionError:77            pass78        time.sleep(interval)79        elapsed += interval8081    raise RuntimeError(f"Container did not become healthy within {timeout}s")828384def stop_container():85    """Remove the container (equivalent to --rm)."""86    try:87        subprocess.run(88            ["docker", "rm", "-f", CONTAINER_NAME],89            capture_output=True, text=True, timeout=3090        )91        print(f"Container {CONTAINER_NAME} removed.", file=sys.stderr)92    except Exception as e:93        print(f"Warning: failed to remove container: {e}", file=sys.stderr)949596def main():97    try:98        input_json = sys.stdin.read()99        execution_input = json.loads(input_json)100        inputs = execution_input.get("inputs", {})101102        # Parse and validate inputs before starting the container103        prompt = inputs.get("prompt", "")104        aspect_ratio = float(inputs.get("aspect_ratio", 1.667))105        megapixel = float(inputs.get("megapixel", 1.0))106        model = inputs.get("model", "flux-2-klein")107        num_inference_steps = int(inputs.get("num_inference_steps", 4))108        guidance_scale = float(inputs.get("guidance_scale", 1.0))109        scheduler = inputs.get("scheduler", "euler")110        dtype = inputs.get("dtype", "bfloat16")111        cpu_offload = inputs.get("cpu_offload", "true").lower() == "true"112        generator_device = inputs.get("generator_device", "cpu")113114        # Seed mode handling115        seed_mode = inputs.get("seed_mode", "random")116        seed_input = int(inputs.get("seed", -1))117118        if seed_mode == "fixed" and seed_input >= 0:119            seed_value = seed_input120        else:121            seed_value = random.randint(0, 2**31 - 1)122123        if not prompt:124            raise ValueError("Prompt is required")125        if not (0.25 <= aspect_ratio <= 4.0):126            raise ValueError(f"Aspect ratio must be between 0.25 and 4.0, got {aspect_ratio}")127        if not (0.25 <= megapixel <= 4.0):128            raise ValueError(f"Megapixel must be between 0.25 and 4.0, got {megapixel}")129130        width, height = compute_dimensions(aspect_ratio, megapixel)131132        # Start the container133        start_container()134135        try:136            # Build request payload137            payload = {138                "prompt": prompt,139                "width": width,140                "height": height,141                "num_inference_steps": num_inference_steps,142                "guidance_scale": guidance_scale,143                "seed": seed_value,144                "scheduler": scheduler,145                "model": model,146                "dtype": dtype,147                "cpu_offload": cpu_offload,148                "generator_device": generator_device,149            }150151            print(152                f"Requesting generation: {width}x{height}, steps={num_inference_steps}, "153                f"cfg={guidance_scale}, seed={seed_value}, model={model}, scheduler={scheduler}, "154                f"dtype={dtype}, cpu_offload={cpu_offload}, generator_device={generator_device}",155                file=sys.stderr,156            )157158            # Call native image service159            resp = requests.post(160                f"{NATIVE_IMAGE_SERVICE_URL}/generate",161                json=payload,162                timeout=600,163            )164165            if resp.status_code != 200:166                try:167                    error_detail = resp.json()168                except Exception:169                    error_detail = resp.text170                raise RuntimeError(171                    f"Native image service returned {resp.status_code}: {error_detail}"172                )173174            # Save image175            os.makedirs(OUTPUT_DIR, exist_ok=True)176            out_path = os.path.join(OUTPUT_DIR, "generated_image.png")177            with open(out_path, "wb") as f:178                f.write(resp.content)179180            # Log metadata from headers181            seed_used = resp.headers.get("X-Seed", "unknown")182            inference_time = resp.headers.get("X-Inference-Time-Ms", "unknown")183            print(184                f"Generated: seed={seed_used}, time={inference_time}ms, "185                f"size={width}x{height}, model={model}",186                file=sys.stderr,187            )188189            # Flat output — keys match OUTPUT_SCHEMA190            output = {191                "image": "generated_image.png",192            }193            print(json.dumps(output, indent=2))194195        finally:196            stop_container()197198    except Exception as e:199        error_output = {200            "error": str(e),201            "errorType": type(e).__name__,202            "traceback": traceback.format_exc(),203        }204        print(json.dumps(error_output), file=sys.stderr)205        sys.exit(1)206207208if __name__ == "__main__":209    main()