modelApiDoc_getStartedBeforeModelwan-ai/wan-2-2/lora/image-to-videomodelApiDoc_getStartedAfterModel modelApiDoc_getStartedMid modelApiDoc_schemaLinkmodelApiDoc_getStartedAfterSchema
curl --request POST \
--url https://model-api.runcomfy.net/v1/models/wan-ai/wan-2-2/lora/image-to-video \
--header "Content-Type: application/json" \
--header "Authorization: Bearer <token>" \
--data '{
"prompt": "The woman is walking slowly",
"image_url": "https://playgrounds-storage-public.runcomfy.net/tools/7002/media-files/usecase5-1-2-input.jpg",
"loras": [
{
"path": "https://huggingface.co/neph1/hard_cut_wan_lora/blob/main/hard_cut_200_wan_i2v_high.safetensors",
"transformer": "both",
"scale": 1
}
]
}'modelApiDoc_authP1 YOUR_API_TOKEN modelApiDoc_authP2 modelApiDoc_profilemodelApiDoc_authP3 Authorization modelApiDoc_authP4 Authorization: Bearer $YOUR_API_TOKENmodelApiDoc_authP5
modelApiDoc_submitBody
curl --request POST \
--url https://model-api.runcomfy.net/v1/models/wan-ai/wan-2-2/lora/image-to-video \
--header "Content-Type: application/json" \
--header "Authorization: Bearer <token>" \
--data '{
"prompt": "The woman is walking slowly",
"image_url": "https://playgrounds-storage-public.runcomfy.net/tools/7002/media-files/usecase5-1-2-input.jpg",
"loras": [
{
"path": "https://huggingface.co/neph1/hard_cut_wan_lora/blob/main/hard_cut_200_wan_i2v_high.safetensors",
"transformer": "both",
"scale": 1
}
]
}'modelApiDoc_monitorBody
curl --request GET \
--url https://model-api.runcomfy.net/v1/requests/{request_id}/status \
--header "Authorization: Bearer <token>"modelApiDoc_retrieveBody
curl --request GET \
--url https://model-api.runcomfy.net/v1/requests/{request_id}/result \
--header "Authorization: Bearer <token>"modelApiDoc_cancelBody
curl --request POST \
--url https://model-api.runcomfy.net/v1/requests/{request_id}/cancel \
--header "Authorization: Bearer <token>"modelApiDoc_hostedFileBody
{
"type": "object",
"title": "modelApiDoc_navInputSchema",
"required": [
"prompt",
"image_url",
"loras"
],
"properties": {
"image_url": {
"title": "Image",
"description": "",
"type": "string",
"default": "https://playgrounds-storage-public.runcomfy.net/tools/7002/media-files/usecase5-1-2-input.jpg"
},
"prompt": {
"title": "Prompt",
"description": "",
"type": "string",
"default": "The woman is walking slowly"
},
"negative_prompt": {
"title": "Negative Prompt",
"description": "",
"type": "string",
"default": ""
},
"loras": {
"title": "LoRAs",
"description": "List of LoRA weights to apply (maximum 3). Each LoRA can be a URL, HuggingFace repo ID, or local path.",
"type": "array",
"default": [
{
"path": "https://huggingface.co/neph1/hard_cut_wan_lora/blob/main/hard_cut_200_wan_i2v_high.safetensors",
"transformer": "both",
"scale": 1
}
],
"items": {
"path": {
"title": "LoRA Path",
"description": "URL, HuggingFace repo ID (owner/repo), or local path to LoRA weights.",
"type": "string",
"format": "str",
"default": ""
},
"transformer": {
"title": "Transformer",
"description": "Specifies the transformer to load the lora weight into. 'high' loads into the high-noise transformer, 'low' loads it into the low-noise transformer, while 'both' loads the LoRA into both transformers.",
"type": "string",
"format": "str_with_choice",
"enum": [
"high",
"low",
"both"
],
"default": "both"
},
"scale": {
"title": "LoRA Scale",
"description": "Scale factor for LoRA application (0.0 to 4.0). ",
"type": "float",
"format": "float_slider_with_range",
"minimum": 0,
"maximum": 4,
"default": 1
}
},
"maxItems": 3,
"minItems": 0
},
"num_frames": {
"title": "Number of Frames",
"description": "",
"type": "integer",
"default": 81,
"minimum": 17,
"maximum": 161
},
"frames_per_second": {
"title": "Frames Per Second",
"description": "",
"type": "integer",
"default": 16,
"minimum": 4,
"maximum": 60
},
"resolution": {
"title": "Resolution",
"description": "",
"type": "string",
"enum": [
"480p",
"580p",
"720p"
],
"default": "480p"
},
"aspect_ratio": {
"title": "Aspect Ratio (W:H)",
"description": "",
"type": "string",
"enum": [
"16:9",
"9:16",
"1:1",
"auto"
],
"default": "auto"
},
"num_inference_steps": {
"title": "Number of Inference Steps",
"description": "",
"type": "integer",
"default": 27,
"minimum": 2,
"maximum": 40
},
"seed": {
"title": "Seed",
"description": "",
"type": "integer",
"maximum": 99999,
"minimum": 10000,
"default": 15775
}
}
}{
"output": {
"type": "object",
"properties": {
"image": {
"type": "string",
"format": "uri",
"description": "modelApiDoc_outDescImage"
},
"video": {
"type": "string",
"format": "uri",
"description": "modelApiDoc_outDescVideo"
},
"images": {
"type": "array",
"description": "modelApiDoc_outDescImages",
"items": {
"type": "string",
"format": "uri"
}
},
"videos": {
"type": "array",
"description": "modelApiDoc_outDescVideos",
"items": {
"type": "string",
"format": "uri"
}
}
}
}
}RunComfy is the premier ComfyUI platform, offering ComfyUI online environment and services, along with ComfyUI workflows featuring stunning visuals. RunComfy also provides AI Models, enabling artists to harness the latest AI tools to create incredible art.