Wan2.2-Lightning / Wan2.2-I2V-A14B-4steps-lora-rank64-Seko-V1 /Wan2.2-I2V-A14B-4steps-lora-rank64-Seko-V1-NativeComfy.json
lightx2v's picture
Upload folder using huggingface_hub
d7fb85b verified
{
"id": "ec7da562-7e21-4dac-a0d2-f4441e1efd3b",
"revision": 0,
"last_node_id": 73,
"last_link_id": 144,
"nodes": [
{
"id": 38,
"type": "CLIPLoader",
"pos": [
38.86457061767578,
189.26513671875
],
"size": [
346.391845703125,
106
],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"slot_index": 0,
"links": [
74,
75
]
}
],
"properties": {
"Node name for S&R": "CLIPLoader",
"cnr_id": "comfy-core",
"ver": "0.3.45",
"models": [
{
"name": "umt5_xxl_fp8_e4m3fn_scaled.safetensors",
"url": "https://huggingface.co/Comfy-Org/Wan_2.1_ComfyUI_repackaged/resolve/main/split_files/text_encoders/umt5_xxl_fp8_e4m3fn_scaled.safetensors",
"directory": "text_encoders"
}
]
},
"widgets_values": [
"umt5_xxl_fp8_e4m3fn_scaled.safetensors",
"wan",
"default"
]
},
{
"id": 58,
"type": "KSamplerAdvanced",
"pos": [
1300.801513671875,
-48.10361099243164
],
"size": [
296.40533447265625,
334.2197265625
],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 123
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 128
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 130
},
{
"name": "latent_image",
"type": "LATENT",
"link": 113
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
124
]
}
],
"properties": {
"Node name for S&R": "KSamplerAdvanced",
"cnr_id": "comfy-core",
"ver": "0.3.45"
},
"widgets_values": [
"disable",
42,
"fixed",
4,
1,
"euler",
"simple",
2,
4,
"disable"
]
},
{
"id": 60,
"type": "CreateVideo",
"pos": [
1653.5601806640625,
79.06910705566406
],
"size": [
270,
78
],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 131
},
{
"name": "audio",
"shape": 7,
"type": "AUDIO",
"link": null
}
],
"outputs": [
{
"name": "VIDEO",
"type": "VIDEO",
"links": [
132
]
}
],
"properties": {
"Node name for S&R": "CreateVideo",
"cnr_id": "comfy-core",
"ver": "0.3.45"
},
"widgets_values": [
16
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [
1657.0252685546875,
-42.49197006225586
],
"size": [
210,
46
],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 124
},
{
"name": "vae",
"type": "VAE",
"link": 76
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"slot_index": 0,
"links": [
131
]
}
],
"properties": {
"Node name for S&R": "VAEDecode",
"cnr_id": "comfy-core",
"ver": "0.3.45"
},
"widgets_values": []
},
{
"id": 69,
"type": "Note",
"pos": [
1086.6795654296875,
359.7047424316406
],
"size": [
379.72747802734375,
135.40545654296875
],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"Set steps to 4, split step to 2.\n\nShift to 5\n\nPlease let me know if you run into any issues.\n\nhttps://github.com/ModelTC/Wan2.2-Lightning/issues/3\nhttps://github.com/ModelTC/Wan2.2-Lightning"
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 54,
"type": "ModelSamplingSD3",
"pos": [
587.8239135742188,
227.8932647705078
],
"size": [
210,
60
],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 134
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"slot_index": 0,
"links": [
125
]
}
],
"properties": {
"Node name for S&R": "ModelSamplingSD3",
"cnr_id": "comfy-core",
"ver": "0.3.45"
},
"widgets_values": [
5.000000000000001
]
},
{
"id": 55,
"type": "ModelSamplingSD3",
"pos": [
585.8684692382812,
336.4201354980469
],
"size": [
210,
58
],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 136
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"slot_index": 0,
"links": [
123
]
}
],
"properties": {
"Node name for S&R": "ModelSamplingSD3",
"cnr_id": "comfy-core",
"ver": "0.3.45"
},
"widgets_values": [
5.000000000000001
]
},
{
"id": 57,
"type": "KSamplerAdvanced",
"pos": [
949.1582641601562,
-58.41084289550781
],
"size": [
304.748046875,
334
],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 125
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 142
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 143
},
{
"name": "latent_image",
"type": "LATENT",
"link": 144
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
113
]
}
],
"properties": {
"Node name for S&R": "KSamplerAdvanced",
"cnr_id": "comfy-core",
"ver": "0.3.45"
},
"widgets_values": [
"enable",
42,
"fixed",
4,
1,
"euler",
"simple",
0,
2,
"enable"
]
},
{
"id": 67,
"type": "LoraLoaderModelOnly",
"pos": [
485.45086669921875,
-78.97525024414062
],
"size": [
315,
82
],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 133
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
134
]
}
],
"properties": {
"Node name for S&R": "LoraLoaderModelOnly"
},
"widgets_values": [
"Wan2.2-I2V-A14B-4steps-lora-rank64-Seko-V1/high_noise_model.safetensors",
1.0000000000000002
]
},
{
"id": 39,
"type": "VAELoader",
"pos": [
40,
350
],
"size": [
344.731689453125,
59.98149108886719
],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"slot_index": 0,
"links": [
76,
138
]
}
],
"properties": {
"Node name for S&R": "VAELoader",
"cnr_id": "comfy-core",
"ver": "0.3.45",
"models": [
{
"name": "wan_2.1_vae.safetensors",
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/vae/wan_2.1_vae.safetensors",
"directory": "vae"
}
]
},
"widgets_values": [
"wan_2.1_vae.safetensors"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [
426.6974792480469,
722.106201171875
],
"size": [
425.27801513671875,
180.6060791015625
],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 75
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"slot_index": 0,
"links": [
130,
140
]
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode",
"cnr_id": "comfy-core",
"ver": "0.3.45"
},
"widgets_values": [
""
],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 63,
"type": "Note",
"pos": [
-13.432207107543945,
784.9240112304688
],
"size": [
370,
120
],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Video Size",
"properties": {},
"widgets_values": [
"By default, we set the video to a smaller size for users with low VRAM. If you have enough VRAM, you can change the size"
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 73,
"type": "WanImageToVideo",
"pos": [
15.426290512084961,
540.6696166992188
],
"size": [
315,
210
],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "positive",
"type": "CONDITIONING",
"link": 139
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 140
},
{
"name": "vae",
"type": "VAE",
"link": 138
},
{
"name": "clip_vision_output",
"shape": 7,
"type": "CLIP_VISION_OUTPUT",
"link": null
},
{
"name": "start_image",
"shape": 7,
"type": "IMAGE",
"link": 141
}
],
"outputs": [
{
"name": "positive",
"type": "CONDITIONING",
"links": [
142
]
},
{
"name": "negative",
"type": "CONDITIONING",
"links": [
143
]
},
{
"name": "latent",
"type": "LATENT",
"links": [
144
]
}
],
"properties": {
"Node name for S&R": "WanImageToVideo"
},
"widgets_values": [
1280,
720,
81,
1
]
},
{
"id": 37,
"type": "UNETLoader",
"pos": [
34.225364685058594,
-71.77911376953125
],
"size": [
346.7470703125,
82
],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"slot_index": 0,
"links": [
133
]
}
],
"properties": {
"Node name for S&R": "UNETLoader",
"cnr_id": "comfy-core",
"ver": "0.3.45",
"models": [
{
"name": "wan2.2_t2v_high_noise_14B_fp8_scaled.safetensors",
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_high_noise_14B_fp8_scaled.safetensors",
"directory": "diffusion_models"
}
]
},
"widgets_values": [
"wan2.2_i2v_high_noise_14B_fp16.safetensors",
"default"
]
},
{
"id": 56,
"type": "UNETLoader",
"pos": [
34.70310592651367,
57.175601959228516
],
"size": [
346.7470703125,
82
],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"slot_index": 0,
"links": [
135
]
}
],
"properties": {
"Node name for S&R": "UNETLoader",
"cnr_id": "comfy-core",
"ver": "0.3.45",
"models": [
{
"name": "wan2.2_t2v_low_noise_14B_fp8_scaled.safetensors",
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_low_noise_14B_fp8_scaled.safetensors",
"directory": "diffusion_models"
}
]
},
"widgets_values": [
"wan2.2_i2v_low_noise_14B_fp16.safetensors",
"default"
]
},
{
"id": 68,
"type": "LoraLoaderModelOnly",
"pos": [
487.37158203125,
72.34300994873047
],
"size": [
315,
82
],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 135
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
136
]
}
],
"properties": {
"Node name for S&R": "LoraLoaderModelOnly"
},
"widgets_values": [
"Wan2.2-I2V-A14B-4steps-lora-rank64-Seko-V1/low_noise_model.safetensors",
1.0000000000000002
]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [
426.6974792480469,
522.106201171875
],
"size": [
422.84503173828125,
164.31304931640625
],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 74
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"slot_index": 0,
"links": [
128,
139
]
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode",
"cnr_id": "comfy-core",
"ver": "0.3.45"
},
"widgets_values": [
"A man and a woman sit facing each other, their eyes locked in quiet affection. They lean forward and share a warm embrace, lingering for a moment before gently pulling apart. Smiling playfully, they then raise their hands, their fingers carefully curving to form a perfect heart shape against the golden afternoon light."
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 71,
"type": "LoadImage",
"pos": [
-353.1475830078125,
539.164306640625
],
"size": [
315,
314
],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"links": [
141
]
},
{
"name": "MASK",
"type": "MASK",
"links": null
}
],
"properties": {
"Node name for S&R": "LoadImage"
},
"widgets_values": [
"i2v_test_4_cartoon (1).png",
"image",
""
]
},
{
"id": 61,
"type": "SaveVideo",
"pos": [
1658.5662841796875,
215.62408447265625
],
"size": [
695.2254028320312,
820
],
"flags": {},
"order": 19,
"mode": 0,
"inputs": [
{
"name": "video",
"type": "VIDEO",
"link": 132
}
],
"outputs": [],
"properties": {
"Node name for S&R": "SaveVideo",
"cnr_id": "comfy-core",
"ver": "0.3.45"
},
"widgets_values": [
"WanVideo2_2_I2V_Lightning",
"mp4",
"h264",
""
]
},
{
"id": 62,
"type": "MarkdownNote",
"pos": [
-480,
-120
],
"size": [
476.29986572265625,
437.32293701171875
],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Model Links",
"properties": {},
"widgets_values": [
"**Diffusion Model**\n- [wan2.2_i2v_high_noise_14B_fp16.safetensors](https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_high_noise_14B_fp16.safetensors)\n- [wan2.2_i2v_low_noise_14B_fp16.safetensors](https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_i2v_low_noise_14B_fp16.safetensors)\n\n**VAE**\n- [wan_2.1_vae.safetensors](https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/vae/wan_2.1_vae.safetensors)\n\n**Text Encoder** \n- [umt5_xxl_fp8_e4m3fn_scaled.safetensors](https://huggingface.co/Comfy-Org/Wan_2.1_ComfyUI_repackaged/resolve/main/split_files/text_encoders/umt5_xxl_fp8_e4m3fn_scaled.safetensors)\n\n**Loras** \n- [Wan2.2-I2V-A14B-4steps-lora-rank64-Seko-V1](https://huggingface.co/lightx2v/Wan2.2-Lightning/tree/main/Wan2.2-I2V-A14B-4steps-lora-rank64-Seko-V1)\n\n\nFile save location\n\n```\nComfyUI/\n├───📂 models/\n│ ├───📂 diffusion_models/\n│ │ ├─── wan2.2_i2v_low_noise_14B_fp8_scaled.safetensors\n│ │ └─── wan2.2_i2v_high_noise_14B_fp8_scaled.safetensors\n│ ├───📂 text_encoders/\n│ │ └─── umt5_xxl_fp8_e4m3fn_scaled.safetensors \n│ ├───📂 loras/\n│ ├─────📂 Wan2.2-I2V-A14B-4steps-lora-rank64-Seko-V1/\n│ │ ├────── high_noise_model.safetensors\n│ │ └────── native_low_noise_model.safetensors \n│ └───📂 vae/\n│ └── wan_2.1_vae.safetensors\n\n```\n"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[
74,
38,
0,
6,
0,
"CLIP"
],
[
75,
38,
0,
7,
0,
"CLIP"
],
[
76,
39,
0,
8,
1,
"VAE"
],
[
113,
57,
0,
58,
3,
"LATENT"
],
[
123,
55,
0,
58,
0,
"MODEL"
],
[
124,
58,
0,
8,
0,
"LATENT"
],
[
125,
54,
0,
57,
0,
"MODEL"
],
[
128,
6,
0,
58,
1,
"CONDITIONING"
],
[
130,
7,
0,
58,
2,
"CONDITIONING"
],
[
131,
8,
0,
60,
0,
"IMAGE"
],
[
132,
60,
0,
61,
0,
"VIDEO"
],
[
133,
37,
0,
67,
0,
"MODEL"
],
[
134,
67,
0,
54,
0,
"MODEL"
],
[
135,
56,
0,
68,
0,
"MODEL"
],
[
136,
68,
0,
55,
0,
"MODEL"
],
[
138,
39,
0,
73,
2,
"VAE"
],
[
139,
6,
0,
73,
0,
"CONDITIONING"
],
[
140,
7,
0,
73,
1,
"CONDITIONING"
],
[
141,
71,
0,
73,
4,
"IMAGE"
],
[
142,
73,
0,
57,
1,
"CONDITIONING"
],
[
143,
73,
1,
57,
2,
"CONDITIONING"
],
[
144,
73,
2,
57,
3,
"LATENT"
]
],
"groups": [
{
"id": 1,
"title": "Step1 - Load models",
"bounding": [
20,
-150,
822.4443359375,
578.7847900390625
],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Step2 - Video size",
"bounding": [
-368.64581298828125,
460.8156433105469,
736.7346801757812,
455.005126953125
],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 3,
"title": "Step3 Prompt",
"bounding": [
416.6974792480469,
452.1065673828125,
445.27801513671875,
464.2060852050781
],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.39142513012204133,
"offset": [
846.2958037931919,
160.95986963678638
]
},
"frontendVersion": "1.25.3",
"VHS_latentpreview": false,
"VHS_latentpreviewrate": 0,
"VHS_MetadataImage": true,
"VHS_KeepIntermediate": true
},
"version": 0.4
}