Wan2.2-Lightning / Wan2.2-T2V-A14B-4steps-lora-rank64-Seko-V1 /Wan2.2-T2V-A14B-4steps-lora-rank64-Seko-V1-NativeComfy.json
checkpointsdup98's picture
Duplicate from lightx2v/Wan2.2-Lightning
52c2e26 verified
{
"id": "ec7da562-7e21-4dac-a0d2-f4441e1efd3b",
"revision": 0,
"last_node_id": 69,
"last_link_id": 136,
"nodes": [
{
"id": 39,
"type": "VAELoader",
"pos": [
40,
350
],
"size": [
344.731689453125,
59.98149108886719
],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "VAE",
"type": "VAE",
"slot_index": 0,
"links": [
76
]
}
],
"properties": {
"Node name for S&R": "VAELoader",
"cnr_id": "comfy-core",
"ver": "0.3.45",
"models": [
{
"name": "wan_2.1_vae.safetensors",
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/vae/wan_2.1_vae.safetensors",
"directory": "vae"
}
]
},
"widgets_values": [
"wan_2.1_vae.safetensors"
]
},
{
"id": 7,
"type": "CLIPTextEncode",
"pos": [
426.6974792480469,
722.106201171875
],
"size": [
425.27801513671875,
180.6060791015625
],
"flags": {},
"order": 11,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 75
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"slot_index": 0,
"links": [
129,
130
]
}
],
"title": "CLIP Text Encode (Negative Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode",
"cnr_id": "comfy-core",
"ver": "0.3.45"
},
"widgets_values": [
""
],
"color": "#322",
"bgcolor": "#533"
},
{
"id": 63,
"type": "Note",
"pos": [
20,
720
],
"size": [
370,
120
],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Video Size",
"properties": {},
"widgets_values": [
"By default, we set the video to a smaller size for users with low VRAM. If you have enough VRAM, you can change the size"
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 60,
"type": "CreateVideo",
"pos": [
1653.5601806640625,
79.06910705566406
],
"size": [
270,
78
],
"flags": {},
"order": 17,
"mode": 0,
"inputs": [
{
"name": "images",
"type": "IMAGE",
"link": 131
},
{
"name": "audio",
"shape": 7,
"type": "AUDIO",
"link": null
}
],
"outputs": [
{
"name": "VIDEO",
"type": "VIDEO",
"links": [
132
]
}
],
"properties": {
"Node name for S&R": "CreateVideo",
"cnr_id": "comfy-core",
"ver": "0.3.45"
},
"widgets_values": [
16
]
},
{
"id": 8,
"type": "VAEDecode",
"pos": [
1657.0252685546875,
-42.49197006225586
],
"size": [
210,
46
],
"flags": {},
"order": 16,
"mode": 0,
"inputs": [
{
"name": "samples",
"type": "LATENT",
"link": 124
},
{
"name": "vae",
"type": "VAE",
"link": 76
}
],
"outputs": [
{
"name": "IMAGE",
"type": "IMAGE",
"slot_index": 0,
"links": [
131
]
}
],
"properties": {
"Node name for S&R": "VAEDecode",
"cnr_id": "comfy-core",
"ver": "0.3.45"
},
"widgets_values": []
},
{
"id": 69,
"type": "Note",
"pos": [
1086.6795654296875,
359.7047424316406
],
"size": [
379.72747802734375,
135.40545654296875
],
"flags": {},
"order": 2,
"mode": 0,
"inputs": [],
"outputs": [],
"properties": {},
"widgets_values": [
"Set steps to 4, split step to 2.\n\nShift to 5\n\nPlease let me know if you run into any issues.\n\nhttps://github.com/ModelTC/Wan2.2-Lightning/issues/3\nhttps://github.com/ModelTC/Wan2.2-Lightning"
],
"color": "#432",
"bgcolor": "#653"
},
{
"id": 37,
"type": "UNETLoader",
"pos": [
34.225364685058594,
-71.77911376953125
],
"size": [
346.7470703125,
82
],
"flags": {},
"order": 3,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"slot_index": 0,
"links": [
133
]
}
],
"properties": {
"Node name for S&R": "UNETLoader",
"cnr_id": "comfy-core",
"ver": "0.3.45",
"models": [
{
"name": "wan2.2_t2v_high_noise_14B_fp8_scaled.safetensors",
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_high_noise_14B_fp8_scaled.safetensors",
"directory": "diffusion_models"
}
]
},
"widgets_values": [
"wan2.2_t2v_high_noise_14B_fp16.safetensors",
"default"
]
},
{
"id": 55,
"type": "ModelSamplingSD3",
"pos": [
438.16534423828125,
350.7990417480469
],
"size": [
210,
58
],
"flags": {},
"order": 13,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 136
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"slot_index": 0,
"links": [
123
]
}
],
"properties": {
"Node name for S&R": "ModelSamplingSD3",
"cnr_id": "comfy-core",
"ver": "0.3.45"
},
"widgets_values": [
5.000000000000001
]
},
{
"id": 54,
"type": "ModelSamplingSD3",
"pos": [
632.8981323242188,
286.01043701171875
],
"size": [
210,
60
],
"flags": {},
"order": 12,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 134
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"slot_index": 0,
"links": [
125
]
}
],
"properties": {
"Node name for S&R": "ModelSamplingSD3",
"cnr_id": "comfy-core",
"ver": "0.3.45"
},
"widgets_values": [
5.000000000000001
]
},
{
"id": 56,
"type": "UNETLoader",
"pos": [
34.70310592651367,
57.175601959228516
],
"size": [
346.7470703125,
82
],
"flags": {},
"order": 4,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"slot_index": 0,
"links": [
135
]
}
],
"properties": {
"Node name for S&R": "UNETLoader",
"cnr_id": "comfy-core",
"ver": "0.3.45",
"models": [
{
"name": "wan2.2_t2v_low_noise_14B_fp8_scaled.safetensors",
"url": "https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_low_noise_14B_fp8_scaled.safetensors",
"directory": "diffusion_models"
}
]
},
"widgets_values": [
"wan2.2_t2v_low_noise_14B_fp16.safetensors",
"default"
]
},
{
"id": 57,
"type": "KSamplerAdvanced",
"pos": [
947.5302124023438,
-68.99319458007812
],
"size": [
304.748046875,
334
],
"flags": {},
"order": 14,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 125
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 127
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 129
},
{
"name": "latent_image",
"type": "LATENT",
"link": 126
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
113
]
}
],
"properties": {
"Node name for S&R": "KSamplerAdvanced",
"cnr_id": "comfy-core",
"ver": "0.3.45"
},
"widgets_values": [
"enable",
42,
"fixed",
4,
1,
"euler",
"simple",
0,
2,
"enable"
]
},
{
"id": 58,
"type": "KSamplerAdvanced",
"pos": [
1302.703857421875,
-70.93173217773438
],
"size": [
296.40533447265625,
334.2197265625
],
"flags": {},
"order": 15,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 123
},
{
"name": "positive",
"type": "CONDITIONING",
"link": 128
},
{
"name": "negative",
"type": "CONDITIONING",
"link": 130
},
{
"name": "latent_image",
"type": "LATENT",
"link": 113
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [
124
]
}
],
"properties": {
"Node name for S&R": "KSamplerAdvanced",
"cnr_id": "comfy-core",
"ver": "0.3.45"
},
"widgets_values": [
"disable",
42,
"fixed",
4,
1,
"euler",
"simple",
2,
4,
"disable"
]
},
{
"id": 38,
"type": "CLIPLoader",
"pos": [
38.86457061767578,
189.26513671875
],
"size": [
346.391845703125,
106
],
"flags": {},
"order": 5,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "CLIP",
"type": "CLIP",
"slot_index": 0,
"links": [
74,
75
]
}
],
"properties": {
"Node name for S&R": "CLIPLoader",
"cnr_id": "comfy-core",
"ver": "0.3.45",
"models": [
{
"name": "umt5_xxl_fp8_e4m3fn_scaled.safetensors",
"url": "https://huggingface.co/Comfy-Org/Wan_2.1_ComfyUI_repackaged/resolve/main/split_files/text_encoders/umt5_xxl_fp8_e4m3fn_scaled.safetensors",
"directory": "text_encoders"
}
]
},
"widgets_values": [
"umt5_xxl_fp8_e4m3fn_scaled.safetensors",
"wan",
"default"
]
},
{
"id": 59,
"type": "EmptyHunyuanLatentVideo",
"pos": [
35.60546875,
508.12109375
],
"size": [
315,
130
],
"flags": {},
"order": 6,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"slot_index": 0,
"links": [
126
]
}
],
"properties": {
"Node name for S&R": "EmptyHunyuanLatentVideo",
"cnr_id": "comfy-core",
"ver": "0.3.45"
},
"widgets_values": [
1280,
720,
81,
1
]
},
{
"id": 61,
"type": "SaveVideo",
"pos": [
1658.5662841796875,
215.62408447265625
],
"size": [
695.2254028320312,
820
],
"flags": {},
"order": 18,
"mode": 0,
"inputs": [
{
"name": "video",
"type": "VIDEO",
"link": 132
}
],
"outputs": [],
"properties": {
"Node name for S&R": "SaveVideo",
"cnr_id": "comfy-core",
"ver": "0.3.45"
},
"widgets_values": [
"WanVideo2_2_T2V_Lightning",
"mp4",
"h264",
""
]
},
{
"id": 6,
"type": "CLIPTextEncode",
"pos": [
426.6974792480469,
522.106201171875
],
"size": [
422.84503173828125,
164.31304931640625
],
"flags": {},
"order": 10,
"mode": 0,
"inputs": [
{
"name": "clip",
"type": "CLIP",
"link": 74
}
],
"outputs": [
{
"name": "CONDITIONING",
"type": "CONDITIONING",
"slot_index": 0,
"links": [
127,
128
]
}
],
"title": "CLIP Text Encode (Positive Prompt)",
"properties": {
"Node name for S&R": "CLIPTextEncode",
"cnr_id": "comfy-core",
"ver": "0.3.45"
},
"widgets_values": [
"Static camera shot, wide shot, sunrise time, side lighting, warm colors. A dinosaur runs swiftly through a savanna, sunlight casting long shadows on the grassy terrain. The lions cower and scatter as the dinosaur approaches, bushes and trees swaying slightly in the breeze. The sky is a beautiful blend of orange and pink hues, highlighting the dramatic scene."
],
"color": "#232",
"bgcolor": "#353"
},
{
"id": 67,
"type": "LoraLoaderModelOnly",
"pos": [
491.8816833496094,
-57.566314697265625
],
"size": [
315,
82
],
"flags": {},
"order": 8,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 133
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
134
]
}
],
"properties": {
"Node name for S&R": "LoraLoaderModelOnly"
},
"widgets_values": [
"Wan2.2-T2V-A14B-4steps-lora-rank64-Seko-V1/high_noise_model.safetensors",
1.0000000000000002
]
},
{
"id": 68,
"type": "LoraLoaderModelOnly",
"pos": [
487.37158203125,
72.34300994873047
],
"size": [
315,
82
],
"flags": {},
"order": 9,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": 135
}
],
"outputs": [
{
"name": "MODEL",
"type": "MODEL",
"links": [
136
]
}
],
"properties": {
"Node name for S&R": "LoraLoaderModelOnly"
},
"widgets_values": [
"Wan2.2-T2V-A14B-4steps-lora-rank64-Seko-V1/low_noise_model.safetensors",
1.0000000000000002
]
},
{
"id": 62,
"type": "MarkdownNote",
"pos": [
-480,
-120
],
"size": [
476.29986572265625,
437.32293701171875
],
"flags": {},
"order": 7,
"mode": 0,
"inputs": [],
"outputs": [],
"title": "Model Links",
"properties": {},
"widgets_values": [
"**Diffusion Model**\n- [wan2.2_t2v_high_noise_14B_fp16.safetensors](https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_high_noise_14B_fp16.safetensors)\n- [wan2.2_t2v_low_noise_14B_fp16.safetensors](https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/diffusion_models/wan2.2_t2v_low_noise_14B_fp16.safetensors)\n\n**VAE**\n- [wan_2.1_vae.safetensors](https://huggingface.co/Comfy-Org/Wan_2.2_ComfyUI_Repackaged/resolve/main/split_files/vae/wan_2.1_vae.safetensors)\n\n**Text Encoder** \n- [umt5_xxl_fp8_e4m3fn_scaled.safetensors](https://huggingface.co/Comfy-Org/Wan_2.1_ComfyUI_repackaged/resolve/main/split_files/text_encoders/umt5_xxl_fp8_e4m3fn_scaled.safetensors)\n\n**Loras** \n- [Wan2.2-T2V-A14B-4steps-lora-rank64-Seko-V1](https://huggingface.co/lightx2v/Wan2.2-Lightning/tree/main/Wan2.2-T2V-A14B-4steps-lora-rank64-Seko-V1)\n\n\nFile save location\n\n```\nComfyUI/\n├───📂 models/\n│ ├───📂 diffusion_models/\n│ │ ├─── wan2.2_t2v_low_noise_14B_fp16.safetensors\n│ │ └─── wan2.2_t2v_high_noise_14B_fp16.safetensors\n│ ├───📂 text_encoders/\n│ │ └─── umt5_xxl_fp8_e4m3fn_scaled.safetensors \n│ ├───📂 loras/\n│ ├─────📂 Wan2.2-T2V-A14B-4steps-lora-rank64-Seko-V1/\n│ │ ├────── high_noise_model.safetensors\n│ │ └────── low_noise_model.safetensors \n│ └───📂 vae/\n│ └── wan_2.1_vae.safetensors\n\n```\n"
],
"color": "#432",
"bgcolor": "#653"
}
],
"links": [
[
74,
38,
0,
6,
0,
"CLIP"
],
[
75,
38,
0,
7,
0,
"CLIP"
],
[
76,
39,
0,
8,
1,
"VAE"
],
[
113,
57,
0,
58,
3,
"LATENT"
],
[
123,
55,
0,
58,
0,
"MODEL"
],
[
124,
58,
0,
8,
0,
"LATENT"
],
[
125,
54,
0,
57,
0,
"MODEL"
],
[
126,
59,
0,
57,
3,
"LATENT"
],
[
127,
6,
0,
57,
1,
"CONDITIONING"
],
[
128,
6,
0,
58,
1,
"CONDITIONING"
],
[
129,
7,
0,
57,
2,
"CONDITIONING"
],
[
130,
7,
0,
58,
2,
"CONDITIONING"
],
[
131,
8,
0,
60,
0,
"IMAGE"
],
[
132,
60,
0,
61,
0,
"VIDEO"
],
[
133,
37,
0,
67,
0,
"MODEL"
],
[
134,
67,
0,
54,
0,
"MODEL"
],
[
135,
56,
0,
68,
0,
"MODEL"
],
[
136,
68,
0,
55,
0,
"MODEL"
]
],
"groups": [
{
"id": 1,
"title": "Step1 - Load models",
"bounding": [
20,
-150,
822.4443359375,
578.7847900390625
],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 2,
"title": "Step2 - Video size",
"bounding": [
20,
450,
367.5572814941406,
222.82713317871094
],
"color": "#3f789e",
"font_size": 24,
"flags": {}
},
{
"id": 3,
"title": "Step3 Prompt",
"bounding": [
416.6974792480469,
452.1065673828125,
445.27801513671875,
464.2060852050781
],
"color": "#3f789e",
"font_size": 24,
"flags": {}
}
],
"config": {},
"extra": {
"ds": {
"scale": 0.6934334949441332,
"offset": [
-280.98502813910534,
197.5228024674855
]
},
"frontendVersion": "1.25.3",
"VHS_latentpreview": false,
"VHS_latentpreviewrate": 0,
"VHS_MetadataImage": true,
"VHS_KeepIntermediate": true
},
"version": 0.4
}