Spaces:
Running
on
Zero
Running
on
Zero
update app
Browse files
app.py
CHANGED
|
@@ -113,9 +113,9 @@ pipe.load_lora_weights("dx8152/Qwen-Image-Edit-2509-Fusion",
|
|
| 113 |
weight_name="溶图.safetensors",
|
| 114 |
adapter_name="fusion")
|
| 115 |
|
| 116 |
-
pipe.load_lora_weights("
|
| 117 |
-
weight_name="
|
| 118 |
-
adapter_name="
|
| 119 |
|
| 120 |
try:
|
| 121 |
pipe.transformer.set_attn_processor(QwenDoubleStreamAttnProcessorFA3())
|
|
@@ -162,8 +162,8 @@ def infer(
|
|
| 162 |
raise gr.Error("Please upload both images for Fusion/Texture/FaceSwap tasks.")
|
| 163 |
|
| 164 |
if not prompt:
|
| 165 |
-
if lora_adapter == "
|
| 166 |
-
prompt = "
|
| 167 |
elif lora_adapter == "Texture Edit":
|
| 168 |
prompt = "Apply texture to object."
|
| 169 |
elif lora_adapter == "Fuse-Objects":
|
|
@@ -172,7 +172,7 @@ def infer(
|
|
| 172 |
adapters_map = {
|
| 173 |
"Texture Edit": "texture",
|
| 174 |
"Fuse-Objects": "fusion",
|
| 175 |
-
"
|
| 176 |
}
|
| 177 |
|
| 178 |
active_adapter = adapters_map.get(lora_adapter)
|
|
@@ -269,7 +269,7 @@ with gr.Blocks(css=css, theme=steel_blue_theme) as demo:
|
|
| 269 |
examples=[
|
| 270 |
["examples/mug.jpg", "examples/wood.jpg", "Apply wood texture to the mug.", "Texture Edit"],
|
| 271 |
["examples/room.jpg", "examples/chair.jpg", "Put the chair in the room naturally.", "Fuse-Objects"],
|
| 272 |
-
["examples/body.jpg", "examples/face.jpg", "
|
| 273 |
],
|
| 274 |
inputs=[image_1, image_2, prompt, lora_adapter],
|
| 275 |
outputs=[output_image, seed],
|
|
|
|
| 113 |
weight_name="溶图.safetensors",
|
| 114 |
adapter_name="fusion")
|
| 115 |
|
| 116 |
+
pipe.load_lora_weights("ostris/qwen_image_edit_2509_shirt_design",
|
| 117 |
+
weight_name="qwen_image_edit_2509_shirt_design.safetensors",
|
| 118 |
+
adapter_name="shirt_design")
|
| 119 |
|
| 120 |
try:
|
| 121 |
pipe.transformer.set_attn_processor(QwenDoubleStreamAttnProcessorFA3())
|
|
|
|
| 162 |
raise gr.Error("Please upload both images for Fusion/Texture/FaceSwap tasks.")
|
| 163 |
|
| 164 |
if not prompt:
|
| 165 |
+
if lora_adapter == "Cloth-Design-Fuse":
|
| 166 |
+
prompt = "Put this design on their shirt."
|
| 167 |
elif lora_adapter == "Texture Edit":
|
| 168 |
prompt = "Apply texture to object."
|
| 169 |
elif lora_adapter == "Fuse-Objects":
|
|
|
|
| 172 |
adapters_map = {
|
| 173 |
"Texture Edit": "texture",
|
| 174 |
"Fuse-Objects": "fusion",
|
| 175 |
+
"Cloth-Design-Fuse": "shirt_design",
|
| 176 |
}
|
| 177 |
|
| 178 |
active_adapter = adapters_map.get(lora_adapter)
|
|
|
|
| 269 |
examples=[
|
| 270 |
["examples/mug.jpg", "examples/wood.jpg", "Apply wood texture to the mug.", "Texture Edit"],
|
| 271 |
["examples/room.jpg", "examples/chair.jpg", "Put the chair in the room naturally.", "Fuse-Objects"],
|
| 272 |
+
["examples/body.jpg", "examples/face.jpg", "Put this design on their shirt.", "Cloth-Design-Fuse"],
|
| 273 |
],
|
| 274 |
inputs=[image_1, image_2, prompt, lora_adapter],
|
| 275 |
outputs=[output_image, seed],
|