Commit
·
1ad30ed
1
Parent(s):
5b21912
update
Browse files- app.py +9 -10
- requirements.txt +1 -1
app.py
CHANGED
|
@@ -42,17 +42,16 @@ def install_dependencies(enable_optimization=False):
|
|
| 42 |
except (ImportError, ModuleNotFoundError):
|
| 43 |
return False
|
| 44 |
|
| 45 |
-
|
| 46 |
-
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
|
| 52 |
-
|
| 53 |
-
|
| 54 |
|
| 55 |
-
if enable_optimization:
|
| 56 |
# install apex for fused layernorm
|
| 57 |
if not _is_package_available("apex"):
|
| 58 |
subprocess.run(
|
|
|
|
| 42 |
except (ImportError, ModuleNotFoundError):
|
| 43 |
return False
|
| 44 |
|
| 45 |
+
if enable_optimization:# flash attention is needed no matter optimization is enabled or not
|
| 46 |
+
# because Hugging Face transformers detects flash_attn is a dependency in STDiT
|
| 47 |
+
# thus, we need to install it no matter what
|
| 48 |
+
if not _is_package_available("flash_attn"):
|
| 49 |
+
subprocess.run(
|
| 50 |
+
f"{sys.executable} -m pip install flash-attn --no-build-isolation",
|
| 51 |
+
env={"FLASH_ATTENTION_SKIP_CUDA_BUILD": "TRUE"},
|
| 52 |
+
shell=True,
|
| 53 |
+
)
|
| 54 |
|
|
|
|
| 55 |
# install apex for fused layernorm
|
| 56 |
if not _is_package_available("apex"):
|
| 57 |
subprocess.run(
|
requirements.txt
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
transformers
|
| 2 |
xformers
|
| 3 |
-
git+https://github.com/hpcaitech/Open-Sora.git
|
|
|
|
| 1 |
transformers
|
| 2 |
xformers
|
| 3 |
+
git+https://github.com/hpcaitech/Open-Sora.git
|