carlex3321 commited on
Commit
e20add1
·
verified ·
1 Parent(s): ed6879d

Update Dockerfile

Browse files
Files changed (1) hide show
  1. Dockerfile +24 -4
Dockerfile CHANGED
@@ -83,9 +83,29 @@ RUN pip install --index-url https://download.pytorch.org/whl/cu128 \
83
  # ---------------- Toolchain, Triton, FA2 (sem bnb) ----------------
84
  RUN pip install packaging ninja cmake pybind11 scikit-build cython hf_transfer numpy==1.24.4
85
 
86
- # Triton 3.x (sem triton.ops)
 
 
 
 
 
 
 
87
  RUN pip uninstall -y triton || true && \
88
- pip install -v --no-build-isolation triton==3.4.0
 
 
 
 
 
 
 
 
 
 
 
 
 
89
 
90
  # FlashAttention 2.8.x
91
  RUN pip install flash-attn==2.8.3 --no-build-isolation || \
@@ -94,10 +114,10 @@ RUN pip install flash-attn==2.8.3 --no-build-isolation || \
94
  pip install flash-attn==2.8.0.post2 --no-build-isolation
95
 
96
  # Diffusers/Transformers estáveis (sem dev)
97
- RUN pip install --no-cache-dir diffusers==0.31.0 transformers==4.44.2 accelerate==0.34.2 omegaconf==2.3.0
98
 
99
  # Opcional: seu fork de otimizações
100
- # RUN pip install -U git+https://github.com/carlex22/diffusers-aduc-sdr
101
 
102
  # ---------------- Repositórios auxiliares ----------------
103
  RUN git clone https://github.com/bytedance-seed/VINCIE.git && \
 
83
  # ---------------- Toolchain, Triton, FA2 (sem bnb) ----------------
84
  RUN pip install packaging ninja cmake pybind11 scikit-build cython hf_transfer numpy==1.24.4
85
 
86
+
87
+
88
+ # ---------------- Toolchain base ----------------
89
+ RUN pip install packaging ninja cmake pybind11 scikit-build cython hf_transfer numpy==1.24.4
90
+
91
+ # ---------------- Instalação via wheels (HF Hub) ----------------
92
+ # Triton, Apex e Q8 pré-compilados (Python 3.10, manylinux x86_64)
93
+ # Use sempre --no-cache-dir e --no-build-isolation para evitar recompilar
94
  RUN pip uninstall -y triton || true && \
95
+ pip install --no-cache-dir --no-build-isolation \
96
+ https://huggingface.co/carlex3321/aduc-sdr/resolve/main/triton-3.5.0%2Bgit51021fb2-cp310-cp310-linux_x86_64.whl && \
97
+ pip install --no-cache-dir --no-build-isolation \
98
+ https://huggingface.co/carlex3321/aduc-sdr/resolve/main/apex-0.1-cp310-cp310-linux_x86_64.whl && \
99
+ pip install --no-cache-dir --no-build-isolation \
100
+ https://huggingface.co/carlex3321/aduc-sdr/resolve/main/q8_kernels-0.0.5-cp310-cp310-linux_x86_64.whl
101
+
102
+ # ---------------- FlashAttention 2.8.x ----------------
103
+ # Mantém fallback em cascata caso a versão mais nova falhe
104
+ RUN pip install --no-cache-dir flash-attn==2.8.3 --no-build-isolation || \
105
+ pip install --no-cache-dir flash-attn==2.8.2 --no-build-isolation || \
106
+ pip install --no-cache-dir flash-attn==2.8.1 --no-build-isolation || \
107
+ pip install --no-cache-dir flash-attn==2.8.0.post2 --no-build-isolation
108
+
109
 
110
  # FlashAttention 2.8.x
111
  RUN pip install flash-attn==2.8.3 --no-build-isolation || \
 
114
  pip install flash-attn==2.8.0.post2 --no-build-isolation
115
 
116
  # Diffusers/Transformers estáveis (sem dev)
117
+ RUN pip install --no-cache-dir transformers==4.44.2 accelerate==0.34.2 omegaconf==2.3.0
118
 
119
  # Opcional: seu fork de otimizações
120
+ RUN pip install -U git+https://github.com/carlex22/diffusers-aduc-sdr
121
 
122
  # ---------------- Repositórios auxiliares ----------------
123
  RUN git clone https://github.com/bytedance-seed/VINCIE.git && \