remiai3 commited on
Commit
84e9994
·
verified ·
1 Parent(s): bc3d566

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -7
app.py CHANGED
@@ -6,11 +6,12 @@ import os
6
  from PIL import Image
7
  import base64
8
  import time
9
- from accelerate import Accelerator
10
  import logging
11
 
12
  # Disable GPU detection
13
  os.environ["CUDA_VISIBLE_DEVICES"] = ""
 
 
14
  torch.set_default_device("cpu")
15
 
16
  app = Flask(__name__, static_folder='static')
@@ -20,8 +21,8 @@ CORS(app)
20
  logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
21
  logger = logging.getLogger(__name__)
22
 
23
- # Initialize Accelerator for CPU
24
- accelerator = Accelerator(device_placement=False)
25
 
26
  # Model cache
27
  model_cache = {}
@@ -49,10 +50,8 @@ def load_model(model_id):
49
  low_cpu_mem_usage=True
50
  )
51
  pipe.scheduler = DPMSolverMultistepScheduler.from_config(pipe.scheduler.config)
52
- pipe = accelerator.prepare(pipe)
53
  pipe.enable_attention_slicing()
54
- pipe.enable_sequential_cpu_offload()
55
- pipe.to("cpu")
56
  model_cache[model_id] = pipe
57
  logger.info(f"Model {model_id} loaded successfully")
58
  except Exception as e:
@@ -90,7 +89,7 @@ def generate():
90
 
91
  width, height = ratio_to_dims.get(ratio, (256, 256))
92
  pipe = load_model(model_id)
93
- pipe.to("cpu")
94
 
95
  images = []
96
  num_inference_steps = 20 if model_id == 'ssd-1b' else 30
 
6
  from PIL import Image
7
  import base64
8
  import time
 
9
  import logging
10
 
11
  # Disable GPU detection
12
  os.environ["CUDA_VISIBLE_DEVICES"] = ""
13
+ os.environ["CUDA_DEVICE_ORDER"] = ""
14
+ os.environ["TORCH_CUDA_ARCH_LIST"] = ""
15
  torch.set_default_device("cpu")
16
 
17
  app = Flask(__name__, static_folder='static')
 
21
  logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
22
  logger = logging.getLogger(__name__)
23
 
24
+ # Log device in use
25
+ logger.info(f"Device in use: {torch.device('cpu')}")
26
 
27
  # Model cache
28
  model_cache = {}
 
50
  low_cpu_mem_usage=True
51
  )
52
  pipe.scheduler = DPMSolverMultistepScheduler.from_config(pipe.scheduler.config)
 
53
  pipe.enable_attention_slicing()
54
+ pipe.to(torch.device("cpu"))
 
55
  model_cache[model_id] = pipe
56
  logger.info(f"Model {model_id} loaded successfully")
57
  except Exception as e:
 
89
 
90
  width, height = ratio_to_dims.get(ratio, (256, 256))
91
  pipe = load_model(model_id)
92
+ pipe.to(torch.device("cpu"))
93
 
94
  images = []
95
  num_inference_steps = 20 if model_id == 'ssd-1b' else 30