manoskary commited on
Commit
eb50b30
·
1 Parent(s): 3f9798c

Remove CUDA environment variable setting and enhance GPU handling in run_inference function

Browse files
Files changed (1) hide show
  1. app.py +3 -5
app.py CHANGED
@@ -25,7 +25,6 @@ CHECKPOINT_EXPECTED = [
25
  ]
26
 
27
  os.makedirs(OUTPUT_ROOT, exist_ok=True)
28
- os.environ.setdefault("CUDA_VISIBLE_DEVICES", str(DEFAULT_CONFIG.get("GPU_id", "0")))
29
 
30
 
31
  def ensure_checkpoints() -> None:
@@ -138,9 +137,7 @@ def run_inference(
138
  musical_mask_start: float,
139
  musical_mask_end: float,
140
  seed: Optional[float],
141
- ):
142
- if not torch.cuda.is_available():
143
- raise gr.Error("This Space has no GPU attached. Please run locally with a GPU or duplicate to a GPU Space.")
144
 
145
  condition_type = _validate_condition_choices(condition_type)
146
  config = _build_base_config()
@@ -181,7 +178,8 @@ def run_inference(
181
 
182
  try:
183
  models = model_cache.get(config)
184
- pipe = models["pipe"]
 
185
  pipe.scheduler.config.sigma_min = config["sigma_min"]
186
  pipe.scheduler.config.sigma_max = config["sigma_max"]
187
  prompt_for_model = "" if config["no_text"] else (prompt_text or "")
 
25
  ]
26
 
27
  os.makedirs(OUTPUT_ROOT, exist_ok=True)
 
28
 
29
 
30
  def ensure_checkpoints() -> None:
 
137
  musical_mask_start: float,
138
  musical_mask_end: float,
139
  seed: Optional[float],
140
+ ):
 
 
141
 
142
  condition_type = _validate_condition_choices(condition_type)
143
  config = _build_base_config()
 
178
 
179
  try:
180
  models = model_cache.get(config)
181
+ pipe = models["pipe"].to("cuda")
182
+ pipe.enable_attention_slicing()
183
  pipe.scheduler.config.sigma_min = config["sigma_min"]
184
  pipe.scheduler.config.sigma_max = config["sigma_max"]
185
  prompt_for_model = "" if config["no_text"] else (prompt_text or "")