leoeric commited on
Commit
dc2ca3c
·
1 Parent(s): 91ce5f5

Disable FSDP for single GPU inference to ensure GPU usage

Browse files
Files changed (2) hide show
  1. app.py +3 -0
  2. sample.py +2 -1
app.py CHANGED
@@ -110,8 +110,11 @@ if torch.cuda.is_available():
110
  print(f"✅ CUDA available! Device: {torch.cuda.get_device_name(0)}")
111
  print(f" CUDA Version: {torch.version.cuda}")
112
  print(f" PyTorch Version: {torch.__version__}")
 
 
113
  else:
114
  print("⚠️ CUDA not available. Make sure GPU hardware is selected in Space settings.")
 
115
 
116
  def generate_image(prompt, aspect_ratio, cfg, seed, checkpoint_file, config_path):
117
  """Generate image from text prompt."""
 
110
  print(f"✅ CUDA available! Device: {torch.cuda.get_device_name(0)}")
111
  print(f" CUDA Version: {torch.version.cuda}")
112
  print(f" PyTorch Version: {torch.__version__}")
113
+ print(f" GPU Count: {torch.cuda.device_count()}")
114
+ print(f" Current Device: {torch.cuda.current_device()}")
115
  else:
116
  print("⚠️ CUDA not available. Make sure GPU hardware is selected in Space settings.")
117
+ print(f" PyTorch Version: {torch.__version__}")
118
 
119
  def generate_image(prompt, aspect_ratio, cfg, seed, checkpoint_file, config_path):
120
  """Generate image from text prompt."""
sample.py CHANGED
@@ -205,7 +205,8 @@ def main(args: argparse.Namespace) -> None:
205
  args.context_length = args.local_attn_window - 1
206
 
207
  # Override some settings for sampling
208
- args.fsdp = 1 # sampling using FSDP if available.
 
209
  if args.use_pretrained_lm is not None:
210
  args.text = args.use_pretrained_lm
211
 
 
205
  args.context_length = args.local_attn_window - 1
206
 
207
  # Override some settings for sampling
208
+ # Disable FSDP for single GPU inference (FSDP can cause CPU fallback)
209
+ args.fsdp = 0 # Disable FSDP for single GPU - use regular GPU inference
210
  if args.use_pretrained_lm is not None:
211
  args.text = args.use_pretrained_lm
212