rishi2025 commited on
Commit
5c9010c
·
verified ·
1 Parent(s): 280ba17

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -20,7 +20,7 @@ from io import BytesIO
20
  from PIL import Image
21
  from PIL import Image
22
 
23
- subprocess.check_call([sys.executable, "-m", "pip", "install", "spaces==0.43.0"])
24
 
25
  dtype = torch.bfloat16
26
  device = "cuda" if torch.cuda.is_available() else "cpu"
@@ -184,7 +184,7 @@ def get_duration(prompt_embeds, image_list, width, height, num_inference_steps,
184
  return max(65, num_inference_steps * step_duration + 10)
185
 
186
  @spaces.GPU(duration=65)
187
- def generate_image(prompt_embeds, image_list, width, height, num_inference_steps, guidance_scale, seed, use_turbo, progress=gr.Progress(track_tqdm=True)):
188
  # Move embeddings to GPU only when inside the GPU decorated function
189
  prompt_embeds = prompt_embeds.to(device)
190
 
 
20
  from PIL import Image
21
  from PIL import Image
22
 
23
+ # subprocess.check_call([sys.executable, "-m", "pip", "install", "spaces==0.43.0"])
24
 
25
  dtype = torch.bfloat16
26
  device = "cuda" if torch.cuda.is_available() else "cpu"
 
184
  return max(65, num_inference_steps * step_duration + 10)
185
 
186
  @spaces.GPU(duration=65)
187
+ def generate_image(prompt_embeds, image_list, width, height, num_inference_steps, guidance_scale, seed, use_turbo):
188
  # Move embeddings to GPU only when inside the GPU decorated function
189
  prompt_embeds = prompt_embeds.to(device)
190