mike23415 commited on
Commit
c012aad
·
verified ·
1 Parent(s): 45eaa91

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -4
app.py CHANGED
@@ -2,7 +2,7 @@ import io
2
  import base64
3
  import torch
4
  from flask import Flask, request, jsonify, send_file
5
- from diffusers import DiffusionPipeline
6
  from PIL import Image
7
  import logging
8
 
@@ -14,14 +14,15 @@ app = Flask(__name__)
14
  # Load the model once at startup (on CPU)
15
  try:
16
  logger.info("Loading Zero123Plus pipeline...")
17
- pipe = DiffusionPipeline.from_pretrained(
18
  "sudo-ai/zero123plus-v1.2",
19
  torch_dtype=torch.float32, # CPU needs float32
 
20
  )
21
  pipe.to("cpu")
22
  logger.info("=== Application Startup at CPU mode =====")
23
  except Exception as e:
24
- logger.error(f"Error loading model: {e}")
25
  pipe = None
26
 
27
  def pil_to_base64(image):
@@ -50,13 +51,15 @@ def generate():
50
 
51
  image = Image.open(io.BytesIO(base64.b64decode(image_data))).convert("RGB")
52
 
 
53
  result = pipe(image)
54
  output_image = result.images[0]
 
55
 
56
  return jsonify({"image": f"data:image/png;base64,{pil_to_base64(output_image)}"})
57
 
58
  except Exception as e:
59
- logger.error(f"Error generating image: {e}")
60
  return jsonify({"error": str(e)}), 500
61
 
62
  if __name__ == "__main__":
 
2
  import base64
3
  import torch
4
  from flask import Flask, request, jsonify, send_file
5
+ from diffusers import Zero123PlusPipeline
6
  from PIL import Image
7
  import logging
8
 
 
14
  # Load the model once at startup (on CPU)
15
  try:
16
  logger.info("Loading Zero123Plus pipeline...")
17
+ pipe = Zero123PlusPipeline.from_pretrained(
18
  "sudo-ai/zero123plus-v1.2",
19
  torch_dtype=torch.float32, # CPU needs float32
20
+ cache_dir="/tmp/hf_home",
21
  )
22
  pipe.to("cpu")
23
  logger.info("=== Application Startup at CPU mode =====")
24
  except Exception as e:
25
+ logger.error(f"Error loading model: {e}", exc_info=True)
26
  pipe = None
27
 
28
  def pil_to_base64(image):
 
51
 
52
  image = Image.open(io.BytesIO(base64.b64decode(image_data))).convert("RGB")
53
 
54
+ logger.info("Processing image with pipeline...")
55
  result = pipe(image)
56
  output_image = result.images[0]
57
+ logger.info("Image processed successfully")
58
 
59
  return jsonify({"image": f"data:image/png;base64,{pil_to_base64(output_image)}"})
60
 
61
  except Exception as e:
62
+ logger.error(f"Error generating image: {e}", exc_info=True)
63
  return jsonify({"error": str(e)}), 500
64
 
65
  if __name__ == "__main__":