mike23415 commited on
Commit
c774bcd
·
verified ·
1 Parent(s): 5c36e3a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -4
app.py CHANGED
@@ -1,7 +1,11 @@
1
  import os
2
  import logging
3
  from flask import Flask, request, jsonify, send_file
4
- from diffusers import DiffusionPipeline
 
 
 
 
5
  import torch
6
  from PIL import Image
7
  import io
@@ -16,14 +20,13 @@ logger = logging.getLogger(__name__)
16
 
17
  # Set Hugging Face cache directory to a writable path
18
  # Make sure to set this BEFORE importing or initializing any models
19
- os.environ['TRANSFORMERS_CACHE'] = '/tmp/transformers_cache'
20
  os.environ['HF_HOME'] = '/tmp/hf_home'
21
  os.environ['XDG_CACHE_HOME'] = '/tmp/cache'
22
 
23
  # Create cache directories if they don't exist
24
- os.makedirs('/tmp/transformers_cache', exist_ok=True)
25
  os.makedirs('/tmp/hf_home', exist_ok=True)
26
  os.makedirs('/tmp/cache', exist_ok=True)
 
27
 
28
  # Global variable for the model
29
  pipe = None
@@ -33,10 +36,13 @@ def load_model():
33
  global pipe
34
  try:
35
  logger.info("Loading Zero123Plus model...")
 
36
  pipe = DiffusionPipeline.from_pretrained(
37
  "sudo-ai/zero123plus-v1.2",
38
  torch_dtype=torch.float32,
39
- cache_dir="/tmp/diffusers_cache"
 
 
40
  )
41
  pipe.to("cpu")
42
  logger.info("Model loaded successfully")
 
1
  import os
2
  import logging
3
  from flask import Flask, request, jsonify, send_file
4
+ from diffusers.pipelines import DiffusionPipeline
5
+ try:
6
+ from diffusers.pipelines.zero123plus.pipeline_zero123plus import Zero123PlusPipeline
7
+ except ImportError:
8
+ print("Zero123PlusPipeline not found in current diffusers version")
9
  import torch
10
  from PIL import Image
11
  import io
 
20
 
21
  # Set Hugging Face cache directory to a writable path
22
  # Make sure to set this BEFORE importing or initializing any models
 
23
  os.environ['HF_HOME'] = '/tmp/hf_home'
24
  os.environ['XDG_CACHE_HOME'] = '/tmp/cache'
25
 
26
  # Create cache directories if they don't exist
 
27
  os.makedirs('/tmp/hf_home', exist_ok=True)
28
  os.makedirs('/tmp/cache', exist_ok=True)
29
+ os.makedirs('/tmp/diffusers_cache', exist_ok=True)
30
 
31
  # Global variable for the model
32
  pipe = None
 
36
  global pipe
37
  try:
38
  logger.info("Loading Zero123Plus model...")
39
+ # Use auto_mapping to let diffusers figure out the correct pipeline class
40
  pipe = DiffusionPipeline.from_pretrained(
41
  "sudo-ai/zero123plus-v1.2",
42
  torch_dtype=torch.float32,
43
+ cache_dir="/tmp/diffusers_cache",
44
+ local_files_only=False,
45
+ resume_download=True
46
  )
47
  pipe.to("cpu")
48
  logger.info("Model loaded successfully")