Spaces:
Running
Running
File size: 1,586 Bytes
5b38336 d90d6a6 5b38336 d90d6a6 2011e87 d90d6a6 2011e87 d90d6a6 5b38336 d90d6a6 2011e87 d90d6a6 2011e87 5b38336 d90d6a6 5b38336 d90d6a6 5b38336 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 |
import bridges
from huggingface_hub import InferenceClient
import gradio_client
import io
import globales
def genera_platillo_gpu(platillo):
client = gradio_client.Client(globales.espacio, hf_token=globales.llave)
prompt = globales.previo + platillo
print("Eso es el prompt final:", prompt)
kwargs = {
"prompt": prompt,
"api_name": "/infer"
}
try:
result = client.predict(**kwargs
# prompt=prompt,
# negative_prompt="",
# seed=42,
# randomize_seed=True,
# width=1024,
# height=1024,
# guidance_scale=3.5,
# num_inference_steps=28,
# api_name="/infer"
)
return result[0]
except Exception as e:
print("Excepción es: ", e)
def genera_platillo_inference(platillo):
client = InferenceClient(
provider= globales.proveedor,
api_key=globales.llave
)
prompt = globales.previo + platillo
try:
image = client.text_to_image(
prompt,
model=globales.inferencia,
#seed=42, #default varía pero el default es que siempre sea la misma.
#guidance_scale=7.5,
#num_inference_steps=50,
#width=1024, #El default es 1024 x 1024 y quizá 1024*768, el max es 1536.
#height=1024 #El límite de replicate es 1024.
)
img_io = io.BytesIO()
image.save(img_io, "PNG")
img_io.seek(0)
return img_io
except Exception as e:
print("Excepción es: ", e) |