rrg92's picture
v1
19f2b1d
raw
history blame
972 Bytes
import gradio as gr
import spaces
from sentence_transformers import SentenceTransformer
from sentence_transformers.util import cos_sim
from sentence_transformers.quantization import quantize_embeddings
print("Loading embedding model");
dimensions = 768
model = SentenceTransformer("mixedbread-ai/mxbai-embed-large-v1", truncate_dim=dimensions)
@spaces.GPU
def embed(text):
query_embedding = model.encode(text, prompt_name="query")
return query_embedding.tolist();
with gr.Blocks() as demo:
txtEmbed = gr.Text(label="Text to embed")
btnEmbed = gr.Button("embed");
search = gr.Text(label="Script to search")
results = gr.Text(label="results");
btnEmbed.click(embed, [txtEmbed], [results])
if __name__ == "__main__":
demo.launch(
share=False,
debug=False,
server_port=7860,
server_name="0.0.0.0",
allowed_paths=[]
)