chatbot_v2 / app.py
MoiMoi-01's picture
Update app.py
2508011 verified
raw
history blame
960 Bytes
import requests
import os
print(os.getcwd())
# url = "https://huggingface.co/ngxson/DeepSeek-R1-Distill-Qwen-7B-abliterated-GGUF/resolve/main/DeepSeek-R1-Distill-Qwen-7B-abliterated-Q4_K_M.gguf?download=true"
# file_name = "/chatbot_v2/models/DeepSeek-R1-Distill-Qwen-7B-abliterated-Q4_K_M.gguf"
# response = requests.get(url, stream=True)
# if response.status_code == 200:
# with open(file_name, "wb") as file:
# for chunk in response.iter_content(chunk_size=8192):
# file.write(chunk)
# print("Download complete:", file_name)
# else:
# print("Failed to download. Status code:", response.status_code)
from llama_cpp import Llama
model_path = "https://huggingface.co/ngxson/DeepSeek-R1-Distill-Qwen-7B-abliterated-GGUF/resolve/main/DeepSeek-R1-Distill-Qwen-7B-abliterated-Q4_K_M.gguf"
# Load the model directly from URL
llm = Llama(model_path=model_path)
# Generate a response
output = llm("What is AI?")
print(output)