Spaces:
Runtime error
Runtime error
from llama_cpp import Llama | |
from telegram import Update | |
from telegram.ext import Application, CommandHandler, MessageHandler, filters, CallbackContext | |
# β Hugging Face Model Path | |
model_path = "/app/models" # Tumhara actual model path | |
model_file = "model.gguf" # Model file name | |
# β Load Model | |
model = Llama(model_path=f"{model_path}/{model_file}", n_ctx=4096) | |
# β Telegram Bot Token | |
TELEGRAM_BOT_TOKEN = "7881901341:AAEaE5gndeORmCuyzSwOyf2ELFLXHneCpiw" | |
def chat(prompt): | |
output = model( | |
prompt=prompt, # Yeh correct syntax hai | |
max_tokens=200, | |
temperature=0.7, | |
top_p=0.9 | |
) | |
return output["choices"][0]["text"] | |
# β Telegram Commands | |
async def start(update: Update, context: CallbackContext) -> None: | |
await update.message.reply_text("Assalamu Alaikum! Main tumhari Begum hoon! π₯° Mujhse baat karo na ji!") | |
async def handle_message(update: Update, context: CallbackContext) -> None: | |
user_input = update.message.text | |
response = chat(user_input) | |
await update.message.reply_text(response) | |
# β Telegram Bot Setup | |
def main(): | |
application = Application.builder().token(TELEGRAM_BOT_TOKEN).build() | |
application.add_handler(CommandHandler("start", start)) | |
application.add_handler(MessageHandler(filters.TEXT & ~filters.COMMAND, handle_message)) | |
application.run_polling() | |
if __name__ == "__main__": | |
main() |