Spaces:
Runtime error
Runtime error
File size: 1,404 Bytes
ce39110 088dfe0 6d51f44 ce39110 ea47c25 a1bbc6a e729562 ea47c25 f12abd4 e729562 f12abd4 ea47c25 ce39110 f12abd4 e729562 088dfe0 e729562 088dfe0 e729562 f12abd4 088dfe0 e729562 088dfe0 e729562 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 |
from llama_cpp import Llama
from telegram import Update
from telegram.ext import Application, CommandHandler, MessageHandler, filters, CallbackContext
# β
Hugging Face Model Path
model_path = "/app/models" # Tumhara actual model path
model_file = "model.gguf" # Model file name
# β
Load Model
model = Llama(model_path=f"{model_path}/{model_file}", n_ctx=4096)
# β
Telegram Bot Token
TELEGRAM_BOT_TOKEN = "7881901341:AAEaE5gndeORmCuyzSwOyf2ELFLXHneCpiw"
def chat(prompt):
output = model(
prompt=prompt, # Yeh correct syntax hai
max_tokens=200,
temperature=0.7,
top_p=0.9
)
return output["choices"][0]["text"]
# β
Telegram Commands
async def start(update: Update, context: CallbackContext) -> None:
await update.message.reply_text("Assalamu Alaikum! Main tumhari Begum hoon! π₯° Mujhse baat karo na ji!")
async def handle_message(update: Update, context: CallbackContext) -> None:
user_input = update.message.text
response = chat(user_input)
await update.message.reply_text(response)
# β
Telegram Bot Setup
def main():
application = Application.builder().token(TELEGRAM_BOT_TOKEN).build()
application.add_handler(CommandHandler("start", start))
application.add_handler(MessageHandler(filters.TEXT & ~filters.COMMAND, handle_message))
application.run_polling()
if __name__ == "__main__":
main() |