|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import requests |
|
import aiohttp |
|
import datetime |
|
import time |
|
from pyrogram import * |
|
from pyrogram.types import * |
|
|
|
from akn.utils.database import db |
|
from akn.utils.handler import * |
|
from akn.utils.logger import LOGS |
|
from akn.utils.prefixprem import command |
|
from akn import app, log_detailed_error |
|
|
|
from config import * |
|
|
|
from Ryzenth import ApiKeyFrom |
|
from Ryzenth.types import QueryParameter |
|
|
|
ryz = ApiKeyFrom(..., is_free_from_ryzenth=True) |
|
|
|
@Akeno( |
|
~filters.scheduled |
|
& command(["askid"]) |
|
& filters.me |
|
& ~filters.forwarded |
|
) |
|
async def hybrid(client: Client, message: Message): |
|
if len(message.command) > 1: |
|
prompt = message.text.split(maxsplit=1)[1] |
|
elif message.reply_to_message: |
|
prompt = message.reply_to_message.text |
|
else: |
|
return await message.reply_text("Give ask from mistralai") |
|
try: |
|
response = await ryz.aio.send_message( |
|
"hybrid", |
|
QueryParameter( |
|
query=prompt |
|
), |
|
dot_access=True |
|
) |
|
output = response.results |
|
if len(output) > 4096: |
|
with open("chat.txt", "w+", encoding="utf8") as out_file: |
|
out_file.write(output) |
|
await message.reply_document( |
|
document="chat.txt", |
|
disable_notification=True |
|
) |
|
os.remove("chat.txt") |
|
else: |
|
await message.reply_text(output) |
|
except Exception as e: |
|
await log_detailed_error(e, where=client.me.id, who=message.chat.title) |
|
await message.reply_text("Error try again gpt") |
|
|
|
@Akeno( |
|
~filters.scheduled |
|
& command(["asken"]) |
|
& filters.me |
|
& ~filters.forwarded |
|
) |
|
async def hybrid_english(client: Client, message: Message): |
|
if len(message.command) > 1: |
|
prompt = message.text.split(maxsplit=1)[1] |
|
elif message.reply_to_message: |
|
prompt = message.reply_to_message.text |
|
else: |
|
return await message.reply_text("Give ask from mistralai") |
|
try: |
|
response = await ryz.aio.send_message( |
|
"hybrid-english", |
|
QueryParameter( |
|
query=prompt |
|
), |
|
dot_access=True |
|
) |
|
output = response.results |
|
if len(output) > 4096: |
|
with open("chat.txt", "w+", encoding="utf8") as out_file: |
|
out_file.write(output) |
|
await message.reply_document( |
|
document="chat.txt", |
|
disable_notification=True |
|
) |
|
os.remove("chat.txt") |
|
else: |
|
await message.reply_text(output) |
|
except Exception as e: |
|
await log_detailed_error(e, where=client.me.id, who=message.chat.title) |
|
await message.reply_text("Error try again gpt") |
|
|
|
module = modules_help.add_module("chatgpt", __file__) |
|
module.add_command("askid", "to question from Ryzenth Library") |
|
module.add_command("asken", "to question from Ryzenth Library") |