randydev commited on
Commit
78477bf
·
verified ·
1 Parent(s): 39c425f

Create Intelligence.py

Browse files
Files changed (1) hide show
  1. akn/AllDownloaderBot/Intelligence.py +104 -0
akn/AllDownloaderBot/Intelligence.py ADDED
@@ -0,0 +1,104 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # -*- coding: utf-8 -*-
3
+ # Copyright 2020-2024 (c) Randy W @xtdevs, @xtsea
4
+ #
5
+ # from : https://github.com/TeamKillerX
6
+ # Channel : @RendyProjects
7
+ # This program is free software: you can redistribute it and/or modify
8
+ # it under the terms of the GNU Affero General Public License as published by
9
+ # the Free Software Foundation, either version 3 of the License, or
10
+ # (at your option) any later version.
11
+ #
12
+ # This program is distributed in the hope that it will be useful,
13
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
14
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15
+ # GNU Affero General Public License for more details.
16
+ #
17
+ # You should have received a copy of the GNU Affero General Public License
18
+ # along with this program. If not, see <https://www.gnu.org/licenses/>.
19
+
20
+
21
+ import time
22
+ import os
23
+ from pyrogram import *
24
+ from pyrogram.types import *
25
+ from pyrogram.errors import *
26
+
27
+ from akn.utils.handler import *
28
+ from akn.utils.logger import LOGS
29
+ from akn.utils.scripts import progress
30
+ from akenoai import *
31
+ from akenoai.types import DifferentAPIDefault
32
+
33
+ js = AkenoXJs(DifferentAPIDefault()).connect()
34
+
35
+ @Client.on_message(
36
+ ~filters.scheduled
37
+ & filters.command(["hybrid"])
38
+ & ~filters.forwarded
39
+ )
40
+ async def hybridai(client, message):
41
+ if len(message.command) > 1:
42
+ prompt = message.text.split(maxsplit=1)[1]
43
+ elif message.reply_to_message:
44
+ prompt = message.reply_to_message.text
45
+ else:
46
+ return await message.reply_text("Give ask from gpt")
47
+ try:
48
+ response = await js.chat.create(
49
+ "akenox/AkenoX-1.9-Hybrid",
50
+ api_key=os.environ["AKENOX_AI_PREM"],
51
+ params_data={"query": prompt},
52
+ is_obj=True
53
+ )
54
+ if len(response.results) > 4096:
55
+ with open("chat.txt", "w+", encoding="utf8") as out_file:
56
+ out_file.write(response.results)
57
+ await message.reply_document(
58
+ document="chat.txt",
59
+ disable_notification=True
60
+ )
61
+ os.remove("chat.txt")
62
+ else:
63
+ await message.reply_text(response.results)
64
+ except Exception as e:
65
+ LOGS.info(f"Error: hybridai {str(e)}")
66
+ await message.reply_text("Error try again")
67
+
68
+ @Client.on_message(
69
+ ~filters.scheduled
70
+ & filters.command(["fluxai"])
71
+ & ~filters.forwarded
72
+ )
73
+ async def aztimgfluxai_(client: Client, message: Message):
74
+ question = message.text.split(" ", 1)[1] if len(message.command) > 1 else None
75
+ if not question:
76
+ return await message.reply_text("Please provide a question for Flux.")
77
+ pro = await message.reply_text("Generating image, please wait...")
78
+ try:
79
+ response = await js.image.create(
80
+ "black-forest-labs/flux-1-schnell",
81
+ image_read=True,
82
+ params_data={"query": question},
83
+ )
84
+ file_path = "randydev.jpg"
85
+ with open(file_path, "wb") as f:
86
+ f.write(response)
87
+ await pro.edit_text("Uploading image...")
88
+ await message.reply_photo(
89
+ file_path,
90
+ progress=progress,
91
+ progress_args=(
92
+ pro,
93
+ time.time(),
94
+ "Uploading image..."
95
+ )
96
+ )
97
+ await pro.delete()
98
+ if os.path.exists(file_path):
99
+ os.remove(file_path)
100
+ except ChatSendPhotosForbidden:
101
+ return await pro.edit_text("You can't send photos in this chat")
102
+ except Exception as e:
103
+ LOGS.error(f"Error: aztimgfluxai_ {str(e)}")
104
+ await pro.edit_text("error generating the image.")