-
Notifications
You must be signed in to change notification settings - Fork 11
/
botmrg_grp.py
158 lines (131 loc) · 4.85 KB
/
botmrg_grp.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
# This scripts contains use cases for simple bots
# import requirements
import os
import asyncio
import PIL.Image
import google.generativeai as genai
from pathlib import Path
from pyrogram import Client, filters, enums
from pyrogram.types import Message
generation_config_cook = {
"temperature": 0.35,
"top_p": 0.95,
"top_k": 40,
"max_output_tokens": 1024,
}
# API KEYS
# Gemini Ai API KEY
API_KEY= os.environ['API_KEY']
# Telegram Auth API ID
API_ID = os.environ['API_ID']
# Telegram Auth API HASH
API_HASH = os.environ['API_HASH']
# Telegram Bot API TOKEN generated from @botfather
BOT_TOKEN = os.environ['BOT_TOKEN']
# configure API KEY for gemini
genai.configure(api_key=API_KEY)
# Setup models
model = genai.GenerativeModel("gemini-pro-vision")
model_text = genai.GenerativeModel("gemini-pro")
model_cook = genai.GenerativeModel(model_name="gemini-pro-vision",
generation_config=generation_config_cook)
# configure pyrogram client
app = Client("gemini_ai", api_id=API_ID, api_hash=API_HASH, bot_token=BOT_TOKEN)
@app.on_message(filters.command("askai") & filters.group)
async def say(_, message: Message):
try:
i = await message.reply_text("<code>Please Wait...</code>")
if len(message.command) > 1:
prompt = message.text.split(maxsplit=1)[1]
elif message.reply_to_message:
prompt = message.reply_to_message.text
else:
await i.delete()
await message.reply_text(
f"<b>Usage: </b><code>/askai [prompt/reply to message]</code>"
)
return
chat = model_text.start_chat()
response = chat.send_message(prompt)
await i.delete()
await message.reply_text(f"**Answer:** {response.text}", parse_mode=enums.ParseMode.MARKDOWN)
except Exception as e:
await i.delete()
await message.reply_text(f"An error occurred: {str(e)}")
@app.on_message(filters.text & filters.private)
async def say(_, message: Message):
try:
await message.reply_chat_action(enums.ChatAction.TYPING)
prompt = message.text
chat = model_text.start_chat()
response = chat.send_message(prompt)
await message.reply_text(f"{response.text}", parse_mode=enums.ParseMode.MARKDOWN)
except Exception as e:
await message.reply_text(f"An error occurred: {str(e)}")
@app.on_message(filters.command("getai") & filters.group)
async def say(_, message: Message):
try:
i = await message.reply_text("<code>Please Wait...</code>")
base_img = await message.reply_to_message.download()
img = PIL.Image.open(base_img)
response = model.generate_content(img)
await i.delete()
await message.reply_text(
f"**Detail Of Image:** {response.parts[0].text}", parse_mode=enums.ParseMode.MARKDOWN
)
os.remove(base_img)
except Exception as e:
await i.delete()
await message.reply_text(str(e))
@app.on_message(filters.command("aicook") & filters.group)
async def say(_, message: Message):
try:
i = await message.reply_text("<code>Cooking...</code>")
base_img = await message.reply_to_message.download()
img = PIL.Image.open(base_img)
cook_img = [
"Accurately identify the baked good in the image and provide an appropriate and recipe consistent with your analysis. ",
img,
]
response = model_cook.generate_content(cook_img)
await i.delete()
await message.reply_text(
f"{response.text}", parse_mode=enums.ParseMode.MARKDOWN
)
os.remove(base_img)
except Exception as e:
await i.delete()
await message.reply_text(str(e))
@app.on_message(filters.command("aiseller") & filters.group)
async def say(_, message: Message):
try:
i = await message.reply_text("<code>Generating...</code>")
if len(message.command) > 1:
taud = message.text.split(maxsplit=1)[1]
else:
await i.delete()
await message.reply_text(
f"<b>Usage: </b><code>/aiseller [target audience] [reply to product image]</code>"
)
return
base_img = await message.reply_to_message.download()
img = PIL.Image.open(base_img)
sell_img = [
"Given an image of a product and its target audience, write an engaging marketing description",
"Product Image: ",
img,
"Target Audience: ",
taud
]
response = model.generate_content(sell_img)
await i.delete()
await message.reply_text(
f"{response.text}", parse_mode=enums.ParseMode.MARKDOWN
)
os.remove(base_img)
except Exception as e:
await i.delete()
await message.reply_text(f"<b>Usage: </b><code>/aiseller [target audience] [reply to product image]</code>")
# Run the bot
if __name__ == "__main__":
app.run()