File size: 1,385 Bytes
a10ad30 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 |
import gradio as gr
from telegram import Bot, Update
from telegram.ext import CommandHandler, MessageHandler, Filters, Updater, CallbackContext
# Initialize your Telegram bot
bot = Bot(token="6516533220:AAEoq0ohv4xAraIw7lB7BZVHKyUg85wo3mI")
# Define a Gradio interface for your machine learning model
def predict_text(text):
# Replace this with your model's prediction logic
prediction = "Your model's prediction: " + text
return prediction
iface = gr.Interface(fn=predict_text, inputs="text", outputs="text")
# Define a command handler for the Telegram bot
def start(update: Update, context: CallbackContext):
update.message.reply_text("Welcome to your bot! Send me text for predictions.")
# Define a message handler for the Telegram bot
def handle_message(update: Update, context: CallbackContext):
user_text = update.message.text
prediction = predict_text(user_text)
update.message.reply_text(prediction)
# Initialize the Telegram bot updater
updater = Updater(token="YOUR_TELEGRAM_BOT_TOKEN", use_context=True)
dispatcher = updater.dispatcher
# Register command and message handlers
dispatcher.add_handler(CommandHandler("start", start))
dispatcher.add_handler(MessageHandler(Filters.text & ~Filters.command, handle_message))
# Start both the Gradio interface and the Telegram bot
iface.launch(share=True)
updater.start_polling()
updater.idle()
|