File size: 4,167 Bytes
81bb955
311fe9c
027b57a
81bb955
 
f88b11c
81bb955
027b57a
 
 
 
 
 
 
 
311fe9c
027b57a
 
 
 
 
 
 
 
 
311fe9c
 
027b57a
 
 
 
 
 
 
 
311fe9c
027b57a
311fe9c
 
027b57a
311fe9c
027b57a
311fe9c
027b57a
 
 
 
 
 
311fe9c
 
 
027b57a
311fe9c
 
027b57a
81bb955
027b57a
 
81bb955
027b57a
81bb955
027b57a
 
 
fe8c737
027b57a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
311fe9c
027b57a
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
import gradio as gr
import openai
import time
from transformers import pipeline
from gtts import gTTS
import os

# ✅ Set your OpenAI API key here
openai.api_key = "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"  # <<< REPLACE THIS WITH YOUR KEY

# Load pipelines
sentiment_pipeline = pipeline("sentiment-analysis", model="distilbert-base-uncased-finetuned-sst-2-english")
summarization_pipeline = pipeline("summarization", model="facebook/bart-large-cnn")

# Chatbot class
class OpenAIChatbot:
    def __init__(self, model="gpt-3.5-turbo"):
        self.model = model

    def set_model(self, model_name):
        self.model = model_name
        return f"Model set to {model_name}"

    def stream_chat(self, message, history, system_prompt=""):
        if not message.strip():
            yield history
            return

        messages = [{"role": "system", "content": system_prompt}] if system_prompt else []
        for user, bot in history:
            messages += [{"role": "user", "content": user}, {"role": "assistant", "content": bot}]
        messages.append({"role": "user", "content": message})

        history.append([message, ""])

        try:
            response = openai.chat.completions.create(
                model=self.model,
                messages=messages,
                stream=True,
                temperature=0.7,
                max_tokens=1000
            )
            bot_reply = ""
            for chunk in response:
                delta = chunk.choices[0].delta
                if delta and delta.content:
                    bot_reply += delta.content
                    history[-1][1] = bot_reply
                    yield history
                    time.sleep(0.02)
        except Exception as e:
            history[-1][1] = f"Error: {str(e)}"
            yield history

chatbot = OpenAIChatbot()

# Multi-task handler
def perform_task(task, text):
    if not text.strip():
        return "⚠️ Please enter some text.", None, gr.update(visible=False)

    if task == "Sentiment Analysis":
        result = sentiment_pipeline(text)[0]
        return f"Label: {result['label']} | Confidence: {round(result['score'], 3)}", None, gr.update(visible=False)

    elif task == "Summarization":
        result = summarization_pipeline(text, max_length=100, min_length=30, do_sample=False)
        return result[0]['summary_text'], None, gr.update(visible=False)

    elif task == "Text-to-Speech":
        tts = gTTS(text)
        file_path = "tts_output.mp3"
        tts.save(file_path)
        return "Audio generated successfully.", file_path, gr.update(visible=True, value=file_path)

# Interface
with gr.Blocks() as demo:
    gr.Markdown("# 🤖 Multi-Task AI Assistant + OpenAI Chatbot")

    with gr.Tab("AI Tasks"):
        task = gr.Dropdown(["Sentiment Analysis", "Summarization", "Text-to-Speech"], value="Sentiment Analysis")
        input_text = gr.Textbox(lines=6, label="Input")
        run_btn = gr.Button("Run")
        output = gr.Textbox(label="Result")
        audio = gr.Audio(type="filepath", visible=False)

        run_btn.click(perform_task, [task, input_text], [output, audio, audio])

    with gr.Tab("Chatbot"):
        model_select = gr.Dropdown(["gpt-3.5-turbo", "gpt-4"], value="gpt-3.5-turbo", label="Model")
        system_prompt = gr.Textbox(label="System Prompt", placeholder="You are a helpful assistant...")
        chat_ui = gr.Chatbot(label="Chat", height=400)
        message_input = gr.Textbox(placeholder="Type your message...")
        send_btn = gr.Button("Send")
        clear_btn = gr.Button("Clear")

        model_select.change(chatbot.set_model, inputs=[model_select], outputs=[])

        def handle_chat(msg, hist, sys_prompt):
            return chatbot.stream_chat(msg, hist, sys_prompt)

        send_btn.click(handle_chat, [message_input, chat_ui, system_prompt], [chat_ui])
        message_input.submit(handle_chat, [message_input, chat_ui, system_prompt], [chat_ui])
        clear_btn.click(lambda: [], outputs=[chat_ui])

demo.launch(share=True)