Spaces:
Paused
Paused
import random | |
import gradio as gr | |
import openai | |
import torch | |
import os | |
def predict(message, history, api_key): | |
openai.api_key = os.environ.get("api_key") | |
openai.api_base = os.environ.get("api_base") | |
history_openai_format = [] | |
for human, assistant in history: | |
history_openai_format.append({"role": "user", "content": human }) | |
history_openai_format.append({"role": "assistant", "content":assistant}) | |
history_openai_format.append({"role": "user", "content": message}) | |
response = openai.ChatCompletion.create( | |
model="gpt-3.5-turbo", | |
messages= history_openai_format, | |
temperature=0.7, | |
max_tokens=5000, | |
top_p=0.95, | |
frequency_penalty=1, | |
presence_penalty=1, | |
stream=True | |
) | |
partial_message = "" | |
for chunk in response: | |
if len(chunk['choices'][0]['delta']) != 0: | |
partial_message = partial_message + chunk['choices'][0]['delta']['content'] | |
yield partial_message | |
return response["choices"][0]["message"]["content"] | |
title = "π Chat with Pigeon" | |
description = \ | |
""" | |
π¬ This space is powered by **Huggingface Hosting**. | |
π This space runs **very fast** even on **CPU**. | |
π You get totally unique and creative answers. | |
π PigeonChat is available worldwide in over **160 languages**. | |
π PigeonChat is powered by **open source** and is completely **private**. | |
π₯οΈοΈ This demo is by **Evgeniy Hristoforu** (**OpenSkyML**). | |
<h2></h2> | |
""" | |
examples=[ | |
'Hello there! How are you doing?', | |
'Can you explain briefly to me what is the Python programming language?', | |
'Explain the plot of Cinderella in a sentence.', | |
'How many hours does it take a man to eat a Helicopter?', | |
"Write a 100-word article on 'Benefits of Open-Source in AI research'", | |
] | |
chatbot = gr.Chatbot(label="PigeonChat",avatar_images=('user.png', 'bot.png'), min_width=260, show_share_button=False, show_copy_button=True, bubble_full_width = False) | |
gr.ChatInterface(engine, chatbot = chatbot, title=title, description=description, examples=examples).launch(share=False, debug=False, show_error=False, show_api=False) |