Spaces:
Running
Running
File size: 3,862 Bytes
92feab2 1cf6b9d 92feab2 1cf6b9d 92feab2 1cf6b9d e867f4c 92feab2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 |
from typing import List, Tuple, Dict, TypedDict, Optional, Any
import os
import gradio as gr
from langchain_openai.chat_models import ChatOpenAI
try:
from utils import format_chat_ag_response
from retrieval.config import ALL_INDICES
from static.css import css_chat
from chat import run_chat
except ImportError:
from .utils import format_chat_ag_response
from .retrieval.config import ALL_INDICES
from .static.css import css_chat
from .chat import run_chat
ROOT = os.path.dirname(os.path.abspath(__file__))
class LoggedComponents(TypedDict):
context: List[gr.components.Component]
found_helpful: gr.components.Component
will_recommend: gr.components.Component
comments: gr.components.Component
email: gr.components.Component
def execute(
thread_id: str,
user_input: Dict[str, Any],
chatbot: List[Dict],
max_new_tokens: int,
indices: Optional[List[str]] = None,
):
llm = ChatOpenAI(
model_name="gpt-4o",
max_tokens=max_new_tokens,
api_key=os.getenv("OPENAI_API_KEY"),
temperature=0.0,
streaming=True
)
return run_chat(
thread_id=thread_id,
user_input=user_input,
chatbot=chatbot,
llm=llm,
indices=indices
)
def build_chat() -> Tuple[LoggedComponents, gr.Blocks]:
with gr.Blocks(theme=gr.themes.Soft(), title="Ask Candid", css=css_chat) as demo:
gr.Markdown(
"""
<h1>Ask Candid</h1>
<p>
Please read the <a
href='https://info.candid.org/chatbot-reference-guide'
target="_blank"
rel="noopener noreferrer"
>guide</a> to get started.
</p>
<hr>
"""
)
with gr.Accordion(label="Advanced settings", open=False):
es_indices = gr.CheckboxGroup(
choices=list(ALL_INDICES),
value=list(ALL_INDICES),
label="Sources to include",
interactive=True
)
max_new_tokens = gr.Slider(
value=256 * 3, minimum=128, maximum=2048, step=128,
label="Max new tokens", interactive=True
)
with gr.Column():
chatbot = gr.Chatbot(
label="Candid Assistant",
elem_id="chatbot",
bubble_full_width=False,
avatar_images=(
None,
os.path.join(ROOT, "static", "candid_logo_yellow.png")
),
height="45vh",
type="messages",
show_label=False,
show_copy_button=True,
show_share_button=True,
show_copy_all_button=True
)
msg = gr.MultimodalTextbox(label="Your message", interactive=True)
thread_id = gr.Text(visible=False, value="", label="thread_id")
gr.ClearButton(components=[msg, chatbot, thread_id], size="sm")
# pylint: disable=no-member
chat_msg = msg.submit(
fn=execute,
inputs=[thread_id, msg, chatbot, max_new_tokens, es_indices],
outputs=[msg, chatbot, thread_id]
)
chat_msg.then(format_chat_ag_response, chatbot, chatbot, api_name="bot_response")
logged = LoggedComponents(
context=[thread_id, chatbot]
)
return logged, demo
if __name__ == '__main__':
_, app = build_chat()
app.queue(max_size=5).launch(
show_api=False,
auth=[
(os.getenv("APP_USERNAME"), os.getenv("APP_PASSWORD")),
(os.getenv("APP_PUBLIC_USERNAME"), os.getenv("APP_PUBLIC_PASSWORD")),
],
auth_message="Login to Candid's AI assistant",
ssr_mode=False
)
|