File size: 4,572 Bytes
ca7e9c6
 
 
 
 
 
 
 
 
 
3de54ca
f57b8d4
ca7e9c6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7a726ff
ca7e9c6
 
7a726ff
ca7e9c6
 
7a726ff
ca7e9c6
 
 
 
 
7a726ff
a7f3b3b
ca7e9c6
 
 
 
3de54ca
ca7e9c6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f3e6036
ca7e9c6
f57b8d4
 
 
 
ca7e9c6
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
##############################################################################
# Main script that builds the UI & connects the logic for an LLM-driven
# query frontend to a "Global Commerce" demo app.
#
# @philmui
# Mon May 1 18:34:45 PDT 2023
##############################################################################


import streamlit as st
from pprint import pprint
from agents import agentController , salesAgent, chinookAgent, chatAgent

##############################################################################

st.set_page_config(page_title="Global", 
                   page_icon=":cart:", 
                   layout="wide")
st.header("πŸ“¦ Global πŸ›οΈ")

col1, col2 = st.columns([1,1])

with col1:
    option_llm = st.selectbox(
        "Model",
        ('text-davinci-003', 
         'text-babbage-001', 
         'text-curie-001', 
         'text-ada-001',
         'gpt-4',
         'gpt-3.5-turbo',
         'google/flan-t5-xl',
         'databricks/dolly-v2-3b',
         'bigscience/bloom-1b7')
    )
with col2:
    option_mode = st.selectbox(
        "LLM mode",
        ("Instruct (all)",
         "Chat (high temperature)",
         "Wolfram-Alpha",
         "Internal-Sales",
         "Internal-Merchant"
         )
    )

def get_question():
    input_text = st.text_area(label="Your question ...", 
                              placeholder="Ask me anything ...",
                              key="question_text", label_visibility="collapsed")
    return input_text

question_text = get_question()
if question_text and len(question_text) > 1:
    output=""
    if option_mode == "Internal-Sales":
        # not going through the agentController
        output = salesAgent(question_text)
    elif option_mode == "Internal-Merchant":
        # not going through the agentController
        output = chinookAgent(question_text, option_llm)
    elif option_mode.startswith("Chat"):
        # not going through the agentController
        response = chatAgent(question_text)
        if response and response.content:
            output = response.content
        else:
            output = response
    else: # DEFAULT DEMO: through agentController
        output = agentController(question_text, option_llm)

    height = min(2*len(output), 280)
    st.text_area(label="In response ...", 
                 value=output, height=height)
    # st.code(pprint(output), language="markdown")

##############################################################################

st.markdown(
    """
    <style>
    textarea[aria-label^="ex"] {
            font-size: 0.8em !important;
            font-family: Arial, sans-serif !important;
            color: gray !important;
    }
    </style>
    """,
    unsafe_allow_html=True,
)

st.markdown("#### 3 types of reasoning:")
col1, col2, col3 = st.columns([1,1,1])

with col1:
    st.markdown("__Common sense reasoning__")
    st.text_area(label="ex1", label_visibility="collapsed", height=120,
                 value="πŸ”Ή Why is the sky blue?\n" +
                       "πŸ”Ή How to avoid touching a hot stove?\n" +
                       "πŸ”Ή Please give tips to win a 3200m track race?\n" +
                       "πŸ”Ή Please advise on how best to prepare for retirement?"
                       )

with col2:
    st.markdown("__Local ('secure') reasoning__")
    st.text_area(label="ex2", label_visibility="collapsed", height=120,
                 value="πŸ”Ή For my company, what is the total sales " +
                       "broken down by month, labeled by months?\n" +
                       "πŸ”Ή How many total artists are there in each "+
                       "genres in our digital media database?\n" +
                       "πŸ”Ή How to best govern a city? (The Prince)\n" +
                       "πŸ”Ή How to win a war? (Art of War)",
                       )

with col3:
    st.markdown("__Enhanced reasoning__ [🎡](https://www.youtube.com/watch?v=hTTUaImgCyU&t=62s)")
    st.text_area(label="ex3", label_visibility="collapsed", height=120,
                 value="πŸ”Ή Who is the president of South Korea?  " +
                       "What is his favorite song? How old is he? " +
                       "What is the smallest prime greater than his age?\n" +
                       "πŸ”Ή What is the derivative of f(x)=3*log(x)*sin(x)?")

st.image(image="images/plugins.png", width=700, caption="salesforce.com")
st.image(image="images/chinook.png", width=420, caption="Digital Media Schema")

##############################################################################