File size: 3,415 Bytes
bba9405 5c89db0 bba9405 c286fe6 1bad60f 37c2451 bba9405 84a3d3e 2f6e0aa 02dd561 bba9405 bdbd09e bba9405 0780bf7 bba9405 93819b7 32dcc75 268f446 32dcc75 93819b7 ecc8d69 b186db2 efab0e6 bba9405 0780bf7 5d4f956 bba9405 0780bf7 d950efc bba9405 bdbd09e 0780bf7 bba9405 155bf94 bba9405 268f446 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 |
import streamlit as st
from langchain.llms import OpenAI
from langchain.prompts import PromptTemplate
from langchain import FewShotPromptTemplate
from langchain.prompts.example_selector import LengthBasedExampleSelector
from dotenv import load_dotenv
load_dotenv() # load the env-sample.txt file
def getLLMResponse(query, age_option,tasktype_option):
examples = []
llm = OpenAI(temperature=.9, model="gpt-3.5-turbo-instruct")
example_template = """
Question: {query}
Response: {answer}
"""
example_prompt = PromptTemplate(
input_variables=["query", "answer"],
template=example_template
)
prefix = """You are a {template_ageoption}, and you are going to {template_tasktype_option} ,
you give one answer for each query. it is strictly limited to 1 answer only, and the answer MUST be LESS THAN 200 words.
For a tweet, you SHOULD NOT give more than 280 characters. If it is not to write for a tweet, DO NOT give a tweet suggestion in your answer.
"""
suffix = """
Question: {template_userInput}
Response: """
example_selector = LengthBasedExampleSelector(
examples=examples,
example_prompt=example_prompt,
max_length = numberOfWords
)
new_prompt_template = FewShotPromptTemplate(
example_selector=example_selector, # use example_selector instead of examples
example_prompt=example_prompt,
prefix=prefix,
suffix=suffix,
input_variables=["template_userInput","template_ageoption","template_tasktype_option"],
example_separator="\n"
)
print(new_prompt_template.format(template_userInput=query,template_ageoption=age_option,template_tasktype_option=tasktype_option))
response=llm(new_prompt_template.format(template_userInput=query,template_ageoption=age_option,template_tasktype_option=tasktype_option))
print(response)
return response
#UI Starts here
st.set_page_config(page_title="PitchPal: Your Friendly Copy Assistant",
page_icon='💻',
layout='centered',
initial_sidebar_state='collapsed')
st.markdown("<h1 style='text-align: center'>PitchPal</h1>", unsafe_allow_html=True)
st.markdown("<h3 style='text-align: center'>Your Efficient Sales Copy Assistant</h2>", unsafe_allow_html=True)
st.markdown("<p style='text-align: right'>By <a href='https://entzyeung.github.io/portfolio/index.html'>Lorentz Yeung</a></p>", unsafe_allow_html=True)
#st.title("PitchPal")
#st.subheader("Your Friendly Sales Copy Assistant")
#st.markdown(
# """
# - by [Lorentz Yeung]()
# """
# )
form_input = st.text_area('Enter the name of the product or service you want to promote: ', 'PlayStation 6', height=100)
# st.write(f'You wrote {len(form_input)} characters.')
tasktype_option = st.selectbox(
'Choose the type of marketing copy you want to generate: ',
('Draft a Twitter post', 'Draft a sales copy', 'Draft a product description'),key=1)
age_option= st.selectbox(
'Select the age group of your intended audience: ',
('below age 18' ,'age 18-45', 'age 46-65', 'age > 65'),key=2)
# numberOfWords= st.slider('Words limit', 1, 200, 25)
numberOfWords = 40 # the new model doesn't support this.
submit = st.button("Generate Your Sales Copy")
if submit:
st.write(getLLMResponse(form_input,tasktype_option,age_option)) |