File size: 3,882 Bytes
bba9405
 
 
 
 
 
 
1848bf7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
053628c
 
bba9405
053628c
0780bf7
053628c
 
 
b186db2
053628c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
bba9405
 
053628c
 
47c8c7d
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
import streamlit as st
from langchain.llms import OpenAI
from langchain.prompts import PromptTemplate
from langchain import FewShotPromptTemplate
from langchain.prompts.example_selector import LengthBasedExampleSelector
from dotenv import load_dotenv


def getLLMResponse(query, age_option,tasktype_option):
    examples = []
    llm = OpenAI(temperature=.9, model="gpt-3.5-turbo-instruct")


    example_template = """
    Question: {query}
    Response: {answer}
    """

    example_prompt = PromptTemplate(
        input_variables=["query", "answer"],
        template=example_template
    )


    prefix = """You are a {template_ageoption}, and you are going to {template_tasktype_option} , 
    you give one answer for each query. it is strictly limited to 1 answer only, and the answer MUST be LESS THAN 200 words. 
    For a tweet, you SHOULD NOT give more than 280 characters. If it is not to write for a tweet, DO NOT give a tweet suggestion in your answer.
    """

    suffix = """
    Question: {template_userInput}
    Response: """

    example_selector = LengthBasedExampleSelector(
        examples=examples,
        example_prompt=example_prompt,
        max_length = numberOfWords
    )


    new_prompt_template = FewShotPromptTemplate(
        example_selector=example_selector,  # use example_selector instead of examples
        example_prompt=example_prompt,
        prefix=prefix,
        suffix=suffix,
        input_variables=["template_userInput","template_ageoption","template_tasktype_option"],
        example_separator="\n"
    )

  
    print(new_prompt_template.format(template_userInput=query,template_ageoption=age_option,template_tasktype_option=tasktype_option))
    response=llm(new_prompt_template.format(template_userInput=query,template_ageoption=age_option,template_tasktype_option=tasktype_option))
    print(response)

    return response

#UI Starts here
# Load environment variables
load_dotenv()  # Make sure your .env file path is correct

# Streamlit App Configuration
st.set_page_config(page_title="PitchPal: Your Friendly Copy Assistant",
                   page_icon="💻",
                   layout="wide",
                   initial_sidebar_state="collapsed")

# Custom CSS for styling
st.markdown(
    """
    <style>
    .big-font {
        font-size:20px !important;
    }
    .title-font {
        font-size:30px !important;
        font-weight: bold;
    }
    .streamlit-container {
        margin-top: 2rem;
    }
    </style>
    """, unsafe_allow_html=True)

# Header Section
st.markdown("<h1 style='text-align: center; color: #1144aa'>PitchPal: Your Efficient Sales Copy Assistant</h1>", unsafe_allow_html=True)
st.markdown("<h3 style='text-align: center; color: #333'>Craft compelling sales copy with ease</h3>", unsafe_allow_html=True)
st.markdown("<p style='text-align: right; font-size:14px;'>By <a href='https://entzyeung.github.io/portfolio/index.html'>Lorentz Yeung</a></p>", unsafe_allow_html=True)

# User Input Section with Improved Layout
col1, col2 = st.columns(2)

with col1:
    form_input = st.text_area('Enter the product or service:', 'PlayStation 6', height=150)

with col2:
    tasktype_option = st.selectbox(
        'Marketing copy type:',
        ('Twitter post', 'Sales copy', 'Product description'),
        index=1)
    age_option = st.selectbox(
        'Audience age group:',
        ('Below 18', '18-45', '46-65', '> 65'),
        index=1)

# Submit Button for Generating Sales Copy
submit = st.button("Generate Sales Copy")

if submit:
    response = getLLMResponse(form_input, age_option, tasktype_option)
    st.markdown("## Generated Sales Copy")
    st.write(response)  # Display the LLM response

# Note: Ensure that all functions and logic related to LLM response generation are correctly implemented
# and replace placeholder texts and functions with your actual application code.