lorentz commited on
Commit
1848bf7
·
verified ·
1 Parent(s): 053628c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +62 -8
app.py CHANGED
@@ -5,10 +5,67 @@ from langchain import FewShotPromptTemplate
5
  from langchain.prompts.example_selector import LengthBasedExampleSelector
6
  from dotenv import load_dotenv
7
 
8
- # Function Definitions (Assuming unchanged, add your actual logic here)
9
- def getLLMResponse(query, age_option, tasktype_option):
10
- # Placeholder for your function's logic. Ensure you replace it with your actual code.
11
- return "Response from LLM based on the query and options provided."
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12
 
13
  # Load environment variables
14
  load_dotenv() # Make sure your .env file path is correct
@@ -63,7 +120,4 @@ submit = st.button("Generate Sales Copy")
63
  if submit:
64
  response = getLLMResponse(form_input, age_option, tasktype_option)
65
  st.markdown("## Generated Sales Copy")
66
- st.write(response) # Display the LLM response
67
-
68
- # Note: Ensure that all functions and logic related to LLM response generation are correctly implemented
69
- # and replace placeholder texts and functions with your actual application code.
 
5
  from langchain.prompts.example_selector import LengthBasedExampleSelector
6
  from dotenv import load_dotenv
7
 
8
+ load_dotenv() # load the env-sample.txt file
9
+
10
+ def getLLMResponse(query, age_option,tasktype_option):
11
+ examples = []
12
+ llm = OpenAI(temperature=.9, model="gpt-3.5-turbo-instruct")
13
+
14
+
15
+ example_template = """
16
+ Question: {query}
17
+ Response: {answer}
18
+ """
19
+
20
+ example_prompt = PromptTemplate(
21
+ input_variables=["query", "answer"],
22
+ template=example_template
23
+ )
24
+
25
+
26
+ prefix = """You are a {template_ageoption}, and you are going to {template_tasktype_option} ,
27
+ you give one answer for each query. it is strictly limited to 1 answer only, and the answer MUST be LESS THAN 200 words.
28
+ For a tweet, you SHOULD NOT give more than 280 characters. If it is not to write for a tweet, DO NOT give a tweet suggestion in your answer.
29
+ """
30
+
31
+ suffix = """
32
+ Question: {template_userInput}
33
+ Response: """
34
+
35
+ example_selector = LengthBasedExampleSelector(
36
+ examples=examples,
37
+ example_prompt=example_prompt,
38
+ max_length = numberOfWords
39
+ )
40
+
41
+
42
+ new_prompt_template = FewShotPromptTemplate(
43
+ example_selector=example_selector, # use example_selector instead of examples
44
+ example_prompt=example_prompt,
45
+ prefix=prefix,
46
+ suffix=suffix,
47
+ input_variables=["template_userInput","template_ageoption","template_tasktype_option"],
48
+ example_separator="\n"
49
+ )
50
+
51
+
52
+ print(new_prompt_template.format(template_userInput=query,template_ageoption=age_option,template_tasktype_option=tasktype_option))
53
+ response=llm(new_prompt_template.format(template_userInput=query,template_ageoption=age_option,template_tasktype_option=tasktype_option))
54
+ print(response)
55
+
56
+ return response
57
+
58
+ #UI Starts here
59
+
60
+ st.set_page_config(page_title="PitchPal: Your Friendly Copy Assistant",
61
+ page_icon='💻',
62
+ layout='centered',
63
+ initial_sidebar_state='collapsed')
64
+
65
+
66
+ st.markdown("<h1 style='text-align: center'>PitchPal</h1>", unsafe_allow_html=True)
67
+ st.markdown("<h3 style='text-align: center'>Your Efficient Sales Copy Assistant</h2>", unsafe_allow_html=True)
68
+ st.markdown("<p style='text-align: right'>By <a href='https://entzyeung.github.io/portfolio/index.html'>Lorentz Yeung</a></p>", unsafe_allow_html=True)
69
 
70
  # Load environment variables
71
  load_dotenv() # Make sure your .env file path is correct
 
120
  if submit:
121
  response = getLLMResponse(form_input, age_option, tasktype_option)
122
  st.markdown("## Generated Sales Copy")
123
+ st.write(response) # Display the LLM response