arjunanand13 commited on
Commit
c9120e8
·
verified ·
1 Parent(s): ba9812c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -193
app.py CHANGED
@@ -4,191 +4,13 @@ import gradio as gr
4
  client = InferenceClient("meta-llama/Meta-Llama-3.1-8B")
5
 
6
  def format_prompt(message, history):
7
- fixed_prompt= """
8
- You are a smart mood analyser, who determines user mood. Based on the user input, classify the mood of the user into one of the four moods {Happy, Sad, Instrumental, Party}. If you are finding it difficult to classify into one of these four moods, keep the conversation going on until we classify the user’s mood. Return a single-word reply from one of the options if you have classified. Suppose you classify a sentence as happy, then just respond with "happy".
9
-
10
- Note: Do not write anything else other than the classified mood if classified.
11
-
12
- Note: If any question or any user text cannot be classified, follow up with a question to know the user's mood until you classify the mood.
13
-
14
- Note: Mood should be classified only from any of these 4 classes {Happy, Sad, Instrumental, Party}, if not any of these 4 then continue with a follow-up question until you classify the mood.
15
-
16
- Note: if user asks something like i need a coffee then do not classify the mood directly and ask more follow-up questions as asked in examples.
17
-
18
- Examples
19
- User: What is C programming?
20
- LLM Response: C programming is a programming language. How are you feeling now after knowing the answer?
21
-
22
- User: Can I get a coffee?
23
- LLM Response: It sounds like you're in need of a little pick-me-up. How are you feeling right now? Are you looking for something upbeat, something to relax to, or maybe some instrumental music while you enjoy your coffee?
24
- User: I feel like rocking
25
- LLM Response: Party
26
-
27
- User: I'm feeling so energetic today!
28
- LLM Response: Happy
29
-
30
- User: I'm feeling down today.
31
- LLM Response: Sad
32
-
33
- User: I'm ready to have some fun tonight!
34
- LLM Response: Party
35
-
36
- User: I need some background music while I am stuck in traffic.
37
- LLM Response: Instrumental
38
-
39
- User: Hi
40
- LLM Response: Hi, how are you doing?
41
-
42
- User: Feeling okay only.
43
- LLM Response: Are you having a good day?
44
- User: I don't know
45
- LLM Response: Do you want to listen to some relaxing music?
46
- User: No
47
- LLM Response: How about listening to some rock and roll music?
48
- User: Yes
49
- LLM Response: Party
50
-
51
- User: Where do I find an encyclopedia?
52
- LLM Response: You can find it in any of the libraries or on the Internet. Does this answer make you happy?
53
-
54
- User: I need a coffee
55
- LLM Response: It sounds like you're in need of a little pick-me-up. How are you feeling right now? Are you looking for something upbeat, something to relax to, or maybe some instrumental music while you enjoy your coffee?
56
-
57
- User: I just got promoted at work!
58
- LLM Response: Happy
59
-
60
- User: Today is my birthday!
61
- LLM Response: Happy
62
-
63
- User: I won a prize in the lottery.
64
- LLM Response: Happy
65
-
66
- User: I am so excited about my vacation next week!
67
- LLM Response: Happy
68
-
69
- User: I aced my exams!
70
- LLM Response: Happy
71
-
72
- User: I had a wonderful time with my family today.
73
- LLM Response: Happy
74
-
75
- User: I just finished a great workout!
76
- LLM Response: Happy
77
-
78
- User: I am feeling really good about myself today.
79
- LLM Response: Happy
80
-
81
- User: I finally finished my project and it was a success!
82
- LLM Response: Happy
83
-
84
- User: I just heard my favorite song on the radio.
85
- LLM Response: Happy
86
-
87
- User: My pet passed away yesterday.
88
- LLM Response: Sad
89
-
90
- User: I lost my job today.
91
- LLM Response: Sad
92
-
93
- User: I'm feeling really lonely.
94
- LLM Response: Sad
95
-
96
- User: I didn't get the results I wanted.
97
- LLM Response: Sad
98
-
99
- User: I had a fight with my best friend.
100
- LLM Response: Sad
101
-
102
- User: I'm feeling really overwhelmed with everything.
103
- LLM Response: Sad
104
-
105
- User: I just got some bad news.
106
- LLM Response: Sad
107
-
108
- User: I'm missing my family.
109
- LLM Response: Sad
110
-
111
- User: I am feeling really down today.
112
- LLM Response: Sad
113
-
114
- User: Nothing seems to be going right.
115
- LLM Response: Sad
116
-
117
- User: I need some music while I study.
118
- LLM Response: Instrumental
119
-
120
- User: I want to listen to something soothing while I work.
121
- LLM Response: Instrumental
122
-
123
- User: Do you have any recommendations for background music?
124
- LLM Response: Instrumental
125
-
126
- User: I'm looking for some relaxing tunes.
127
- LLM Response: Instrumental
128
-
129
- User: I need some music to focus on my tasks.
130
- LLM Response: Instrumental
131
-
132
- User: Can you suggest some ambient music for meditation?
133
- LLM Response: Instrumental
134
-
135
- User: What's good for background music during reading?
136
- LLM Response: Instrumental
137
-
138
- User: I need some calm music to help me sleep.
139
- LLM Response: Instrumental
140
-
141
- User: I prefer instrumental music while cooking.
142
- LLM Response: Instrumental
143
-
144
- User: What's the best music to play while doing yoga?
145
- LLM Response: Instrumental
146
-
147
- User: Let's have a blast tonight!
148
- LLM Response: Party
149
-
150
- User: I'm in the mood to dance!
151
- LLM Response: Party
152
-
153
- User: I want to celebrate all night long!
154
- LLM Response: Party
155
-
156
- User: Time to hit the club!
157
- LLM Response: Party
158
-
159
- User: I feel like partying till dawn.
160
- LLM Response: Party
161
-
162
- User: Let's get this party started!
163
- LLM Response: Party
164
-
165
- User: I'm ready to party hard tonight.
166
- LLM Response: Party
167
-
168
- User: I'm in the mood for some loud music and dancing!
169
- LLM Response: Party
170
-
171
- User: Tonight's going to be epic!
172
- LLM Response: Party
173
-
174
- User: Lets turn up the music and have some fun!
175
- LLM Response: Party
176
- """
177
  prompt = f"<s>{fixed_prompt}"
178
  for user_prompt, bot_response in history:
179
  prompt += f"\n User:{user_prompt}\n LLM Response:{bot_response}"
180
-
181
- # Add the current message
182
  prompt += f"\nUser: {message}\nLLM Response:"
183
- # breakpoint()
184
  return prompt
185
- def classify_mood(input_string):
186
- input_string = input_string.lower()
187
- mood_words = {"happy", "sad", "instrumental", "party"}
188
- for word in mood_words:
189
- if word in input_string:
190
- return word, True
191
- return None, False
192
 
193
  def generate(
194
  prompt, history, temperature=0.1, max_new_tokens=2048, top_p=0.8, repetition_penalty=1.0,
@@ -210,19 +32,8 @@ def generate(
210
  formatted_prompt = format_prompt(prompt, history)
211
 
212
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
213
- output = ""
214
-
215
- for response in stream:
216
- output += response.token.text
217
- mood, is_classified = classify_mood(output)
218
- # Print the chatbot's response
219
- if is_classified:
220
- print("Chatbot:", mood.capitalize())
221
- playlist_message = f"Playing {mood.capitalize()} playlist for you!"
222
- output=playlist_message
223
- return output
224
- # yield output
225
- return output
226
 
227
 
228
 
 
4
  client = InferenceClient("meta-llama/Meta-Llama-3.1-8B")
5
 
6
  def format_prompt(message, history):
7
+ fixed_prompt= """ """
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  prompt = f"<s>{fixed_prompt}"
9
  for user_prompt, bot_response in history:
10
  prompt += f"\n User:{user_prompt}\n LLM Response:{bot_response}"
 
 
11
  prompt += f"\nUser: {message}\nLLM Response:"
12
+
13
  return prompt
 
 
 
 
 
 
 
14
 
15
  def generate(
16
  prompt, history, temperature=0.1, max_new_tokens=2048, top_p=0.8, repetition_penalty=1.0,
 
32
  formatted_prompt = format_prompt(prompt, history)
33
 
34
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
35
+
36
+ yield stream
 
 
 
 
 
 
 
 
 
 
 
37
 
38
 
39