File size: 2,482 Bytes
b0dae41
 
 
 
 
61b301d
 
b0dae41
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
abfbe18
 
 
 
 
 
b0dae41
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
import streamlit as st
import random
from transformers import pipeline

# Load the GPT2 model
#generator = pipeline('text-generation', model='gpt2')
generator = pipeline('text-generation', model='bloom-560m')

# Set the maximum length of the generated prompt
max_length = 50

# Define the prompts and solutions
prompts = {
    "Difficulty sleeping": [
        "Try keeping a consistent sleep schedule and avoid caffeine before bedtime.",
        "Make your bedroom a comfortable and calming environment.",
        "Avoid using electronic devices before bedtime.",
        "Try relaxation techniques like deep breathing or meditation.",
        "Consider talking to a healthcare provider if sleep problems persist."
    ],
    "Time management": [
        "Use a planner or time-tracking app to prioritize tasks and stay on schedule.",
        "Break down large tasks into smaller ones.",
        "Limit multitasking and focus on one task at a time.",
        "Delegate tasks to others when possible.",
        "Take regular breaks and avoid overworking yourself."
    ],
    "Stress management": [
        "Practice mindfulness techniques such as deep breathing or meditation.",
        "Get regular exercise to reduce stress and improve mood.",
        "Get enough sleep and practice good sleep habits.",
        "Take breaks throughout the day to reduce stress levels.",
        "Try to identify the sources of stress in your life and develop strategies to manage them."
    ]
}

# Define the function to generate the prompts and solutions
def generate_prompt(prompt):
    # Get a random solution for the prompt
    solution = random.choice(prompts[prompt])
    # Generate the prompt text
    prompt_text = f"What can I do to {prompt.lower()}? "
    # Generate the prompt output
    output = generator(prompt_text, max_length=max_length, num_return_sequences=1, no_repeat_ngram_size=2, early_stopping=True)
    # Decode the prompt output
    output_text = output[0]['generated_text'][len(prompt_text):].strip()
    # Return the generated prompt and solution
    return output_text, solution

# Set the app title
st.title('ICL-LM Interface')
# Get the user input
option = st.selectbox('Select a problem:', list(prompts.keys()))

if st.button('Generate Prompt and Solution'):
    # Generate the prompt and solution
    prompt, solution = generate_prompt(option)
    # Display the prompt
    st.write('Prompt:', prompt)
    # Display the solution
    st.write('Solution:', solution)