Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,15 +1,11 @@
|
|
1 |
import streamlit as st
|
2 |
import random
|
3 |
from transformers import pipeline
|
|
|
4 |
|
5 |
-
# Load the GPT2 model
|
6 |
generator = pipeline('text-generation', model='gpt2')
|
7 |
-
#generator = pipeline('text-generation', model='bloom-560m')
|
8 |
-
|
9 |
-
# Set the maximum length of the generated prompt
|
10 |
max_length = 50
|
11 |
|
12 |
-
# Define the prompts and solutions
|
13 |
prompts = {
|
14 |
"Difficulty sleeping": [
|
15 |
"Try keeping a consistent sleep schedule and avoid caffeine before bedtime.",
|
@@ -34,28 +30,25 @@ prompts = {
|
|
34 |
]
|
35 |
}
|
36 |
|
37 |
-
# Define the function to generate the prompts and solutions
|
38 |
def generate_prompt(prompt):
|
39 |
-
# Get a random solution for the prompt
|
40 |
solution = random.choice(prompts[prompt])
|
41 |
-
# Generate the prompt text
|
42 |
prompt_text = f"What can I do to {prompt.lower()}? "
|
43 |
-
# Generate the prompt output
|
44 |
output = generator(prompt_text, max_length=max_length, num_return_sequences=1, no_repeat_ngram_size=2, early_stopping=True)
|
45 |
-
# Decode the prompt output
|
46 |
output_text = output[0]['generated_text'][len(prompt_text):].strip()
|
47 |
-
|
48 |
-
return output_text, solution
|
49 |
|
50 |
-
# Set the app title
|
51 |
st.title('ICL-LM Interface')
|
52 |
-
# Get the user input
|
53 |
option = st.selectbox('Select a problem:', list(prompts.keys()))
|
54 |
|
55 |
if st.button('Generate Prompt and Solution'):
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import streamlit as st
|
2 |
import random
|
3 |
from transformers import pipeline
|
4 |
+
import pandas as pd
|
5 |
|
|
|
6 |
generator = pipeline('text-generation', model='gpt2')
|
|
|
|
|
|
|
7 |
max_length = 50
|
8 |
|
|
|
9 |
prompts = {
|
10 |
"Difficulty sleeping": [
|
11 |
"Try keeping a consistent sleep schedule and avoid caffeine before bedtime.",
|
|
|
30 |
]
|
31 |
}
|
32 |
|
|
|
33 |
def generate_prompt(prompt):
|
|
|
34 |
solution = random.choice(prompts[prompt])
|
|
|
35 |
prompt_text = f"What can I do to {prompt.lower()}? "
|
|
|
36 |
output = generator(prompt_text, max_length=max_length, num_return_sequences=1, no_repeat_ngram_size=2, early_stopping=True)
|
|
|
37 |
output_text = output[0]['generated_text'][len(prompt_text):].strip()
|
38 |
+
return prompt_text, output_text, solution
|
|
|
39 |
|
|
|
40 |
st.title('ICL-LM Interface')
|
|
|
41 |
option = st.selectbox('Select a problem:', list(prompts.keys()))
|
42 |
|
43 |
if st.button('Generate Prompt and Solution'):
|
44 |
+
results = []
|
45 |
+
for _ in range(3):
|
46 |
+
prompt_text, prompt, solution = generate_prompt(option)
|
47 |
+
results.append([prompt_text, prompt, solution])
|
48 |
+
|
49 |
+
with open('results.txt', 'w') as f:
|
50 |
+
for result in results:
|
51 |
+
f.write(f"{result[0]}\t{result[1]}\t{result[2]}\n")
|
52 |
+
|
53 |
+
df = pd.read_csv('results.txt', sep='\t', header=None, names=['Input', 'Prompt', 'Solution'])
|
54 |
+
st.write(df)
|