Spaces:
Runtime error
Runtime error
File size: 3,851 Bytes
27027e2 6f98ac2 27027e2 64daf11 27027e2 64daf11 27027e2 64daf11 27027e2 64daf11 27027e2 64daf11 27027e2 64daf11 27027e2 64daf11 27027e2 64daf11 27027e2 6f98ac2 27027e2 6f98ac2 27027e2 6f98ac2 62fd756 6f98ac2 27027e2 141ff1d 27027e2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 |
import gradio as gr
import requests
# GPT-J-6B API
API_URL = "https://api-inference.huggingface.co/models/EleutherAI/gpt-j-6B"
headers = {"Authorization": "Bearer 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"}
prompt = """Oh, my life
is changing every day
Every possible way
And oh, my dreams,
it's never quite as it seems
Never quite as it seems"""
#examples = [["mind"], ["memory"], ["sleep"],["wellness"],["nutrition"]]
def poem2_generate(word):
p = word.lower() + "\n" + "poem using word: "
gr.Markdown("f"*****Inside poem_generate - Prompt is :{p}")
json_ = {"inputs": p,
"parameters":
{
"top_p": 0.9,
"temperature": 1.1,
"max_new_tokens": 50,
"return_full_text": False
}}
response = requests.post(API_URL, headers=headers, json=json_)
output = response.json()
gr.Markdown("f"If there was an error? Reason is : {output}")
output_tmp = output[0]['generated_text']
gr.Markdown("f"GPTJ response without splits is: {output_tmp}")
#poem = output[0]['generated_text'].split("\n\n")[0] # +"."
if "\n\n" not in output_tmp:
if output_tmp.find('.') != -1:
idx = output_tmp.find('.')
poem = output_tmp[:idx+1]
else:
idx = output_tmp.rfind('\n')
poem = output_tmp[:idx]
else:
poem = output_tmp.split("\n\n")[0] # +"."
poem = poem.replace('?','')
gr.Markdown("f"Poem being returned is: {poem}")
return poem
def poem_generate(word):
p = prompt + word.lower() + "\n" + "poem using word: "
gr.Markdown("f"*****Inside poem_generate - Prompt is :{p}")
json_ = {"inputs": p,
"parameters":
{
"top_p": 0.9,
"temperature": 1.1,
"max_new_tokens": 50,
"return_full_text": False
}}
response = requests.post(API_URL, headers=headers, json=json_)
output = response.json()
gr.Markdown("f"If there was an error? Reason is : {output}")
output_tmp = output[0]['generated_text']
gr.Markdown("f"GPTJ response without splits is: {output_tmp}")
poem = output[0]['generated_text'].split("\n\n")[0] # +"."
if "\n\n" not in output_tmp:
if output_tmp.find('.') != -1:
idx = output_tmp.find('.')
poem = output_tmp[:idx+1]
else:
idx = output_tmp.rfind('\n')
poem = output_tmp[:idx]
else:
poem = output_tmp.split("\n\n")[0] # +"."
poem = poem.replace('?','')
gr.Markdown("print(f"Poem being returned is: {poem}")
return poem
def poem_to_image(poem):
print("*****Inside Poem_to_image")
poem = " ".join(poem.split('\n'))
poem = poem + " oil on canvas."
steps, width, height, images, diversity = '50','256','256','1',15
img = gr.Interface().load("spaces/multimodalart/latentdiffusion")(poem, steps, width, height, images, diversity)[0]
return img
def set_example(example: list) -> dict:
return gr.Textbox.update(value=example[0])
demo = gr.Blocks()
with demo:
gr.Markdown("<h1><center>Few Shot Learning for Text - Word Image Search</center></h1>")
gr.Markdown(
"https://huggingface.co/blog/few-shot-learning-gpt-neo-and-inference-api, https://github.com/EleutherAI/the-pile"
)
with gr.Row():
input_word = gr.Textbox(lines=7, value=prompt)
#examples=[["living, loving, feeling good"], ["I want to live. I want to give."],["Ive been to Hollywood. Ive been to Redwood"]]
#example_text = gr.Dataset(components=[input_word], samples=examples)
#example_text.click(fn=set_example,inputs = example_text,outputs= example_text.components)
poem_txt = gr.Textbox(lines=7)
output_image = gr.Image(type="filepath", shape=(256,256))
b1 = gr.Button("Generate Text")
b2 = gr.Button("Generate Image")
b1.click(poem2_generate, input_word, poem_txt)
b2.click(poem_to_image, poem_txt, output_image)
demo.launch(enable_queue=True, debug=True) |