Spaces:
Runtime error
Runtime error
File size: 4,023 Bytes
7888440 cfe7669 7888440 1222150 7888440 0667ded 7888440 0667ded 7888440 bfe5356 7888440 bfe5356 7888440 1222150 7888440 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 |
import gradio as gr
import requests
# GPT-J-6B API
API_URL = "https://api-inference.huggingface.co/models/EleutherAI/gpt-j-6B"
headers = {"Authorization": "Bearer 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"}
prompt = """AI: I am using AI to solve cognitive memory
Human: I love you AI
---
AI: Today I want to tell you that you matter to me
Human: Thankyou AI you are a great friend to me.
---
AI: Today I will teach you to code!
Human:"""
examples = [["river"], ["night"], ["trees"],["table"],["laughs"]]
def poem2_generate(word):
p = word.lower() + "\n" + "poem using word: "
print(f"*****Inside poem_generate - Prompt is :{p}")
json_ = {"inputs": p,
"parameters":
{
"top_p": 0.9,
"temperature": 1.1,
"max_new_tokens": 50,
"return_full_text": False
}}
response = requests.post(API_URL, headers=headers, json=json_)
output = response.json()
print(f"If there was an error? Reason is : {output}")
output_tmp = output[0]['generated_text']
print(f"GPTJ response without splits is: {output_tmp}")
#poem = output[0]['generated_text'].split("\n\n")[0] # +"."
if "\n\n" not in output_tmp:
if output_tmp.find('.') != -1:
idx = output_tmp.find('.')
poem = output_tmp[:idx+1]
else:
idx = output_tmp.rfind('\n')
poem = output_tmp[:idx]
else:
poem = output_tmp.split("\n\n")[0] # +"."
poem = poem.replace('?','')
print(f"Poem being returned is: {poem}")
return poem
def poem_generate(word):
p = prompt + word.lower() + "\n" + "poem using word: "
print(f"*****Inside poem_generate - Prompt is :{p}")
json_ = {"inputs": p,
"parameters":
{
"top_p": 0.9,
"temperature": 1.1,
"max_new_tokens": 50,
"return_full_text": False
}}
response = requests.post(API_URL, headers=headers, json=json_)
output = response.json()
print(f"If there was an error? Reason is : {output}")
output_tmp = output[0]['generated_text']
print(f"GPTJ response without splits is: {output_tmp}")
#poem = output[0]['generated_text'].split("\n\n")[0] # +"."
if "\n\n" not in output_tmp:
if output_tmp.find('.') != -1:
idx = output_tmp.find('.')
poem = output_tmp[:idx+1]
else:
idx = output_tmp.rfind('\n')
poem = output_tmp[:idx]
else:
poem = output_tmp.split("\n\n")[0] # +"."
poem = poem.replace('?','')
print(f"Poem being returned is: {poem}")
return poem
def poem_to_image(poem):
print("*****Inside Poem_to_image")
poem = " ".join(poem.split('\n'))
poem = poem + " oil on canvas."
steps, width, height, images, diversity = '50','256','256','1',15
img = gr.Interface.load("spaces/multimodalart/latentdiffusion")(poem, steps, width, height, images, diversity)[0]
return img
demo = gr.Blocks()
with demo:
gr.Markdown("<h1><center>Few Shot Learning for Text - Word Image Search</center></h1>")
gr.Markdown(
"<div>This example uses prompt engineering to search for answers in EleutherAI large language model and follows the pattern of Few Shot Learning where you supply A 1) Task Description, 2) a Set of Examples, and 3) a Prompt. Then few shot learning can show the answer given the pattern of the examples. More information on how it works is here: https://huggingface.co/blog/few-shot-learning-gpt-neo-and-inference-api Also the Eleuther AI was trained on texts called The Pile which is documented here on its github. Review this to find what types of language patterns it can generate text for as answers: https://github.com/EleutherAI/the-pile"
)
with gr.Row():
input_word = gr.Textbox(lines=7, value=prompt)
poem_txt = gr.Textbox(lines=7)
output_image = gr.Image(type="filepath", shape=(256,256))
b1 = gr.Button("Generate Text")
b2 = gr.Button("Generate Image")
b1.click(poem2_generate, input_word, poem_txt)
b2.click(poem_to_image, poem_txt, output_image)
#examples=examples
demo.launch(enable_queue=True, debug=True) |