Spaces:
Runtime error
Runtime error
from asyncio import constants | |
import gradio as gr | |
import requests | |
import os | |
import re | |
import random | |
from words import * | |
# GPT-J-6B API | |
API_URL = "https://api-inference.huggingface.co/models/EleutherAI/gpt-j-6B" | |
basePrompt=""" | |
The following session was recorded from a text adventure game. | |
---- | |
""" | |
story=""" | |
Computer: You approach the enterance of the dungeon. | |
""" | |
def fallbackResponse(): | |
"You are attacked by a {monster}!".format(monster=random.choice(monsters)) | |
def continue_story(prompt): | |
global story | |
print("about to die",basePrompt,story,prompt) | |
print("huh?",story) | |
p=basePrompt+story+"player:"+str(prompt) | |
print("got prompt:\n\n",p) | |
print(f"*****Inside desc_generate - Prompt is :{p}") | |
json_ = {"inputs": p, | |
"parameters": | |
{ | |
"top_p": 0.9, | |
"temperature": 1.1, | |
"max_new_tokens": 50, | |
"return_full_text": False, | |
}} | |
#response = requests.post(API_URL, headers=headers, json=json_) | |
response = requests.post(API_URL, json=json_) | |
output = response.json() | |
print(f"If there was an error? Reason is : {output}") | |
#error handling | |
if "error" in output: | |
print("using fallback description method!") | |
#fallback method | |
output_tmp=fallbackResponse() | |
else: | |
output_tmp = output[0]['generated_text'] | |
#truncate response at first newline | |
if "\n" in output_tmp: | |
idx = output_tmp.find('\n') | |
output_tmp = output_tmp[:idx] | |
#check if response starts with "computer:", if not add it | |
if ":" not in output_tmp: | |
output_tmp = "computer:"+output_tmp | |
#truncate story to last 6 lines | |
story_tmp = story.split("\n") | |
if len(story_tmp)>6: | |
story_tmp = story_tmp[-6:] | |
story = "\n".join(story_tmp) | |
#return story | |
story=story+"player:"+prompt+"\n"+output_tmp+"\n" | |
return story | |
demo = gr.Blocks() | |
with demo: | |
gr.Markdown("<h1><center>NPC Generator</center></h1>") | |
gr.Markdown( | |
"based on <a href=https://huggingface.co/spaces/Gradio-Blocks/GPTJ6B_Poetry_LatentDiff_Illustration> Gradio poetry generator</a>." | |
"<div>first input name, race and class (or generate them randomly)</div>" | |
"<div>Next, use GPT-J to generate a short description</div>" | |
"<div>Finally, Generate an illustration π¨ provided by <a href=https://huggingface.co/spaces/multimodalart/latentdiffusion>Latent Diffusion model</a>.</div>" | |
) | |
with gr.Row(): | |
output_story = gr.Textbox(value=story,label="story",lines=7) | |
with gr.Row(): | |
input_command = gr.Textbox(label="input",placeholder="look around") | |
with gr.Row(): | |
b0 = gr.Button("Submit") | |
b0.click(continue_story,inputs=[input_command],outputs=[output_story]) | |
#examples=examples | |
demo.launch(enable_queue=True, debug=True) |