liteDungeon / app.py
Logan Zoellner
fix bug and truncate story
e44be4d
raw
history blame
2.61 kB
from asyncio import constants
import gradio as gr
import requests
import os
import re
import random
from words import *
basePrompt="""
The following session was recorded from a text adventure game.
----
"""
story="""
Computer: You approach the enterance of the dungeon.
"""
def fallbackResponse():
"You are attacked by a {monster}!".format(monster=random.choice(monsters))
def continue_story(prompt):
p=basePrompt+story+"player:"+prompt
print(f"*****Inside desc_generate - Prompt is :{p}")
json_ = {"inputs": p,
"parameters":
{
"top_p": 0.9,
"temperature": 1.1,
"max_new_tokens": 50,
"return_full_text": False,
}}
#response = requests.post(API_URL, headers=headers, json=json_)
response = requests.post(API_URL, json=json_)
output = response.json()
print(f"If there was an error? Reason is : {output}")
#error handling
if "error" in output:
print("using fallback description method!")
#fallback method
output_tmp=fallbackResponse()
else:
output_tmp = output[0]['generated_text']
#truncate response at first newline
if "\n" in output_tmp:
idx = output_tmp.find('\n')
output_tmp = output_tmp[:idx]
#check if response starts with "computer:", if not add it
if ":" not in output_tmp:
output_tmp = "computer:"+output_tmp
#truncate story to last 6 lines
story_tmp = story.split("\n")
if len(story_tmp)>6:
story_tmp = story_tmp[-6:]
story = "\n".join(story_tmp)
#return story
story=story+"player:"+prompt+"\n"+output_tmp+"\n"
demo = gr.Blocks()
with demo:
gr.Markdown("<h1><center>NPC Generator</center></h1>")
gr.Markdown(
"based on <a href=https://huggingface.co/spaces/Gradio-Blocks/GPTJ6B_Poetry_LatentDiff_Illustration> Gradio poetry generator</a>."
"<div>first input name, race and class (or generate them randomly)</div>"
"<div>Next, use GPT-J to generate a short description</div>"
"<div>Finally, Generate an illustration 🎨 provided by <a href=https://huggingface.co/spaces/multimodalart/latentdiffusion>Latent Diffusion model</a>.</div>"
)
with gr.Row():
b0 = gr.Button("Submit")
with gr.Row():
input_command = gr.Textbox(label="input",placeholder="look around")
with gr.Row():
story = gr.Textbox(label="description",lines=7)
b0.click(continue_story,inputs=[input_command],outputs=[story])
#examples=examples
demo.launch(enable_queue=True, debug=True)