Spaces:
Build error
Build error
Update app.py
Browse files
app.py
CHANGED
@@ -1,163 +1,78 @@
|
|
1 |
import gradio as gr
|
2 |
from share_btn import community_icon_html, loading_icon_html, share_js
|
3 |
import re
|
4 |
-
import os
|
5 |
-
#hf_token = os.environ.get('HF_TOKEN')
|
6 |
import openai
|
7 |
OPENAI_API_KEY = os.environ.get('OPENAI_API_KEY')
|
8 |
from gradio_client import Client
|
9 |
-
#client = Client("https://fffiloni-test-llama-api-debug.hf.space/", hf_token=hf_token)
|
10 |
clipi_client = Client("https://fffiloni-clip-interrogator-2.hf.space/")
|
11 |
|
12 |
-
def
|
13 |
-
|
14 |
-
colon_index = input_text.find(":")
|
15 |
-
|
16 |
-
# Check if ":" exists in the input_text
|
17 |
-
if colon_index != -1:
|
18 |
-
# Extract the text after the colon
|
19 |
-
result_text = input_text[colon_index + 1:].strip()
|
20 |
-
return result_text
|
21 |
-
else:
|
22 |
-
# Return the original text if ":" is not found
|
23 |
-
return input_text
|
24 |
|
25 |
def infer(image_input, audience, keyword, protagonist):
|
26 |
gr.Info('Calling CLIP Interrogator, ์ด๋ฏธ์ง๋ฅผ ํด์ํ๊ณ ์์ต๋๋ค...')
|
27 |
-
clipi_result = clipi_client.predict(
|
28 |
-
image_input, # str (filepath or URL to image) in 'parameter_3' Image component
|
29 |
-
"best", # str in 'Select mode' Radio component
|
30 |
-
4, # int | float (numeric value between 2 and 24) in 'best mode max flavors' Slider component
|
31 |
-
api_name="/clipi2"
|
32 |
-
)
|
33 |
-
print(clipi_result)
|
34 |
-
|
35 |
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
Keyword: {keyword}
|
41 |
Protagonist: {protagonist}
|
42 |
ํ๊ตญ์ด๋ก ๋ต๋ณํด์ค.
|
43 |
-
"""
|
44 |
-
gr.Info('Calling ChatGPT, ์ด์ผ๊ธฐ๋ฅผ ๋ง๋ค๊ณ ์์ต๋๋ค...')
|
45 |
-
#result = client.predict(
|
46 |
-
# llama_q, # str in 'Message' Textbox component
|
47 |
-
# "I2S",
|
48 |
-
# api_name="/predict"
|
49 |
-
#)
|
50 |
-
chat_completion = openai.ChatCompletion.create(model="gpt-3.5-turbo-16k", messages=[{"role": "user", "content": llama_q}])
|
51 |
-
result = chat_completion.choices[0].message.content
|
52 |
-
|
53 |
-
print(f"Llama2 result: {result}")
|
54 |
|
55 |
-
|
56 |
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
# Join the paragraphs back with an extra empty line between each paragraph
|
61 |
-
formatted_text = '\n'.join(paragraphs)
|
62 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
63 |
|
|
|
64 |
return formatted_text, gr.Group.update(visible=True)
|
65 |
|
66 |
-
css="""
|
67 |
-
#
|
68 |
-
a {text-decoration-line: underline; font-weight: 600;}
|
69 |
-
a {text-decoration-line: underline; font-weight: 600;}
|
70 |
-
.animate-spin {
|
71 |
-
animation: spin 1s linear infinite;
|
72 |
-
}
|
73 |
-
@keyframes spin {
|
74 |
-
from {
|
75 |
-
transform: rotate(0deg);
|
76 |
-
}
|
77 |
-
to {
|
78 |
-
transform: rotate(360deg);
|
79 |
-
}
|
80 |
-
}
|
81 |
-
#share-btn-container {
|
82 |
-
display: flex;
|
83 |
-
padding-left: 0.5rem !important;
|
84 |
-
padding-right: 0.5rem !important;
|
85 |
-
background-color: #000000;
|
86 |
-
justify-content: center;
|
87 |
-
align-items: center;
|
88 |
-
border-radius: 9999px !important;
|
89 |
-
max-width: 13rem;
|
90 |
-
}
|
91 |
-
div#share-btn-container > div {
|
92 |
-
flex-direction: row;
|
93 |
-
background: black;
|
94 |
-
align-items: center;
|
95 |
-
}
|
96 |
-
#share-btn-container:hover {
|
97 |
-
background-color: #060606;
|
98 |
-
}
|
99 |
-
#share-btn {
|
100 |
-
all: initial;
|
101 |
-
color: #ffffff;
|
102 |
-
font-weight: 600;
|
103 |
-
cursor:pointer;
|
104 |
-
font-family: 'IBM Plex Sans', sans-serif;
|
105 |
-
margin-left: 0.5rem !important;
|
106 |
-
padding-top: 0.5rem !important;
|
107 |
-
padding-bottom: 0.5rem !important;
|
108 |
-
right:0;
|
109 |
-
}
|
110 |
-
#share-btn * {
|
111 |
-
all: unset;
|
112 |
-
}
|
113 |
-
#share-btn-container div:nth-child(-n+2){
|
114 |
-
width: auto !important;
|
115 |
-
min-height: 0px !important;
|
116 |
-
}
|
117 |
-
#share-btn-container .wrap {
|
118 |
-
display: none !important;
|
119 |
-
}
|
120 |
-
#share-btn-container.hidden {
|
121 |
-
display: none!important;
|
122 |
-
}
|
123 |
"""
|
124 |
|
125 |
with gr.Blocks(css=css) as demo:
|
126 |
with gr.Column(elem_id="col-container"):
|
127 |
gr.Markdown(
|
128 |
"""
|
129 |
-
<h1 style="text-align: center">
|
130 |
-
<p style="text-align: center">์ด๋ฏธ์ง๋ฅผ ์
๋ก๋ํ์ธ์, ChatGPT๋ฅผ ํตํด ํ๊ตญ์ด๋ก
|
131 |
-
<p style="text-align: center">์๋ณธ https://huggingface.co/spaces/fffiloni/Image-to-Story ์์ ํ๊ตญ์ด๋ก ๊ธ์ ์์ฑํ๊ฒ ํ๊ณ , Llama ๋ฅผ ChatGPT ๋ก ์์ ํ ๊ฒ์
๋๋ค.</p>
|
132 |
-
<p style="text-align: center">ChatGPT ์๋ต์ด ์ค๋ ์ง์ฐ๋๊ฑฐ๋ ์ฌ์ฉ์ ํ์ผ๋ก ์๋ ๋๊ฐ ์์ต๋๋ค.</p>
|
133 |
"""
|
134 |
)
|
135 |
-
|
136 |
with gr.Row():
|
137 |
with gr.Column():
|
138 |
image_in = gr.Image(label="์ด๋ฏธ์ง ์
๋ ฅ", type="filepath", elem_id="image-in", height=420)
|
139 |
audience = gr.Radio(label="๋์", choices=["Children", "Adult"], value="Children")
|
140 |
-
keyword_in = gr.Textbox(label="ํต์ฌ ํค์๋")
|
141 |
-
protagonist_in = gr.Textbox(label="์ฃผ์ธ๊ณต")
|
142 |
-
submit_btn = gr.Button('
|
143 |
with gr.Column():
|
144 |
-
|
145 |
-
story = gr.Textbox(label="์์ฑ๋ ์คํ ๋ฆฌ", elem_id="story")
|
146 |
-
|
147 |
with gr.Group(elem_id="share-btn-container", visible=False) as share_group:
|
148 |
community_icon = gr.HTML(community_icon_html)
|
149 |
loading_icon = gr.HTML(loading_icon_html)
|
150 |
share_button = gr.Button("Share to community", elem_id="share-btn")
|
151 |
-
|
152 |
-
gr.Examples(examples=[["./examples/crabby.png", "Children"],["./examples/hopper.jpeg", "Adult"]],
|
153 |
fn=infer,
|
154 |
-
inputs=[image_in, audience],
|
155 |
outputs=[story, share_group],
|
156 |
cache_examples=True
|
157 |
)
|
158 |
|
159 |
submit_btn.click(fn=infer, inputs=[image_in, audience, keyword_in, protagonist_in], outputs=[story, share_group])
|
160 |
-
# submit_btn.click(fn=infer, inputs=[image_in, audience], outputs=[story, share_group])
|
161 |
share_button.click(None, [], [], _js=share_js)
|
162 |
|
163 |
demo.queue(max_size=12).launch()
|
|
|
1 |
import gradio as gr
|
2 |
from share_btn import community_icon_html, loading_icon_html, share_js
|
3 |
import re
|
4 |
+
import os
|
|
|
5 |
import openai
|
6 |
OPENAI_API_KEY = os.environ.get('OPENAI_API_KEY')
|
7 |
from gradio_client import Client
|
|
|
8 |
clipi_client = Client("https://fffiloni-clip-interrogator-2.hf.space/")
|
9 |
|
10 |
+
def generate_image_url(keywords):
|
11 |
+
return f"https://image.pollinations.ai/prompt/{keywords}"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
12 |
|
13 |
def infer(image_input, audience, keyword, protagonist):
|
14 |
gr.Info('Calling CLIP Interrogator, ์ด๋ฏธ์ง๋ฅผ ํด์ํ๊ณ ์์ต๋๋ค...')
|
15 |
+
clipi_result = clipi_client.predict(image_input, "best", 4, api_name="/clipi2")[0]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
16 |
|
17 |
+
story_intro = f"""
|
18 |
+
# Illustrated Tales
|
19 |
+
## Created by [Sigkawat Pengnoo](https://flowgpt.com/prompt/qzv2D3OvHkzkfSE4rQCqv) at FlowGPT
|
20 |
+
|
21 |
Keyword: {keyword}
|
22 |
Protagonist: {protagonist}
|
23 |
ํ๊ตญ์ด๋ก ๋ต๋ณํด์ค.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
24 |
|
25 |
+
STORY : "{{ {clipi_result} }}"
|
26 |
|
27 |
+
Let's begin with Chapter 1!
|
28 |
+
"""
|
|
|
|
|
|
|
29 |
|
30 |
+
chapters = []
|
31 |
+
current_image_url = generate_image_url(clipi_result)
|
32 |
+
for chapter_num in range(1, 4): # 3๊ฐ์ ์ฅ์ ์์ฑํฉ๋๋ค.
|
33 |
+
chapter_prompt = f"{story_intro}\n\n\n\nChapter {chapter_num} ๋ด์ฉ์ ๋ง๋ค์ด์ค."
|
34 |
+
chat_completion = openai.ChatCompletion.create(model="gpt-3.5-turbo-16k", messages=[{"role": "user", "content": chapter_prompt}])
|
35 |
+
chapter_story = chat_completion.choices[0].message.content
|
36 |
+
chapters.append(chapter_story)
|
37 |
+
current_image_url = generate_image_url(chapter_story) # ๋ค์ ์ฅ์ ์ด๋ฏธ์ง URL์ ์์ฑํฉ๋๋ค.
|
38 |
|
39 |
+
formatted_text = '\n'.join(chapters)
|
40 |
return formatted_text, gr.Group.update(visible=True)
|
41 |
|
42 |
+
css = """
|
43 |
+
# ์ฌ๊ธฐ์ ์ด์ ์ ์ ์ํ CSS ์ฝ๋๋ฅผ ๋ฃ์ผ์ธ์.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
44 |
"""
|
45 |
|
46 |
with gr.Blocks(css=css) as demo:
|
47 |
with gr.Column(elem_id="col-container"):
|
48 |
gr.Markdown(
|
49 |
"""
|
50 |
+
<h1 style="text-align: center">Illustrated Tales - Korean</h1>
|
51 |
+
<p style="text-align: center">์ด๋ฏธ์ง๋ฅผ ์
๋ก๋ํ์ธ์, ChatGPT๋ฅผ ํตํด ํ๊ตญ์ด๋ก ์ด์ผ๊ธฐ์ ๊ทธ๋ฆผ์ ๋ง๋ค์ด ์ค๋๋ค!</p>
|
|
|
|
|
52 |
"""
|
53 |
)
|
|
|
54 |
with gr.Row():
|
55 |
with gr.Column():
|
56 |
image_in = gr.Image(label="์ด๋ฏธ์ง ์
๋ ฅ", type="filepath", elem_id="image-in", height=420)
|
57 |
audience = gr.Radio(label="๋์", choices=["Children", "Adult"], value="Children")
|
58 |
+
keyword_in = gr.Textbox(label="ํต์ฌ ํค์๋")
|
59 |
+
protagonist_in = gr.Textbox(label="์ฃผ์ธ๊ณต")
|
60 |
+
submit_btn = gr.Button('์ด์ผ๊ธฐ์ ๊ทธ๋ฆผ์ ๋ง๋ค์ด ์ฃผ์ธ์')
|
61 |
with gr.Column():
|
62 |
+
story = gr.Textbox(label="์์ฑ๋ ์ด์ผ๊ธฐ์ ๊ทธ๋ฆผ", elem_id="story")
|
|
|
|
|
63 |
with gr.Group(elem_id="share-btn-container", visible=False) as share_group:
|
64 |
community_icon = gr.HTML(community_icon_html)
|
65 |
loading_icon = gr.HTML(loading_icon_html)
|
66 |
share_button = gr.Button("Share to community", elem_id="share-btn")
|
67 |
+
|
68 |
+
gr.Examples(examples=[["./examples/crabby.png", "Children", "ํด๋ณ์์์ ๋ชจํ", "๊ฒ ์ฃผ์ธ๊ณต"],["./examples/hopper.jpeg", "Adult", "์ฐ์์์ ์ฌํ", "ํ ๋ผ ์ฃผ์ธ๊ณต"]],
|
69 |
fn=infer,
|
70 |
+
inputs=[image_in, audience, keyword_in, protagonist_in],
|
71 |
outputs=[story, share_group],
|
72 |
cache_examples=True
|
73 |
)
|
74 |
|
75 |
submit_btn.click(fn=infer, inputs=[image_in, audience, keyword_in, protagonist_in], outputs=[story, share_group])
|
|
|
76 |
share_button.click(None, [], [], _js=share_js)
|
77 |
|
78 |
demo.queue(max_size=12).launch()
|