Spaces:
Runtime error
Runtime error
Commit
·
a972e32
1
Parent(s):
b913503
add documentation
Browse files- app.py +22 -0
- explanation.py +1 -1
app.py
CHANGED
|
@@ -271,6 +271,28 @@ def main():
|
|
| 271 |
if not ('input_image' in st.session_state and st.session_state['input_image'] is not None):
|
| 272 |
print("Image not present")
|
| 273 |
st.success("Upload an image to start")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 274 |
else:
|
| 275 |
make_prompt_row()
|
| 276 |
|
|
|
|
| 271 |
if not ('input_image' in st.session_state and st.session_state['input_image'] is not None):
|
| 272 |
print("Image not present")
|
| 273 |
st.success("Upload an image to start")
|
| 274 |
+
st.write("Welcome to the interior design controlnet demo! "
|
| 275 |
+
"You can start by uploading a picture of your room, after which you will see "
|
| 276 |
+
"a good variety of options to edit your current room to generate the room of your dreams! "
|
| 277 |
+
"You can choose between inpainting, segmentation conditioning and re-generating objects, which "
|
| 278 |
+
"use our custom trained controlnet model."
|
| 279 |
+
)
|
| 280 |
+
st.write("### About the dataset")
|
| 281 |
+
st.write("To make this demo as good as possible, our team spend a lot of time training a custom model. "
|
| 282 |
+
"We used the LAION5B dataset to build our custom dataset, which contains 130k images of 15 types of rooms "
|
| 283 |
+
"in almost 30 design styles. After fetching all these images, we started adding metadata such as "
|
| 284 |
+
"captions (from the BLIP captioning model) and segmentation maps (from the HuggingFace UperNetForSemanticSegmentation model). "
|
| 285 |
+
)
|
| 286 |
+
st.write("### About the model")
|
| 287 |
+
st.write(
|
| 288 |
+
"These were then used to train the controlnet model to generate quality interior design images by using "
|
| 289 |
+
"the segmentation maps and prompts as conditioning information for the model. "
|
| 290 |
+
"By training on segmentation maps, the enduser has a very finegrained control over which objects they "
|
| 291 |
+
"want to place in their room. "
|
| 292 |
+
"The resulting model is then used in a community pipeline that supports image2image and inpainting, "
|
| 293 |
+
"so the user can keep elements of their room and change specific parts of the image."
|
| 294 |
+
""
|
| 295 |
+
)
|
| 296 |
else:
|
| 297 |
make_prompt_row()
|
| 298 |
|
explanation.py
CHANGED
|
@@ -27,4 +27,4 @@ def make_regeneration_explanation():
|
|
| 27 |
st.image("content/regen_example.png", caption="Room where all concepts except for 'bed', 'lamp', 'table' are regenerated")
|
| 28 |
|
| 29 |
def make_segmentation_explanation():
|
| 30 |
-
pass
|
|
|
|
| 27 |
st.image("content/regen_example.png", caption="Room where all concepts except for 'bed', 'lamp', 'table' are regenerated")
|
| 28 |
|
| 29 |
def make_segmentation_explanation():
|
| 30 |
+
pass
|