Update app.py
Browse files
app.py
CHANGED
|
@@ -1,9 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import os
|
| 2 |
import gradio as gr
|
| 3 |
import requests
|
| 4 |
import json
|
|
|
|
|
|
|
| 5 |
from PIL import Image
|
| 6 |
|
|
|
|
|
|
|
|
|
|
| 7 |
css = """
|
| 8 |
.example-image img{
|
| 9 |
display: flex; /* Use flexbox to align items */
|
|
@@ -76,6 +84,48 @@ css = """
|
|
| 76 |
|
| 77 |
"""
|
| 78 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 79 |
def convert_fun(input_str):
|
| 80 |
# Remove line breaks and extra whitespaces
|
| 81 |
return ' '.join(input_str.split())
|
|
@@ -251,7 +301,6 @@ def get_attributes(frame):
|
|
| 251 |
return face_crop, one_line_attribute
|
| 252 |
|
| 253 |
def check_liveness(frame):
|
| 254 |
-
|
| 255 |
url = "https://recognito-faceliveness.p.rapidapi.com/api/check_liveness"
|
| 256 |
try:
|
| 257 |
files = {'image': open(frame, 'rb')}
|
|
@@ -313,6 +362,7 @@ def analyze_face(frame):
|
|
| 313 |
|
| 314 |
|
| 315 |
def compare_face(frame1, frame2):
|
|
|
|
| 316 |
url = "https://recognito.p.rapidapi.com/api/compare_face"
|
| 317 |
try:
|
| 318 |
files = {'image1': open(frame1, 'rb'), 'image2': open(frame2, 'rb')}
|
|
@@ -395,94 +445,166 @@ def compare_face(frame1, frame2):
|
|
| 395 |
similarity_score = f"""<br/><div class="markdown-fail-container"><p style="text-align: center; font-size: 20px; color: red;">Similarity score: {str_score}</p></div>"""
|
| 396 |
|
| 397 |
return [face1, face2, matching_result, similarity_score]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 398 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 399 |
|
| 400 |
-
|
| 401 |
-
# This function will be called whenever a new image is set for the gr.Image component
|
| 402 |
-
print("New image set:", image_data)
|
| 403 |
|
| 404 |
-
|
| 405 |
-
gr.
|
| 406 |
-
|
| 407 |
-
|
| 408 |
-
<
|
| 409 |
-
|
| 410 |
-
<
|
| 411 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 412 |
</div>
|
| 413 |
-
|
| 414 |
-
|
| 415 |
-
|
| 416 |
-
|
| 417 |
-
|
| 418 |
-
|
| 419 |
-
|
| 420 |
-
|
| 421 |
-
|
| 422 |
-
|
| 423 |
-
|
| 424 |
-
|
| 425 |
-
|
| 426 |
-
|
| 427 |
-
|
| 428 |
-
|
| 429 |
-
|
| 430 |
-
|
| 431 |
-
|
| 432 |
-
|
| 433 |
-
|
| 434 |
-
|
| 435 |
-
|
| 436 |
-
|
| 437 |
-
|
| 438 |
-
|
| 439 |
-
|
| 440 |
-
|
| 441 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 442 |
|
| 443 |
-
|
| 444 |
-
|
| 445 |
-
|
| 446 |
-
|
| 447 |
-
|
| 448 |
-
with gr.Column(scale=1):
|
| 449 |
-
compare_face_input1 = gr.Image(label="Image1", type='filepath', elem_classes="example-image")
|
| 450 |
-
gr.Examples(['examples/1.jpg', 'examples/2.jpg', 'examples/3.jpg', 'examples/4.jpg'],
|
| 451 |
-
inputs=compare_face_input1)
|
| 452 |
-
with gr.Column(scale=1):
|
| 453 |
-
compare_face_input2 = gr.Image(label="Image2", type='filepath', elem_classes="example-image")
|
| 454 |
-
gr.Examples(['examples/5.jpg', 'examples/6.jpg', 'examples/7.jpg', 'examples/8.jpg'],
|
| 455 |
-
inputs=compare_face_input2)
|
| 456 |
|
| 457 |
-
|
| 458 |
-
|
| 459 |
-
compare_face_button = gr.Button("Compare Face", variant="primary", size="lg")
|
| 460 |
-
with gr.Row(elem_classes="face-row"):
|
| 461 |
-
face_output1 = gr.Image(value="icons/face.jpg", label="Face 1", scale=0, elem_classes="face-image", show_share_button=False, show_download_button=False, show_fullscreen_button=False)
|
| 462 |
-
compare_result = gr.Image(value="icons/blank.png", min_width=30, scale=0, show_download_button=False, show_label=False, show_share_button=False, show_fullscreen_button=False)
|
| 463 |
-
face_output2 = gr.Image(value="icons/face.jpg", label="Face 2", scale=0, elem_classes="face-image", show_share_button=False, show_download_button=False, show_fullscreen_button=False)
|
| 464 |
-
similarity_markdown = gr.Markdown("")
|
| 465 |
-
|
| 466 |
-
compare_face_button.click(compare_face, inputs=[compare_face_input1, compare_face_input2], outputs=[face_output1, face_output2, compare_result, similarity_markdown])
|
| 467 |
-
|
| 468 |
-
with gr.Tab("Face Liveness, Analysis"):
|
| 469 |
-
with gr.Row():
|
| 470 |
-
with gr.Column(scale=1):
|
| 471 |
-
face_input = gr.Image(label="Image", type='filepath', elem_classes="example-image")
|
| 472 |
-
gr.Examples(['examples/att_1.jpg', 'examples/att_2.jpg', 'examples/att_3.jpg', 'examples/att_4.jpg', 'examples/att_5.jpg', 'examples/att_6.jpg', 'examples/att_7.jpg'],
|
| 473 |
-
inputs=face_input)
|
| 474 |
-
|
| 475 |
-
with gr.Blocks():
|
| 476 |
-
with gr.Column(scale=1, elem_classes="block-background"):
|
| 477 |
-
analyze_face_button = gr.Button("Analyze Face", variant="primary", size="lg")
|
| 478 |
-
with gr.Row(elem_classes="face-row"):
|
| 479 |
-
face_output = gr.Image(value="icons/face.jpg", label="Face", scale=0, elem_classes="face-image", show_share_button=False, show_download_button=False, show_fullscreen_button=False)
|
| 480 |
|
| 481 |
-
|
| 482 |
-
attribute_result = gr.Markdown("")
|
| 483 |
-
|
| 484 |
-
analyze_face_button.click(analyze_face, inputs=face_input, outputs=[face_output, liveness_result, attribute_result])
|
| 485 |
-
|
| 486 |
-
gr.HTML('<a href="https://visitorbadge.io/status?path=https%3A%2F%2Fhuggingface.co%2Fspaces%2FRecognito%2FFaceRecognition-LivenessDetection-FaceAnalysis"><img src="https://api.visitorbadge.io/api/combined?path=https%3A%2F%2Fhuggingface.co%2Fspaces%2FRecognito%2FFaceRecognition-LivenessDetection-FaceAnalysis&countColor=%2337d67a&style=flat&labelStyle=upper" /></a>')
|
| 487 |
|
| 488 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
sys.path.append('../')
|
| 3 |
+
|
| 4 |
import os
|
| 5 |
import gradio as gr
|
| 6 |
import requests
|
| 7 |
import json
|
| 8 |
+
import cv2
|
| 9 |
+
import numpy as np
|
| 10 |
from PIL import Image
|
| 11 |
|
| 12 |
+
from fr.engine.header import *
|
| 13 |
+
from fl.engine.header import *
|
| 14 |
+
|
| 15 |
css = """
|
| 16 |
.example-image img{
|
| 17 |
display: flex; /* Use flexbox to align items */
|
|
|
|
| 84 |
|
| 85 |
"""
|
| 86 |
|
| 87 |
+
file_path = os.path.abspath(__file__)
|
| 88 |
+
gradio_path = os.path.dirname(file_path)
|
| 89 |
+
root_path = os.path.dirname(gradio_path)
|
| 90 |
+
|
| 91 |
+
g_fr_activation_result = -1
|
| 92 |
+
g_fl_activation_result = -1
|
| 93 |
+
MATCH_THRESHOLD = 0.67
|
| 94 |
+
|
| 95 |
+
def activate_fr_sdk():
|
| 96 |
+
fr_key = os.environ.get("FR_LICENSE_KEY")
|
| 97 |
+
fr_dict_path = os.path.join(root_path, "fr/engine/bin")
|
| 98 |
+
|
| 99 |
+
ret = -1
|
| 100 |
+
if fr_key is None:
|
| 101 |
+
print_warning("Recognition online license key not found!")
|
| 102 |
+
else:
|
| 103 |
+
ret = fr.engine.header.init_sdk(fr_dict_path.encode('utf-8'), fr_key.encode('utf-8'))
|
| 104 |
+
|
| 105 |
+
if ret == 0:
|
| 106 |
+
print_log("Successfully init FR SDK!")
|
| 107 |
+
else:
|
| 108 |
+
print_error(f"Falied to init FR SDK, Error code {ret}")
|
| 109 |
+
|
| 110 |
+
return ret
|
| 111 |
+
|
| 112 |
+
def activate_fl_sdk():
|
| 113 |
+
fl_key = os.environ.get("FL_LICENSE_KEY")
|
| 114 |
+
fl_dict_path = os.path.join(root_path, "fl/engine/bin")
|
| 115 |
+
|
| 116 |
+
ret = -1
|
| 117 |
+
if fl_key is None:
|
| 118 |
+
print_warning("Liveness Detection online license key not found!")
|
| 119 |
+
else:
|
| 120 |
+
ret = fl.engine.header.init_sdk(fl_dict_path.encode('utf-8'), fl_key.encode('utf-8'))
|
| 121 |
+
|
| 122 |
+
if ret == 0:
|
| 123 |
+
print_log("Successfully init FL SDK!")
|
| 124 |
+
else:
|
| 125 |
+
print_error(f"Falied to init FL SDK, Error code {ret}")
|
| 126 |
+
|
| 127 |
+
return ret
|
| 128 |
+
|
| 129 |
def convert_fun(input_str):
|
| 130 |
# Remove line breaks and extra whitespaces
|
| 131 |
return ' '.join(input_str.split())
|
|
|
|
| 301 |
return face_crop, one_line_attribute
|
| 302 |
|
| 303 |
def check_liveness(frame):
|
|
|
|
| 304 |
url = "https://recognito-faceliveness.p.rapidapi.com/api/check_liveness"
|
| 305 |
try:
|
| 306 |
files = {'image': open(frame, 'rb')}
|
|
|
|
| 362 |
|
| 363 |
|
| 364 |
def compare_face(frame1, frame2):
|
| 365 |
+
"""
|
| 366 |
url = "https://recognito.p.rapidapi.com/api/compare_face"
|
| 367 |
try:
|
| 368 |
files = {'image1': open(frame1, 'rb'), 'image2': open(frame2, 'rb')}
|
|
|
|
| 445 |
similarity_score = f"""<br/><div class="markdown-fail-container"><p style="text-align: center; font-size: 20px; color: red;">Similarity score: {str_score}</p></div>"""
|
| 446 |
|
| 447 |
return [face1, face2, matching_result, similarity_score]
|
| 448 |
+
"""
|
| 449 |
+
global g_fr_activation_result
|
| 450 |
+
if g_fr_activation_result != 0:
|
| 451 |
+
gr.Warning("FR SDK Activation Failed!")
|
| 452 |
+
return None, None, None, None
|
| 453 |
+
|
| 454 |
+
try:
|
| 455 |
+
image1 = open(frame1, 'rb')
|
| 456 |
+
image2 = open(frame2, 'rb')
|
| 457 |
+
except:
|
| 458 |
+
raise gr.Error("Please select images files!")
|
| 459 |
+
|
| 460 |
+
image_mat1 = cv2.imdecode(np.frombuffer(image1.read(), np.uint8), cv2.IMREAD_COLOR)
|
| 461 |
+
image_mat2 = cv2.imdecode(np.frombuffer(image2.read(), np.uint8), cv2.IMREAD_COLOR)
|
| 462 |
+
start_time = time.time()
|
| 463 |
+
result, score, face_bboxes, face_features = compare_face(image_mat1, image_mat2, float(threshold))
|
| 464 |
+
end_time = time.time()
|
| 465 |
+
process_time = (end_time - start_time) * 1000
|
| 466 |
|
| 467 |
+
try:
|
| 468 |
+
image1 = Image.open(frame1)
|
| 469 |
+
image2 = Image.open(frame2)
|
| 470 |
+
images = [image1, image2]
|
| 471 |
+
|
| 472 |
+
face1 = Image.new('RGBA',(150, 150), (80,80,80,0))
|
| 473 |
+
face2 = Image.new('RGBA',(150, 150), (80,80,80,0))
|
| 474 |
+
faces = [face1, face2]
|
| 475 |
+
|
| 476 |
+
face_bboxes_result = []
|
| 477 |
+
if face_bboxes is not None:
|
| 478 |
+
for i, bbox in enumerate(face_bboxes):
|
| 479 |
+
x1 = bbox[0]
|
| 480 |
+
y1 = bbox[1]
|
| 481 |
+
x2 = bbox[2]
|
| 482 |
+
y2 = bbox[3]
|
| 483 |
+
if x1 < 0:
|
| 484 |
+
x1 = 0
|
| 485 |
+
if y1 < 0:
|
| 486 |
+
y1 = 0
|
| 487 |
+
if x2 >= images[i].width:
|
| 488 |
+
x2 = images[i].width - 1
|
| 489 |
+
if y2 >= images[i].height:
|
| 490 |
+
y2 = images[i].height - 1
|
| 491 |
+
|
| 492 |
+
face_bbox_str = f"x1: {x1}, y1: {y1}, x2: {x2}, y2: {y2}"
|
| 493 |
+
face_bboxes_result.append(face_bbox_str)
|
| 494 |
+
|
| 495 |
+
faces[i] = images[i].crop((x1, y1, x2, y2))
|
| 496 |
+
face_image_ratio = faces[i].width / float(faces[i].height)
|
| 497 |
+
resized_w = int(face_image_ratio * 150)
|
| 498 |
+
resized_h = 150
|
| 499 |
+
|
| 500 |
+
faces[i] = faces[i].resize((int(resized_w), int(resized_h)))
|
| 501 |
+
except:
|
| 502 |
+
pass
|
| 503 |
+
|
| 504 |
+
matching_result = Image.open(os.path.join(gradio_path, "icons/blank.png"))
|
| 505 |
+
similarity_score = ""
|
| 506 |
+
if faces[0] is not None and faces[1] is not None:
|
| 507 |
+
if score is not None:
|
| 508 |
+
str_score = str("{:.4f}".format(score))
|
| 509 |
+
if result == "SAME PERSON":
|
| 510 |
+
matching_result = Image.open(os.path.join(gradio_path, "icons/same.png"))
|
| 511 |
+
similarity_score = f"""<br/><div class="markdown-success-container"><p style="text-align: center; font-size: 20px; color: green;">Similarity score: {str_score}</p></div>"""
|
| 512 |
+
else:
|
| 513 |
+
matching_result = Image.open(os.path.join(gradio_path, "icons/different.png"))
|
| 514 |
+
similarity_score = f"""<br/><div class="markdown-fail-container"><p style="text-align: center; font-size: 20px; color: red;">Similarity score: {str_score}</p></div>"""
|
| 515 |
+
|
| 516 |
+
return faces[0], faces[1], matching_result, similarity_score)
|
| 517 |
|
| 518 |
+
|
|
|
|
|
|
|
| 519 |
|
| 520 |
+
def launch_demo(activate_fr_result, activate_fl_result):
|
| 521 |
+
with gr.Blocks(css=css) as demo:
|
| 522 |
+
gr.Markdown(
|
| 523 |
+
"""
|
| 524 |
+
<a href="https://recognito.vision" style="display: flex; align-items: center;">
|
| 525 |
+
<img src="https://recognito.vision/wp-content/uploads/2024/03/Recognito-modified.png" style="width: 8%; margin-right: 15px;"/>
|
| 526 |
+
<div>
|
| 527 |
+
<p style="font-size: 32px; font-weight: bold; margin: 0;">Recognito</p>
|
| 528 |
+
<p style="font-size: 18px; margin: 0;">www.recognito.vision</p>
|
| 529 |
+
</div>
|
| 530 |
+
</a>
|
| 531 |
+
|
| 532 |
+
<p style="font-size: 20px; font-weight: bold;">β¨ NIST FRVT Top #1 Face Recognition Algorithm Developer</p>
|
| 533 |
+
<div style="display: flex; align-items: center;">
|
| 534 |
+
  <a href="https://pages.nist.gov/frvt/html/frvt11.html"> <p style="font-size: 14px;">ππ» Latest NIST FRVT Report</p></a>
|
| 535 |
+
</div>
|
| 536 |
+
<p style="font-size: 20px; font-weight: bold;">π Product Documentation</p>
|
| 537 |
+
<div style="display: flex; align-items: center;">
|
| 538 |
+
  <a href="https://docs.recognito.vision" style="display: flex; align-items: center;"><img src="https://recognito.vision/wp-content/uploads/2024/05/book.png" style="width: 48px; margin-right: 5px;"/></a>
|
| 539 |
+
</div>
|
| 540 |
+
<p style="font-size: 20px; font-weight: bold;">π Visit Recognito</p>
|
| 541 |
+
<div style="display: flex; align-items: center;">
|
| 542 |
+
  <a href="https://recognito.vision" style="display: flex; align-items: center;"><img src="https://recognito.vision/wp-content/uploads/2024/03/recognito_64_cl.png" style="width: 32px; margin-right: 5px;"/></a>
|
| 543 |
+
<a href="https://www.linkedin.com/company/recognito-vision" style="display: flex; align-items: center;"><img src="https://recognito.vision/wp-content/uploads/2024/03/linkedin_64_cl.png" style="width: 32px; margin-right: 5px;"/></a>
|
| 544 |
+
<a href="https://huggingface.co/recognito" style="display: flex; align-items: center;"><img src="https://recognito.vision/wp-content/uploads/2024/03/hf_64_cl.png" style="width: 32px; margin-right: 5px;"/></a>
|
| 545 |
+
<a href="https://github.com/recognito-vision" style="display: flex; align-items: center;"><img src="https://recognito.vision/wp-content/uploads/2024/03/github_64_cl.png" style="width: 32px; margin-right: 5px;"/></a>
|
| 546 |
+
<a href="https://hub.docker.com/u/recognito" style="display: flex; align-items: center;"><img src="https://recognito.vision/wp-content/uploads/2024/03/docker_64_cl.png" style="width: 32px; margin-right: 5px;"/></a>
|
| 547 |
+
<a href="https://www.youtube.com/@recognito-vision" style="display: flex; align-items: center;"><img src="https://recognito.vision/wp-content/uploads/2024/04/youtube_64_cl.png" style="width: 32px; margin-right: 5px;"/></a>
|
| 548 |
</div>
|
| 549 |
+
<p style="font-size: 20px; font-weight: bold;">π€ Contact us for our on-premise Face Recognition, Liveness Detection SDKs deployment</p>
|
| 550 |
+
<div style="display: flex; align-items: center;">
|
| 551 |
+
  <a target="_blank" href="mailto:[email protected]"><img src="https://img.shields.io/badge/email-[email protected]?logo=gmail " alt="www.recognito.vision"></a>
|
| 552 |
+
<a target="_blank" href="https://wa.me/+14158003112"><img src="https://img.shields.io/badge/whatsapp-+14158003112-blue.svg?logo=whatsapp " alt="www.recognito.vision"></a>
|
| 553 |
+
<a target="_blank" href="https://t.me/recognito_vision"><img src="https://img.shields.io/badge/telegram-@recognito__vision-blue.svg?logo=telegram " alt="www.recognito.vision"></a>
|
| 554 |
+
<a target="_blank" href="https://join.slack.com/t/recognito-workspace/shared_invite/zt-2d4kscqgn-"><img src="https://img.shields.io/badge/slack-recognito__workspace-blue.svg?logo=slack " alt="www.recognito.vision"></a>
|
| 555 |
+
</div>
|
| 556 |
+
<br/><br/><br/>
|
| 557 |
+
"""
|
| 558 |
+
)
|
| 559 |
+
|
| 560 |
+
with gr.Tabs():
|
| 561 |
+
with gr.Tab("Face Recognition"):
|
| 562 |
+
with gr.Row():
|
| 563 |
+
with gr.Column(scale=2):
|
| 564 |
+
with gr.Row():
|
| 565 |
+
with gr.Column(scale=1):
|
| 566 |
+
compare_face_input1 = gr.Image(label="Image1", type='filepath', elem_classes="example-image")
|
| 567 |
+
gr.Examples(['examples/1.jpg', 'examples/2.jpg', 'examples/3.jpg', 'examples/4.jpg'],
|
| 568 |
+
inputs=compare_face_input1)
|
| 569 |
+
with gr.Column(scale=1):
|
| 570 |
+
compare_face_input2 = gr.Image(label="Image2", type='filepath', elem_classes="example-image")
|
| 571 |
+
gr.Examples(['examples/5.jpg', 'examples/6.jpg', 'examples/7.jpg', 'examples/8.jpg'],
|
| 572 |
+
inputs=compare_face_input2)
|
| 573 |
+
|
| 574 |
+
with gr.Blocks():
|
| 575 |
+
with gr.Column(scale=1, min_width=400, elem_classes="block-background"):
|
| 576 |
+
compare_face_button = gr.Button("Compare Face", variant="primary", size="lg")
|
| 577 |
+
with gr.Row(elem_classes="face-row"):
|
| 578 |
+
face_output1 = gr.Image(value="icons/face.jpg", label="Face 1", scale=0, elem_classes="face-image", show_share_button=False, show_download_button=False, show_fullscreen_button=False)
|
| 579 |
+
compare_result = gr.Image(value="icons/blank.png", min_width=30, scale=0, show_download_button=False, show_label=False, show_share_button=False, show_fullscreen_button=False)
|
| 580 |
+
face_output2 = gr.Image(value="icons/face.jpg", label="Face 2", scale=0, elem_classes="face-image", show_share_button=False, show_download_button=False, show_fullscreen_button=False)
|
| 581 |
+
similarity_markdown = gr.Markdown("")
|
| 582 |
+
|
| 583 |
+
compare_face_button.click(compare_face, inputs=[compare_face_input1, compare_face_input2], outputs=[face_output1, face_output2, compare_result, similarity_markdown])
|
| 584 |
+
|
| 585 |
+
with gr.Tab("Face Liveness, Analysis"):
|
| 586 |
+
with gr.Row():
|
| 587 |
+
with gr.Column(scale=1):
|
| 588 |
+
face_input = gr.Image(label="Image", type='filepath', elem_classes="example-image")
|
| 589 |
+
gr.Examples(['examples/att_1.jpg', 'examples/att_2.jpg', 'examples/att_3.jpg', 'examples/att_4.jpg', 'examples/att_5.jpg', 'examples/att_6.jpg', 'examples/att_7.jpg'],
|
| 590 |
+
inputs=face_input)
|
| 591 |
|
| 592 |
+
with gr.Blocks():
|
| 593 |
+
with gr.Column(scale=1, elem_classes="block-background"):
|
| 594 |
+
analyze_face_button = gr.Button("Analyze Face", variant="primary", size="lg")
|
| 595 |
+
with gr.Row(elem_classes="face-row"):
|
| 596 |
+
face_output = gr.Image(value="icons/face.jpg", label="Face", scale=0, elem_classes="face-image", show_share_button=False, show_download_button=False, show_fullscreen_button=False)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 597 |
|
| 598 |
+
liveness_result = gr.Markdown("")
|
| 599 |
+
attribute_result = gr.Markdown("")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 600 |
|
| 601 |
+
analyze_face_button.click(analyze_face, inputs=face_input, outputs=[face_output, liveness_result, attribute_result])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 602 |
|
| 603 |
+
gr.HTML('<a href="https://visitorbadge.io/status?path=https%3A%2F%2Fhuggingface.co%2Fspaces%2FRecognito%2FFaceRecognition-LivenessDetection-FaceAnalysis"><img src="https://api.visitorbadge.io/api/combined?path=https%3A%2F%2Fhuggingface.co%2Fspaces%2FRecognito%2FFaceRecognition-LivenessDetection-FaceAnalysis&countColor=%2337d67a&style=flat&labelStyle=upper" /></a>')
|
| 604 |
+
|
| 605 |
+
demo.launch(server_name="0.0.0.0", server_port=7860, show_api=False)
|
| 606 |
+
|
| 607 |
+
if __name__ == '__main__':
|
| 608 |
+
g_fr_activation_result = activate_fr_sdk()
|
| 609 |
+
g_fl_activation_result = activate_fl_sdk()
|
| 610 |
+
launch_demo(g_fr_activation_result, g_fl_activation_result)
|