danhtran2mind commited on
Commit
20cf96a
·
verified ·
1 Parent(s): e6c8145

Upload 258 files

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +17 -0
  2. apps/assets/examples/example_face_1.jpg +0 -0
  3. apps/assets/examples/example_face_2.jpg +3 -0
  4. apps/assets/examples/example_face_3.jpg +0 -0
  5. apps/assets/examples/example_face_4.jpg +0 -0
  6. apps/gradio_app.py +103 -73
  7. apps/gradio_app/components.py +42 -81
  8. apps/gradio_app/inference.py +1 -1
  9. apps/gradio_app/static/scripts.js +3 -0
  10. apps/gradio_app/static/styles.css +270 -0
  11. assets/gradio_app_demo.jpg +3 -0
  12. src/third_party/edgeface/.gitignore +11 -0
  13. src/third_party/edgeface/.python-version +1 -0
  14. src/third_party/edgeface/LICENSE +30 -0
  15. src/third_party/edgeface/README.md +126 -0
  16. src/third_party/edgeface/assets/benchmark.png +3 -0
  17. src/third_party/edgeface/assets/edgeface.png +3 -0
  18. src/third_party/edgeface/backbones/__init__.py +37 -0
  19. src/third_party/edgeface/backbones/timmfr.py +78 -0
  20. src/third_party/edgeface/ckpts/edgeface_s_gamma_05.pt +3 -0
  21. src/third_party/edgeface/ckpts/edgeface_xs_gamma_06.pt +3 -0
  22. src/third_party/edgeface/face_alignment/LICENSE +21 -0
  23. src/third_party/edgeface/face_alignment/README.md +1 -0
  24. src/third_party/edgeface/face_alignment/align.py +58 -0
  25. src/third_party/edgeface/face_alignment/face_yolo.py +149 -0
  26. src/third_party/edgeface/face_alignment/mtcnn.py +175 -0
  27. src/third_party/edgeface/face_alignment/mtcnn_pytorch/.gitignore +3 -0
  28. src/third_party/edgeface/face_alignment/mtcnn_pytorch/LICENSE +21 -0
  29. src/third_party/edgeface/face_alignment/mtcnn_pytorch/README.md +26 -0
  30. src/third_party/edgeface/face_alignment/mtcnn_pytorch/caffe_models/det1.caffemodel +0 -0
  31. src/third_party/edgeface/face_alignment/mtcnn_pytorch/caffe_models/det1.prototxt +177 -0
  32. src/third_party/edgeface/face_alignment/mtcnn_pytorch/caffe_models/det2.caffemodel +3 -0
  33. src/third_party/edgeface/face_alignment/mtcnn_pytorch/caffe_models/det2.prototxt +228 -0
  34. src/third_party/edgeface/face_alignment/mtcnn_pytorch/caffe_models/det3.caffemodel +3 -0
  35. src/third_party/edgeface/face_alignment/mtcnn_pytorch/caffe_models/det3.prototxt +294 -0
  36. src/third_party/edgeface/face_alignment/mtcnn_pytorch/caffe_models/det4.caffemodel +3 -0
  37. src/third_party/edgeface/face_alignment/mtcnn_pytorch/caffe_models/det4.prototxt +995 -0
  38. src/third_party/edgeface/face_alignment/mtcnn_pytorch/extract_weights_from_caffe_models.py +47 -0
  39. src/third_party/edgeface/face_alignment/mtcnn_pytorch/get_aligned_face_from_mtcnn.ipynb +0 -0
  40. src/third_party/edgeface/face_alignment/mtcnn_pytorch/images/example.png +3 -0
  41. src/third_party/edgeface/face_alignment/mtcnn_pytorch/images/face0.jpg +0 -0
  42. src/third_party/edgeface/face_alignment/mtcnn_pytorch/images/jf.jpg +0 -0
  43. src/third_party/edgeface/face_alignment/mtcnn_pytorch/images/office1.jpg +0 -0
  44. src/third_party/edgeface/face_alignment/mtcnn_pytorch/images/office2.jpg +3 -0
  45. src/third_party/edgeface/face_alignment/mtcnn_pytorch/images/office3.jpg +0 -0
  46. src/third_party/edgeface/face_alignment/mtcnn_pytorch/images/office4.jpg +3 -0
  47. src/third_party/edgeface/face_alignment/mtcnn_pytorch/images/office5.jpg +0 -0
  48. src/third_party/edgeface/face_alignment/mtcnn_pytorch/refine_faces.ipynb +315 -0
  49. src/third_party/edgeface/face_alignment/mtcnn_pytorch/src/__init__.py +2 -0
  50. src/third_party/edgeface/face_alignment/mtcnn_pytorch/src/align_trans.py +304 -0
.gitattributes CHANGED
@@ -121,3 +121,20 @@ tests/test_images/Kate[[:space:]]Winslet.jpg filter=lfs diff=lfs merge=lfs -text
121
  tests/test_images/Tom[[:space:]]Cruise.jpg filter=lfs diff=lfs merge=lfs -text
122
  tests/test_images/Tom[[:space:]]Hanks.jpg filter=lfs diff=lfs merge=lfs -text
123
  tests/test_images/Viola[[:space:]]Davis.jpg filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
121
  tests/test_images/Tom[[:space:]]Cruise.jpg filter=lfs diff=lfs merge=lfs -text
122
  tests/test_images/Tom[[:space:]]Hanks.jpg filter=lfs diff=lfs merge=lfs -text
123
  tests/test_images/Viola[[:space:]]Davis.jpg filter=lfs diff=lfs merge=lfs -text
124
+ apps/assets/examples/example_face_2.jpg filter=lfs diff=lfs merge=lfs -text
125
+ assets/gradio_app_demo.jpg filter=lfs diff=lfs merge=lfs -text
126
+ src/third_party/edgeface/assets/benchmark.png filter=lfs diff=lfs merge=lfs -text
127
+ src/third_party/edgeface/assets/edgeface.png filter=lfs diff=lfs merge=lfs -text
128
+ src/third_party/edgeface/face_alignment/mtcnn_pytorch/caffe_models/det2.caffemodel filter=lfs diff=lfs merge=lfs -text
129
+ src/third_party/edgeface/face_alignment/mtcnn_pytorch/caffe_models/det3.caffemodel filter=lfs diff=lfs merge=lfs -text
130
+ src/third_party/edgeface/face_alignment/mtcnn_pytorch/caffe_models/det4.caffemodel filter=lfs diff=lfs merge=lfs -text
131
+ src/third_party/edgeface/face_alignment/mtcnn_pytorch/images/example.png filter=lfs diff=lfs merge=lfs -text
132
+ src/third_party/edgeface/face_alignment/mtcnn_pytorch/images/office2.jpg filter=lfs diff=lfs merge=lfs -text
133
+ src/third_party/edgeface/face_alignment/mtcnn_pytorch/images/office4.jpg filter=lfs diff=lfs merge=lfs -text
134
+ src/third_party/edgeface/tests/test_images/Cate[[:space:]]Blanchett.jpg filter=lfs diff=lfs merge=lfs -text
135
+ src/third_party/edgeface/tests/test_images/Daniel[[:space:]]Day-Lewis.jpg filter=lfs diff=lfs merge=lfs -text
136
+ src/third_party/edgeface/tests/test_images/Elon_Musk.jpg filter=lfs diff=lfs merge=lfs -text
137
+ src/third_party/edgeface/tests/test_images/Gal[[:space:]]Gado.jpg filter=lfs diff=lfs merge=lfs -text
138
+ src/third_party/edgeface/tests/test_images/Kate[[:space:]]Winslet.jpg filter=lfs diff=lfs merge=lfs -text
139
+ src/third_party/edgeface/tests/test_images/Tom[[:space:]]Hanks.jpg filter=lfs diff=lfs merge=lfs -text
140
+ src/third_party/edgeface/tests/test_images/Viola[[:space:]]Davis.jpg filter=lfs diff=lfs merge=lfs -text
apps/assets/examples/example_face_1.jpg ADDED
apps/assets/examples/example_face_2.jpg ADDED

Git LFS Details

  • SHA256: f29d99c0774faeaef343cdb471103186b97ad09d295abf9a56e55a763c28b8f4
  • Pointer size: 131 Bytes
  • Size of remote file: 203 kB
apps/assets/examples/example_face_3.jpg ADDED
apps/assets/examples/example_face_4.jpg ADDED
apps/gradio_app.py CHANGED
@@ -2,90 +2,118 @@ import gradio as gr
2
  from PIL import Image
3
  from gradio_app.inference import run_inference
4
  from gradio_app.components import (
5
- CONTENT_DESCRIPTION, CONTENT_IN, CONTENT_OUT,
6
- list_reference_files, list_mapping_files,
7
- list_classifier_files, list_edgeface_files
 
 
8
  )
 
 
9
 
10
- def create_image_input_column():
11
- """Create the column for image input and output display."""
12
- with gr.Column():
13
  image_input = gr.Image(type="pil", label="Upload Image")
14
  output = gr.HTML(label="Inference Results", elem_classes=["results-container"])
15
  return image_input, output
16
 
17
- def create_model_files_column():
18
- """Create the column for model file selection."""
19
- with gr.Column():
20
- with gr.Group(elem_classes=["section-group"]):
21
- gr.Markdown("### Model Files", elem_classes=["section-title"])
22
- ref_dict = gr.Dropdown(
23
- choices=["Select a file"] + list_reference_files(),
24
- label="Reference Dict JSON",
25
- value="data/reference_data/reference_image_data.json"
26
- )
27
- index_map = gr.Dropdown(
28
- choices=["Select a file"] + list_mapping_files(),
29
- label="Index to Class Mapping JSON",
30
- value="ckpts/index_to_class_mapping.json"
31
- )
32
- classifier_model = gr.Dropdown(
33
- choices=["Select a file"] + list_classifier_files(),
34
- label="Classifier Model (.pth)",
35
- value="ckpts/SlimFace_efficientnet_b3_full_model.pth"
36
- )
37
- edgeface_model = gr.Dropdown(
38
- choices=["Select a file"] + list_edgeface_files(),
39
- label="EdgeFace Model (.pt)",
40
- value="ckpts/idiap/edgeface_s_gamma_05.pt"
41
- )
42
- return ref_dict, index_map, classifier_model, edgeface_model
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
43
 
44
- def create_settings_column():
45
- """Create the column for advanced settings."""
46
- with gr.Column():
47
- with gr.Group(elem_classes=["section-group"]):
48
- gr.Markdown("### Advanced Settings", elem_classes=["section-title"])
49
- algorithm = gr.Dropdown(
50
- choices=["yolo", "mtcnn", "retinaface"],
51
- label="Detection Algorithm",
52
- value="yolo"
53
- )
54
- accelerator = gr.Dropdown(
55
- choices=["auto", "cpu", "cuda", "mps"],
56
- label="Accelerator",
57
- value="auto"
58
- )
59
- resolution = gr.Slider(
60
- minimum=128,
61
- maximum=512,
62
- step=32,
63
- label="Image Resolution",
64
- value=300
65
- )
66
- similarity_threshold = gr.Slider(
67
- minimum=0.1,
68
- maximum=1.0,
69
- step=0.05,
70
- label="Similarity Threshold",
71
- value=0.3
72
- )
73
- return algorithm, accelerator, resolution, similarity_threshold
74
 
75
  def create_interface():
76
  """Create the Gradio interface for SlimFace."""
77
- with gr.Blocks(css="gradio_app/static/styles.css", theme=gr.themes.Soft()) as demo:
78
  gr.Markdown("# SlimFace Demonstration")
79
  gr.Markdown(CONTENT_DESCRIPTION)
80
- gr.HTML(CONTENT_IN)
81
-
82
- with gr.Row():
83
- image_input, output = create_image_input_column()
84
- ref_dict, index_map, classifier_model, edgeface_model = create_model_files_column()
85
-
86
- with gr.Row():
87
- algorithm, accelerator, resolution, similarity_threshold = create_settings_column()
88
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
89
  with gr.Row():
90
  submit_btn = gr.Button("Run Inference", variant="primary", elem_classes=["centered-button"])
91
 
@@ -104,13 +132,15 @@ def create_interface():
104
  ],
105
  outputs=output
106
  )
107
- gr.Markdown(CONTENT_OUT)
 
 
108
  return demo
109
 
110
  def main():
111
  """Launch the Gradio interface."""
112
  demo = create_interface()
113
- demo.launch()
114
 
115
  if __name__ == "__main__":
116
  main()
 
2
  from PIL import Image
3
  from gradio_app.inference import run_inference
4
  from gradio_app.components import (
5
+ CONTENT_DESCRIPTION, CONTENT_OUTTRO,
6
+ CONTENT_IN_1, CONTENT_IN_2,
7
+ CONTENT_OUT_1, CONTENT_OUT_2,
8
+ list_reference_files, list_mapping_files,
9
+ list_classifier_files, list_edgeface_files
10
  )
11
+ from glob import glob
12
+ import os
13
 
14
+ def create_image_io_row():
15
+ """Create the row for image input and output display."""
16
+ with gr.Row(elem_classes=["image-io-row"]):
17
  image_input = gr.Image(type="pil", label="Upload Image")
18
  output = gr.HTML(label="Inference Results", elem_classes=["results-container"])
19
  return image_input, output
20
 
21
+ def create_model_settings_row():
22
+ """Create the row for model files and settings."""
23
+ with gr.Row():
24
+ with gr.Column():
25
+ with gr.Group(elem_classes=["section-group"]):
26
+ gr.Markdown("### Model Files", elem_classes=["section-title"])
27
+ ref_dict = gr.Dropdown(
28
+ choices=["Select a file"] + list_reference_files(),
29
+ label="Reference Dict JSON",
30
+ value="data/reference_data/reference_image_data.json"
31
+ )
32
+ index_map = gr.Dropdown(
33
+ choices=["Select a file"] + list_mapping_files(),
34
+ label="Index to Class Mapping JSON",
35
+ value="ckpts/index_to_class_mapping.json"
36
+ )
37
+ classifier_model = gr.Dropdown(
38
+ choices=["Select a file"] + list_classifier_files(),
39
+ label="Classifier Model (.pth)",
40
+ value="ckpts/SlimFace_efficientnet_b3_full_model.pth"
41
+ )
42
+ edgeface_model = gr.Dropdown(
43
+ choices=["Select a file"] + list_edgeface_files(),
44
+ label="EdgeFace Model (.pt)",
45
+ value="ckpts/idiap/edgeface_s_gamma_05.pt"
46
+ )
47
+ with gr.Column():
48
+ with gr.Group(elem_classes=["section-group"]):
49
+ gr.Markdown("### Advanced Settings", elem_classes=["section-title"])
50
+ algorithm = gr.Dropdown(
51
+ choices=["yolo", "mtcnn", "retinaface"],
52
+ label="Detection Algorithm",
53
+ value="yolo"
54
+ )
55
+ accelerator = gr.Dropdown(
56
+ choices=["auto", "cpu", "cuda", "mps"],
57
+ label="Accelerator",
58
+ value="auto"
59
+ )
60
+ resolution = gr.Slider(
61
+ minimum=128,
62
+ maximum=512,
63
+ step=32,
64
+ label="Image Resolution",
65
+ value=300
66
+ )
67
+ similarity_threshold = gr.Slider(
68
+ minimum=0.1,
69
+ maximum=1.0,
70
+ step=0.05,
71
+ label="Similarity Threshold",
72
+ value=0.3
73
+ )
74
+ return ref_dict, index_map, classifier_model, edgeface_model, algorithm, accelerator, resolution, similarity_threshold
75
 
76
+ # Load local CSS file
77
+ CSS = open("apps/gradio_app/static/styles.css").read()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
78
 
79
  def create_interface():
80
  """Create the Gradio interface for SlimFace."""
81
+ with gr.Blocks(css=CSS, theme=gr.themes.Soft()) as demo:
82
  gr.Markdown("# SlimFace Demonstration")
83
  gr.Markdown(CONTENT_DESCRIPTION)
84
+ gr.Markdown(CONTENT_IN_1)
85
+ gr.HTML(CONTENT_IN_2)
86
+
87
+ image_input, output = create_image_io_row()
88
+ ref_dict, index_map, classifier_model, edgeface_model, algorithm, accelerator, resolution, similarity_threshold = create_model_settings_row()
 
 
 
89
 
90
+ # Add example image gallery as a table
91
+ with gr.Group():
92
+ gr.Markdown("### Example Images")
93
+ example_images = glob("apps/assets/examples/*.[jp][pn][gf]")
94
+ if example_images:
95
+ # Create a list of dictionaries for the table
96
+ table_data = []
97
+ for img_path in example_images:
98
+ table_data.append({
99
+ "Image": img_path, # Will be rendered as an image
100
+ "Action": f"Use {os.path.basename(img_path)}" # Button text
101
+ })
102
+
103
+ # Create a table with images and buttons
104
+ gr.Dataframe(
105
+ value=table_data,
106
+ headers=["Image", "Action"],
107
+ datatype=["image", "str"],
108
+ interactive=False,
109
+ elem_classes=["example-table"],
110
+ # Add click event for buttons
111
+ row_click=lambda row: Image.open(row["Image"]),
112
+ outputs=image_input
113
+ )
114
+ else:
115
+ gr.Markdown("No example images found in apps/assets/examples/")
116
+
117
  with gr.Row():
118
  submit_btn = gr.Button("Run Inference", variant="primary", elem_classes=["centered-button"])
119
 
 
132
  ],
133
  outputs=output
134
  )
135
+ gr.Markdown(CONTENT_OUTTRO)
136
+ gr.HTML(CONTENT_OUT_1)
137
+ gr.Markdown(CONTENT_OUT_2)
138
  return demo
139
 
140
  def main():
141
  """Launch the Gradio interface."""
142
  demo = create_interface()
143
+ demo.launch(share=True)
144
 
145
  if __name__ == "__main__":
146
  main()
apps/gradio_app/components.py CHANGED
@@ -44,90 +44,51 @@ def list_edgeface_files():
44
  CONTENT_DESCRIPTION = """
45
  **SlimFace: Advanced Face Classification with TorchVision Backbones**
46
  """
47
- CONTENT_IN = """
48
- <style>
49
- body {
50
- font-family: Arial, sans-serif;
51
- line-height: 1.6;
52
- margin: 0; /* Remove default margin for full-width */
53
- padding: 20px; /* Adjust padding for content spacing */
54
- color: #333;
55
- width: 100%; /* Ensure body takes full width */
56
- box-sizing: border-box; /* Include padding in width calculation */
57
- }
58
- .attribution {
59
- background-color: #f9f9f9;
60
- padding: 20px;
61
- border-radius: 8px;
62
- box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
63
- }
64
- .quote-container {
65
- border-left: 5px solid #007bff;
66
- padding-left: 15px;
67
- margin-bottom: 15px;
68
- font-style: italic;
69
- }
70
- .attribution p {
71
- margin: 10px 0;
72
- }
73
- .badge {
74
- display: inline-block;
75
- border-radius: 4px;
76
- text-decoration: none;
77
- font-size: 14px;
78
- transition: background-color 0.3s;
79
- }
80
- .badge:hover {
81
- background-color: #0056b3;
82
- }
83
- .badge img {
84
- vertical-align: middle;
85
- margin-right: 5px;
86
- }
87
- .source {
88
- color: #555;
89
- }
90
- </style>
91
- <div class="quote-container">
92
- <p>
93
- This project leverages code from
94
- <a class="badge" href="https://github.com/otroshi/edgeface">
95
- <img src="https://img.shields.io/badge/Built%20on-otroshi%2Fedgeface-blue?style=flat&logo=github" alt="Built on edgeface">
96
- </a>
97
- by
98
- <a class="badge" href="https://github.com/otroshi">
99
- <img src="https://img.shields.io/badge/GitHub-Hatef_Otroshi-blue?style=flat&logo=github" alt="Hatef Otroshi">
100
- </a>,
101
- with our own bug fixes and enhancements available at
102
- <a class="badge" href="https://github.com/danhtran2mind/edgeface/tree/main/face_alignment">
103
- <img src="https://img.shields.io/badge/GitHub-danhtran2mind%2Fedgeface-blue?style=flat&logo=github" alt="Edgeface Enhancements">
104
- </a>.
105
- </p>
106
- </div>
107
- <p class="source">
108
- For more information, you can follow below:<br>
109
- Source code:
110
- <a class="badge" href="https://github.com/danhtran2mind/SlimFace">
111
- <img src="https://img.shields.io/badge/GitHub-danhtran2mind%2FSlimFace-blue?style=flat" alt="GitHub Repo">
112
- ,
113
- </a>
114
- Author:
115
- <a class="badge" href="https://github.com/danhtran2mind">
116
- <img src="https://img.shields.io/badge/GitHub-danhtran2mind-blue?style=flat" alt="GitHub Profile">
117
- ,
118
- </a>
119
- PyTorch Docs:
120
- <a class="badge" href="https://docs.pytorch.org/vision/main/models.html">
121
- <img src="https://img.shields.io/badge/PyTorch-Pretrain%20Model%20Docs-blue?style=flat" alt="PyTorch Docs">
122
- </a>
123
- </p>
124
- """
125
-
126
- CONTENT_OUT = """
127
- ## More Information about SlimFace
128
 
 
129
  SlimFace empowers developers to build high-accuracy face classification models using transfer learning, leveraging TorchVision's powerful pre-trained architectures. 🌟 It provides a flexible, efficient, and scalable solution for facial recognition, delivering top-tier performance for custom applications.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
130
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
131
  **Supported Architectures:**
132
  - **EfficientNet**: B0-B7 and V2 (Small, Medium, Large) for balanced performance and efficiency. 📸
133
  - **RegNet**: X/Y series (400MF to 128GF) for optimized computation across diverse hardware. 💻
 
44
  CONTENT_DESCRIPTION = """
45
  **SlimFace: Advanced Face Classification with TorchVision Backbones**
46
  """
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47
 
48
+ CONTENT_IN_1 = """
49
  SlimFace empowers developers to build high-accuracy face classification models using transfer learning, leveraging TorchVision's powerful pre-trained architectures. 🌟 It provides a flexible, efficient, and scalable solution for facial recognition, delivering top-tier performance for custom applications.
50
+ """
51
+ CONTENT_IN_2 = """
52
+ <p class="source">
53
+ For more information, you can follow below:<br>
54
+ Source code:
55
+ <a class="badge" href="https://github.com/danhtran2mind/SlimFace">
56
+ <img src="https://img.shields.io/badge/GitHub-danhtran2mind%2FSlimFace-blue?style=flat" alt="GitHub Repo">
57
+ </a>,
58
+ Author:
59
+ <a class="badge" href="https://github.com/danhtran2mind">
60
+ <img src="https://img.shields.io/badge/GitHub-danhtran2mind-blue?style=flat" alt="GitHub Profile">
61
+ </a>,
62
+ PyTorch Docs:
63
+ <a class="badge" href="https://docs.pytorch.org/vision/main/models.html">
64
+ <img src="https://img.shields.io/badge/PyTorch-Pretrain%20Model%20Docs-blue?style=flat" alt="PyTorch Docs">
65
+ </a>
66
+ </p>
67
+ """
68
+
69
 
70
+ CONTENT_OUTTRO = """
71
+ ## More Information about SlimFace
72
+ """
73
+ CONTENT_OUT_1 = """
74
+ <div class="quote-container">
75
+ <p>
76
+ This project leverages code from
77
+ <a class="badge" href="https://github.com/otroshi/edgeface">
78
+ <img src="https://img.shields.io/badge/Built%20on-otroshi%2Fedgeface-blue?style=flat&logo=github" alt="Built on edgeface">
79
+ </a>
80
+ by
81
+ <a class="badge" href="https://github.com/otroshi">
82
+ <img src="https://img.shields.io/badge/GitHub-Hatef_Otroshi-blue?style=flat&logo=github" alt="Hatef Otroshi">
83
+ </a>,
84
+ with our own bug fixes and enhancements available at
85
+ <a class="badge" href="https://github.com/danhtran2mind/edgeface/tree/main/face_alignment">
86
+ <img src="https://img.shields.io/badge/GitHub-danhtran2mind%2Fedgeface-blue?style=flat&logo=github" alt="Edgeface Enhancements">
87
+ </a>.
88
+ </p>
89
+ </div>
90
+ """
91
+ CONTENT_OUT_2 = """
92
  **Supported Architectures:**
93
  - **EfficientNet**: B0-B7 and V2 (Small, Medium, Large) for balanced performance and efficiency. 📸
94
  - **RegNet**: X/Y series (400MF to 128GF) for optimized computation across diverse hardware. 💻
apps/gradio_app/inference.py CHANGED
@@ -71,7 +71,7 @@ def run_inference(image, reference_dict_path, index_to_class_mapping_path, model
71
  # Similarity with Reference Image
72
  similarity = result.get('similarity', 'N/A')
73
  similarity_str = f'{similarity:.4f}' if isinstance(similarity, (int, float)) else 'N/A'
74
- output += f'<div class="result-item"><span class="label">Similarity with<br>Reference Image</span><span class="value">{similarity_str}</span></div>'
75
 
76
  # Confirmed Person
77
  confirmed = result.get('confirmed', 'N/A')
 
71
  # Similarity with Reference Image
72
  similarity = result.get('similarity', 'N/A')
73
  similarity_str = f'{similarity:.4f}' if isinstance(similarity, (int, float)) else 'N/A'
74
+ output += f'<div class="result-item"><span class="label">Similarity with<br> Reference Image</span><span class="value">{similarity_str}</span></div>'
75
 
76
  # Confirmed Person
77
  confirmed = result.get('confirmed', 'N/A')
apps/gradio_app/static/scripts.js ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ // Placeholder for future JavaScript functionality
2
+ // Currently, no JavaScript is required for the Gradio app as interactions are handled by Gradio
3
+ console.log("SlimFace Gradio App JavaScript loaded");
apps/gradio_app/static/styles.css ADDED
@@ -0,0 +1,270 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ body {
2
+ font-family: 'Inter', -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
3
+ background: linear-gradient(145deg, #e2e8f0 0%, #b8c6db 100%);
4
+ margin: 0;
5
+ padding: 0;
6
+ min-height: 100vh;
7
+ color: #1a202c;
8
+ }
9
+
10
+ .gradio-container {
11
+ max-width: 1280px;
12
+ margin: 0 auto;
13
+ padding: 2.5rem 1.5rem;
14
+ box-sizing: border-box;
15
+ }
16
+
17
+ h1 {
18
+ color: #1a202c;
19
+ font-size: 2.75rem;
20
+ font-weight: 800;
21
+ text-align: center;
22
+ margin-bottom: 2.5rem;
23
+ letter-spacing: -0.025em;
24
+ background: linear-gradient(to right, #2b6cb0, #4a90e2);
25
+ -webkit-background-clip: text;
26
+ -webkit-text-fill-color: transparent;
27
+ }
28
+
29
+ .section-title {
30
+ color: #1a202c;
31
+ font-size: 1.5rem;
32
+ font-weight: 700;
33
+ margin-bottom: 1rem;
34
+ border-bottom: 2px solid #4a90e2;
35
+ padding-bottom: 0.5rem;
36
+ letter-spacing: -0.015em;
37
+ }
38
+
39
+ .section-group {
40
+ background: rgba(255, 255, 255, 0.95);
41
+ border-radius: 0.5rem;
42
+ padding: 1.5rem;
43
+ border: 1px solid rgba(226, 232, 240, 0.5);
44
+ box-shadow: 0 2px 4px rgba(0, 0, 0, 0.05);
45
+ }
46
+
47
+ .results-container {
48
+ display: flex;
49
+ flex-direction: column;
50
+ gap: 1.75rem;
51
+ padding: 2rem;
52
+ background: rgba(255, 255, 255, 0.95);
53
+ border-radius: 1.25rem;
54
+ box-shadow: 0 10px 20px rgba(0, 0, 0, 0.15), 0 4px 6px rgba(0, 0, 0, 0.1);
55
+ border: 1px solid rgba(226, 232, 240, 0.5);
56
+ backdrop-filter: blur(8px);
57
+ }
58
+
59
+ .result-card {
60
+ background: linear-gradient(145deg, #f7fafc, #edf2f7);
61
+ border-radius: 1rem;
62
+ padding: 2.25rem;
63
+ box-shadow: 0 6px 12px rgba(0, 0, 0, 0.1);
64
+ transition: transform 0.3s ease, box-shadow 0.3s ease, background 0.3s ease;
65
+ position: relative;
66
+ overflow: hidden;
67
+ }
68
+
69
+ .result-card:hover {
70
+ transform: translateY(-5px);
71
+ box-shadow: 0 10px 24px rgba(0, 0, 0, 0.15);
72
+ background: linear-gradient(145deg, #ffffff, #e6eefa);
73
+ }
74
+
75
+ .result-card::before {
76
+ content: '';
77
+ position: absolute;
78
+ top: 0;
79
+ left: 0;
80
+ width: 100%;
81
+ height: 4px;
82
+ background: linear-gradient(to right, #4a90e2, #63b3ed);
83
+ transition: height 0.3s ease;
84
+ }
85
+
86
+ .result-card:hover::before {
87
+ height: 8px;
88
+ }
89
+
90
+ .result-title {
91
+ color: #1a202c;
92
+ font-size: 1.875rem;
93
+ font-weight: 700;
94
+ margin-bottom: 1.5rem;
95
+ border-bottom: 3px solid #4a90e2;
96
+ padding-bottom: 0.75rem;
97
+ letter-spacing: -0.015em;
98
+ }
99
+
100
+ .result-item {
101
+ display: flex;
102
+ justify-content: space-between;
103
+ align-items: center;
104
+ margin: 1rem 0;
105
+ font-size: 1.125rem;
106
+ color: #2d3748;
107
+ line-height: 1.6;
108
+ }
109
+
110
+ .label {
111
+ font-weight: 600;
112
+ color: #2b6cb0;
113
+ text-align: left;
114
+ text-transform: uppercase;
115
+ font-size: 0.95rem;
116
+ letter-spacing: 0.05em;
117
+ flex: 0 0 auto;
118
+ }
119
+
120
+ .value {
121
+ color: #1a202c;
122
+ font-weight: 500;
123
+ text-align: right;
124
+ flex: 0 0 auto;
125
+ }
126
+
127
+ .value.confirmed-true {
128
+ color: #2f855a;
129
+ font-weight: 600;
130
+ background: #c6f6d5;
131
+ padding: 0.25rem 0.5rem;
132
+ border-radius: 0.375rem;
133
+ }
134
+
135
+ .value.confirmed-false {
136
+ color: #c53030;
137
+ font-weight: 600;
138
+ background: #fed7d7;
139
+ padding: 0.25rem 0.5rem;
140
+ border-radius: 0.375rem;
141
+ }
142
+
143
+ .error-message {
144
+ background: #fef2f2;
145
+ color: #9b2c2c;
146
+ padding: 1.75rem;
147
+ border-radius: 0.875rem;
148
+ margin: 1.25rem 0;
149
+ font-size: 1.125rem;
150
+ font-weight: 500;
151
+ border: 1px solid #e53e3e;
152
+ box-shadow: 0 2px 8px rgba(0, 0, 0, 0.1);
153
+ }
154
+
155
+ .centered-button {
156
+ display: block;
157
+ margin: 1rem auto;
158
+ background: #4a90e2;
159
+ color: white;
160
+ padding: 0.75rem 1.5rem;
161
+ border-radius: 0.5rem;
162
+ border: none;
163
+ font-size: 1rem;
164
+ font-weight: 600;
165
+ cursor: pointer;
166
+ transition: background 0.3s ease;
167
+ position: relative;
168
+ padding-left: 2.5rem;
169
+ width: 30%;
170
+ }
171
+
172
+ .centered-button:hover {
173
+ background: #2b6cb0;
174
+ }
175
+
176
+ .centered-button::after {
177
+ content: '🤔';
178
+ position: absolute;
179
+ left: 0.75rem;
180
+ top: 50%;
181
+ transform: translateY(-50%);
182
+ font-size: 1.2rem;
183
+ }
184
+
185
+ @media (max-width: 768px) {
186
+ .gradio-container {
187
+ padding: 1.5rem;
188
+ }
189
+
190
+ h1 {
191
+ font-size: 2rem;
192
+ }
193
+
194
+ .results-container {
195
+ padding: 1.5rem;
196
+ }
197
+
198
+ .result-card {
199
+ padding: 1.5rem;
200
+ }
201
+
202
+ .result-title {
203
+ font-size: 1.5rem;
204
+ }
205
+
206
+ .result-item {
207
+ font-size: 1rem;
208
+ flex-direction: column;
209
+ align-items: flex-start;
210
+ gap: 0.5rem;
211
+ }
212
+
213
+ .label, .value {
214
+ text-align: left;
215
+ }
216
+
217
+ .section-title {
218
+ font-size: 1.25rem;
219
+ }
220
+
221
+ .section-group {
222
+ padding: 1rem;
223
+ }
224
+
225
+ .centered-button {
226
+ padding: 0.5rem 1rem;
227
+ font-size: 0.9rem;
228
+ }
229
+ }
230
+
231
+ /*Components for Gradio App*/
232
+ .quote-container {
233
+ border-left: 5px solid #007bff;
234
+ padding-left: 15px;
235
+ margin-bottom: 15px;
236
+ font-style: italic;
237
+ }
238
+ .attribution p {
239
+ margin: 10px 0;
240
+ }
241
+ .badge {
242
+ display: inline-block;
243
+ border-radius: 4px;
244
+ text-decoration: none;
245
+ font-size: 14px;
246
+ transition: background-color 0.3s;
247
+ }
248
+ .badge:hover {
249
+ background-color: #0056b3;
250
+ }
251
+ .badge img {
252
+ vertical-align: middle;
253
+ margin-right: 5px;
254
+ }
255
+ .source {
256
+ font-size: 14px;
257
+ }
258
+ /* Gradio Examples */
259
+ .example-table {
260
+ width: 100%;
261
+ max-width: 800px;
262
+ }
263
+ .example-table img {
264
+ max-width: 100px;
265
+ height: auto;
266
+ }
267
+ .example-table .cell {
268
+ vertical-align: middle;
269
+ text-align: center;
270
+ }
assets/gradio_app_demo.jpg ADDED

Git LFS Details

  • SHA256: a3adaf9c5f433f400cc1476efe0e729df9a325951cb00a7614060e9bb7906a26
  • Pointer size: 131 Bytes
  • Size of remote file: 143 kB
src/third_party/edgeface/.gitignore ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ .venv/
2
+ *.pyc
3
+ __pycache__/
4
+ *.pyo
5
+ *.pyd
6
+ .Python
7
+ *.egg-info/
8
+ dist/
9
+ build/
10
+ *.sqlite3
11
+ checkpoints/yolo11_face_detection
src/third_party/edgeface/.python-version ADDED
@@ -0,0 +1 @@
 
 
1
+ 3.11.11
src/third_party/edgeface/LICENSE ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ BSD 3-Clause License
2
+
3
+ Copyright (c) 2024, Anjith George, Christophe Ecabert, Hatef Otroshi Shahreza,
4
+ Ketan Kotwal, Sébastien Marcel
5
+ Idiap Research Institute, Martigny 1920, Switzerland.
6
+
7
+ Redistribution and use in source and binary forms, with or without
8
+ modification, are permitted provided that the following conditions are met:
9
+
10
+ 1. Redistributions of source code must retain the above copyright notice, this
11
+ list of conditions and the following disclaimer.
12
+
13
+ 2. Redistributions in binary form must reproduce the above copyright notice,
14
+ this list of conditions and the following disclaimer in the documentation
15
+ and/or other materials provided with the distribution.
16
+
17
+ 3. Neither the name of the copyright holder nor the names of its
18
+ contributors may be used to endorse or promote products derived from
19
+ this software without specific prior written permission.
20
+
21
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
22
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
23
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
24
+ DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
25
+ FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
26
+ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
27
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
28
+ CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
29
+ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
src/third_party/edgeface/README.md ADDED
@@ -0,0 +1,126 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+
3
+ # EdgeFace: Efficient Face Recognition Model for Edge Devices
4
+
5
+ [![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/edgeface-efficient-face-recognition-model-for/lightweight-face-recognition-on-lfw)](https://paperswithcode.com/sota/lightweight-face-recognition-on-lfw?p=edgeface-efficient-face-recognition-model-for)
6
+ [![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/edgeface-efficient-face-recognition-model-for/lightweight-face-recognition-on-calfw)](https://paperswithcode.com/sota/lightweight-face-recognition-on-calfw?p=edgeface-efficient-face-recognition-model-for)
7
+ [![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/edgeface-efficient-face-recognition-model-for/lightweight-face-recognition-on-cplfw)](https://paperswithcode.com/sota/lightweight-face-recognition-on-cplfw?p=edgeface-efficient-face-recognition-model-for)
8
+ [![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/edgeface-efficient-face-recognition-model-for/lightweight-face-recognition-on-cfp-fp)](https://paperswithcode.com/sota/lightweight-face-recognition-on-cfp-fp?p=edgeface-efficient-face-recognition-model-for)
9
+ [![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/edgeface-efficient-face-recognition-model-for/lightweight-face-recognition-on-agedb-30)](https://paperswithcode.com/sota/lightweight-face-recognition-on-agedb-30?p=edgeface-efficient-face-recognition-model-for)
10
+ [![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/edgeface-efficient-face-recognition-model-for/lightweight-face-recognition-on-ijb-b)](https://paperswithcode.com/sota/lightweight-face-recognition-on-ijb-b?p=edgeface-efficient-face-recognition-model-for)
11
+ [![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/edgeface-efficient-face-recognition-model-for/lightweight-face-recognition-on-ijb-c)](https://paperswithcode.com/sota/lightweight-face-recognition-on-ijb-c?p=edgeface-efficient-face-recognition-model-for)
12
+
13
+ [![arXiv](https://img.shields.io/badge/cs.CV-arXiv%3A2307.01838-009d81v2.svg)](https://arxiv.org/abs/2307.01838v2)
14
+ [![HF-Demo](https://img.shields.io/badge/%F0%9F%A4%97%20Hugging%20Face-Demo-orange)](https://huggingface.co/spaces/Idiap/EdgeFace)
15
+ [![HF-Model](https://img.shields.io/badge/%F0%9F%A4%97%20Hugging%20Face-Model-green)](https://huggingface.co/collections/Idiap/edgeface-67f500eded03ecd8be56e63e)
16
+
17
+
18
+ This repository contain inference code and pretrained models to use [**EdgeFace: Efficient Face Recognition Model for Edge Devices**](https://ieeexplore.ieee.org/abstract/document/10388036/),
19
+ which is the **winning entry** in *the compact track of ["EFaR 2023: Efficient Face Recognition Competition"](https://arxiv.org/abs/2308.04168) organised at the IEEE International Joint Conference on Biometrics (IJCB) 2023*. For the complete source code of training and evaluation, please check the [official repository](https://gitlab.idiap.ch/bob/bob.paper.tbiom2023_edgeface).
20
+
21
+
22
+ ![EdgeFace](assets/edgeface.png)
23
+
24
+ ## Installation
25
+ ```bash
26
+ pip install -r requirements.txt
27
+ ```
28
+ **Note:** If cannot `import cv2`, run above CLI in Linux
29
+
30
+ ```bash
31
+ chmod +x packages.txt
32
+ sudo ./packages.txt
33
+ ```
34
+
35
+ ## Inference
36
+ The following code shows how to use the model for inference:
37
+ ```python
38
+ import torch
39
+ from torchvision import transforms
40
+ from face_alignment import align
41
+ from backbones import get_model
42
+
43
+ # load model
44
+ model_name="edgeface_s_gamma_05" # or edgeface_xs_gamma_06
45
+ model=get_model(model_name)
46
+ checkpoint_path=f'checkpoints/{model_name}.pt'
47
+ model.load_state_dict(torch.load(checkpoint_path, map_location='cpu')) # Load state dict
48
+ model.eval() # Call eval() on the model object
49
+
50
+ transform = transforms.Compose([
51
+ transforms.ToTensor(),
52
+ transforms.Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5]),
53
+ ])
54
+
55
+ paths = 'tests/test_images/Elon_Musk.jpg'
56
+ batch_size = len(paths) if isinstance(paths, (list, tuple)) else 1
57
+
58
+ # Align faces (assuming align.get_aligned_face returns a list of tuples)
59
+ aligned_result = align.get_aligned_face(paths, algorithm='yolo')
60
+
61
+ transformed_inputs = [transform(result[1]) for result in aligned_result]
62
+ transformed_inputs = torch.stack(transformed_inputs)
63
+
64
+ # Extract embeddings
65
+ embeddings = model(transformed_inputs)
66
+ print(embeddings.shape) # Expected: torch.Size([batch_size, 512])
67
+ ```
68
+
69
+
70
+
71
+ ## Pre-trained models
72
+ - EdgeFace-s (gamma=0.5): available in [`checkpoints/edgeface_s_gamma_05.pt`](checkpoints/edgeface_s_gamma_05.pt)
73
+ - EdgeFace-xs (gamma=0.6): available in [`checkpoints/edgeface_xs_gamma_06.pt`](checkpoints/edgeface_xs_gamma_06.pt)
74
+
75
+
76
+
77
+ ## Performance
78
+ The performance of each model is reported in Table 2 of the [paper](https://arxiv.org/pdf/2307.01838v2.pdf):
79
+
80
+ ![performance](assets/benchmark.png)
81
+
82
+
83
+ ## :rocket: New! Using EdgeFace Models via `torch.hub`
84
+
85
+ ### Available Models on `torch.hub`
86
+
87
+ - `edgeface_base`
88
+ - `edgeface_s_gamma_05`
89
+ - `edgeface_xs_q`
90
+ - `edgeface_xs_gamma_06`
91
+ - `edgeface_xxs`
92
+ - `edgeface_xxs_q`
93
+
94
+ **NOTE:** Models with `_q` are quantised and require less storage.
95
+
96
+ ### Loading EdgeFace Models with `torch.hub`
97
+
98
+ You can load the models using `torch.hub` as follows:
99
+
100
+ ```python
101
+ import torch
102
+ model = torch.hub.load('otroshi/edgeface', 'edgeface_xs_gamma_06', source='github', pretrained=True)
103
+ model.eval()
104
+ ```
105
+
106
+ ### Performance benchmarks of different variants of EdgeFace
107
+
108
+ | Model | MPARAMS| MFLOPs | LFW(%) | CALFW(%) | CPLFW(%) | CFP-FP(%) | AgeDB30(%) |
109
+ |:--------------------|-------:|-------:|:-------------|:-------------|:-------------|:-------------|:-------------|
110
+ | edgeface_base | 18.23 |1398.83 | 99.83 ± 0.24 | 96.07 ± 1.03 | 93.75 ± 1.16 | 97.01 ± 0.94 | 97.60 ± 0.70 |
111
+ | edgeface_s_gamma_05 | 3.65 | 306.12 | 99.78 ± 0.27 | 95.55 ± 1.05 | 92.48 ± 1.42 | 95.74 ± 1.09 | 97.03 ± 0.85 |
112
+ | edgeface_xs_gamma_06| 1.77 | 154.00 | 99.73 ± 0.35 | 95.28 ± 1.37 | 91.58 ± 1.42 | 94.71 ± 1.07 | 96.08 ± 0.95 |
113
+ | edgeface_xxs | 1.24 | 94.72 | 99.57 ± 0.33 | 94.83 ± 0.98 | 90.27 ± 0.93 | 93.63 ± 0.99 | 94.92 ± 1.15 |
114
+
115
+ ## Reference
116
+ If you use this repository, please cite the following paper, which is [published](https://ieeexplore.ieee.org/abstract/document/10388036/) in the IEEE Transactions on Biometrics, Behavior, and Identity Science (IEEE T-BIOM). The PDF version of the paper is available as [pre-print on arxiv](https://arxiv.org/pdf/2307.01838v2.pdf). The complete source code for reproducing all experiments in the paper (including training and evaluation) is also publicly available in the [official repository](https://gitlab.idiap.ch/bob/bob.paper.tbiom2023_edgeface).
117
+
118
+
119
+ ```bibtex
120
+ @article{edgeface,
121
+ title={Edgeface: Efficient face recognition model for edge devices},
122
+ author={George, Anjith and Ecabert, Christophe and Shahreza, Hatef Otroshi and Kotwal, Ketan and Marcel, Sebastien},
123
+ journal={IEEE Transactions on Biometrics, Behavior, and Identity Science},
124
+ year={2024}
125
+ }
126
+ ```
src/third_party/edgeface/assets/benchmark.png ADDED

Git LFS Details

  • SHA256: 8e14612848a5cfa2d4f4b3e4eba4b595103ebd79008322f64ea113fa66f51dc8
  • Pointer size: 131 Bytes
  • Size of remote file: 368 kB
src/third_party/edgeface/assets/edgeface.png ADDED

Git LFS Details

  • SHA256: 232ec9a36cd4cb56452b2d37174678332aba8be83c300775b2df432c21787326
  • Pointer size: 131 Bytes
  • Size of remote file: 616 kB
src/third_party/edgeface/backbones/__init__.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ ===============================================================================
3
+ Author: Anjith George
4
+ Institution: Idiap Research Institute, Martigny, Switzerland.
5
+
6
+ Copyright (C) 2023 Anjith George
7
+
8
+ This software is distributed under the terms described in the LICENSE file
9
+ located in the parent directory of this source code repository.
10
+
11
+ For inquiries, please contact the author at [email protected]
12
+ ===============================================================================
13
+ """
14
+ from .timmfr import get_timmfrv2, replace_linear_with_lowrank_2
15
+
16
+ import torch
17
+
18
+ def get_model(name, **kwargs):
19
+
20
+ if name=='edgeface_xs_gamma_06':
21
+ return replace_linear_with_lowrank_2(get_timmfrv2('edgenext_x_small', batchnorm=False), rank_ratio=0.6)
22
+ elif name=='edgeface_xs_q':
23
+ model= get_timmfrv2('edgenext_x_small', batchnorm=False)
24
+ model = torch.quantization.quantize_dynamic(model, qconfig_spec={torch.nn.Linear}, dtype=torch.qint8)
25
+ return model
26
+ elif name=='edgeface_xxs':
27
+ return get_timmfrv2('edgenext_xx_small', batchnorm=False)
28
+ elif name=='edgeface_base':
29
+ return get_timmfrv2('edgenext_base', batchnorm=False)
30
+ elif name=='edgeface_xxs_q':
31
+ model=get_timmfrv2('edgenext_xx_small', batchnorm=False)
32
+ model = torch.quantization.quantize_dynamic(model, qconfig_spec={torch.nn.Linear}, dtype=torch.qint8)
33
+ return model
34
+ elif name=='edgeface_s_gamma_05':
35
+ return replace_linear_with_lowrank_2(get_timmfrv2('edgenext_small', batchnorm=False), rank_ratio=0.5)
36
+ else:
37
+ raise ValueError()
src/third_party/edgeface/backbones/timmfr.py ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ ===============================================================================
3
+ Author: Anjith George
4
+ Institution: Idiap Research Institute, Martigny, Switzerland.
5
+
6
+ Copyright (C) 2023 Anjith George
7
+
8
+ This software is distributed under the terms described in the LICENSE file
9
+ located in the parent directory of this source code repository.
10
+
11
+ For inquiries, please contact the author at [email protected]
12
+ ===============================================================================
13
+ """
14
+
15
+
16
+
17
+ import timm
18
+ import torch
19
+ import torch.nn as nn
20
+ import math
21
+
22
+ class LoRaLin(nn.Module):
23
+ def __init__(self, in_features, out_features, rank, bias=True):
24
+ super(LoRaLin, self).__init__()
25
+ self.in_features = in_features
26
+ self.out_features = out_features
27
+ self.rank = rank
28
+ self.linear1 = nn.Linear(in_features, rank, bias=False)
29
+ self.linear2 = nn.Linear(rank, out_features, bias=bias)
30
+
31
+ def forward(self, input):
32
+ x = self.linear1(input)
33
+ x = self.linear2(x)
34
+ return x
35
+
36
+ def replace_linear_with_lowrank_recursive_2(model, rank_ratio=0.2):
37
+ for name, module in model.named_children():
38
+ if isinstance(module, nn.Linear) and 'head' not in name:
39
+ in_features = module.in_features
40
+ out_features = module.out_features
41
+ rank = max(2,int(min(in_features, out_features) * rank_ratio))
42
+ bias=False
43
+ if module.bias is not None:
44
+ bias=True
45
+ lowrank_module = LoRaLin(in_features, out_features, rank, bias)
46
+
47
+ setattr(model, name, lowrank_module)
48
+ else:
49
+ replace_linear_with_lowrank_recursive_2(module, rank_ratio)
50
+
51
+ def replace_linear_with_lowrank_2(model, rank_ratio=0.2):
52
+ replace_linear_with_lowrank_recursive_2(model, rank_ratio)
53
+ return model
54
+
55
+
56
+
57
+ class TimmFRWrapperV2(nn.Module):
58
+ """
59
+ Wraps timm model
60
+ """
61
+ def __init__(self, model_name='edgenext_x_small', featdim=512, batchnorm=False):
62
+ super().__init__()
63
+ self.featdim = featdim
64
+ self.model_name = model_name
65
+
66
+ self.model = timm.create_model(self.model_name)
67
+ self.model.reset_classifier(self.featdim)
68
+
69
+ def forward(self, x):
70
+ x = self.model(x)
71
+ return x
72
+
73
+
74
+ def get_timmfrv2(model_name, **kwargs):
75
+ """
76
+ Create an instance of TimmFRWrapperV2 with the specified `model_name` and additional arguments passed as `kwargs`.
77
+ """
78
+ return TimmFRWrapperV2(model_name=model_name, **kwargs)
src/third_party/edgeface/ckpts/edgeface_s_gamma_05.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dc59abda2e8580399fd115a1eeb07e1f21156196db604b884407bcf0f17efb07
3
+ size 14695737
src/third_party/edgeface/ckpts/edgeface_xs_gamma_06.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5ae7504cd9aee0a5d52c2115fd2eb66b0985dd1730f40134b5854e0cb658ce16
3
+ size 7170425
src/third_party/edgeface/face_alignment/LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2022 Minchul Kim
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
src/third_party/edgeface/face_alignment/README.md ADDED
@@ -0,0 +1 @@
 
 
1
+ Face alignment script is from [AdaFace](https://github.com/mk-minchul/AdaFace) repositpry: https://github.com/mk-minchul/AdaFace/tree/master/face_alignment
src/third_party/edgeface/face_alignment/align.py ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+ import os
3
+ import torch
4
+ from . import mtcnn
5
+ from .face_yolo import face_yolo_detection
6
+ import argparse
7
+ from PIL import Image
8
+ from tqdm import tqdm
9
+ import random
10
+ from datetime import datetime
11
+
12
+ DEVICE = "cuda:0" if torch.cuda.is_available() else "cpu"
13
+
14
+ mtcnn_model = mtcnn.MTCNN(device=DEVICE, crop_size=(112, 112))
15
+
16
+ def add_padding(pil_img, top, right, bottom, left, color=(0,0,0)):
17
+ width, height = pil_img.size
18
+ new_width = width + right + left
19
+ new_height = height + top + bottom
20
+ result = Image.new(pil_img.mode, (new_width, new_height), color)
21
+ result.paste(pil_img, (left, top))
22
+ return result
23
+
24
+ def handle_image_mtcnn(img_path, pil_img):
25
+ img = Image.open(img_path).convert('RGB') if pil_img is None else pil_img
26
+ assert isinstance(img, Image.Image), 'Face alignment requires PIL image or path'
27
+ try:
28
+ bboxes, faces = mtcnn_model.align_multi(img, limit=1)
29
+ return bboxes[0], faces[0]
30
+ except Exception as e:
31
+ print(f'Face detection failed: {e}')
32
+ return None, None
33
+
34
+ def get_aligned_face(image_path_or_image_paths, rgb_pil_image=None, algorithm='mtcnn'):
35
+ if algorithm=='mtcnn':
36
+ if isinstance(image_path_or_image_paths, list):
37
+ results = [handle_image_mtcnn(path, rgb_pil_image) for path in image_path_or_image_paths]
38
+ return results
39
+ elif isinstance(image_path_or_image_paths, str):
40
+ return [handle_image_mtcnn(image_path_or_image_paths, rgb_pil_image)]
41
+ else:
42
+ raise TypeError("image_path_or_image_paths must be a list or string")
43
+
44
+ elif algorithm=='yolo':
45
+ if isinstance(image_path_or_image_paths, list):
46
+ image_paths = image_path_or_image_paths
47
+ results = face_yolo_detection(image_paths,
48
+ # yolo_model_path="ckpts/yolo_face_detection/model.pt",
49
+ use_batch=True, device=DEVICE)
50
+ elif isinstance(image_path_or_image_paths, str):
51
+ image_paths = [image_path_or_image_paths]
52
+ results = face_yolo_detection(image_paths,
53
+ # yolo_model_path="ckpts/yolo_face_detection/model.pt",
54
+ use_batch=True, device=DEVICE)
55
+ else:
56
+ raise TypeError("image_path_or_image_paths must be a list or string")
57
+ results = list(results)
58
+ return results
src/third_party/edgeface/face_alignment/face_yolo.py ADDED
@@ -0,0 +1,149 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from ultralytics import YOLO
2
+ import cv2
3
+ import os
4
+ from PIL import Image
5
+ import numpy as np
6
+ import glob
7
+ import sys
8
+ import argparse
9
+ import torch
10
+
11
+ sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
12
+
13
+ from utils import download_yolo_face_detection
14
+
15
+ def initialize_yolo_model(yolo_model_path):
16
+ """Initialize YOLO model with specified device."""
17
+ # if device.startswith('cuda') and not torch.cuda.is_available():
18
+ # print("Warning: CUDA not available, falling back to CPU.")
19
+ # device = 'cpu'
20
+ if not os.path.exists(yolo_model_path):
21
+ download_yolo_face_detection.download_yolo_face_detection_model()
22
+ return YOLO(yolo_model_path)
23
+
24
+ def process_image_results(image, image_rgb, boxes):
25
+ """Process bounding boxes and crop faces for a single image."""
26
+ bounding_boxes, cropped_faces = [], []
27
+ for box in boxes:
28
+ x1, y1, x2, y2 = map(int, box)
29
+ if x2 > x1 and y2 > y1 and x1 >= 0 and y1 >= 0 and x2 <= image.shape[1] and y2 <= image.shape[0]:
30
+ bounding_boxes.append([x1, y1, x2, y2])
31
+ cropped_face = image_rgb[y1:y2, x1:x2]
32
+ if cropped_face.size > 0:
33
+ pil_image = Image.fromarray(cropped_face).resize((112, 112), Image.Resampling.BILINEAR)
34
+ cropped_faces.append(pil_image)
35
+ return np.array(bounding_boxes, dtype=np.int32) if bounding_boxes else np.empty((0, 4), dtype=np.int32), cropped_faces
36
+
37
+ def process_batch(model, image_paths, all_bounding_boxes, all_cropped_faces, device):
38
+ """Process images in batch mode using list comprehensions for efficiency."""
39
+ # Validate and load images, filter out invalid ones
40
+ valid_data = [(cv2.imread(path), path) for path in image_paths if os.path.exists(path)]
41
+ valid_images, valid_image_paths = zip(*[(img, path) for img, path in valid_data if img is not None]) if valid_data else ([], [])
42
+
43
+ # Append empty results for invalid images
44
+ for path in image_paths:
45
+ if not os.path.exists(path) or cv2.imread(path) is None:
46
+ all_bounding_boxes.append(np.empty((0, 4), dtype=np.int32))
47
+ all_cropped_faces.append([])
48
+ print(f"Warning: {'not found' if not os.path.exists(path) else 'failed to load'} {path}. Skipping.")
49
+
50
+ # Process valid images
51
+ if valid_images:
52
+ images_rgb = [cv2.cvtColor(img, cv2.COLOR_BGR2RGB) for img in valid_images]
53
+ results = model.predict(source=valid_image_paths, conf=0.25, iou=0.45, verbose=False, device=device)
54
+
55
+ # Process results with comprehension
56
+ for img, rgb, result in zip(valid_images, images_rgb, results):
57
+ bboxes, faces = process_image_results(img, rgb, result.boxes.xyxy.cpu().numpy())
58
+ all_bounding_boxes.append(bboxes)
59
+ all_cropped_faces.append(faces[0] if faces else [])
60
+
61
+ def process_individual(model, image_paths, all_bounding_boxes, all_cropped_faces, device):
62
+ """Process images individually."""
63
+ for image_path in image_paths:
64
+ if not os.path.exists(image_path):
65
+ print(f"Warning: {image_path} not found. Skipping.")
66
+ all_bounding_boxes.append(np.empty((0, 4), dtype=np.int32))
67
+ all_cropped_faces.append([])
68
+ continue
69
+
70
+ image = cv2.imread(image_path)
71
+ if image is None:
72
+ print(f"Warning: Failed to load {image_path}. Skipping.")
73
+ all_bounding_boxes.append(np.empty((0, 4), dtype=np.int32))
74
+ all_cropped_faces.append([])
75
+ continue
76
+
77
+ image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
78
+ results = model(image_path, conf=0.25, iou=0.45, verbose=False, device=device)
79
+
80
+ for result in results:
81
+ boxes = result.boxes.xyxy.cpu().numpy()
82
+ bboxes, faces = process_image_results(image, image_rgb, boxes)
83
+ all_bounding_boxes.append(bboxes)
84
+ all_cropped_faces.append(faces[0] if faces else [])
85
+
86
+ def face_yolo_detection(image_paths, yolo_model_path="./ckpts/yolo_face_detection/model.pt", use_batch=True, device='cuda'):
87
+ """Perform face detection using YOLOv11 with batch or individual processing on specified device."""
88
+ model = initialize_yolo_model(yolo_model_path)
89
+ all_bounding_boxes, all_cropped_faces = [], []
90
+
91
+ if use_batch:
92
+ process_batch(model, image_paths, all_bounding_boxes, all_cropped_faces, device)
93
+ else:
94
+ process_individual(model, image_paths, all_bounding_boxes, all_cropped_faces, device)
95
+
96
+ return zip(all_bounding_boxes, all_cropped_faces)
97
+
98
+ if __name__ == "__main__":
99
+ parser = argparse.ArgumentParser(description="YOLOv11 face detection")
100
+ parser.add_argument("--use-batch", action="store_true", default=True, help="Use batch processing (default: True)")
101
+ parser.add_argument("--image-dir", type=str, default="test/test_images", help="Input image directory")
102
+ parser.add_argument("--yolo-model-path", type=str, default="ckpts/yolo_face_detection/model.pt", help="YOLO model path")
103
+ parser.add_argument("--device", type=str, default="cuda", help="Device to run the model (e.g., 'cuda', 'cpu', 'cuda:0')")
104
+
105
+ args = parser.parse_args()
106
+
107
+ image_paths = (glob.glob(os.path.join(args.image_dir, "*.[jJ][pP][gG]")) +
108
+ glob.glob(os.path.join(args.image_dir, "*.[pP][nN][gG]")))
109
+
110
+ if args.yolo_model_path:
111
+ yolo_model_path = args.yolo_model_path
112
+ else:
113
+ yolo_model_path = os.path.join("ckpts", "yolo_face_detection", "model.pt")
114
+
115
+ import time
116
+ t1 = time.time()
117
+ results = face_yolo_detection(image_paths, yolo_model_path, args.use_batch, args.device)
118
+ print("Time taken:", time.time() - t1)
119
+
120
+ # Optional: Save or process results
121
+ # for i, (bboxes, faces) in enumerate(results):
122
+ # print(f"Image {i}: Bounding Boxes: {bboxes}")
123
+ # for j, face in enumerate(faces):
124
+ # face.save(f"face_{i}_{j}.png")
125
+
126
+ # Benchmarking (uncomment to use)
127
+ # import time
128
+ # num_runs = 50
129
+ # batch_times, individual_times = [], []
130
+
131
+ # # Benchmark batch processing
132
+ # for _ in range(num_runs):
133
+ # t1 = time.time()
134
+ # face_yolo_detection(image_paths, yolo_model_path, use_batch=True, device=args.device)
135
+ # batch_times.append(time.time() - t1)
136
+
137
+ # # Benchmark individual processing
138
+ # for _ in range(num_runs):
139
+ # t1 = time.time()
140
+ # face_yolo_detection(image_paths, yolo_model_path, use_batch=False, device=args.device)
141
+ # individual_times.append(time.time() - t1)
142
+
143
+ # # Calculate and print average times
144
+ # avg_batch_time = sum(batch_times) / num_runs
145
+ # avg_individual_time = sum(individual_times) / num_runs
146
+
147
+ # print(f"\nBenchmark Results (over {num_runs} runs):")
148
+ # print(f"Average Batch Processing Time: {avg_batch_time:.4f} seconds")
149
+ # print(f"Average Individual Processing Time: {avg_individual_time:.4f} seconds")
src/third_party/edgeface/face_alignment/mtcnn.py ADDED
@@ -0,0 +1,175 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Tuple
2
+ import numpy as np
3
+ import torch
4
+ from PIL import Image
5
+ from torch.autograd import Variable
6
+
7
+ import sys
8
+ import os
9
+
10
+ sys.path.insert(0, os.path.dirname(__file__))
11
+
12
+ from mtcnn_pytorch.src.get_nets import PNet, RNet, ONet
13
+ from mtcnn_pytorch.src.box_utils import nms, calibrate_box, get_image_boxes, convert_to_square
14
+ from mtcnn_pytorch.src.first_stage import run_first_stage
15
+ from mtcnn_pytorch.src.align_trans import get_reference_facial_points, warp_and_crop_face
16
+
17
+
18
+ class MTCNN():
19
+ def __init__(self, device: str = 'cuda:0', crop_size: Tuple[int, int] = (112, 112)):
20
+
21
+ assert device in ['cuda:0', 'cpu']
22
+ self.device = torch.device(device)
23
+ assert crop_size in [(112, 112), (96, 112)]
24
+ self.crop_size = crop_size
25
+
26
+ # change working dir to this file location to load npz files. Then switch back
27
+ cwd = os.getcwd()
28
+ os.chdir(os.path.dirname(__file__))
29
+
30
+ self.pnet = PNet().to(self.device)
31
+ self.rnet = RNet().to(self.device)
32
+ self.onet = ONet().to(self.device)
33
+ self.pnet.eval()
34
+ self.rnet.eval()
35
+ self.onet.eval()
36
+ self.refrence = get_reference_facial_points(default_square=crop_size[0] == crop_size[1])
37
+
38
+ self.min_face_size = 20
39
+ self.thresholds = [0.6,0.7,0.9]
40
+ self.nms_thresholds = [0.7, 0.7, 0.7]
41
+ self.factor = 0.85
42
+
43
+
44
+ os.chdir(cwd)
45
+
46
+ def align(self, img):
47
+ _, landmarks = self.detect_faces(img, self.min_face_size, self.thresholds, self.nms_thresholds, self.factor)
48
+ facial5points = [[landmarks[0][j], landmarks[0][j + 5]] for j in range(5)]
49
+ warped_face = warp_and_crop_face(np.array(img), facial5points, self.refrence, crop_size=self.crop_size)
50
+ return Image.fromarray(warped_face)
51
+
52
+ def align_multi(self, img, limit=None):
53
+ boxes, landmarks = self.detect_faces(img, self.min_face_size, self.thresholds, self.nms_thresholds, self.factor)
54
+ if limit:
55
+ boxes = boxes[:limit]
56
+ landmarks = landmarks[:limit]
57
+ faces = []
58
+ for landmark in landmarks:
59
+ facial5points = [[landmark[j], landmark[j + 5]] for j in range(5)]
60
+ warped_face = warp_and_crop_face(np.array(img), facial5points, self.refrence, crop_size=self.crop_size)
61
+ faces.append(Image.fromarray(warped_face))
62
+ return boxes, faces
63
+
64
+ def detect_faces(self, image, min_face_size, thresholds, nms_thresholds, factor):
65
+ """
66
+ Arguments:
67
+ image: an instance of PIL.Image.
68
+ min_face_size: a float number.
69
+ thresholds: a list of length 3.
70
+ nms_thresholds: a list of length 3.
71
+
72
+ Returns:
73
+ two float numpy arrays of shapes [n_boxes, 4] and [n_boxes, 10],
74
+ bounding boxes and facial landmarks.
75
+ """
76
+
77
+ # BUILD AN IMAGE PYRAMID
78
+ width, height = image.size
79
+ min_length = min(height, width)
80
+
81
+ min_detection_size = 12
82
+ # factor = 0.707 # sqrt(0.5)
83
+
84
+ # scales for scaling the image
85
+ scales = []
86
+
87
+ # scales the image so that
88
+ # minimum size that we can detect equals to
89
+ # minimum face size that we want to detect
90
+ m = min_detection_size / min_face_size
91
+ min_length *= m
92
+
93
+ factor_count = 0
94
+ while min_length > min_detection_size:
95
+ scales.append(m * factor**factor_count)
96
+ min_length *= factor
97
+ factor_count += 1
98
+
99
+ # STAGE 1
100
+
101
+ # it will be returned
102
+ bounding_boxes = []
103
+
104
+ with torch.no_grad():
105
+ # run P-Net on different scales
106
+ for s in scales:
107
+ boxes = run_first_stage(image, self.pnet, scale=s, threshold=thresholds[0])
108
+ bounding_boxes.append(boxes)
109
+
110
+ # collect boxes (and offsets, and scores) from different scales
111
+ bounding_boxes = [i for i in bounding_boxes if i is not None]
112
+ if len(bounding_boxes) == 0:
113
+ return [], []
114
+ bounding_boxes = np.vstack(bounding_boxes)
115
+
116
+ keep = nms(bounding_boxes[:, 0:5], nms_thresholds[0])
117
+ bounding_boxes = bounding_boxes[keep]
118
+
119
+ # use offsets predicted by pnet to transform bounding boxes
120
+ bounding_boxes = calibrate_box(bounding_boxes[:, 0:5], bounding_boxes[:, 5:])
121
+ # shape [n_boxes, 5]
122
+
123
+ bounding_boxes = convert_to_square(bounding_boxes)
124
+ bounding_boxes[:, 0:4] = np.round(bounding_boxes[:, 0:4])
125
+
126
+ # STAGE 2
127
+
128
+ img_boxes = get_image_boxes(bounding_boxes, image, size=24)
129
+ img_boxes = torch.FloatTensor(img_boxes).to(self.device)
130
+
131
+ output = self.rnet(img_boxes)
132
+ offsets = output[0].cpu().data.numpy() # shape [n_boxes, 4]
133
+ probs = output[1].cpu().data.numpy() # shape [n_boxes, 2]
134
+
135
+ keep = np.where(probs[:, 1] > thresholds[1])[0]
136
+ bounding_boxes = bounding_boxes[keep]
137
+ bounding_boxes[:, 4] = probs[keep, 1].reshape((-1, ))
138
+ offsets = offsets[keep]
139
+
140
+ keep = nms(bounding_boxes, nms_thresholds[1])
141
+ bounding_boxes = bounding_boxes[keep]
142
+ bounding_boxes = calibrate_box(bounding_boxes, offsets[keep])
143
+ bounding_boxes = convert_to_square(bounding_boxes)
144
+ bounding_boxes[:, 0:4] = np.round(bounding_boxes[:, 0:4])
145
+
146
+ # STAGE 3
147
+
148
+ img_boxes = get_image_boxes(bounding_boxes, image, size=48)
149
+ if len(img_boxes) == 0:
150
+ return [], []
151
+ img_boxes = torch.FloatTensor(img_boxes).to(self.device)
152
+ output = self.onet(img_boxes)
153
+ landmarks = output[0].cpu().data.numpy() # shape [n_boxes, 10]
154
+ offsets = output[1].cpu().data.numpy() # shape [n_boxes, 4]
155
+ probs = output[2].cpu().data.numpy() # shape [n_boxes, 2]
156
+
157
+ keep = np.where(probs[:, 1] > thresholds[2])[0]
158
+ bounding_boxes = bounding_boxes[keep]
159
+ bounding_boxes[:, 4] = probs[keep, 1].reshape((-1, ))
160
+ offsets = offsets[keep]
161
+ landmarks = landmarks[keep]
162
+
163
+ # compute landmark points
164
+ width = bounding_boxes[:, 2] - bounding_boxes[:, 0] + 1.0
165
+ height = bounding_boxes[:, 3] - bounding_boxes[:, 1] + 1.0
166
+ xmin, ymin = bounding_boxes[:, 0], bounding_boxes[:, 1]
167
+ landmarks[:, 0:5] = np.expand_dims(xmin, 1) + np.expand_dims(width, 1) * landmarks[:, 0:5]
168
+ landmarks[:, 5:10] = np.expand_dims(ymin, 1) + np.expand_dims(height, 1) * landmarks[:, 5:10]
169
+
170
+ bounding_boxes = calibrate_box(bounding_boxes, offsets)
171
+ keep = nms(bounding_boxes, nms_thresholds[2], mode='min')
172
+ bounding_boxes = bounding_boxes[keep]
173
+ landmarks = landmarks[keep]
174
+
175
+ return bounding_boxes, landmarks
src/third_party/edgeface/face_alignment/mtcnn_pytorch/.gitignore ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ .ipynb_checkpoints
2
+ __pycache__
3
+
src/third_party/edgeface/face_alignment/mtcnn_pytorch/LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2017 Dan Antoshchenko
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
src/third_party/edgeface/face_alignment/mtcnn_pytorch/README.md ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # MTCNN
2
+
3
+ `pytorch` implementation of **inference stage** of face detection algorithm described in
4
+ [Joint Face Detection and Alignment using Multi-task Cascaded Convolutional Networks](https://arxiv.org/abs/1604.02878).
5
+
6
+ ## Example
7
+ ![example of a face detection](images/example.png)
8
+
9
+ ## How to use it
10
+ Just download the repository and then do this
11
+ ```python
12
+ from src import detect_faces
13
+ from PIL import Image
14
+
15
+ image = Image.open('image.jpg')
16
+ bounding_boxes, landmarks = detect_faces(image)
17
+ ```
18
+ For examples see `test_on_images.ipynb`.
19
+
20
+ ## Requirements
21
+ * pytorch 0.2
22
+ * Pillow, numpy
23
+
24
+ ## Credit
25
+ This implementation is heavily inspired by:
26
+ * [pangyupo/mxnet_mtcnn_face_detection](https://github.com/pangyupo/mxnet_mtcnn_face_detection)
src/third_party/edgeface/face_alignment/mtcnn_pytorch/caffe_models/det1.caffemodel ADDED
Binary file (28.2 kB). View file
 
src/third_party/edgeface/face_alignment/mtcnn_pytorch/caffe_models/det1.prototxt ADDED
@@ -0,0 +1,177 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: "PNet"
2
+ input: "data"
3
+ input_dim: 1
4
+ input_dim: 3
5
+ input_dim: 12
6
+ input_dim: 12
7
+
8
+ layer {
9
+ name: "conv1"
10
+ type: "Convolution"
11
+ bottom: "data"
12
+ top: "conv1"
13
+ param {
14
+ lr_mult: 1
15
+ decay_mult: 1
16
+ }
17
+ param {
18
+ lr_mult: 2
19
+ decay_mult: 0
20
+ }
21
+ convolution_param {
22
+ num_output: 10
23
+ kernel_size: 3
24
+ stride: 1
25
+ weight_filler {
26
+ type: "xavier"
27
+ }
28
+ bias_filler {
29
+ type: "constant"
30
+ value: 0
31
+ }
32
+ }
33
+ }
34
+ layer {
35
+ name: "PReLU1"
36
+ type: "PReLU"
37
+ bottom: "conv1"
38
+ top: "conv1"
39
+ }
40
+ layer {
41
+ name: "pool1"
42
+ type: "Pooling"
43
+ bottom: "conv1"
44
+ top: "pool1"
45
+ pooling_param {
46
+ pool: MAX
47
+ kernel_size: 2
48
+ stride: 2
49
+ }
50
+ }
51
+
52
+ layer {
53
+ name: "conv2"
54
+ type: "Convolution"
55
+ bottom: "pool1"
56
+ top: "conv2"
57
+ param {
58
+ lr_mult: 1
59
+ decay_mult: 1
60
+ }
61
+ param {
62
+ lr_mult: 2
63
+ decay_mult: 0
64
+ }
65
+ convolution_param {
66
+ num_output: 16
67
+ kernel_size: 3
68
+ stride: 1
69
+ weight_filler {
70
+ type: "xavier"
71
+ }
72
+ bias_filler {
73
+ type: "constant"
74
+ value: 0
75
+ }
76
+ }
77
+ }
78
+ layer {
79
+ name: "PReLU2"
80
+ type: "PReLU"
81
+ bottom: "conv2"
82
+ top: "conv2"
83
+ }
84
+
85
+ layer {
86
+ name: "conv3"
87
+ type: "Convolution"
88
+ bottom: "conv2"
89
+ top: "conv3"
90
+ param {
91
+ lr_mult: 1
92
+ decay_mult: 1
93
+ }
94
+ param {
95
+ lr_mult: 2
96
+ decay_mult: 0
97
+ }
98
+ convolution_param {
99
+ num_output: 32
100
+ kernel_size: 3
101
+ stride: 1
102
+ weight_filler {
103
+ type: "xavier"
104
+ }
105
+ bias_filler {
106
+ type: "constant"
107
+ value: 0
108
+ }
109
+ }
110
+ }
111
+ layer {
112
+ name: "PReLU3"
113
+ type: "PReLU"
114
+ bottom: "conv3"
115
+ top: "conv3"
116
+ }
117
+
118
+
119
+ layer {
120
+ name: "conv4-1"
121
+ type: "Convolution"
122
+ bottom: "conv3"
123
+ top: "conv4-1"
124
+ param {
125
+ lr_mult: 1
126
+ decay_mult: 1
127
+ }
128
+ param {
129
+ lr_mult: 2
130
+ decay_mult: 0
131
+ }
132
+ convolution_param {
133
+ num_output: 2
134
+ kernel_size: 1
135
+ stride: 1
136
+ weight_filler {
137
+ type: "xavier"
138
+ }
139
+ bias_filler {
140
+ type: "constant"
141
+ value: 0
142
+ }
143
+ }
144
+ }
145
+
146
+ layer {
147
+ name: "conv4-2"
148
+ type: "Convolution"
149
+ bottom: "conv3"
150
+ top: "conv4-2"
151
+ param {
152
+ lr_mult: 1
153
+ decay_mult: 1
154
+ }
155
+ param {
156
+ lr_mult: 2
157
+ decay_mult: 0
158
+ }
159
+ convolution_param {
160
+ num_output: 4
161
+ kernel_size: 1
162
+ stride: 1
163
+ weight_filler {
164
+ type: "xavier"
165
+ }
166
+ bias_filler {
167
+ type: "constant"
168
+ value: 0
169
+ }
170
+ }
171
+ }
172
+ layer {
173
+ name: "prob1"
174
+ type: "Softmax"
175
+ bottom: "conv4-1"
176
+ top: "prob1"
177
+ }
src/third_party/edgeface/face_alignment/mtcnn_pytorch/caffe_models/det2.caffemodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:39b20f7a57bb8176cc9466cea4dfd52da6a6f876de60c7ab222a309f2d0ca08c
3
+ size 407910
src/third_party/edgeface/face_alignment/mtcnn_pytorch/caffe_models/det2.prototxt ADDED
@@ -0,0 +1,228 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: "RNet"
2
+ input: "data"
3
+ input_dim: 1
4
+ input_dim: 3
5
+ input_dim: 24
6
+ input_dim: 24
7
+
8
+
9
+ ##########################
10
+ ######################
11
+ layer {
12
+ name: "conv1"
13
+ type: "Convolution"
14
+ bottom: "data"
15
+ top: "conv1"
16
+ param {
17
+ lr_mult: 0
18
+ decay_mult: 0
19
+ }
20
+ param {
21
+ lr_mult: 0
22
+ decay_mult: 0
23
+ }
24
+ convolution_param {
25
+ num_output: 28
26
+ kernel_size: 3
27
+ stride: 1
28
+ weight_filler {
29
+ type: "xavier"
30
+ }
31
+ bias_filler {
32
+ type: "constant"
33
+ value: 0
34
+ }
35
+ }
36
+ }
37
+ layer {
38
+ name: "prelu1"
39
+ type: "PReLU"
40
+ bottom: "conv1"
41
+ top: "conv1"
42
+ propagate_down: true
43
+ }
44
+ layer {
45
+ name: "pool1"
46
+ type: "Pooling"
47
+ bottom: "conv1"
48
+ top: "pool1"
49
+ pooling_param {
50
+ pool: MAX
51
+ kernel_size: 3
52
+ stride: 2
53
+ }
54
+ }
55
+
56
+ layer {
57
+ name: "conv2"
58
+ type: "Convolution"
59
+ bottom: "pool1"
60
+ top: "conv2"
61
+ param {
62
+ lr_mult: 0
63
+ decay_mult: 0
64
+ }
65
+ param {
66
+ lr_mult: 0
67
+ decay_mult: 0
68
+ }
69
+ convolution_param {
70
+ num_output: 48
71
+ kernel_size: 3
72
+ stride: 1
73
+ weight_filler {
74
+ type: "xavier"
75
+ }
76
+ bias_filler {
77
+ type: "constant"
78
+ value: 0
79
+ }
80
+ }
81
+ }
82
+ layer {
83
+ name: "prelu2"
84
+ type: "PReLU"
85
+ bottom: "conv2"
86
+ top: "conv2"
87
+ propagate_down: true
88
+ }
89
+ layer {
90
+ name: "pool2"
91
+ type: "Pooling"
92
+ bottom: "conv2"
93
+ top: "pool2"
94
+ pooling_param {
95
+ pool: MAX
96
+ kernel_size: 3
97
+ stride: 2
98
+ }
99
+ }
100
+ ####################################
101
+
102
+ ##################################
103
+ layer {
104
+ name: "conv3"
105
+ type: "Convolution"
106
+ bottom: "pool2"
107
+ top: "conv3"
108
+ param {
109
+ lr_mult: 0
110
+ decay_mult: 0
111
+ }
112
+ param {
113
+ lr_mult: 0
114
+ decay_mult: 0
115
+ }
116
+ convolution_param {
117
+ num_output: 64
118
+ kernel_size: 2
119
+ stride: 1
120
+ weight_filler {
121
+ type: "xavier"
122
+ }
123
+ bias_filler {
124
+ type: "constant"
125
+ value: 0
126
+ }
127
+ }
128
+ }
129
+ layer {
130
+ name: "prelu3"
131
+ type: "PReLU"
132
+ bottom: "conv3"
133
+ top: "conv3"
134
+ propagate_down: true
135
+ }
136
+ ###############################
137
+
138
+ ###############################
139
+
140
+ layer {
141
+ name: "conv4"
142
+ type: "InnerProduct"
143
+ bottom: "conv3"
144
+ top: "conv4"
145
+ param {
146
+ lr_mult: 0
147
+ decay_mult: 0
148
+ }
149
+ param {
150
+ lr_mult: 0
151
+ decay_mult: 0
152
+ }
153
+ inner_product_param {
154
+ num_output: 128
155
+ weight_filler {
156
+ type: "xavier"
157
+ }
158
+ bias_filler {
159
+ type: "constant"
160
+ value: 0
161
+ }
162
+ }
163
+ }
164
+ layer {
165
+ name: "prelu4"
166
+ type: "PReLU"
167
+ bottom: "conv4"
168
+ top: "conv4"
169
+ }
170
+
171
+ layer {
172
+ name: "conv5-1"
173
+ type: "InnerProduct"
174
+ bottom: "conv4"
175
+ top: "conv5-1"
176
+ param {
177
+ lr_mult: 0
178
+ decay_mult: 0
179
+ }
180
+ param {
181
+ lr_mult: 0
182
+ decay_mult: 0
183
+ }
184
+ inner_product_param {
185
+ num_output: 2
186
+ #kernel_size: 1
187
+ #stride: 1
188
+ weight_filler {
189
+ type: "xavier"
190
+ }
191
+ bias_filler {
192
+ type: "constant"
193
+ value: 0
194
+ }
195
+ }
196
+ }
197
+ layer {
198
+ name: "conv5-2"
199
+ type: "InnerProduct"
200
+ bottom: "conv4"
201
+ top: "conv5-2"
202
+ param {
203
+ lr_mult: 1
204
+ decay_mult: 1
205
+ }
206
+ param {
207
+ lr_mult: 2
208
+ decay_mult: 1
209
+ }
210
+ inner_product_param {
211
+ num_output: 4
212
+ #kernel_size: 1
213
+ #stride: 1
214
+ weight_filler {
215
+ type: "xavier"
216
+ }
217
+ bias_filler {
218
+ type: "constant"
219
+ value: 0
220
+ }
221
+ }
222
+ }
223
+ layer {
224
+ name: "prob1"
225
+ type: "Softmax"
226
+ bottom: "conv5-1"
227
+ top: "prob1"
228
+ }
src/third_party/edgeface/face_alignment/mtcnn_pytorch/caffe_models/det3.caffemodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9d6098829a4d6d318f37cec42142465637fafe4c673f2e93b69495bf7ca23d2d
3
+ size 1558412
src/third_party/edgeface/face_alignment/mtcnn_pytorch/caffe_models/det3.prototxt ADDED
@@ -0,0 +1,294 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: "ONet"
2
+ input: "data"
3
+ input_dim: 1
4
+ input_dim: 3
5
+ input_dim: 48
6
+ input_dim: 48
7
+ ##################################
8
+ layer {
9
+ name: "conv1"
10
+ type: "Convolution"
11
+ bottom: "data"
12
+ top: "conv1"
13
+ param {
14
+ lr_mult: 1
15
+ decay_mult: 1
16
+ }
17
+ param {
18
+ lr_mult: 2
19
+ decay_mult: 1
20
+ }
21
+ convolution_param {
22
+ num_output: 32
23
+ kernel_size: 3
24
+ stride: 1
25
+ weight_filler {
26
+ type: "xavier"
27
+ }
28
+ bias_filler {
29
+ type: "constant"
30
+ value: 0
31
+ }
32
+ }
33
+ }
34
+ layer {
35
+ name: "prelu1"
36
+ type: "PReLU"
37
+ bottom: "conv1"
38
+ top: "conv1"
39
+ }
40
+ layer {
41
+ name: "pool1"
42
+ type: "Pooling"
43
+ bottom: "conv1"
44
+ top: "pool1"
45
+ pooling_param {
46
+ pool: MAX
47
+ kernel_size: 3
48
+ stride: 2
49
+ }
50
+ }
51
+ layer {
52
+ name: "conv2"
53
+ type: "Convolution"
54
+ bottom: "pool1"
55
+ top: "conv2"
56
+ param {
57
+ lr_mult: 1
58
+ decay_mult: 1
59
+ }
60
+ param {
61
+ lr_mult: 2
62
+ decay_mult: 1
63
+ }
64
+ convolution_param {
65
+ num_output: 64
66
+ kernel_size: 3
67
+ stride: 1
68
+ weight_filler {
69
+ type: "xavier"
70
+ }
71
+ bias_filler {
72
+ type: "constant"
73
+ value: 0
74
+ }
75
+ }
76
+ }
77
+
78
+ layer {
79
+ name: "prelu2"
80
+ type: "PReLU"
81
+ bottom: "conv2"
82
+ top: "conv2"
83
+ }
84
+ layer {
85
+ name: "pool2"
86
+ type: "Pooling"
87
+ bottom: "conv2"
88
+ top: "pool2"
89
+ pooling_param {
90
+ pool: MAX
91
+ kernel_size: 3
92
+ stride: 2
93
+ }
94
+ }
95
+
96
+ layer {
97
+ name: "conv3"
98
+ type: "Convolution"
99
+ bottom: "pool2"
100
+ top: "conv3"
101
+ param {
102
+ lr_mult: 1
103
+ decay_mult: 1
104
+ }
105
+ param {
106
+ lr_mult: 2
107
+ decay_mult: 1
108
+ }
109
+ convolution_param {
110
+ num_output: 64
111
+ kernel_size: 3
112
+ weight_filler {
113
+ type: "xavier"
114
+ }
115
+ bias_filler {
116
+ type: "constant"
117
+ value: 0
118
+ }
119
+ }
120
+ }
121
+ layer {
122
+ name: "prelu3"
123
+ type: "PReLU"
124
+ bottom: "conv3"
125
+ top: "conv3"
126
+ }
127
+ layer {
128
+ name: "pool3"
129
+ type: "Pooling"
130
+ bottom: "conv3"
131
+ top: "pool3"
132
+ pooling_param {
133
+ pool: MAX
134
+ kernel_size: 2
135
+ stride: 2
136
+ }
137
+ }
138
+ layer {
139
+ name: "conv4"
140
+ type: "Convolution"
141
+ bottom: "pool3"
142
+ top: "conv4"
143
+ param {
144
+ lr_mult: 1
145
+ decay_mult: 1
146
+ }
147
+ param {
148
+ lr_mult: 2
149
+ decay_mult: 1
150
+ }
151
+ convolution_param {
152
+ num_output: 128
153
+ kernel_size: 2
154
+ weight_filler {
155
+ type: "xavier"
156
+ }
157
+ bias_filler {
158
+ type: "constant"
159
+ value: 0
160
+ }
161
+ }
162
+ }
163
+ layer {
164
+ name: "prelu4"
165
+ type: "PReLU"
166
+ bottom: "conv4"
167
+ top: "conv4"
168
+ }
169
+
170
+
171
+ layer {
172
+ name: "conv5"
173
+ type: "InnerProduct"
174
+ bottom: "conv4"
175
+ top: "conv5"
176
+ param {
177
+ lr_mult: 1
178
+ decay_mult: 1
179
+ }
180
+ param {
181
+ lr_mult: 2
182
+ decay_mult: 1
183
+ }
184
+ inner_product_param {
185
+ #kernel_size: 3
186
+ num_output: 256
187
+ weight_filler {
188
+ type: "xavier"
189
+ }
190
+ bias_filler {
191
+ type: "constant"
192
+ value: 0
193
+ }
194
+ }
195
+ }
196
+
197
+ layer {
198
+ name: "drop5"
199
+ type: "Dropout"
200
+ bottom: "conv5"
201
+ top: "conv5"
202
+ dropout_param {
203
+ dropout_ratio: 0.25
204
+ }
205
+ }
206
+ layer {
207
+ name: "prelu5"
208
+ type: "PReLU"
209
+ bottom: "conv5"
210
+ top: "conv5"
211
+ }
212
+
213
+
214
+ layer {
215
+ name: "conv6-1"
216
+ type: "InnerProduct"
217
+ bottom: "conv5"
218
+ top: "conv6-1"
219
+ param {
220
+ lr_mult: 1
221
+ decay_mult: 1
222
+ }
223
+ param {
224
+ lr_mult: 2
225
+ decay_mult: 1
226
+ }
227
+ inner_product_param {
228
+ #kernel_size: 1
229
+ num_output: 2
230
+ weight_filler {
231
+ type: "xavier"
232
+ }
233
+ bias_filler {
234
+ type: "constant"
235
+ value: 0
236
+ }
237
+ }
238
+ }
239
+ layer {
240
+ name: "conv6-2"
241
+ type: "InnerProduct"
242
+ bottom: "conv5"
243
+ top: "conv6-2"
244
+ param {
245
+ lr_mult: 1
246
+ decay_mult: 1
247
+ }
248
+ param {
249
+ lr_mult: 2
250
+ decay_mult: 1
251
+ }
252
+ inner_product_param {
253
+ #kernel_size: 1
254
+ num_output: 4
255
+ weight_filler {
256
+ type: "xavier"
257
+ }
258
+ bias_filler {
259
+ type: "constant"
260
+ value: 0
261
+ }
262
+ }
263
+ }
264
+ layer {
265
+ name: "conv6-3"
266
+ type: "InnerProduct"
267
+ bottom: "conv5"
268
+ top: "conv6-3"
269
+ param {
270
+ lr_mult: 1
271
+ decay_mult: 1
272
+ }
273
+ param {
274
+ lr_mult: 2
275
+ decay_mult: 1
276
+ }
277
+ inner_product_param {
278
+ #kernel_size: 1
279
+ num_output: 10
280
+ weight_filler {
281
+ type: "xavier"
282
+ }
283
+ bias_filler {
284
+ type: "constant"
285
+ value: 0
286
+ }
287
+ }
288
+ }
289
+ layer {
290
+ name: "prob1"
291
+ type: "Softmax"
292
+ bottom: "conv6-1"
293
+ top: "prob1"
294
+ }
src/third_party/edgeface/face_alignment/mtcnn_pytorch/caffe_models/det4.caffemodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:23dfa3f889a8dd5d1ffe7429229270892bcb19221ede13aaac8896ea060bfb76
3
+ size 3798152
src/third_party/edgeface/face_alignment/mtcnn_pytorch/caffe_models/det4.prototxt ADDED
@@ -0,0 +1,995 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: "LNet"
2
+ input: "data"
3
+ input_dim: 1
4
+ input_dim: 15
5
+ input_dim: 24
6
+ input_dim: 24
7
+
8
+ layer {
9
+ name: "slicer_data"
10
+ type: "Slice"
11
+ bottom: "data"
12
+ top: "data241"
13
+ top: "data242"
14
+ top: "data243"
15
+ top: "data244"
16
+ top: "data245"
17
+ slice_param {
18
+ axis: 1
19
+ slice_point: 3
20
+ slice_point: 6
21
+ slice_point: 9
22
+ slice_point: 12
23
+ }
24
+ }
25
+ layer {
26
+ name: "conv1_1"
27
+ type: "Convolution"
28
+ bottom: "data241"
29
+ top: "conv1_1"
30
+ param {
31
+ lr_mult: 1
32
+ decay_mult: 1
33
+ }
34
+ param {
35
+ lr_mult: 2
36
+ decay_mult: 1
37
+ }
38
+ convolution_param {
39
+ num_output: 28
40
+ kernel_size: 3
41
+ stride: 1
42
+ weight_filler {
43
+ type: "xavier"
44
+ }
45
+ bias_filler {
46
+ type: "constant"
47
+ value: 0
48
+ }
49
+ }
50
+
51
+ }
52
+ layer {
53
+ name: "prelu1_1"
54
+ type: "PReLU"
55
+ bottom: "conv1_1"
56
+ top: "conv1_1"
57
+
58
+ }
59
+ layer {
60
+ name: "pool1_1"
61
+ type: "Pooling"
62
+ bottom: "conv1_1"
63
+ top: "pool1_1"
64
+ pooling_param {
65
+ pool: MAX
66
+ kernel_size: 3
67
+ stride: 2
68
+ }
69
+ }
70
+
71
+ layer {
72
+ name: "conv2_1"
73
+ type: "Convolution"
74
+ bottom: "pool1_1"
75
+ top: "conv2_1"
76
+ param {
77
+ lr_mult: 1
78
+ decay_mult: 1
79
+ }
80
+ param {
81
+ lr_mult: 2
82
+ decay_mult: 1
83
+ }
84
+ convolution_param {
85
+ num_output: 48
86
+ kernel_size: 3
87
+ stride: 1
88
+ weight_filler {
89
+ type: "xavier"
90
+ }
91
+ bias_filler {
92
+ type: "constant"
93
+ value: 0
94
+ }
95
+ }
96
+
97
+ }
98
+ layer {
99
+ name: "prelu2_1"
100
+ type: "PReLU"
101
+ bottom: "conv2_1"
102
+ top: "conv2_1"
103
+ }
104
+ layer {
105
+ name: "pool2_1"
106
+ type: "Pooling"
107
+ bottom: "conv2_1"
108
+ top: "pool2_1"
109
+ pooling_param {
110
+ pool: MAX
111
+ kernel_size: 3
112
+ stride: 2
113
+ }
114
+
115
+ }
116
+ layer {
117
+ name: "conv3_1"
118
+ type: "Convolution"
119
+ bottom: "pool2_1"
120
+ top: "conv3_1"
121
+ param {
122
+ lr_mult: 1
123
+ decay_mult: 1
124
+ }
125
+ param {
126
+ lr_mult: 2
127
+ decay_mult: 1
128
+ }
129
+ convolution_param {
130
+ num_output: 64
131
+ kernel_size: 2
132
+ stride: 1
133
+ weight_filler {
134
+ type: "xavier"
135
+ }
136
+ bias_filler {
137
+ type: "constant"
138
+ value: 0
139
+ }
140
+ }
141
+
142
+ }
143
+ layer {
144
+ name: "prelu3_1"
145
+ type: "PReLU"
146
+ bottom: "conv3_1"
147
+ top: "conv3_1"
148
+ }
149
+ ##########################
150
+ layer {
151
+ name: "conv1_2"
152
+ type: "Convolution"
153
+ bottom: "data242"
154
+ top: "conv1_2"
155
+ param {
156
+ lr_mult: 1
157
+ decay_mult: 1
158
+ }
159
+ param {
160
+ lr_mult: 2
161
+ decay_mult: 1
162
+ }
163
+ convolution_param {
164
+ num_output: 28
165
+ kernel_size: 3
166
+ stride: 1
167
+ weight_filler {
168
+ type: "xavier"
169
+ }
170
+ bias_filler {
171
+ type: "constant"
172
+ value: 0
173
+ }
174
+ }
175
+
176
+ }
177
+ layer {
178
+ name: "prelu1_2"
179
+ type: "PReLU"
180
+ bottom: "conv1_2"
181
+ top: "conv1_2"
182
+
183
+ }
184
+ layer {
185
+ name: "pool1_2"
186
+ type: "Pooling"
187
+ bottom: "conv1_2"
188
+ top: "pool1_2"
189
+ pooling_param {
190
+ pool: MAX
191
+ kernel_size: 3
192
+ stride: 2
193
+ }
194
+ }
195
+
196
+ layer {
197
+ name: "conv2_2"
198
+ type: "Convolution"
199
+ bottom: "pool1_2"
200
+ top: "conv2_2"
201
+ param {
202
+ lr_mult: 1
203
+ decay_mult: 1
204
+ }
205
+ param {
206
+ lr_mult: 2
207
+ decay_mult: 1
208
+ }
209
+ convolution_param {
210
+ num_output: 48
211
+ kernel_size: 3
212
+ stride: 1
213
+ weight_filler {
214
+ type: "xavier"
215
+ }
216
+ bias_filler {
217
+ type: "constant"
218
+ value: 0
219
+ }
220
+ }
221
+
222
+ }
223
+ layer {
224
+ name: "prelu2_2"
225
+ type: "PReLU"
226
+ bottom: "conv2_2"
227
+ top: "conv2_2"
228
+ }
229
+ layer {
230
+ name: "pool2_2"
231
+ type: "Pooling"
232
+ bottom: "conv2_2"
233
+ top: "pool2_2"
234
+ pooling_param {
235
+ pool: MAX
236
+ kernel_size: 3
237
+ stride: 2
238
+ }
239
+
240
+ }
241
+ layer {
242
+ name: "conv3_2"
243
+ type: "Convolution"
244
+ bottom: "pool2_2"
245
+ top: "conv3_2"
246
+ param {
247
+ lr_mult: 1
248
+ decay_mult: 1
249
+ }
250
+ param {
251
+ lr_mult: 2
252
+ decay_mult: 1
253
+ }
254
+ convolution_param {
255
+ num_output: 64
256
+ kernel_size: 2
257
+ stride: 1
258
+ weight_filler {
259
+ type: "xavier"
260
+ }
261
+ bias_filler {
262
+ type: "constant"
263
+ value: 0
264
+ }
265
+ }
266
+
267
+ }
268
+ layer {
269
+ name: "prelu3_2"
270
+ type: "PReLU"
271
+ bottom: "conv3_2"
272
+ top: "conv3_2"
273
+ }
274
+ ##########################
275
+ ##########################
276
+ layer {
277
+ name: "conv1_3"
278
+ type: "Convolution"
279
+ bottom: "data243"
280
+ top: "conv1_3"
281
+ param {
282
+ lr_mult: 1
283
+ decay_mult: 1
284
+ }
285
+ param {
286
+ lr_mult: 2
287
+ decay_mult: 1
288
+ }
289
+ convolution_param {
290
+ num_output: 28
291
+ kernel_size: 3
292
+ stride: 1
293
+ weight_filler {
294
+ type: "xavier"
295
+ }
296
+ bias_filler {
297
+ type: "constant"
298
+ value: 0
299
+ }
300
+ }
301
+
302
+ }
303
+ layer {
304
+ name: "prelu1_3"
305
+ type: "PReLU"
306
+ bottom: "conv1_3"
307
+ top: "conv1_3"
308
+
309
+ }
310
+ layer {
311
+ name: "pool1_3"
312
+ type: "Pooling"
313
+ bottom: "conv1_3"
314
+ top: "pool1_3"
315
+ pooling_param {
316
+ pool: MAX
317
+ kernel_size: 3
318
+ stride: 2
319
+ }
320
+ }
321
+
322
+ layer {
323
+ name: "conv2_3"
324
+ type: "Convolution"
325
+ bottom: "pool1_3"
326
+ top: "conv2_3"
327
+ param {
328
+ lr_mult: 1
329
+ decay_mult: 1
330
+ }
331
+ param {
332
+ lr_mult: 2
333
+ decay_mult: 1
334
+ }
335
+ convolution_param {
336
+ num_output: 48
337
+ kernel_size: 3
338
+ stride: 1
339
+ weight_filler {
340
+ type: "xavier"
341
+ }
342
+ bias_filler {
343
+ type: "constant"
344
+ value: 0
345
+ }
346
+ }
347
+
348
+ }
349
+ layer {
350
+ name: "prelu2_3"
351
+ type: "PReLU"
352
+ bottom: "conv2_3"
353
+ top: "conv2_3"
354
+ }
355
+ layer {
356
+ name: "pool2_3"
357
+ type: "Pooling"
358
+ bottom: "conv2_3"
359
+ top: "pool2_3"
360
+ pooling_param {
361
+ pool: MAX
362
+ kernel_size: 3
363
+ stride: 2
364
+ }
365
+
366
+ }
367
+ layer {
368
+ name: "conv3_3"
369
+ type: "Convolution"
370
+ bottom: "pool2_3"
371
+ top: "conv3_3"
372
+ param {
373
+ lr_mult: 1
374
+ decay_mult: 1
375
+ }
376
+ param {
377
+ lr_mult: 2
378
+ decay_mult: 1
379
+ }
380
+ convolution_param {
381
+ num_output: 64
382
+ kernel_size: 2
383
+ stride: 1
384
+ weight_filler {
385
+ type: "xavier"
386
+ }
387
+ bias_filler {
388
+ type: "constant"
389
+ value: 0
390
+ }
391
+ }
392
+
393
+ }
394
+ layer {
395
+ name: "prelu3_3"
396
+ type: "PReLU"
397
+ bottom: "conv3_3"
398
+ top: "conv3_3"
399
+ }
400
+ ##########################
401
+ ##########################
402
+ layer {
403
+ name: "conv1_4"
404
+ type: "Convolution"
405
+ bottom: "data244"
406
+ top: "conv1_4"
407
+ param {
408
+ lr_mult: 1
409
+ decay_mult: 1
410
+ }
411
+ param {
412
+ lr_mult: 2
413
+ decay_mult: 1
414
+ }
415
+ convolution_param {
416
+ num_output: 28
417
+ kernel_size: 3
418
+ stride: 1
419
+ weight_filler {
420
+ type: "xavier"
421
+ }
422
+ bias_filler {
423
+ type: "constant"
424
+ value: 0
425
+ }
426
+ }
427
+
428
+ }
429
+ layer {
430
+ name: "prelu1_4"
431
+ type: "PReLU"
432
+ bottom: "conv1_4"
433
+ top: "conv1_4"
434
+
435
+ }
436
+ layer {
437
+ name: "pool1_4"
438
+ type: "Pooling"
439
+ bottom: "conv1_4"
440
+ top: "pool1_4"
441
+ pooling_param {
442
+ pool: MAX
443
+ kernel_size: 3
444
+ stride: 2
445
+ }
446
+ }
447
+
448
+ layer {
449
+ name: "conv2_4"
450
+ type: "Convolution"
451
+ bottom: "pool1_4"
452
+ top: "conv2_4"
453
+ param {
454
+ lr_mult: 1
455
+ decay_mult: 1
456
+ }
457
+ param {
458
+ lr_mult: 2
459
+ decay_mult: 1
460
+ }
461
+ convolution_param {
462
+ num_output: 48
463
+ kernel_size: 3
464
+ stride: 1
465
+ weight_filler {
466
+ type: "xavier"
467
+ }
468
+ bias_filler {
469
+ type: "constant"
470
+ value: 0
471
+ }
472
+ }
473
+
474
+ }
475
+ layer {
476
+ name: "prelu2_4"
477
+ type: "PReLU"
478
+ bottom: "conv2_4"
479
+ top: "conv2_4"
480
+ }
481
+ layer {
482
+ name: "pool2_4"
483
+ type: "Pooling"
484
+ bottom: "conv2_4"
485
+ top: "pool2_4"
486
+ pooling_param {
487
+ pool: MAX
488
+ kernel_size: 3
489
+ stride: 2
490
+ }
491
+
492
+ }
493
+ layer {
494
+ name: "conv3_4"
495
+ type: "Convolution"
496
+ bottom: "pool2_4"
497
+ top: "conv3_4"
498
+ param {
499
+ lr_mult: 1
500
+ decay_mult: 1
501
+ }
502
+ param {
503
+ lr_mult: 2
504
+ decay_mult: 1
505
+ }
506
+ convolution_param {
507
+ num_output: 64
508
+ kernel_size: 2
509
+ stride: 1
510
+ weight_filler {
511
+ type: "xavier"
512
+ }
513
+ bias_filler {
514
+ type: "constant"
515
+ value: 0
516
+ }
517
+ }
518
+
519
+ }
520
+ layer {
521
+ name: "prelu3_4"
522
+ type: "PReLU"
523
+ bottom: "conv3_4"
524
+ top: "conv3_4"
525
+ }
526
+ ##########################
527
+ ##########################
528
+ layer {
529
+ name: "conv1_5"
530
+ type: "Convolution"
531
+ bottom: "data245"
532
+ top: "conv1_5"
533
+ param {
534
+ lr_mult: 1
535
+ decay_mult: 1
536
+ }
537
+ param {
538
+ lr_mult: 2
539
+ decay_mult: 1
540
+ }
541
+ convolution_param {
542
+ num_output: 28
543
+ kernel_size: 3
544
+ stride: 1
545
+ weight_filler {
546
+ type: "xavier"
547
+ }
548
+ bias_filler {
549
+ type: "constant"
550
+ value: 0
551
+ }
552
+ }
553
+
554
+ }
555
+ layer {
556
+ name: "prelu1_5"
557
+ type: "PReLU"
558
+ bottom: "conv1_5"
559
+ top: "conv1_5"
560
+
561
+ }
562
+ layer {
563
+ name: "pool1_5"
564
+ type: "Pooling"
565
+ bottom: "conv1_5"
566
+ top: "pool1_5"
567
+ pooling_param {
568
+ pool: MAX
569
+ kernel_size: 3
570
+ stride: 2
571
+ }
572
+ }
573
+
574
+ layer {
575
+ name: "conv2_5"
576
+ type: "Convolution"
577
+ bottom: "pool1_5"
578
+ top: "conv2_5"
579
+ param {
580
+ lr_mult: 1
581
+ decay_mult: 1
582
+ }
583
+ param {
584
+ lr_mult: 2
585
+ decay_mult: 1
586
+ }
587
+ convolution_param {
588
+ num_output: 48
589
+ kernel_size: 3
590
+ stride: 1
591
+ weight_filler {
592
+ type: "xavier"
593
+ }
594
+ bias_filler {
595
+ type: "constant"
596
+ value: 0
597
+ }
598
+ }
599
+
600
+ }
601
+ layer {
602
+ name: "prelu2_5"
603
+ type: "PReLU"
604
+ bottom: "conv2_5"
605
+ top: "conv2_5"
606
+ }
607
+ layer {
608
+ name: "pool2_5"
609
+ type: "Pooling"
610
+ bottom: "conv2_5"
611
+ top: "pool2_5"
612
+ pooling_param {
613
+ pool: MAX
614
+ kernel_size: 3
615
+ stride: 2
616
+ }
617
+
618
+ }
619
+ layer {
620
+ name: "conv3_5"
621
+ type: "Convolution"
622
+ bottom: "pool2_5"
623
+ top: "conv3_5"
624
+ param {
625
+ lr_mult: 1
626
+ decay_mult: 1
627
+ }
628
+ param {
629
+ lr_mult: 2
630
+ decay_mult: 1
631
+ }
632
+ convolution_param {
633
+ num_output: 64
634
+ kernel_size: 2
635
+ stride: 1
636
+ weight_filler {
637
+ type: "xavier"
638
+ }
639
+ bias_filler {
640
+ type: "constant"
641
+ value: 0
642
+ }
643
+ }
644
+
645
+ }
646
+ layer {
647
+ name: "prelu3_5"
648
+ type: "PReLU"
649
+ bottom: "conv3_5"
650
+ top: "conv3_5"
651
+ }
652
+ ##########################
653
+ layer {
654
+ name: "concat"
655
+ bottom: "conv3_1"
656
+ bottom: "conv3_2"
657
+ bottom: "conv3_3"
658
+ bottom: "conv3_4"
659
+ bottom: "conv3_5"
660
+ top: "conv3"
661
+ type: "Concat"
662
+ concat_param {
663
+ axis: 1
664
+ }
665
+ }
666
+ ##########################
667
+ layer {
668
+ name: "fc4"
669
+ type: "InnerProduct"
670
+ bottom: "conv3"
671
+ top: "fc4"
672
+ param {
673
+ lr_mult: 1
674
+ decay_mult: 1
675
+ }
676
+ param {
677
+ lr_mult: 2
678
+ decay_mult: 1
679
+ }
680
+ inner_product_param {
681
+ num_output: 256
682
+ weight_filler {
683
+ type: "xavier"
684
+ }
685
+ bias_filler {
686
+ type: "constant"
687
+ value: 0
688
+ }
689
+ }
690
+
691
+ }
692
+ layer {
693
+ name: "prelu4"
694
+ type: "PReLU"
695
+ bottom: "fc4"
696
+ top: "fc4"
697
+ }
698
+ ############################
699
+ layer {
700
+ name: "fc4_1"
701
+ type: "InnerProduct"
702
+ bottom: "fc4"
703
+ top: "fc4_1"
704
+ param {
705
+ lr_mult: 1
706
+ decay_mult: 1
707
+ }
708
+ param {
709
+ lr_mult: 2
710
+ decay_mult: 1
711
+ }
712
+ inner_product_param {
713
+ num_output: 64
714
+ weight_filler {
715
+ type: "xavier"
716
+ }
717
+ bias_filler {
718
+ type: "constant"
719
+ value: 0
720
+ }
721
+ }
722
+
723
+ }
724
+ layer {
725
+ name: "prelu4_1"
726
+ type: "PReLU"
727
+ bottom: "fc4_1"
728
+ top: "fc4_1"
729
+ }
730
+ layer {
731
+ name: "fc5_1"
732
+ type: "InnerProduct"
733
+ bottom: "fc4_1"
734
+ top: "fc5_1"
735
+ param {
736
+ lr_mult: 1
737
+ decay_mult: 1
738
+ }
739
+ param {
740
+ lr_mult: 2
741
+ decay_mult: 1
742
+ }
743
+ inner_product_param {
744
+ num_output: 2
745
+ weight_filler {
746
+ type: "xavier"
747
+ #type: "constant"
748
+ #value: 0
749
+ }
750
+ bias_filler {
751
+ type: "constant"
752
+ value: 0
753
+ }
754
+ }
755
+ }
756
+
757
+
758
+ #########################
759
+ layer {
760
+ name: "fc4_2"
761
+ type: "InnerProduct"
762
+ bottom: "fc4"
763
+ top: "fc4_2"
764
+ param {
765
+ lr_mult: 1
766
+ decay_mult: 1
767
+ }
768
+ param {
769
+ lr_mult: 2
770
+ decay_mult: 1
771
+ }
772
+ inner_product_param {
773
+ num_output: 64
774
+ weight_filler {
775
+ type: "xavier"
776
+ }
777
+ bias_filler {
778
+ type: "constant"
779
+ value: 0
780
+ }
781
+ }
782
+
783
+ }
784
+ layer {
785
+ name: "prelu4_2"
786
+ type: "PReLU"
787
+ bottom: "fc4_2"
788
+ top: "fc4_2"
789
+ }
790
+ layer {
791
+ name: "fc5_2"
792
+ type: "InnerProduct"
793
+ bottom: "fc4_2"
794
+ top: "fc5_2"
795
+ param {
796
+ lr_mult: 1
797
+ decay_mult: 1
798
+ }
799
+ param {
800
+ lr_mult: 2
801
+ decay_mult: 1
802
+ }
803
+ inner_product_param {
804
+ num_output: 2
805
+ weight_filler {
806
+ type: "xavier"
807
+ #type: "constant"
808
+ #value: 0
809
+ }
810
+ bias_filler {
811
+ type: "constant"
812
+ value: 0
813
+ }
814
+ }
815
+ }
816
+
817
+ #########################
818
+ layer {
819
+ name: "fc4_3"
820
+ type: "InnerProduct"
821
+ bottom: "fc4"
822
+ top: "fc4_3"
823
+ param {
824
+ lr_mult: 1
825
+ decay_mult: 1
826
+ }
827
+ param {
828
+ lr_mult: 2
829
+ decay_mult: 1
830
+ }
831
+ inner_product_param {
832
+ num_output: 64
833
+ weight_filler {
834
+ type: "xavier"
835
+ }
836
+ bias_filler {
837
+ type: "constant"
838
+ value: 0
839
+ }
840
+ }
841
+
842
+ }
843
+ layer {
844
+ name: "prelu4_3"
845
+ type: "PReLU"
846
+ bottom: "fc4_3"
847
+ top: "fc4_3"
848
+ }
849
+ layer {
850
+ name: "fc5_3"
851
+ type: "InnerProduct"
852
+ bottom: "fc4_3"
853
+ top: "fc5_3"
854
+ param {
855
+ lr_mult: 1
856
+ decay_mult: 1
857
+ }
858
+ param {
859
+ lr_mult: 2
860
+ decay_mult: 1
861
+ }
862
+ inner_product_param {
863
+ num_output: 2
864
+ weight_filler {
865
+ type: "xavier"
866
+ #type: "constant"
867
+ #value: 0
868
+ }
869
+ bias_filler {
870
+ type: "constant"
871
+ value: 0
872
+ }
873
+ }
874
+ }
875
+
876
+ #########################
877
+ layer {
878
+ name: "fc4_4"
879
+ type: "InnerProduct"
880
+ bottom: "fc4"
881
+ top: "fc4_4"
882
+ param {
883
+ lr_mult: 1
884
+ decay_mult: 1
885
+ }
886
+ param {
887
+ lr_mult: 2
888
+ decay_mult: 1
889
+ }
890
+ inner_product_param {
891
+ num_output: 64
892
+ weight_filler {
893
+ type: "xavier"
894
+ }
895
+ bias_filler {
896
+ type: "constant"
897
+ value: 0
898
+ }
899
+ }
900
+
901
+ }
902
+ layer {
903
+ name: "prelu4_4"
904
+ type: "PReLU"
905
+ bottom: "fc4_4"
906
+ top: "fc4_4"
907
+ }
908
+ layer {
909
+ name: "fc5_4"
910
+ type: "InnerProduct"
911
+ bottom: "fc4_4"
912
+ top: "fc5_4"
913
+ param {
914
+ lr_mult: 1
915
+ decay_mult: 1
916
+ }
917
+ param {
918
+ lr_mult: 2
919
+ decay_mult: 1
920
+ }
921
+ inner_product_param {
922
+ num_output: 2
923
+ weight_filler {
924
+ type: "xavier"
925
+ #type: "constant"
926
+ #value: 0
927
+ }
928
+ bias_filler {
929
+ type: "constant"
930
+ value: 0
931
+ }
932
+ }
933
+ }
934
+
935
+ #########################
936
+ layer {
937
+ name: "fc4_5"
938
+ type: "InnerProduct"
939
+ bottom: "fc4"
940
+ top: "fc4_5"
941
+ param {
942
+ lr_mult: 1
943
+ decay_mult: 1
944
+ }
945
+ param {
946
+ lr_mult: 2
947
+ decay_mult: 1
948
+ }
949
+ inner_product_param {
950
+ num_output: 64
951
+ weight_filler {
952
+ type: "xavier"
953
+ }
954
+ bias_filler {
955
+ type: "constant"
956
+ value: 0
957
+ }
958
+ }
959
+
960
+ }
961
+ layer {
962
+ name: "prelu4_5"
963
+ type: "PReLU"
964
+ bottom: "fc4_5"
965
+ top: "fc4_5"
966
+ }
967
+ layer {
968
+ name: "fc5_5"
969
+ type: "InnerProduct"
970
+ bottom: "fc4_5"
971
+ top: "fc5_5"
972
+ param {
973
+ lr_mult: 1
974
+ decay_mult: 1
975
+ }
976
+ param {
977
+ lr_mult: 2
978
+ decay_mult: 1
979
+ }
980
+ inner_product_param {
981
+ num_output: 2
982
+ weight_filler {
983
+ type: "xavier"
984
+ #type: "constant"
985
+ #value: 0
986
+ }
987
+ bias_filler {
988
+ type: "constant"
989
+ value: 0
990
+ }
991
+ }
992
+ }
993
+
994
+ #########################
995
+
src/third_party/edgeface/face_alignment/mtcnn_pytorch/extract_weights_from_caffe_models.py ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import caffe
2
+ import numpy as np
3
+
4
+ """
5
+ The purpose of this script is to convert pretrained weights taken from
6
+ official implementation here:
7
+ https://github.com/kpzhang93/MTCNN_face_detection_alignment/tree/master/code/codes/MTCNNv2
8
+ to required format.
9
+
10
+ In a nutshell, it just renames and transposes some of the weights.
11
+ You don't have to use this script because weights are already in `src/weights`.
12
+ """
13
+
14
+
15
+ def get_all_weights(net):
16
+ all_weights = {}
17
+ for p in net.params:
18
+ if 'conv' in p:
19
+ name = 'features.' + p
20
+ if '-' in p:
21
+ s = list(p)
22
+ s[-2] = '_'
23
+ s = ''.join(s)
24
+ all_weights[s + '.weight'] = net.params[p][0].data
25
+ all_weights[s + '.bias'] = net.params[p][1].data
26
+ elif len(net.params[p][0].data.shape) == 4:
27
+ all_weights[name + '.weight'] = net.params[p][0].data.transpose((0, 1, 3, 2))
28
+ all_weights[name + '.bias'] = net.params[p][1].data
29
+ else:
30
+ all_weights[name + '.weight'] = net.params[p][0].data
31
+ all_weights[name + '.bias'] = net.params[p][1].data
32
+ elif 'prelu' in p.lower():
33
+ all_weights['features.' + p.lower() + '.weight'] = net.params[p][0].data
34
+ return all_weights
35
+
36
+
37
+ # P-Net
38
+ net = caffe.Net('caffe_models/det1.prototxt', 'caffe_models/det1.caffemodel', caffe.TEST)
39
+ np.save('src/weights/pnet.npy', get_all_weights(net))
40
+
41
+ # R-Net
42
+ net = caffe.Net('caffe_models/det2.prototxt', 'caffe_models/det2.caffemodel', caffe.TEST)
43
+ np.save('src/weights/rnet.npy', get_all_weights(net))
44
+
45
+ # O-Net
46
+ net = caffe.Net('caffe_models/det3.prototxt', 'caffe_models/det3.caffemodel', caffe.TEST)
47
+ np.save('src/weights/onet.npy', get_all_weights(net))
src/third_party/edgeface/face_alignment/mtcnn_pytorch/get_aligned_face_from_mtcnn.ipynb ADDED
The diff for this file is too large to render. See raw diff
 
src/third_party/edgeface/face_alignment/mtcnn_pytorch/images/example.png ADDED

Git LFS Details

  • SHA256: 3d4ed3b964160fe56c8b583f3849108878cbcb8162ca664be0dbb4a883b194bd
  • Pointer size: 131 Bytes
  • Size of remote file: 419 kB
src/third_party/edgeface/face_alignment/mtcnn_pytorch/images/face0.jpg ADDED
src/third_party/edgeface/face_alignment/mtcnn_pytorch/images/jf.jpg ADDED
src/third_party/edgeface/face_alignment/mtcnn_pytorch/images/office1.jpg ADDED
src/third_party/edgeface/face_alignment/mtcnn_pytorch/images/office2.jpg ADDED

Git LFS Details

  • SHA256: 33eeb116e69baf2de74d0aaad1baa3ffdae2e131e5c53f20de883620b118b89e
  • Pointer size: 131 Bytes
  • Size of remote file: 127 kB
src/third_party/edgeface/face_alignment/mtcnn_pytorch/images/office3.jpg ADDED
src/third_party/edgeface/face_alignment/mtcnn_pytorch/images/office4.jpg ADDED

Git LFS Details

  • SHA256: 8fca456609397b48e493e9d25e78c13079d6436965f8eeae1e3ba308b92cc71b
  • Pointer size: 131 Bytes
  • Size of remote file: 206 kB
src/third_party/edgeface/face_alignment/mtcnn_pytorch/images/office5.jpg ADDED
src/third_party/edgeface/face_alignment/mtcnn_pytorch/refine_faces.ipynb ADDED
@@ -0,0 +1,315 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 2,
6
+ "metadata": {
7
+ "ExecuteTime": {
8
+ "end_time": "2018-07-21T07:06:15.533290Z",
9
+ "start_time": "2018-07-21T07:06:15.509560Z"
10
+ }
11
+ },
12
+ "outputs": [
13
+ {
14
+ "name": "stdout",
15
+ "output_type": "stream",
16
+ "text": [
17
+ "The autoreload extension is already loaded. To reload it, use:\n",
18
+ " %reload_ext autoreload\n"
19
+ ]
20
+ }
21
+ ],
22
+ "source": [
23
+ "%load_ext autoreload\n",
24
+ "%autoreload 2\n",
25
+ "\n",
26
+ "from src import detect_faces, show_bboxes\n",
27
+ "from PIL import Image\n",
28
+ "import cv2\n",
29
+ "import numpy as np\n",
30
+ "from src.align_trans import get_reference_facial_points, warp_and_crop_face\n",
31
+ "import mxnet as mx\n",
32
+ "import io\n",
33
+ "from pathlib import Path"
34
+ ]
35
+ },
36
+ {
37
+ "cell_type": "code",
38
+ "execution_count": 3,
39
+ "metadata": {
40
+ "ExecuteTime": {
41
+ "end_time": "2018-07-21T07:08:15.237357Z",
42
+ "start_time": "2018-07-21T07:08:15.214563Z"
43
+ }
44
+ },
45
+ "outputs": [],
46
+ "source": [
47
+ "face_folder = Path('/home/f/learning/Dataset/faces_vgg_112x112')\n",
48
+ "bin_path = face_folder/'train.rec'\n",
49
+ "idx_path = face_folder/'train.idx'"
50
+ ]
51
+ },
52
+ {
53
+ "cell_type": "code",
54
+ "execution_count": 4,
55
+ "metadata": {
56
+ "ExecuteTime": {
57
+ "end_time": "2018-07-21T07:08:20.176501Z",
58
+ "start_time": "2018-07-21T07:08:17.337626Z"
59
+ }
60
+ },
61
+ "outputs": [],
62
+ "source": [
63
+ "imgrec = mx.recordio.MXIndexedRecordIO(str(idx_path), str(bin_path), 'r')"
64
+ ]
65
+ },
66
+ {
67
+ "cell_type": "code",
68
+ "execution_count": 25,
69
+ "metadata": {
70
+ "ExecuteTime": {
71
+ "end_time": "2018-07-21T07:10:25.708722Z",
72
+ "start_time": "2018-07-21T07:10:25.687476Z"
73
+ }
74
+ },
75
+ "outputs": [
76
+ {
77
+ "name": "stdout",
78
+ "output_type": "stream",
79
+ "text": [
80
+ "HEADER(flag=0, label=2.0, id=813, id2=0)\n"
81
+ ]
82
+ },
83
+ {
84
+ "data": {
85
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAHAAAABwCAIAAABJgmMcAABQ2UlEQVR4nGX9Z5NkSZYlBl6m+t4z5ixoZtGuLirdgAyAnVngv+MDdoEVwYpMTwPTmOmqnqpKHpERGeHEyHuqegk+qLln9MA+uAQxdzdTU73k3HOO4m92u6AorRoCCQLTuBqzJNeaWNA8zDNyYgYAAgThMh/BY5Q0sIgQABRTRwjCEBqGgZnBkQFzzuimqurmEY7AgMKckSeRBMQEZu3hsC9a8jgAInkehgGZJCUZxMLfvn+nZhEhIhAREYLiamAuIiDjaVmMWQmd0InVjYjClSIgDAAQAwgNwiESACLDv3oQALh7RPSvEfH0f4gYEYjYn9P/xd37MxHRzMZxbK0NwxARYmAACIgpEWYBgohorSTmUAtzDkcCV4cICKCQi2kaJYV5NCUPImIWSmIEFgHm7EAACIS1UsQgnJhKawAgIhwQtQ45Z+JwhYCVCEFkkTyNqCQiJOwAYZqFn11e1FpbayklIkrEQsnMCHCapuZwd9ifaisRFUDBw1UtEBEiEAIAHADdHcIhmgVRIGJfI0QkQiJ6WrhPl7X/GRHp8XH+BIhqrcwcEUTUn6mqZiYOAeBBEQiECAj9V2FAuAriSCkTEmIiZqT1atqtV6s8WG1aKrhZuEFUr4GoDrU16b/bFWsEISi6O4WP47hZjZnYS8tE6yExZBEKviy1BuE4ZsG0LEsQElFtTd3Wq01LLSL63o8IIWLmMQ+r1erkfrlffdjvPzwcojVGimBVhfNGi77NIhAACQBA+wbsXxERwPsaPS3o0xr540NVn5aSmfsTiMjdRaT/OxGpqqg3AALCILQwCKQgFta5oFsmYiYJ2uRxt12vhnGdebNaT5IFghDdzd2BsJlV1+Myz/Ps7oKEARhQwoEQER0hAHL4Jo/rzXY1DKs8JMacc0psEMiAzHWux+OxtWYQTWQpZVyN7iMiXl5ejuOoWt2dmYUYAFYcq+EyCdR2qk0r0CAYDoEQThAY6J+eX+LzeY8fH/bpMcfHx9Oi961nZn2Jn57GzGbWvz59ixg4IZIwCQGhu9da0YjDOCAFJoCVyM1m9fLqZruedoOs1+uBJUzJIyKIgIQNwsEtPCLCQVsr86KqKKm5AeIwDGkcMktOaZA0cBqYhpRyTkSEgkBYSjFrrV3u9/v7+3sl9jFfXFwSkUP0Be3Hvy+Iuh2ijCkRrGrbtVYe5lqAEqMDOYC5QxAA+CfL+una9T+4u5k9rebT6e5P6AEhpfS0W/uGba2pKiKqqoj0JZY8DH3VIwICIsJUSUGYB6Z1Hq7X6+eb7Yvrq5vtdhzyduTtapVEbKkIPqacBwEiC7dQd++vxjXcHQOquSMwMzIhMCIK48CSWDJxZhIRAKckgX7wpkRpNdxMed5uaq3LUpmSDHmappQSCbeIJVTVACESQmALkPUKIszM3/8AxYGwmkVAxL9auOhvEqCf7sdt6BHxlJqewuin39VPuohEBDP3XPQUIp6yFhFJSqm1pu5uDoQIgQFZJDGNKe/Wq+dXl59dXV5vt9thGFK+2qUxZyG2BGCeE4uwmQ1CANkBmDkxm5mrIaLwYGYOgYiURJAAgCymMRMguEc0IhIwZE6rySGrqoWMCAvhCEgku90ugAAAwpkACQp4cw31gQHD3P1iyM+36+NxNj2VUlvV6hgAKMzECBxh9JjT+1rFOWP14NhXGSIcACPoKcg+hYb+1x40eyhIKT0tqKpGhLi6ewBiuPVwJ4g5pfUw7PKwXa13q2k9jqPwKLyZMoGFKQuO42DaAIDAAN2sEREjUjiEJ4QQwIjQRQiZWYQ9wLUhYk5pYMwphRqCi4iDRVhOpAANHJkBhkUSbzZmkceRiCy8qnrplQOys2qEmzuQaTLYSr7ZbJZqD6dS5kWD+nbqZ5Yet9vT3uxr+l8Fzcf/Oi+0GTwlqKc/mNkwDKqacwYAZu7ZEgDEVZko3DJLEFLAahqnlNZ5WI/Ds8uLm6vLy836YhgHJkYgDASr1Q2RERhR1SICwAmFkDAcmqs7EbEIQQCEkBChu0PCxNyfR6CBbmZhjgDhDgyIwAQU6O7rIRMRBIlkCwdKzGRePdgiXB09WEEcRmCH2A7TzRYPs97lsp3iw+G01MqmrUlKIgQpMaI8ZaSnoPlpYP2vQi2zmFkPqWZWa621juPYE2Nr7dOfQ0QSBuCBSFkGySzEq5wHgTGn9WrcbdcX2812PU1JBEIAwR2ZGSPMFD0AiUCIATgn6UVvuBMC92Kr1zjMzIwoPV4TwuNJ8gg3CApABEYOAjAHRGbE6PWbuysLGzhir2msv8nVarXMSqQAqO5KMYmsclrldKxKgKq6tIpEQ05DYvchCUUE4Lm6BDhHzE+z0NNWjYierD7NYz2e9jNeSmHmXs/1qCoIjBhJJOecs2ThKaccNg15O43bzWo1DrmXAO5mStGQEIiQAh0CwtWVghDCmJFcDXoUBxTElHNKqb/Q/joAkAh7DgGEXlrE+e15b2MeNw6Ch3s4WphrOCCMqwETPhwPdVmqFvKMgRQgBBlpnYbtarXbtPcPp3MENAyA1hTMXQ3Xjz+f/PEXwVPF/vT4NCb0iNHXq3/taerTcrWU0kt9ISISTomFgCCS0JglA1+sx+1mXE85JyQOxEAMoCDDiAB3AEciDA8AN3MIBnSi6IkSICyA4CnvAwDnxMznsOUeAI5IQWDnstkAQ8MdEP2x2wsPZ2BOGc0DIUtGpqKttubNHc/FNgdwOBMMTFOSIUtOiZpGFAhwQyAEoGVZer5mwh6pALCX6PD/eCAi83nhehJ/WutaK37yeIqw0qvuHiBSkjGnaUwDxmY9bFbDOLBQIDhSEAYgJWBCdNfHDwoIkZnPFX4P+UDhYGCISINwTsMwEFHOQ28neqPGiOjGgOHJ3dGt17AR8Gm9HRGOodUjAJjAmvuPhTQgAgYLsRtFMPogvFlN05CZT4zkgBjoAaoOHhZVhIdhyJAlIQBFuJkhPsVQ+q/W9OnPIpJz7vVmrVVEnsqppycLCgeGhqaglHic8iA8SYyDJAEMNXd1aEYGDh6AICTIFO6O3ncbEBCQR0CEsBCzuweCQzTT0ioSCzORMrP3mMWciNz96T2pKpqDeyBFhDk81YwQptqQkzCnlBLlIGyqqmoB7mAR/WMkjCHLJnAcx0Tcv51Q3N0B1AEZVC1icfcBUkqJmQAgwnrQ/H8kqPh08/ayqR/5vr7MDABPUVjOIQuAc5pWgwgHNObMjABh1hzJAowJwiNCgbD3s8xIPXKDQwABBrqHQQAEMgVAEIZDLQ0CRSQAhmFCZEQkYUR0NXdFRAKEZo59gxCAAQA9AkPeG+cwrRARLAkAmIiJLALCAR3QA9xVzTwiri52b+728HDU5sEKEYIJkRGjdzsREWARkbOISD8WANCDwNPe7Mf5Mf64mYlIa+1p9Xu46Emfmc+LzcLjmMcx998njOCqLbRSC2aiIEYERkyciMggEFxIqH++7hhBiIGP8Ez/3CLArIcbIorAp1fQD0uLACdE8HN956QB5v3DI0IijAgP7QGNzmBY09Z6B91qNW/gARBEgD1AQQzDkFISEWEmllZqCzVQNGXm/mN70ROR3J35Xy3ip4nx8cX/WGkBwDAMZtZa601n37/uLhjACBPLBAzHYhTjIGyYOLvZqbWcGCU5oSNAgCE5ICMkJgpgs15vIpCqEmFQEDEAaGvMxMMoIiLS+2DqjTUGhhPQkKS6lVLQg5M0U4cIDkQmYHdXD0M0JyYBAEMkhAg3cABD9AmSOlZTC2ekTCiwiJYLltebfLjDdLk+tbDVpmjUZhFGERRAZuRG3prZeoqUzqHQ3SMMBJk5AIgCyQMcEUJtGBLAGQxtrQlnRjEzDCLg8JDr68t5nlU1IosI47mvr7Uyd5QQVZWYmJjwv+qLIR5hV6bzvzOzsCAiIT6Gp/PRePrkRYSZ+z4VES21mZ4/dqitdQTrnC4FCAWb6vkA8rnt4TOKgcxM4RFA/bcLpZQa4NXVxe7+oKeKGRtIBp6XqlrDFcDB7YxhEKr2AyEpCRFoOICH9/PvEQGBEeEI6RGyE5FHkKlXK9ETtSCiEAKipP7ygJnOcVdYOAOARohDMDg+wTDxiJ2el7hjXD0VJpZ+XiIiJel5MKWUUoJHrOGc6Puqfdr8ATJSECBiVdXWgACYMMAhzAwNA0FVa63Wmgd5GIAjIkIQQ2YZEizNVtO0Xa/vFmUQTlNpkRKM4+jW3BW8qjaIFhFz0UGE0AkRhQnPEImBCiDAGVIBIGCy1s5dH0DPsYFAzOCBiOJh45gJEQCaViAMQTULT+eTayGBQQhBfbf2tO6PRUVfCEJ+ggl6eO5oaQ/qPZY/bVUWcbMf61NmIjKzZVlcNSKIGREZ0YgcHD0AIMzP2ybQPSAQApvZUzKJgDAnDGEkjYz48vnzd/enj7cPPODiIHkicCKCEIbBvIQZAqhWIG5GvjSRSCmRIFCAgWojoiechJkXiJySanU3APIzhkkWhoiSCIkITFtTDuHMEdzx1NbaDBBEaRwAyAAAwAK4VzJOZwAoAgDMrS9lPw7MjCKIgMQ9CdZan85779s6Vm5mPee21sq8oHlEkDsyEVHOWV1rrbWjvI8VjLubati522HmcANXcAtXRswExeFit7q5unyYWyOWHkICiNg1DJ0wozgREab+U00jHDw8AZ/7T5IACggM78H7jDQjAFMQajPssJU7IopaJUMIG5iFkM8VA5GwG1RvnFI4eqAbwGOgROy/L6w3wxHhkFJi5sQSEW6Wc05JiNgeazQA6B9VrdXd6bHHsNpU1dX6N0ZEM40aABCI6lZr1TgPxhARgMzcrPdUCAAdHjw3uIACkRDIm5uPmcYxL0tjmUprjJFzdgAIQkR3BEMPfgxl7hHe3MyREQByogCKMEAOdPUAZAgC8KcOCogcIJA9QhiBEASlv3t3s9o0J+YU5+meG0TzCPfE6BjI6BEdVQALcHOL3rB/+ngaszz1Emfs+fHRARJ6bJYRcRgGGUYL71GiqkYEI+ScrRYiIkSiMwTTpwNzc3eHnh8gGI0ACIIRhJHMB5FpTPuqwZg5leNpGIbAXtwyBAR2bAERETAALBDNDcEBYNEgogDr8I6bMSdksarMZN6RHo5+zs1lyCki+g5EOn/gYW5mCGFuwmwWihqAPcsTCRESBkYAnsN1r5bdPbH0ngwf54h955jZU4WMiPIYN9FDRAiwQkGCCNCmvbHpkw9y0PCcs0cgcv9Hbw2MAGLIubUGgWZGxL0ACHcOB62rvH714nr78uX7Q3vzw91+rtFazkm1qTZEdDMiJiKkDmg6khAFovSs7eFufZDp5qTWmNG8EBAKuzvEIwgMGADCGCmnME9ARIDmvaNSVYBIhKp6PJ1oWq2HDITqjmbgQBiMQYDIIhhPCKu6uTs9blgRflpEM1PEnPPThAwRAbzWEo9xFh36xsSO3gF0+MO8DypC3QCASNy11hqAECiSRQDroiSEit6yUGJcyvH2sFx89vObNJaIz7cXf/7jl6q62WwOD/v7+/vr62sKXJYlJe6Z013DnAeu5ikxgUQEAkO4mYWjhpshgJemzJxSsoCckqois4R5kCdCQUSPLLyaJhEJc8ewAIMwOh98ifDeKjICBAIwE0ZnQPQFPDe27h7hANTP/lOK71huz+wRwQAgQkSt1nMfjciPg0mHeJqL9QVFJn7Mcj23AnJEIHIgECkiE2JiKXURhN12c1cbhP3xn/94N9df/Ob345h/97v/5uuvv/5W62r9slVTVRmyqxIRMDS1lKRHKrNAoKdTGNExN0MARCbA1pp7B561n0tJhIPwZr2eJLsZQ6zHSfBxehM/8il6Qu8bBAOYAIkdnbF3PwAA+DghaKrM1Kv3p1oq5xz/am8+oraP/Zy7Mwk+jsks/Ckcd/IIQ4dN4fyxMltzh2BmDAAgdw9HAEgiuKiVJTMxmmu9urrQulxcXPyv/9v/5+/+7u9+8YufTdOU0vDll1++f/dh2G5Op1OP9RGuTXsz2ptU7LOE/oKDws3BUCRQkAWBqlrVCgAyDkPOmZGYKBP3Cc+YJNwSAoKjKfb2OELDJcLQERA8sEMiFASP7RMhGTRoiMjU89J5uz0hsn2Je31H0dEgJKI+p0UPAtD+NHdg6n89T9RcUTsgG+BmpoBJeuAEIqIkg0dEc8UYhqGaD5L2h+Plbnv5+idfvXnzhz/8D3e3H6ZhxID/8B/+wx/+8Idf/epXt7e3v//971pr9/f3Hz58eHh4mKap1kVE/LGm78VNBAVEYBCJublDNGUiZmmtmpmAR2IJtRp1M0zCzIBMlESEMVy9BgI+NrBmHZ8KAgyHsACMcISwnnDcer5srTejKZ0Broh4KjnP0L25f7I33Z0B1Q0RhYgAGgAEWHio9SPn1skY52GkqwEEU0KyZuchNiK6Q2vNzZk4Qpno5voaCL3p++/fvXjxoqMhL18+j7Db2w8PD3en+TDkabPZ5Jyfv7j5+uuvH8+lwydLCo8bxzSIKefcWtE+JkEYplEEaUzZzdA7xscQoKrjNIkQGjRWip7H1d0lkCKQAQmQoPcu2Fv3T8JoDw5kJGgdi+4A19ORZ2Z3BYBOAeukAUYU+rHjEiIAICACTHD+IRp9zs9C5yjGEoDgoRH+dA4SJhUjj9U0vXn7wz/86S83r3/+56++ef/DqT/nJz/57Pe///3bt2+J4vXr15vN6vb2/v3797/5zW8eHh4QMeXRQxH9x5N+bskI8VyE9pct8iO9STabjTCrWniEeacAxXlIfR7Adhy+J4diPdsAASEAIsQnkNdjvfxjb25m6N5hrnEcH9fUWzOw88tCACIaJDFzVFNV7/AdIDIlosi5V10Okc7gIyGmLNK8ErEHAzN1tAqhmUarS6tJeHk4XF/ePL/ef/f27c3V9WFuzIiIX3311ZdffklEn3/++bNn1wBweXn58PCw3W7/4z/9k0f0OgxAAQKAAiyiY6AIQK3VNA7hIUMG8NPxMI5jGpPklPpJGcdpyAO49mbD3cO8DyMAPSI8QrUKUz8LjsC9v0WkM5QQeAYN8cf1fZx0x+OEC/v0DaD3oPZYJFHP+8znvwZpPM7RAKxPHKNnPjAzZiACjOg/273zj34k1TBgay3n/LA0dxdKD/eHaXtTypxzPhwO0zTVtvzLv/wLAGw2u5SSO/zxT38ahmFZlsfj7YjU2Q8/5l7ENE4RNs/z8xc3z55d395+VNWH/Z24uyAFUYDVdkKPnARJwmsAEmHOCQAcaQktfbjIjCICIgESLN4jmjmCgwJQr10kiB2NKRAIKSJqrRSBwh36DUJzJ4CcBgLojXnRYn3QicjEiBgYwORe+tTTEYKRUIiIUICnpgWpDNJqW2q1qtQ8OSZ1EwzBxtZGTplxoahxalATkuQBMbkCAAjS/vaYMhPRw8PdI0YOZg0gEB2REahvVABgYggDtPVKklSR080zPB5Pz19uhYiY+qaACIA4Y+nMHWGzcBMkRw1zDABz8D5GD0QC8AiMsJ4q4mljAhgEhiNKZ3ecy0YAq9bpqT1ZoQc4PBF6e67ve5mIkDvDQN06/tIjxnnchIjmEQjgaAERwMwJRCPmeWFmUBWkaZqGlCPakHIFFGDTFB6tAAQDALAwBxAHBQZ39uNjxISIc9GNPwa3QMQAWJbTVMldA+L6+jrApLfSfS/jY3HTAQh3DzdyV1R2wug9MhMiPY78e81rHakkwiCHcDzX5ErA7szcSb8RYZ8c/6dAGxEWIUSEhEwZAZnMDII01MyaaevwrYCIqBqeG4+iJNz5f27MiQPUyKx4qLUKCCJCTQmYkdEQbSTMrhARas6cAQMAkROAd4omAHUCGQJjR0CCAOkxLwAiABoTrlbj5eVuHIe7+31tlnMSd3XH3tAwEQMBgGlYlFGYCM4DXvYkwmkYMDIS97EaOCI5WHhHOiMQHcDCJSg6KtBxT2TEQMRE3MHjvg37J1e9YWuJWYiNAoFIBEjc/THHOgYRnWEUBuxv0dyQEUiQEM2RDMgNznMtRMyMwIygWYSRwuLMUAHoU7BAAAQLB7RHHLlH5fM0CcIh4rxJP5kdpczzckS0u7u7gLrdrdUKYkife3WcUYQRwMy9KRMYgnQsBWlMacyCiBlAiMA7QYWcIMD1vDTnLAQAgIEEQCgszEwYjGmQ1Olq7ra0euZiBHYsJjFnSTwmooBAi/MUIhAROOeOVBmYRwQjpcREpIzmDs3dwB16jUaEALFeDw3cNTbTarNSin1iLqBnpM6dGCKMEDzUoW8Jfxy7BgQBYG+WH99Wn/J6hEVwKeV3v/+b3cU0z4eHh4/3Dx/v74/SA1mnrPv5vAKYDeMgZ9w8Bk7rIQtjRIw9JJzxVEDqaR0MguO8oGEe/UUHJKGUmJkTcQdMe9kEAMWLmnq4qYVZcw9zDGVmQg4ERHYAd2+t5UGIyEwhgjp3HQCxNws1zC3AAsxCVZsbknVYLBEkoHHMWdJcLaUAQA0HDCQM92AKBwjv7wIRKBCBzgxy6DXDmTPSO+FedT5//nwcVvd3D9vdqLZar9fPni3SB/z4OCk1MyZKzOeMgTByGgVz4kSEAZnpnEnAAaGT2RAR3VEI+9C9R2wPR6+1CpJIypKE5Vx0MFlKfSzj3uf6EBGllGW/5JwlDyLCKcNjt9rjg5baW/iqtWoDgApnYmYP+g5h4R46jtmioWAGIoOUKWWi0pgwELA1ZDG3oF7ZPVEbGAA6qBPR62+gR8JEf0ZPxa3VCEDkq6sbQIWQq6vrWpczrMLIRMSAGsGAImKthSByToSJJSMNwiICpuhhj/TJxxANRCQoQsRIDEhA6IAY3lRZzYwG6jyWftJzHgBQJLm7iZqZ1tpaw4D+qZiZRWVm6wHRI9RKKR27OpWlc5KUpI/X3aFpIGJK3IDHVXaUxRo45ElyFiSbVqm2yijFIWU2B0J29/MgrsfHiJ6jMboS6Ecqzqd5nykxwzdfv0Hyq+utanz/9mPOWVS1Nbu6uCQiLUsSQQBwY8LEMiUZMo9JBDHMPSqcyS2IiICojzOyiHAzcGFiAowIJEwsRuFu7ue1eELvtRrnxJAAINRaa1UYmQtia7r4QpIt1Myij6EeQ1jRVsMCsJmCQUUzMzcw86WpAoZQHlMINnMgrLUaIKX42S8/e/fx+PbP3758eS2pnUpJkhwpZz6eTogI6H16Gh7MYKZIHQc6x1HoBKiIgHAPcD4djjnLx798u9mu9vv7Z8+eiVmIcCnF3RPhKAnChWg95dUwrBKn3ga5AZ1PREQYAPnjZBXOScg0DJshEQh4NG/u7gjG3ty93ffuc7u9kJyZUydNm5m6AwplZkowzy3Aq6J5s3iEJ+Aphlo4BVWwU2utteZBJBBoFsXcGcOjlhbVHVTDa2Babw/Lh3cfbteXzz77/NVqvbkgOS01gIP466+/3u12t7e36810PO77ZL/W0k9qc4NPHh2UQaCcUtMqPNaiwtMyG+F0PFTprEZVBXNZjUQY6kPK0zBOWbIwh1I8VrMevR0EAH8EQXraJaKwKABEBo5E4MCp8xQIm9vheHL3nPOxqOSEnDog3x4hKOqUq6DqoQam2loLMER0balySqnzbABQAw61nuaiEUyJiAJJPdSjFC1tCQpOVE0bsNBptV2t5vlPX/zzaaa0H0jyOE7TarfdrL7GdjzcSQIRFOFPscTmhv+ajBefCBXCIYCYU2st5zHQtIU09awxJMopp5QggjHGLExIGGEaGEQgJNjZd58ArggAQX6mx4QjgsVcWmUVkYEYEANBI8BgcVfVk/q+GQByEg/U8KbaF3oYBiaKubbmBtFaOS0zuCdmd01GQwQgGiBaFMCHUvfz0hTMTiJZ8uAQs9ZjWZa6mLVAkCFjylbs4JHGdP3s4lfXL/I07h9O33339n5/ePv9l0x2cXV9e3s/zzVJQqRaC5EgnAHyT6NnP5SI4N5nC6KqScZwSJLcVRDZAgA5pUQBGJ4l9bETAQACBiB2zAkMgojC8RErpHOZiGSqCGAQTRsHZxG3VsK0RURo8+V0cgsNEJEAOpUFkEnYEcKRqDLPAJDdm1tE1Losy4kBc2KOyImDBYksoLnPGh8PZX88FafWGsAiIi18Lsv98bAsJxnyMAyxn4HloZRY5evXr9JAMvo//If/369//dvNll+++klK+asvvwOgIW8/3h1qWUQGZoboIiyGT9LvI7LjABQROefWWp/jPi49ieSxE0taa0gw5bQaE2EQBiP0UsndNaznX9O+F/GprTbvhe5Z5oREQeiuVkNVj8vSgatlqaquqsQJOVl4VXOIAHJ3YOr0iAzQORZNK5hOScYhZUKHRE1JpEGo0sNSb4+n+4fT+8P8qHILDbfQqm2plYiGlFbTJq+lWqD5YT7NtaT5dPP8WR6Gm+f5iy//i1nc3Ny8fPHZ/f1+msavv/qutSK8giBE1hbEZ/VIR0ke17aP1vKyLKvVqstBalVmlpQSY5iZdyYyBp0Fqr2WiD4hNTCCzlT+cc7jEGEBAOGPLFHAwKha69zmMhdtD/tTL6pU3TWW0jTcPJbSqrkhiHTM+gy/roeMiK0Va5oFxyHtdNyMqc/4BaA6ztruD8cfbvd3h4d3R1XV1lqttWlBIhkyM7949vxqd/Xs2Ys37954VWv2xT//5zQNF5cvnt189ubN+4uLi9cvP1PVh4eHb77+YprWTJ4H8dlba4QkLCmR+ZmrD51X9kh0JiLVOk2Duw/DUEpZr9dmIYgIgYDQJ6IAHuEp53NYNEcMISZiAiSidKZLkENAdCmvQILWWpgFoUXMy+nDw93heNTwpk5EiTMAISEwWbWltMXstCzNHIn0UbnGzHeIKSUIM2uj8FoThg9p6wgabs2Wpodmd4fj7f3Dx4f7D/M5pyFiAA55uHr27Orq6mJ7OaTh9uOdG4pkGfBXv/rV+9sPd/fl/uOteVvm9tOffJ7SsN1uu+7o/bs3rbWcJ1QiTKYeXVSBGGeZ+I+DxS6N7tPcUs7aXjMTw46tmKIoBiMwM6GEVYhgIiHicDAARsQwQkQKDfBgJvBw166BoZyU6OPdx4+Hw2HRElyKIuTn18+JJCLGaTUFlqWN2tzhuzdv9h8+dN229So9sNYqEiKS0wREAclDSouqIIk8oDqVZsclPj6Uw0Je0RxAIJhmrVVZ5jjNd8dN2W6m4+EDJqqh2KaLdPPTV88+PhzX6/X1zeX333/37fffPezvVtNms7n4cPfhtCDSypxT4tZm6OM4Sp0S3SWHiNzhZ226Xq93l5ettap3rz57SUS3t7diZkjQ9yN+InogInxMcME/CsYRmAL6cBIRg6A1K/NczI/LPJsdllLUaqvAPI7TOOzOhcUwusP+eKhVT3M5nU6Hw6Fvrkf6lTOzMHf63Ho1Xmw2q8xj5rXQdrNipGa+niYexu/v7k/HvRvmNI2SqlvxKsQQ9nB7F60+fKDVmHKmabcaNqv11U2X1u12Fyh8dXPz4e6Hm5ur6+trs2DKrZyW+cicI1AVEFNEJywxRBCSB4pkAOiF5jAMm82GkfaneT2Nq3H65ptvcs5S2wIswthHksCpT4z7Zo/HaravZq/j+9nsNbcGtPBjqbPW+4fD3BQkcc4TsyN5BAHVpX68vd0fTyR5Lsv9/hgRrRkzd5Cxg4eJOOdMtuSc1+v11eXF1W67mfJuPW6nfLlZE+BpLg3gYW4CkBC229Xm2es8Dg+n/cf9/dxqrRrqJEnArGler1fjmtNUZz0U213dBOL11c3DwyGn6f7+cHNzczzM47jx7fDu+z1SLqW5e0qJKFFvBR2JWdU7v7UsZbVaaW3g8Zvf/Oavf/6XaZpqrS+fP7u+vpbzBvFohDEkfJwOuZ917vHImzkjxB4AQMIAUJo201OpJ6uHeWlElAcgquqn03ycy3E+SeSHw2GYxqW0/XKq6lXt6vr6YruDiDIv8+G4GXe7zXqzXjOz6LFzgcZBGIzBRuGLabq5uNisprnUovHu7uHl1cWvfvYTGcfd9edpSId69Wzen5Z6nOcwn1JG98P+Xr0NMqzXl2/vDg/7+XB6f4p2dXVBDMO4CTCR9Xo1rFa77775c20xTcM4DF0L0Exba5IzsiAzhjInRBSpzNxqvby8/PIvf725uSHA4erq/fv3zy6uhJldLeDc7fStp6pC4Y5B5Oeu86x3ZKSuzuw04lNdTks5LVUtzOE4n45zOZyOZkGSCVibX+2uZBrwcBo3W0jp7v5+s9kMw5BT8vXmwOnVs5uXN89evXg5phz1ttZayuzumXFAzOAcBnXhaRBXkrRO/PrmqvMYIk0GEcBJthdrK20LBm42Hw+75y8p0XRxFXko39+5cVOcI46nH9brSa0wwv3t4g7rVTGX1bSFENXamYXMQpmLKxGpOwDWPujOQ5d71lovt7v337/72c9+lpmvLy7fv38v3hQ8hiwdnG+tSdCUMiJEh76jy7moL3eiVHrnQCgipGdw0MzMw8xyzlvkAOoER4Zhvd1sLi4d4Yfbjx/uH6ZXU5f3rMZpFF4226vt5np3cb3bZklU27IsR4imJRFOvXdxy4RsJhDIMApfbKZhGA7L8vFgagW0MXkArhI5wVLmFXPOediseBhPzqYYnogGByTCpRjxVOY5wKzpMt+36szJ3ZEJAZp5z+xMiZk9lIRczQGGnJbFPeKrr75Z/fpvD/vTfr9/++136/X62bMbCTsLbMDDmkangJ4HYRA9tyOeZ5lEAJ4Y1TEAxpwAVgg8DAMPY6l6fziyJLVwBw2otWrFl89fjKsJEl9cXNzsH+KR2qiteS1TklF4TELe0EPApkQ0ZVUSgpw4E5LHfDoKnvUJjD4wHI9zm09WCMEyIaTkEc09PDDJuJmImVNykbJf3AOAiJJXzGNurbiZ5G0rmvNGW1uWinT2FzAIJOiaLFdERLMAcYcI89aJFMPInKr5r//whzqfdpfXr1+/3m63Mg0DRpc6OAOOKQuf52jhgcxEIp1c5wEGxJhS7jwO5DQN43a1dqRiiiw3F0qSTrW0Zktpx+NRBwgrrSI5t1rF+1SNzaAZNDcR1vk4E7KVzWo9shECCgcTMydBIQxTaOxNE7HWJoG71fp0WlpODnmuZW4LIgR6lAYA6yElkc1uvTQ34TAXzmx0mOfgdamEIaXqRgZKuZRCJJvtVWtFVVHY64JMHoaBAAkAALv2jCzCIUhSVcvCQfzlV9/M83y5uwjOLz//mYR5EjZvncFkZkjSmQMWrhEE2Dm0fbKGKVLmlFKniDBzIDUzBzCHkl0DNqupmR9Ox8wxFzMtRUtKwyiSh4QBRFRMwZsIjYNMkpEcvYEWAEuSh9XK3VWrEAmhVS1QECCJSM6MDSOmPCxzHVhogJRpadWarXJaDeMwpK6IjowVXRIwwel0Gla7En2BcBzQA7U0llxb6RJHBwIzTmc9XEcwPPRpYnamuhCSsAJ88/YteABQ/XCbxhWlQbIkIsgyZEJ319pCuHPZGRCR+pTaH804+gM9EJ0CE3LOmYiW0pbWADwzB1KlkNW0HfJhKYfDoZTK0MjNW6tLizBCvFmv19NEAbUtBLjZTOv1enAlIhYxs8XN1ZSAAMy8459TytOwMqynuRJAq6VZdYQxyWY9EZEQJmZE6OYxHoBuzJQyqypKhLVxHFXdwzjJspxyzubNzzppRsAIhTNDG+iTQd2Z3YbcLJjZ/MzULGbff/g4N5dE3LRgF/EhMTMTQTgiOpznOe6OyI/CLUAPREwsBJiQOJwBE6ETBKCBm7u4deUXAmdctTEhIpgvEA0ijDabzXgWWcZu2kzTNIyJiFY4qapGIGLOWVvrUnJ3j25l1an/em5VwRuGUxhjHpgQEdyFkRDMWyeQJ4rEiO7MMI10OtlSjmYt54xIIqLaCZPnoXsARDzOyp6E5mepGwGd1a6A3NrCKJKEgPZzOZb30mUZiqCMubM5EJ8GKRHR7McpHlHnGT62VcQR0cqiiACUEDCRejhGFukj4tVmjVvsGqRlKatEiBMiWlMAo4DNatput+v1upcZNhdVUG1PIksEoBDX+gRSQB/TIgrxZstEU9Vm4UlQkMAxMbnrwABhED4NeUpzEpp22yNjWTSlYSnqrkvTnLM3B0T3DvUiBTxOeYhDgc7yIe10EERA9kAmTGlAxKZnzhahiPUCk1BVm3CfSQhTYKcqBuKZdtYXkoHdwZorKlAwdp7XmVkrQZ1OBYidK63ekNkRURATKnYViKWcRGizWm+32/U0IGIr1WqtdQkPDFOHR1IawKPWtbU2AozjWB2qBzNvxzQMqblVbcMw5JxR3a21tlgwtAqlgToj7VbTtJo4yXH/cLFZj2M+HOdEWKqeWcHoiNjJL50HjoEOROfFDToPk89M4ohHtrtDt11o1c76BMTQsK4UA+oj5SctD3WiT8euTYMIncMsIlQRu9MQMYE54tnJDM6dFTtwRKhDpnEQnmuxR1BxmqbNej3lxNhfPgRC7ZRoIowQIXf0dhY9qnopbVKdhklEVsM4pLxEHadpM6zMjCllFlPVBU66gLswjcJJ4PnV1eXLS6PxICwkgLgCrO1dqEGzznLofPq+aj1DPcpWqDO/EACZEAMQRXJrDQDCXCR1F4vqRVJKpRSnEAQL916ydOcKhHCzcERwoj48d7XgICVEIyBEB4uoIIkJMBESUhJKxHhOi9BaO5bF1REiC1POXXq1Wa2GlEOtloLuCCAAKTNqVHVEYBZE9KYRwUSdYHLmkcXZX2EpM7hPOVMXEHpwEIn4MPiiVIMA2UFb06j3p1NcXr568brU+vHuPgJr0ZQGh+js306kRwAE6gZZ2OeyDhFn3RvimZOccybA4/EIHrXWnPMgSbpOLQ+CRKWUZVmmxMCMwmDhHuhhBPyj6QaGRY3qLp2xf3ZnQx9SDkQzY0AcmAJaa4EGEAIohEOSMQ0gKQgzSZ/RC9E4DmiupbbuFxbRCfwdrzkTvf2RPkwkIqzeR4cEXuosC0/TRCiAgAiJeTEP8zIviElEluPhw7LcHdrxh9sXrz7L4/Ddt28b+DiOyLTUQkn6tBExCBghAJAAPQgCACFCI/pCBwCshqm7XILHuB57+2Nh8rHeOTdwO57Ks+0WHW0xGMUCGcUxFDSTEJ3JigsYP/LuUgA7U/dHaWhgGB2gciiN+0lu2cEQeEwpPbJtCQgDsUGE5ZyQSV1dYCkGi7XFhTJAeETmVNRra+CRJM+tO1kyASQEBm2tAMAqTY9JXxGwmmooZIkhqVJ1AOGHwx3xdHnzk+bw/vu3ljTCizew5JASJwcDLRAGHIAehI5s3SfClvVmBFO3Ak6hsN1d//TVL83inbzT0ELH1haIJtYspzyI9FDVWYnRMSe3MKBHco171No6X0OIgIM6+zEoItQNzyYsZ3PT1NOJexA+7u5e5xERgQcxPbJxHIGJgJlbAi3RWu00qwBSN0eQM0qbeq4z81IKBWg1RlO1psqU3Mysdb5u+Jkda6Ey5M1mEzLhNH339tvD/KDY+hg43EUGfzITBAQIIIY49zoBSmws+pOfvSjL4bvvvru5fob08J//9L/vtleXl9dzVRJ5dfHaTKUtBdgMK1RzAVVV9UbGWeJszeCtNXQR7rsK2CnOwg6MiEgsXd2HIXjut/AsLoV+YHo1RohAT1h1l5z3Uhdbq3Mtx+M8WzvMx6ZmEEgizADIREystXblGSEPw1CqMnUbETYNVbfktZlrJY6n2q5bTZijIajp6e5hLgWIEVytRAAEMZmZY5c1Yiczcl9XokCiQCCozR62F+k3F68GGe7v9s3m6xfXp+Pbjx9vEfi0vIsIIZLWrDbdDMMwjA6k4eagDgTAyBjYZXRMFAHq2lez12zuHpFcvOs1Oz/H4cyPDEQR7pUw8o9qht5BlNLcjTkhRGu23x/3+/2H+WBmnRVKJF3enIkbGAKauvUhFXVDgzTmMYIi0Bxas2Z6lqqGuwMihweQzOX4cDguTgdMwzBYawE0jqtmYQqlFEROnYEO5BFI/XsBg5HQvZnZ+/fv9Wq6ut7Utj+cPlzdXN3vvxvy9PL1er1en06nDx8+SOLsrsEwTeuUEgB2r9uqDcOBOXe1JoE5uDUMlT6LdwACdLTuYhsuDt0OqyslkEiYMfHjMsITYSAgau3FELVqzfS0lMPxtD8c96f9NK0kiTm4OziitwBtbqtxioju3+kQTEREKQ3LXDt8VWpVU2FozdzOXNGmkYaxtMPcVDEZurpZALEggHsFIGamoO4D5w4e7oZEoAHs0ErlBBcXV9c362nCVvZDXrf6bjVN1uzqcgMA//7f//9/+/vfvnh5IaHGSI4CAGYRDOZezXsB5ADNjMPDkSHCgPi8Np1e10vUeKSImhkwECB84kRwnmRjn3p1t4JQVSKBoMN8vN8fTku5fzjcH45CtBrGYZjUoaf47npi6jxAuKk2jGAixEAKQTGb+8Q/CCOcCJ5GVWZUiwWhWjgxSg7n41zymIKs1obESMjAj9wQQgwINIwANPMhBRKs1hMzvnv/lslYwlsdx6kW22wuv397+8tf/vJnP/ub1bh7eHgQjHOb6WomBpCqGZTCq5G7zapp79YDIbEQRW9sqbOrAdQtSgwjqmofFKazdwu4x/n5CHh2Kuj7zt3BrJnD/ni6fdgf5tP+eJpre7bZTDwkykMiGEhVQ5tCsx5jIlwNETixz0bdJZu5+/pREsQ4n3ZAM+jtVm2ujkjJSIgwzNXBVRFpkKyqrVWR7GoASCxE0PMhYAQUwHb97Hq3Sc0gp0DyL/78l2c3L+7vb93p44c7opTS6usv3202G8EAQCAAC19qZUZz9ZTHyOBAbgghwgyEbhYO7t3sCCiQJUi68JgqGQVI14Qlw6heETGlJ5ShU1jPb9gNlqWqx2Fejsv8cJpPpQJS4kyRwkCYhbOANEBXYxYCZOEAU62OvpRTgHV7mNLqXGPs9t8Bnc9y9s/goVYHYiAJR0MdV1OtFZGBYZ7nzgk0VUDvokgRcSTosDo1DM5pfDg+7LaJxFpd/ubXvwADutodD/NqNW7W6zdv3kbE+x++l2VZhmFoqjlLa00T73bbcG1mQZAQutg40IGJkRCCmc+IVkR3EnnU1wO4EImQt9bsbMt19gUws14/hYYFttbUo6q++/DDsdS5tuq22W3NqXkIEpFAEiCwhg4IBOGWCVhErRJ6hLlr0aZuc1lIZBgSEak2d1f11qwEmlNTm5dKJGZADKqVGatFaHQ7qNbaj/48XRwL3X8RWiWiQRsP+eJw3F/uVmZaFlutVn/9619++tlPKtXmJxn85z//+dvv3ggJu3unjTfV5gkRKYl6QxICoK78fbSCTY/JxQN6VypnPxECcIsws1qRkQQ7Hd86TV3P2jswMwcsS1P3peqstbTqiJilqp/YrBYOg1KBqWvlDIyiu5aFaL0/nlLipZZaazUvrZbWEqGFo6O6gcVpqU58OtaTmzqJiC6GSBFGhKrWGzAg0la715j7jxhbAPQ+lyLGcfXV19+xwJDo4w9zK6ftdv3eH/7w2/9RtX28/fLVq+c31y8f9nd5nKTPS0XEIFprZmYQA6Vwde8mkdgcACEhQydPIpkZmAdF4jMzycIJ0B1aNcdgAhPJLCzSnthcqGZmAV3Vfirl/uHw8fYeCDeXVzLkh/1xJj+VpR6beuC5PrOceBQSiAkZW4vTKWeptdXaiuH98XA6nVaxESnMj+wjwtNpDk4k6fhwf3+4b8GQpgA1g+4K2uEiBwjoyc8DDIAYiRAcnTBShsPx42q1qlVxmL7//t3l7uLDB52m1f/1H982XaZp/ea7RRhKrc+fv5QnVbQ2DEI1m+eFp7NAwiAQgJiim4UxBwQgIbrhuRQFAPUIUwZkoYgQpAhGjIahy6JuZoFMHV3t5cRclvm0PByPiLjd7W6ePzeIt2/ePcTJEX74cPvm/TuStF6vp2nYbdcD03aaNlPe17pRG8fcP/599e9vb1UVU4b5lLnbwumxVENc3G6Pd4qxu1qf7meDCiGEZEhgjtwTQDCL24+0RcQABIZAhKrLertelpKHbOFXlzcfP96/fPH6dDoVAlO6uz8yY86pzMvth/dS1QEA1RR0SNksjsdjSpL5rOt3Aj8blVFn2joAklBv25EDwdxczYhScDCAILijebg6m7urOwOrm6o7gKruD8f9fl9Km1abq6ur3Wa71JKFPn6435+O+3k5zst+fjD4IALjOLy8uX71/Nk17EaLgnVoZmbM/N3Hj9/f3U15vEZRg96vLtpaQAErGpBQiF5e37yb/3o4POCw5ZRNDUAEqVnT8AHRwKhbQZ6nPNYBzJy2h/2cc3ajbt41TMNcjw4+z6f1tFml1AWIeUpFTQIJAJoZAeQMqt6quYejB1HvFonIEc1Bw/uR6rrWLl8IQHPvfDwNByByR0Yw1fPkK9xdAZtZZ6iWWiy8mRLzbre72O7GlMdx/PWv/vZw/M/H/eHZ1fXLV5992D/MrTbz43H/9bv3SmREq3HaNwfTWjWltF+Oikx5MCRyYATrulaKqlbDnOS7775Nu9l8SYMbWSuLOa/Xm2VRROzkL2YMiDA3bN3RNEAB0WwgISDqt6RUU85ctIgIBVZbRCSnIcICfZiydDQeA4jALMx1HMdwNHBhdiAGUDdwYkEPOVulISJTqEWEPkoBAcgDzYLCIEIBKKCoA4SFi4i6mRsj1KZ3d3fufrHbbbfrnLNbY0hXu4u//83vrncXxjybUc6yWinEx48fv/rm6/vDjHS7XlcicjVVRab1athd32ymlQFKoDmEqgPU1lzweDoVyc9ePYdhNaN/+Hh/sbmYi97ezcxsVjgzM+o5zeqjXUecVR/gHtod1rvGUpsxc86pNyZNNRxdCwCYNvMqZu5uQiQh3d6rJ2IMcOfOygzAICei/HQFDndbTw8LBLBwQuIARwizCGKLLq6hgR3CmzW1oue7Hk7Ho5ptN5ubm5vddjuk3EM5E23HYT2Mi7knXg3jfl6q+Xq7e/Hi1ffff//9h4/jsTN4pctz8jRcXt+s0qCnpXcTy1xCwMKPpyI5NWYBev9w+5Of/vT62Y3FLiB/+If/OJ9OAI4oy7JM65XVAh6u8aTp7A7mJKa6nJn2SCTJHUPRXcwMgiHY+sQwp9aaaDgxN3cLFWcRPFgdVJMQh1P32HJLyAy8uKJ2UiMAABKbB7p108tu9tMzPj9e6ZK1FG0pj1UbQSzLUuellsIozy5unu0uE6eEEBCtLQrxcDpGQmWYTWdUTDjmjMDTMErO98f9vMxAMeaB6jykHIDEwmNe5mP3swmOIGzBhnR/PNaBTxDFjtVX65vVf/rjP//8Z3+bJlVcigZKSnljFhqZCEMQEIMgogVyeBDyPM+r1ep4PF6M4+lURDpWa0OWWnu32q07JTzkzGIEGHI6F4mPHne9TWTE7kjUSWR2NrzCfgkFRHTBXJ/aP8LqT9ZNEQRqEE2XZqXW/eGkqoR0fXm93l6koV+hAEBSvS7Lcjgd7u/vj6CVqJS5NgemnMbVarXb7YLxUI4iCQiZEzK5+3GZkzAAmHuYAWF41NaKtmGaHk6HzbOr689e3Z0ON+vty5exLJWZCTgGdnfTmsdERG7WbytxtwiXxIGxWa8IYplPY06Hh/ucBm81pcTCtbbN1IfeDuClzCIi1bqNKHX/SKLUR4wO0FQpGBiZ0MyqWdfBMXPE2YCZ8NElCjHcDYAfm6gON1STau7utdlpqadmGDiN4+7Zs2G1dmKLqKUEwLzU41KOy3wop4JQCUurVT1TRgohubza5VXOp6EbB3e0sA+oS+IIBWtMxEw1nEVAW2t2/7C/b2357k0FuDvMpY13d+/cQbUGjD9W8hERQRxI4c27jDzC//aXvwCA169f7/f7N2/efPHFF8DkZq217W5ba72/u728uj4tZZqms312RBhjV0yOQ3b341w2q1EJOJyDNALdOtxcTdN5G3I6s28JCcLOZllOyI+gPSIWDwNStVPVw1ItMKWUVhsZV4emi5qpLsvSJY7VtKAbo0MUr0ExroYxT4ho1jbTKJlX63Gp1cxaKYlFw9V1qcVbFQjJjEyh3q1citrl5TWuN3998+Ynf/OLQDi+Xw4PJ5GNBWIECgGBqeIn8mE3i4CmM0FYm//yl798/vLZh+/f/v3vfxet5pxrre/ev1+thm8f7q4udwA2ZA7QsJA0jLVWC8BwhAjA0gxxFgITcmZIKSfuF6UYAAUaIISjIyJjBCMisnYDs24w1Pm6QYigFoHYPA5LmZuKSJpWw3pV3dpyQoAyL+7Wu7XW2hKmhDXc3PMwiAghaC2mwTlnoETkoEur1JwRgkKbHeJEbushB2Hpdz0gUx6mLICE0+QhZYHrF8/Habj7WJY5CEUkAYudvampg6GI2FsDQr55dv3y+fM///GPrSx//fOfPnv5YmAKbc+vrz5/9fIwnz7/7NXt/d2H29vN7kZVP97diuR8OJ0wgseRiDQ8zEnp4TQPTJoEIogzImOAWjB3MxHSADAD4H6XSAdvMM53T1CcHXUCobV2nE+llJzztB6n1UjCx7JoKQw4l6VrDlv4YZmPWpcw781M+HI4gCMjYhAhCid1S05LMQ6yojGmuZayeBZIQuzJ3YPYiZeiBzOcVpebm7/5ZT6pauNaLOcpPMDTvDT3xjl1H7GIcLeUhrNagdKzm+eHw+HFq1edQvH+3fevX7/64x//+POf/+zNmzffvX3zb/7Nv3G3aRqb2+X11ev66sm/20W1MUc09cCAACGUFNRU54LO5iyCcHZ86TAcUYQGM0NId5/j8yAkiFTV1NxaKbO2EmHTtBnHERFLK6fTES1SShZOENXdVefWmoEpKliZKzIdj0cC3q7WQ0rTMAnnpRYnzGmMiONxdqqIgWYLQpinIUsa1KGoHuf27uFYcf9235yTAs/lIZS1oBuEh4gwp2q6LKcnINysmUGE1Vpvb++vd/kXf/NLZP5//0//09dffrksy69//euPHz8eDodf/Ozn+/3+3bt3L1+/0mV+8+bNb3/7WxnGNE5ZVaMbrXQbilIRMbN44NK01lqIpnGcIgtSQCC6IPU1BbAgArMu2gfshgtogM2j1sW8kpAEk1CtpV9GOR9PRDwNIzNb09OyNNV+pYhZO52Ww/E4rCbwPliSzfZys9mYhQEeiw7rzdK0npZuSRCmXqvWenVznYaplHLYz4EyTrt50XrS/bIsGsj3U17PpeU0dszbXc2s37HVDeQiIjDGPB2Pxw8fbr/55r9M0zTmYTOt5tPpK/oWPU6n0+vXrz98vB1W09XV9eXlVV6m+e2b03EWQbrYbE+nU3Nr3siH0qqzZM9LVUbicASPJFhKl5at8xgQFiiEfWjobsYc3R6IM/X5O6KaNVODUG15TAF2nEseh493DyKSg5qHRlQrh+PsCCLJncpiZjjkdTimnBDxVI2Op8PSROT9Dx/v9g8asbQqKaG3w8M+J96sBnU4zmV3nX1pBmJomBhK3R+K0bC7uChVm1r06zIgIkyjG28CccLHWw16ahqGIRzzZuMBs9nHt28vtjsrSgHq9O2b92oVEcdp+stXX0eE5FSrShIWpggPNXevfmaIRETV5loxYJXTIMkAltaISD06O9wBIwxV4TH1MyBxAISbuYMhOWHVAEm12eILIVe101wAWhI/VWdOJDIrNI8BMNQXg6V6adURJHfTF39/++Dagc7ltMwtALOM0wTzg6qunl3X0mgiIP7h4y3mddPTXHRWsGAkVqfWEGggan1iHxFB59oPzgYS0GtYIjII6lV5YjMdhoxpKBZC6TSfhNgBZFgjYtFmpXoEM8+nIlNOVmWP2G+L6Ds/EDQ8IZemBC4EpwLuufVLtphjGEBYKOis9kRQB+iYrbiDekQEMjWkakZJillrMU5y2s/vP9wD4Ga9GwbwaED88eHhtFTJ6eFw36lhZ8EzUx+xo4c11aWYRvMgQY54mJfLddrtdtM0galwauaJWD0swIObhwcDMQWZdcE2sAAiGAQE9PvnelPprkTk/eLaDuCBeXAAqCHJWLVhYgUSTqa1dy9mSkTMAgDLUkUYkxCFm7XucI+IrTVGIgY1FcBmQVUJJchOSwWmCPCcBiaCEApGbGb9FIkDgJ2l+8QgyZCW2mrz1po5PTzsT6cZHXVx1buHw7F5LGqOwJIXWmo1BqA+GSc0M7BYjaOeFlPIABdT2lxejKvpVMtqnTbrCRHRpZouS91cpA/3+2aBLF6tmgMkSUPn1WiYh1NAgCMxATGJI4X3UY8DdaAHEZCZ1QOBa9Gcs1nUZpIGYDTrbjBILH2OS4ySk6yHHKbTmEspwJJSIpZlWSxcw5saEAWCBagbh6hbLSpYwANyyoRgAdz9/sLDizYIVD/HeCAhyeWw7ydKa9uO683nazDYPxwPh5MEqjsHELCWWsKYISUy81YNGc7kmKagsEt0s7u8ur68uLrkIVvoHEuEUUBrkTlLTg/Hw7KU0rAZNnU1NHKkR2mLARAHWkD3PjTEBN0cBxncmBkesTcC6I09PjrCakBCMIdhGmutWg0xcs4U4GGqer6UeMzn0S8R+ePtOGmSYRg6MamZYetXKEgpjYgIkAk5sYcjkRBDv1RS7YzNALlbcYMzX9UIWQCfXV3vVlt03N8fzMEAD/P8/vbu/rA/HOcdt4vt7nK3C4dqSpxqWK21HufxGd1sLnar1bQahim38OJ4mo9hurQWEavtRrU+3B8bkvFUWluaIY0AoKotlEiQu6oFDRzP10ZDnCeJEEwi1CG3AIsg7tN5FtNuFUK1FmZuaiKSBrF+HQ9BZ7LIkBIRPbu+rlVrMyMyNQCota7GKaXcGYXckZEYWmsNWoSBZ0JgTBgumB3OJkzdmwKBA6yazvPSuTdqhuBDHi+macp5M6yuplW/hLZG9AU9zovVOUuapnW/q8EJ70+Hu/v78eWrzTAJgLfqYM0bJPTahJDT2Frpls9LKU3bYsirUXsdJmgRZs2CrA8aATsaj0wYQMzRtR0BiOxuDl1ZCACA6pk4AlwdCPtOd3dgtAhwc3ek851G3SaVO5i0Px6Pp7rUNqtGxDiuOGWDWJaFEYeU1FVKIUrn+4vcu8ct9dvoAhnPVlQAIMwGoarzPBMRI1qrKeXddnt9edWB6IGZERhivdmsVqu51dIqz6XV7gOHLWxx5/Vut9ky8ygy7x8UvarVVkhSEIgQIqSUzOx4PDa11nx/qhNPtVZVoOT9ntaICPeuD3q6jupHhlCQuyJF04YYrsGcENBMp3E8zTMigkegppQ81Dz6/VadDXeGiTFk7QIAmfFKsmMBgpMrETrF4qqqTjiwqCl5oBpkscDzHcjavMKYpZRT9pSzBIRZd5NuGFhdW2ullGlIY8o315c3V5vwExG7qWrosQ7DiD4PedykZJSaB44IAIk4Iuba8jA08Op2LHMInhy1mpqvQsABaHAEQluiLEuc1DGvVtc3IeN/93d/mC5uvn3zw7/89Yurm5dffPUVIYBVbH0WjtqCOHnnLZJHGIMBBfiSRww/AITg8Ovf/+2HH27v7w/7h6MrCHMEAbqwlFY7vdkdUkrmIe1xc+dp3Lj7aWZAhCBErV1vnxMTuJOZARznEwG2FrUupgPDKiLCWnd978eh380TQK21DsVHBOUzqNpaC/JM7GBF22kpD4fTOK52u11KKQ0JAM2shavpertq6kny8bgvrWm4uhkEMlXVeZ5hoFZb0dapERSQh2HY7O5OVYb84cOHd+/e/bt/9+8+3O7/8sVfO8s10JBFEPV8qacBkLWahIjAzQFiyLwstpQTk1a9f/5y8+zZjkm+/fbbzWZTtdVWlmXBU1sWjUAi7p4AApK6gmEcV7W4xDKQMFEEH0tVbDnnhojh3VeJm67GQUTUo1Q98qzWGGEu1R7vFDUH1Y6gmroBITD1ixMNomhzsNoNzAECobS6aJtbZeZBhpSS1vMNacZ4nGcLN4K74/3D8TBr1fA0DpQIgz2ihSOTBxQtRSOxpGl1tbqYlzqupvvDAxH98x//U9cPR0BYWGggOVCgdq7vkMW1Opjr8uLF7mI3Sbo+nY6Hw+HNmz+XUsZhVWtV1eOSU+Zpmn7925/+wz/8Y9Wy3VzPRUUyeEg9G+ACU8oiCWTiwQCOqq7miIoOhADORCzJam1dZcPYwA+neSGchgxAOQsjAsCAhMgerhYOlDPlYcrj0O1q3UDRukGwYQSjY5j5Mh/dPcw6d92arzebkzcNPyzz/njczydIzCLA4oTFVCFKLXNZarPZfa7mMoKIEztQIBGnn/zsZ2/fv7u7v+9jxMTJ+1X3FBAeCEgEYWEe3tzrepWE/eHu/ec/eblZb5+/2P3TP/3Tf/v3f8/MX3311Wq1Wa+n+8P+T3/6J5JFchnNa9tr9WmdtFV5OM1THgQQkYc0rqapVj0sS3c16LMnILZWPSChcB4BWd0jMDEahJoFArKcRRIBQDIMYgCtH3aRnHMaByKM84UURuergEI9gNAINcIxgqAsp+PxeDgchoehm0AurZp7GofdaiNDbuEO0VqrrqdlnstiIMjCQy7Nv/ru++Xrd05Jhul+P19cP4/A9Xr98PDwSPNHJA7qgpkItwj38DGLWbn9+P7Fs5+mafzTn/7P169fTqvd5W7z8eO777///uWr58MIX3z1x1//+lfj3//q9ec/2V1M7vnq6vWXX3z35//y1TBt5XgqmbMQUwRTmvLU1l7MQFv3VVcPcNWmGMDILCkYzMNM1YEJBmFD0YDqnd0UXisgLaW01qAzT4SZufugIYZ7NPd+U0FxDUckMQoQBofT4aQEC1htS0qptJbzSITb3S6NGYnHPCDicT7NZQmEqt6iArATOZGGBpIBafVhXLXWSmmdGfWojcHwwK427OObAIKYTw/L/LBe0cVuHbD8+m9+wYIBIsTXV1c/+fz1l19+gSNPQyaCPPD/93/5n3/zu7+7vLr84ot/fvXqF3/8039ORnI6LpthzWLmjhGc0nq9XtzuamnamkfXs2GEIKP6SVvXW3TVNTsgiQRWc0ejAITw2hywtoaIXaPoBgGUZAA3FE50Zu55uKmrOwh0LhkldqFhHKewTmQbhzTkySDSOFh4prSaVup2OB1LKQ+HQ1UzEI3mTArJggPFAhGpqUVbmBM9CifcvN856moaDojSxbJh7n55tfvFz16L0H/5018lRcp4v19Op9O0St98c/x4+8O/ef3fpsT/xz/+4+9+99v/4f/133/7zdthmJ49v7y/fycM5rMcDqfVuG5ME/OQJFEOIVokjwkOBSAASbVJECAupXqm1to4pLzekIg1NY8WAIYBfSrC4I7qajGOQ84cdnZfQyYMT0kYsrbSr1tN4RHuEaU2AMCU56arlIMpr6b9fr/dbjFIEEWkNWXmzFJrjRaJMxBLTgjcaiCzpEyqVQFAltpSGoiktda9KNrSHCNhIuGg6NeEd8QuMQ3DsBz3InI6LT/9xS8B62m/F5GffPby889e/vWvf35xc/3lX//8/v33KPyP//iPu4tLItkfy2Z99dcvvmVmU5O52f3heLPbnmqNiMSkbuM4TGUcawU1NaMAIgyCIQ0zWJax1XaYFwRnpDTkYymZbRqHxGweQrioIVDXFClYa0oky1JXmadxJYRlAUlprk3NJMlpnvtQ73A4tNYsfCkLZllavRIBIGtKAYl4O61W09SKJmJb6iPYyMwYMlqIatMWTudrBeNsmYTd0dlDLRgcPbxrOiO62ZyZLjfX18KjamOSeamvPv/p8fBut9v9+3/43//tv/23rbVvv/328vLyV7/61fc/vH///v2LF5+FS1M+7QvLQJRFUR5KpcNpEmJmc42IzDKlNJ0vQXIIJIIIaG4azkgkqZqC+ZiHuTRVtQy2RGbJSRBFm2IEMpUWQoyIy7LIOHQLj2kcwCMQRBoxB2Kptc9mEYRI5rm4x36/N7P7+3tGWQ0jAkzDKMRRFdxBDTwAoDUjkSQZh6Eu/VogauoEARxdzxAR0GW+QhHWzo0NRL96GCNJqmWe57LfH8zb+w/3V9e7//Sf/vLi5TS0+OlPfnn7cR8Rr199/tlnny3LcrG7Oh7KOGymceeQhL8Cz4QiFbBVIyoAeQ3UIyMCJKCMWCM0wAm6nbu5t6YNYLVaoQYwkbBGIFMgW0BRc3dLEW5E5EDaHBOrxf5wTCw6emk1MXGSlFLO2RHmpWhrVpZamqHknKFZQNRSiGie5yFlk2RmeSUIUJZFazEzRowI1cYoBpYBXKO7HwJEgEGge4tAD0gkzAzcryANZj63jBFEtCzLOI5lqf/yp78Se8r0w/u7Uua7j/c5/7DdbiMOL14+P82nh/3Hq6vLnPOv/uZ337/9MA3pmy/fIAyIuS4uikLgiwGXtl5ZSpkJoOl6tTrfjb3MZmHRFPDMJFV1d8mJiLr8bkiCJGlIYa02dWyJGJnUDSS7exBqeDNdahMk6pcBp9TdCVup0GxMmYMpT/1ii8NpTzQ4xJDyOI5JSKgfFK9aLTxnGVbDmqZmJ7UGnFprtS2tqXogy9nfol8xHhTdczLcXXtzj0z9TjF0TCxlqUmoGZDF/thvssinA+sgD/cHN/3um/uANozy7u19beXVy9fvP3w8nfDu9uSGQx4aNAlMXQGyrzXJHmO9HiXllGkgosd7qRY3pbOQqwtwrFNIaq1MBMTnaXBgIHSXfbIoUecIRFyv1yxSmx7nOXUpuLYOyoA5Im5W681qXdWSTOoG6K2kcWRHX6/XUx4QUTKrqoWXtjR3mbJYTpDhsK9zldUY1oiABdnACQI1gAIgAgJQHS080OBMb/F+RXVHHZdSj/vTej1NeZjrLDKauoGPvD0dlmHYJfH94X4chuXUKnmAfPnFW7XY3/9VeLIaiy8ig5CniOaEFrpfWsTeYXW5XgnzOE0X5mq94YnzPciIKSVVLaUBkFnkSSJiLlVkDlcCHADAgTIlTlUNwEurSWhpJkzNdH8sBHgqS78Cfj1NY86EMs+zqiPhJBl2uxaKTJx4GFL37jBT16hWjUhyylNe09TsAmRxpCBfrcZodry/R86IZK5AbOZE1EyDkN2BkBjAwdApziWTqu92G6uttQYG43q9398Pw6Dh42pda42qQ14ToRpi9w/lzNSxCx/HqWq4q6BjBCMDkLTwQy10BEC/XG9zTqvVqpo2D91bNFU1yMLMrVkppdeSImLWhmFAxNacMBCj30fKzAgolEqzLBboLAIkx+NpYDqdTomYqIufsBf9AgiBqyGnMc1aNRSiG12qW7h7NS2uxAm8Gjgy5CzTaqjGkIYpbweHk9ZjqcAEYURsEMR9M0hEnK/aeBRcRTyyEJCKKiIPw/T+/Yfr60tVVT/Wk4tkDWdgBjYFHrIH9gtDuqh/WZacRwMTBvJ+nYwFkqPbsS5xsMTCtEbhaZo2zWpriDXM56dCBJ9ksESUNptNYupkNndHjGZKtWaiIQ0OUVp1iq6scffFGgZk4ZQySGqt9YsrwaMui1M4Y2I0A2R0NwBo1pjZLQIgBAKCc1qPcpwXRCTCQDwcHhpSKbN79OuazxewEwGdX/aZyBZnVi1EAMIyF0jQ7QKJaBxHs1D16vvtdqulEQMAqipzMnUidgPE6IxXkRRh5vZ/A6WGzyuMZ43wAAAAAElFTkSuQmCC\n",
86
+ "text/plain": [
87
+ "<PIL.JpegImagePlugin.JpegImageFile image mode=RGB size=112x112 at 0x7FCAB5C7C048>"
88
+ ]
89
+ },
90
+ "execution_count": 25,
91
+ "metadata": {},
92
+ "output_type": "execute_result"
93
+ }
94
+ ],
95
+ "source": [
96
+ "i =813\n",
97
+ "\n",
98
+ "img_info = imgrec.read_idx(i)\n",
99
+ "\n",
100
+ "header, img = mx.recordio.unpack(img_info)\n",
101
+ "\n",
102
+ "encoded_jpg_io = io.BytesIO(img)\n",
103
+ "\n",
104
+ "image = Image.open(encoded_jpg_io)\n",
105
+ "\n",
106
+ "print(header)\n",
107
+ "image"
108
+ ]
109
+ },
110
+ {
111
+ "cell_type": "code",
112
+ "execution_count": 26,
113
+ "metadata": {
114
+ "ExecuteTime": {
115
+ "end_time": "2018-07-21T07:10:26.732578Z",
116
+ "start_time": "2018-07-21T07:10:26.711066Z"
117
+ }
118
+ },
119
+ "outputs": [
120
+ {
121
+ "data": {
122
+ "text/plain": [
123
+ "(112, 112)"
124
+ ]
125
+ },
126
+ "execution_count": 26,
127
+ "metadata": {},
128
+ "output_type": "execute_result"
129
+ }
130
+ ],
131
+ "source": [
132
+ "image.size"
133
+ ]
134
+ },
135
+ {
136
+ "cell_type": "code",
137
+ "execution_count": 27,
138
+ "metadata": {
139
+ "ExecuteTime": {
140
+ "end_time": "2018-07-21T07:10:29.714824Z",
141
+ "start_time": "2018-07-21T07:10:29.676756Z"
142
+ }
143
+ },
144
+ "outputs": [],
145
+ "source": [
146
+ "bounding_boxes, landmarks = detect_faces(image)"
147
+ ]
148
+ },
149
+ {
150
+ "cell_type": "code",
151
+ "execution_count": 28,
152
+ "metadata": {
153
+ "ExecuteTime": {
154
+ "end_time": "2018-07-21T07:10:30.404858Z",
155
+ "start_time": "2018-07-21T07:10:30.386340Z"
156
+ }
157
+ },
158
+ "outputs": [
159
+ {
160
+ "data": {
161
+ "text/plain": [
162
+ "(array([[ 13.36201936, 5.58984986, 78.93511893, 104.44713098,\n",
163
+ " 0.99996698]]),\n",
164
+ " array([[45.040733, 73.22949 , 67.01588 , 46.294598, 68.35203 , 47.975132,\n",
165
+ " 46.75182 , 68.91486 , 85.37722 , 84.38674 ]], dtype=float32))"
166
+ ]
167
+ },
168
+ "execution_count": 28,
169
+ "metadata": {},
170
+ "output_type": "execute_result"
171
+ }
172
+ ],
173
+ "source": [
174
+ "bounding_boxes,landmarks"
175
+ ]
176
+ },
177
+ {
178
+ "cell_type": "code",
179
+ "execution_count": 36,
180
+ "metadata": {
181
+ "ExecuteTime": {
182
+ "end_time": "2018-07-21T07:14:20.172835Z",
183
+ "start_time": "2018-07-21T07:14:20.138160Z"
184
+ }
185
+ },
186
+ "outputs": [
187
+ {
188
+ "name": "stderr",
189
+ "output_type": "stream",
190
+ "text": [
191
+ " 0%| | 0/1 [00:00<?, ?it/s]\n"
192
+ ]
193
+ },
194
+ {
195
+ "ename": "FaceWarpException",
196
+ "evalue": "In File /root/Notebooks/face/mtcnn-pytorch/src/align_trans.py:FaceWarpException('No paddings to do, output_size must be None or [ 96 112]',)",
197
+ "output_type": "error",
198
+ "traceback": [
199
+ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
200
+ "\u001b[0;31mFaceWarpException\u001b[0m Traceback (most recent call last)",
201
+ "\u001b[0;32m<ipython-input-36-1da710ed1190>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m 14\u001b[0m \u001b[0mlandmark\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mlandmarks\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 15\u001b[0m \u001b[0mfacial5points\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mlandmark\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mj\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m-\u001b[0m \u001b[0mbox\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mlandmark\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mj\u001b[0m\u001b[0;34m+\u001b[0m\u001b[0;36m5\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m-\u001b[0m \u001b[0mbox\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mj\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m5\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 16\u001b[0;31m \u001b[0mdst_img\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mwarp_and_crop_face\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mface\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mfacial5points\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcrop_size\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m112\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m112\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 17\u001b[0m \u001b[0mfaces\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mappend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mImage\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfromarray\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdst_img\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m...\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
202
+ "\u001b[0;32m~/Notebooks/face/mtcnn-pytorch/src/align_trans.py\u001b[0m in \u001b[0;36mwarp_and_crop_face\u001b[0;34m(src_img, facial_pts, reference_pts, crop_size, align_type)\u001b[0m\n\u001b[1;32m 258\u001b[0m \u001b[0minner_padding_factor\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 259\u001b[0m \u001b[0mouter_padding\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 260\u001b[0;31m default_square)\n\u001b[0m\u001b[1;32m 261\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 262\u001b[0m \u001b[0mref_pts\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfloat32\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mreference_pts\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
203
+ "\u001b[0;32m~/Notebooks/face/mtcnn-pytorch/src/align_trans.py\u001b[0m in \u001b[0;36mget_reference_facial_points\u001b[0;34m(output_size, inner_padding_factor, outer_padding, default_square)\u001b[0m\n\u001b[1;32m 102\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 103\u001b[0m raise FaceWarpException(\n\u001b[0;32m--> 104\u001b[0;31m 'No paddings to do, output_size must be None or {}'.format(tmp_crop_size))\n\u001b[0m\u001b[1;32m 105\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 106\u001b[0m \u001b[0;31m# check output size\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
204
+ "\u001b[0;31mFaceWarpException\u001b[0m: In File /root/Notebooks/face/mtcnn-pytorch/src/align_trans.py:FaceWarpException('No paddings to do, output_size must be None or [ 96 112]',)"
205
+ ]
206
+ }
207
+ ],
208
+ "source": [
209
+ "from tqdm import tqdm\n",
210
+ "faces = []\n",
211
+ "img_cv2 = np.array(image)[...,::-1]\n",
212
+ "for i in tqdm(range(len(bounding_boxes))):\n",
213
+ " box = bounding_boxes[i][:4].astype(np.int32).tolist()\n",
214
+ " for idx, coord in enumerate(box[:2]):\n",
215
+ " if coord > 1:\n",
216
+ " box[idx] -= 1\n",
217
+ " if box[2] + 1 < img_cv2.shape[1]:\n",
218
+ " box[2] += 1\n",
219
+ " if box[3] + 1 < img_cv2.shape[0]:\n",
220
+ " box[3] += 1\n",
221
+ " face = img_cv2[box[1]:box[3],box[0]:box[2]]\n",
222
+ " landmark = landmarks[i]\n",
223
+ " facial5points = [[landmark[j] - box[0],landmark[j+5] - box[1]] for j in range(5)]\n",
224
+ " dst_img = warp_and_crop_face(face,facial5points, crop_size=(112,112))\n",
225
+ " faces.append(Image.fromarray(dst_img[...,::-1]))"
226
+ ]
227
+ },
228
+ {
229
+ "cell_type": "code",
230
+ "execution_count": 38,
231
+ "metadata": {
232
+ "ExecuteTime": {
233
+ "end_time": "2018-07-21T07:21:45.873749Z",
234
+ "start_time": "2018-07-21T07:21:45.857902Z"
235
+ }
236
+ },
237
+ "outputs": [],
238
+ "source": [
239
+ "reference_pts = get_reference_facial_points(default_square= True)"
240
+ ]
241
+ },
242
+ {
243
+ "cell_type": "code",
244
+ "execution_count": 40,
245
+ "metadata": {
246
+ "ExecuteTime": {
247
+ "end_time": "2018-07-21T07:22:21.544120Z",
248
+ "start_time": "2018-07-21T07:22:21.517479Z"
249
+ }
250
+ },
251
+ "outputs": [
252
+ {
253
+ "name": "stderr",
254
+ "output_type": "stream",
255
+ "text": [
256
+ "/root/Notebooks/face/mtcnn-pytorch/src/matlab_cp2tform.py:90: FutureWarning: `rcond` parameter will change to the default of machine precision times ``max(M, N)`` where M and N are the input matrix dimensions.\n",
257
+ "To use the future default and silence this warning we advise to pass `rcond=None`, to keep using the old, explicitly pass `rcond=-1`.\n",
258
+ " r, _, _, _ = lstsq(X, U)\n"
259
+ ]
260
+ }
261
+ ],
262
+ "source": [
263
+ "dst_img = warp_and_crop_face(face, facial5points, reference_pts, crop_size=(112,112))"
264
+ ]
265
+ },
266
+ {
267
+ "cell_type": "code",
268
+ "execution_count": 43,
269
+ "metadata": {
270
+ "ExecuteTime": {
271
+ "end_time": "2018-07-21T07:22:31.344783Z",
272
+ "start_time": "2018-07-21T07:22:31.313710Z"
273
+ }
274
+ },
275
+ "outputs": [
276
+ {
277
+ "data": {
278
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAHAAAABwCAIAAABJgmMcAAAzBUlEQVR4nL29TZNjS44deAC430syPjJfVXVPtaSWmWx+zvz/zZjNTtNSS/U+MjNI3usO4GiBy3ivW4vZTJGWi4zMCAYJwvFxcHBc/nI5nXpvZmszUz31vrZG5mLa1QAsTUWkmbamJJtpV2VSgXBXoLeWoKma2WldTuvS1M7rclmWru3Sezd9Oa0vp1VE11ODiAkVdPc5/Xa9/vj4iJkJruvpdDqJqjV7fX1dliUi5pyZTpJkIG8ck7yP+cvH9V9//fY/fvv22/W+BSYRRCQikSQACBIJ4P/5rz9/v254yqP9X//nf/rzl69fXl9fz8vr2i/n82KNEZopgJmaKUwIJCJJqVdKZIRPB5mQFKiqmrXeVFsz66aLtaW1Ra2bLc2amQi0GZVj7GMMtmbnZZyW/fXFp48xQLXW+rqe1rW1pqYuGIw5kUwKUkigEdYbLhdmurtP5+ZJekQZlICoiKhAyRR5jjEBoP3Tn//y09vr2/l8Wtvr2l7O526W05HRzZberBkEKUwmmSKiagrJSDIFkhQCEIiqqoqaqaqgQZbWuplBVCCAKEwhpuuyuAmZmbmKnE3GrkOFiWVZz+czoBAR0AQqUOFEeoZkLgYhmXFWvK/Ldjlv23C/79sec05HAFBVMQMAEYg+0aLtL1/f3s7ny7Ke1/b+0te+NNXsgjRT6c1EJJlqCihBUW1qImBmRgrEtJEgKCKiKqoKADDI0mxpTQXIBGhmIoCyi6S1YIY7VNnbsMZ1JWHW+nISFZKegRmBbAIxMVEHkZGkZVrmSnld1p8uL/vIj23kDPcMglbWhIgo+DRrAmiXdTn3dl7aeWkqBMNUFmvIJClCFYKZ4aqiIsIEU0UFpEFI5Cz3NGuq5T2pqt10MekGU0UATDMBMkmAVAihJqJKookKAKqqmTUKKDLDychUwIRkpJIaaImkJpAm7+tlvshtjx/3sZ84b/d9H3NOi2ZmrVlTwRNt2l5Op3Pva7OmokhheBAiAiiIRCJJAiliChWBZDAiSRGx1gQUQAWmUp4FlaZqWj9ARSQymRWXhYQAIiqoX0TAmgoUEIGq1scIUip8SBKEqq7rKiMEASQZIVxMzr2fezv3fmve1EjOOfc5ILL0tnRj5hMNejmdzLqKCYUposgIQEACKhChQgFpqnWayQQpFamAZpWQTE1NDRAy8fhZgBUQyAyKoowpInK4jYiqHD5EkklCoElCaE07eyB9JpMEFWqCFJpIVyywl768nk5vl/n9PhQQCCrGCNwDyYgnGnRdexcYKCSzjrOACZBgEgpQIYAAhzmDJFXFICbSWmutmVmZSVVF2sNoQDIzIYAKmSkiIgYABASf3yfH7yOZAiWSQUHrJu0EE0fO8AgXNoEoxERM0IXdbO3tsixrb83MzNSVdACZQjlM/CSDNq2jHcwQBkKgCqGAIHC8RwpoLqKGsjsAKISiAFAeKiIQsdbKkQ97RjBAgaqgfq58k2AyEyIpIgDKOUmaGEzrm9VUgM7WZxuqEUIRKBVqpDFUaMLV7LT087osvTUzESUFkEwkSD4xhopQUHahZr2xFB5h9HiQBMMdkhABoSJMUpiRbISK9WZmqtr6oqp8PEQVKpmpkEp04Od7JIkK0J/fTyCDzqyII3U0QK2a7AgUAhU10YSCJlyaXk7LeV16b6YiEAICYSLwzJyEBkkRKCHQBqhIZuRRb4oIVMRUK7QlUqAqClGCmSkiwUxAVLU1U2utqWpmZvlypZnMChckGcFMRhJUSbDsXLY94m2EQ820TrBZbxTxCHevur2+UwQKCrKZrks7n0+nZWlqADKpCuZnuHqWQcOHAIKUJABTExVASSZS6uAqJIVgAgZUscms1gWROX2O2QihHakfAAmImooamUlSAWYG67+gwoQSiUemohy+CgHJiABmIivQmqqpBohMCCGEJJkMr4/vtCxvLy/9+7UqkToKlQ2eaNCYAigShECQYqoQERVR1NlJsAImyQAAKiimBCggEZ5jzExGaxARbaoqQoioqUAiPCOOEglyWC0JovytCigRICVQUZVStUGARLrXgcj0Gc5MZgqoClFK+WnT3ns7sqSKtnCPwFNjaKbzePWiaiKSVe6oHGELACl8fBdRbwyVSYDIhHslazPLBMDPCKyHaY15xMoKnZmUPL5EZfnjpEg+6mCKQAAyycggQ4SNirQIBAGmqXVFk9Eyz+BPq/7Dpe9XaXLZAyHnmTLuG/zjSQYtLzv8Q41ylDIKUaD+iKriyEIAhJXAERHMrKBZGSkzyQAowmr8halqS7OZMUYwo9y/nIrKKvAfKb4MqlXsgJRMiJTdVSAC0yMqVkmnAlM006baLc+n/tP767fbPc0tZMI6bGTi/hx7olE0RRIAqn457FjHs95P1ZeUrIJO9fBlkKplkOMhj88DwGdlmpllcROd6fVlX3picmZmQlVEqlBTCMw8omJrWR9AnfEKh3U8RFDHRE2a2dJyQBbIy8vl5XK5+S1EVbvDmj0JuwPQIkKpVIU8KqVqWwjIEXzIgppIUiprVwVQiJzo53mvCv/f/Y6oKPio4Y82KT8xNgOQZEZGBgQwfRRUlaCEoHuER0bk0QunHFEfAphIM7NEI87L8vXt7bfbuM2ZtIQkn9gpBWkABfn5XsFHhKxAICQLaiuDljUPUIkpvwNMZerj71V2ffbRVYe11jIzMz1mRlShKiLINJEUIShJAZDHp4hq2qLOvWRmPLKM4HhpgjRlU7Tk2tqffvr6t++377eRGfkoZZ9k0EyhCqmkRESdMCmUApWgBIAcbSc/ndHMmEnmYREgMyPiM56inOvApJHHJ0KS7j7HRBm0in+BmnXVYLj7jIiIyE+DMpnpwfg89aKmzESGMJEpZFM2gSnOp/7l7eXbbeRIQv/3Q/P3NCgASAICBGGoNCx8dMCPt5R4tJjNmpmZqlSUfHQmVTkeJ5p8VDlZOS8zY3pGhIf7DHdEHh3Tw4ES8Iw5x8yMiHwEGZZjVgtRyDOA3/ECKGBgAxqo6WD2Juvarr4jqfpED2VKUjIBkXaUiVJxtCr58gjmcZxNzVTLXq213itoytFl4ggR7u7u9XnoZ6vukRHhnpGiKpCjAePhuwlkJlARtkB+FTERyVTRPKrXigaZIoUaZqVGBU1hKpq5Njstre8zQp7qoTXYEpLKFKiCFMqBKUhWh8mqSg9/fDzKxCTINLNqkD7D63G8M1Pq6QtM1RRVhZk1SGQmMzLrgCsIMQIGSgpR8VoJIAIiSTYVSXoEGMFUpYoqREghjanpp3b609f39f3Lnwd//rHlv/z3Zxo0PZNAh1IJ0aNil8MkyGQyBUcyce92GO6zen/EWWQm//BfIseEKSKEVFFTFcBjlhd6+BFay7Xr6VQMJtVpHa+HQgXBDBAFXYmpKuEzNFRUKAo2FRMO38cey8v7T+cz+vrLx4/nGdQ9jAJVO4YvlRM/wV+hqAjL6SpERkZmtsLrIKpy1P4A6sxGVJA9EkoVW2SU+TIyMzyQNad8wHcC4IAPqqkCUBken5j0gR2qQmHHOZgRKm6Hn0ZTLGqyD0Hebx+//vLtfntSmwSgbfuwVbtZARORCZJCFVi1RGai1EcRCRwuWxXq463qZzl1GKE+D6ACqwJQrTasqiWi/E9bxYcH8JqP45+Z9TE9GjB+Fm2ftQYFInbMTupFEEp006VpSu7b7Xb9wfTnGTQikxmggYdBVUQkQQFMVf4AXFS+IshkRqSpaj6yzr8NAo/y4DMa1BPow9E+K7A/IKH8356Eis+IHNVSMMAkq1PiUc9JoYSizUwj6N5FBlOZ53X5ydbnGTTz8DYSkXmcPcJURDSlBpxIHA18PpzIVTUqLWU+usSqT/GZkQ7DoeZyZeKqvTJTCAHy8ZwkoaIoSPiBPGcgjlFTFboPN1Upt1c1661lguk5kUvvM7KZ7R692dcvX5bU5xkUQMpROQdTIKKF2VEEAVALoz1MRoEGZjX1Aj3C6KPYru7o00BAhB+B4Jh1ymFT0eoIBZX9JUghCnDGZz0fwXxMYx79mkAFTSQ8IzMEqDieSXfPSFUTJoil90i73sYTDSo1kqRHmB5oGoucgQyKkCkHaikiUImgiEhA/TizrfXPUikfjzKciFR++WPBn5l1YD/HHHjkNBAKNYXiMxDk7/ABIQiSqoQgWZjeI3wTTVq09OS69G8f9//6L//jX79f9fzleQYVQZFAADqhUC00XSpRHOjnI7CVk1Xtycj06lahtMOBPp207GtmyQCPoVAe+SZNtIb4Rxz4RBCBGkllReoiRrAiqTU1D0+mmhF2tCEBiiyZnk6XGdNMYx/d+pe3L79cx//8+ZdnGrQSd2SmKIQFeUAVqZBkPgqiz1zxgEPrS9bBlEwXRERh5sd/MN2TkfqHRGRqBjEzeqZHRCRAEKIHzlJ9aiaVWX1FfaiqMDWToItYUj3pSVEePYCaamoqg8XJMFWVJzaeQANRHUhEIFKoIlCBFQenMskDnMcD2Dv8CfhsUvGoHI+G6uHTLN+sOtQdKLsZSRWFEoCSSok/VAWfg7XCFKos1Yebk8WmKgqJV98fkYRBQAYYpjCmZDaxxVYVeRS2f2eDUpI1YwgXYaZkHtQvVDkiRQERHrOzBBQF7j6yTT5KQTzQz3Ko1g70hI+m3tQKmgPh6YHC6AARq5OggEpERnrNuVJAFRETUX1EV48pGGahPpMeiYQkWkKJqZIGdJW1taV1xRPrUGk9RYMxgRQ0FZiJNYM1wkTbo5FBwaYIUgGT6qIoAiQOekYBTqzySyXTWmtHQVr8PKh9dqVVsQORgaM0VyQzEVnlXNb8tT7Kz1ojignEat+hImbaqDMyYhzMSdHWpLfetRUE/pzhZxMgGSxOXJKZUqQLEKL4rF/AKkX5GUkP2PeYkRwJmZ/xAJmMiMpLj5ymwRCzKpWKefqYe1KgicxkZHiGZwIsGCUiwWRKEolMUVWFWqSZNU3VrNwayciYQJpqI5uIiqmoigaegdu3jAAFecw1C4rQI0M8OkkgmKz2RDXBFFZBLiSYmmlmAhK/Q5wP0lZxJhTVHWR6ufdBtRNrBv19AFrMrjzi6RGOKwQfBR4ItXJ0bRRP0YAc9UB9fxOFqTJNdWlW1IfnPFqG1yy+qXZFE7UDtH+Y5LM5BCFCkaO34WHOOuZHLoJAYKqf/X8+kHqFEAwGyKba1FD0ZCn6HchEahXBqiZHm4rqdvFoj0iISlaBysKeD9JpzRpOaw+hkCv1cjqf19lwfRqJuUnSTBYTU1sE3bQd2GLimI6wAGB8pvqjieSDkyemViN8ETPV3nplmGQOn4V1BCmJjMzMptqt2dK0mRwMkMdoQIpYpRUnMgNRDZVoEaQLkM5gPW/hi8mMCBCaahCVhHSRdZHzae3WTJ/Ufbameup96aYiXaQf5yuZoGgNQgpQ/yxE/4BlFEaB1rQ1LapMt1aT+qpDRWRwuDuTkcwIRrgqM5Uhrqp2cFTE8hg9JZlqTQQZiYNjwQOnJ4Jwn8xMStS/ZBZhqjWBkoouOgU2ZVlsWexZDoq29nbqrTcF0EWWI2KBB7fp96q4UODHFwdQyTr0x4xMe+s1cZJjrn/0TyADIshK2gB8+tzuBHrv1rqZaet4DFfIjAhSfYwHZMfhEwAhE+JeizZygAZSJR772ikZkgSaaF+kd7WGp42V2trb0q2pKqSLNBUAkgTyWJ/4Q41etacdyKPg6PnT52yitKYizeyzUwLQW68jXL1PqDMz3GdkRtYgGGREBFFoIOpzSzJjzlkg1iyakoiIupi7V/08gx4EYKZNrC8t9Rjjk2xd+mpv7+fnHXlQmuhq1sxUiqKa+e8qjCKJiJhYAeP1pzKUKFiocEQ+xpLlUwASbICaZs2eekTEHLPoC+meHk6BWiIeFUKRxLNC9gyfDBJR4BNkSGREJDI4PUZmiMDUuqViMmquEwmKwfL1y7n39iSDLt16ocgZOIAhfvaOUdQvHh1nRsDsaFcAgCJqailIZmSMMZiMCDNrvQKyqhW+VgW6Uq2JwRqJPXJGIKZYRjEYqnpTqXUczwwmUlMwMsecM8ITxVHOpEdOIk2YnOE5gwhnOhnWtS3b3L59fOdTilAALZPCoiwW4ItalysI798ETSDAmB4QbU2ongllZBa+5pm3vJNsra3L+vL6Zr2rKoEgk+kR7oEEiYCEaojMZGYywiOz2JykgL2bmhWhCYoU2SJuY+zDnVQxEaVIJpz05GTsc1BTFAFOMo1Y9PJ6uey72bOOvEeg6A0EHhV1jTUOZOTgbx75fgJaK38KFSG0ATSISpBjjrEPEMuy3IZbb2qtSleSw716p+NT8sjACHoyGGPOQovBBLK59d5Z+LcyRe4eH2PetzGjAoOJNYg6cw+/z30fe9BZ3FUzmkfzj0jr1tqzDCrHnAfAw35H5/3Akh4GRTFvgwORCTVtZq1GjbWPQI7k5hGZN8+PGTUVgWiQnjHdM9laW5bFzDAjd5+RwZw+tm3LDBVRUMAWtgIQSUgqHbhN/76N632bIe4OUbNOlRm+zfHjftvHRoGaqhlEU/UeMbud3t/laUnJM7VS4IH61L8r8YdtFDEBkinHmmU4abQQyQwPJkFHBuc+9n3PYAA2JiAjIgkeCYyZNVPaASk6lWckOee+bXcwm2pTaYLOBmtVos/IPXHdx7fb/nG735xzeh2b+qj2Oa77fd93Uem9t9atdZrefOLc5TLkWVOlJp84EUCBihJSyfoxpj8IjnFsyaLGeKYSzOGDEyM8MjNyjrnvMzNnpFmjqEd6ZhTXHZJ5cKNFtVUnlOlzDh8MbyJrt6XZopKANhezEHjK5vnjvn+73r9/3H+5bcMPDDQZXt0C6RF9Wc4qbT3Zso50T6fn337+ec4njZXaPj3SBTxw3MTBPK45x7+dX+LI1CKwYHLsM8PDb/fNo/hx4TPcc0yHaEIic8wYGRQ5Clw5GnNT7SIkp4+YrsKl6XldLr3l0kVV3Q0Iyh782Mf36+3XH9dv14+/3Xx6+JxzTvcZDDXrp3Xpy8vb1y9vX15f3iDy67dfIU5w23aPeJJBh1eWpooC8vsESVS0AJyqsYU1kqylsAz3sY2xj233eb3vGYWbSiQY3Id7hkeOGbv7iKjVm/zDrzDVbqYqETPdm8ppMY+J87k1XUBnhocH7x7Xbf9+vX77/vHbx49vW4zpcTBPKap9Wd++/PTly/vby/tpPTHy9nFNVjzVP7//w8u/3P/28zNo4e0gLzyMeBxx0QJEC/OFNiO9DpYIhdP9ut1++/h+u9+dWfu2zbpKE1FpYtDwyHQ2jcS+zzG9AJV4nAAVGGDVUFT4dDTD8JxOT7EEgBncndvI6xbfruO6wYd4aK1KB+iZ01V37n+7Xj/m6+Uk8H2/osmka5xXfe9teYI1UWWTftpUUXuYR8A76CJgweIJbRaq29i/3W7f7/fryJ02RpD2+vrWzxcRE9VlPZEydu9zeoReb7f5t/3+w91ba3mw8ETJzEhDa21dlmVp69LXpbfWRM2sqVgCam2BtRbuebvd3VPR124BBjIZgEbE9cfHNeJD5NvSepfWZb2clvN6fnsPasYz4PrDoH/w0MegqDqWg+yZEeGZw2N33zNuY35s232M3T1FrC+9nZf1DLEkmlomr/f7HH7fx7bt1+t127aqQPMTuCJUa8NZlmYv59Pby+XltJyXdlnb69rf315MzD1SbBJ7ZBMyfLX2cnm1dQnGfe63sY0amLhLMpGDrmzrcl76au2Ujs1H5rMMWoPdA1SXkqMolBNHTARGxLbve8T1fr/PuUc6AdVmjaoEmnUmtrFdb/fpAbVt339c73NO93APAHlMAMRMTbR0SZRzWZbL5fL+9vrl9eX1cno5LW+X9fW8fn19aWr7mDN5n74Nf1nX98tpPV1e//QPy+nsjB/b9brdd/d9n0g2sSZAejKa6tpP6+n1Y8Ttvs/xrKQ04xhRyMGH56e7kgzmzBwR9znvc1z3sbsnlGZFPJrDx5yK6Z5jzul+38fuPtzv++xLX9fTcu4g55hj7qd+upzPl9NpXZcmsNx768vS17U3hSKa8tTa22l9f3l5OZ88Ynh+bGPM+MefvmSGLcvp9c99PU3G27zc576PuY0yqCLDx8h06+3l5a1fXr/ffxs7n7ZL1x57FCkiSdbRUFOIeORwHz7vc2wx72NOIkUjOcfYho8x9hljDoRGECLarIhJ1paW6H05nc+9dxWJ6Td8fH1//+nL+5++/vT++tqE4rfMjJgAm8ki0oGG1AzMXdduzEVwMn0/r//0lz9dLpcZmXYOqGaKtKWpn9bpiWR6zH2ntd7MTsvy8hbaIyRDBc9Cm7YZEXkMhCAmGhEEqcyM6XMbY9v3+z49MpP79Pu2b/vY9j3LcNoBPa/LclptWbZ9zIxBbvtozdZ17b0vvSu5raevr69/ev/yl59+ent97QLMH2Ps27a5TxWupqtqAzWzCYwpTBVZm556e385r+t62/fvd8YciCl0FTZIa5Ipc04R2NL6svTzWftydSCFqU/brGl71aGfEkH6Ozb8KStwIJvFXI4QkdN6am2BWGtNRBR9Xc+vb2/r5RLg94+Pbx8fARaP2cy6WQO89dfz+f3lfFmXU29NUmFdujHdIOBScyYRYfq+T7VqzJtwUSyGfRs59hxEpCFFFSYJeM0Bm2o/9RLtakYzv+/HmZMnDT7ryONBNlYVNJWoCafi1KVs2/qQLz2St23MCIjVGlhA3N0nX19ef/r6tZ8Wmr69vb5fr3FszGdG+Jyc+2rahcpkzBwbmjSkqshiYYsCzXQpEaOImNPbWLAAgmRXfVmX7b51k1M3kRwxvcQ3QEwHuZg21XVdltM6AtlMSNNmCnvWJLl5UuWxOQkI2JsZD/bWIrL0fllPAaRIQqanJyAy/NEIbduY2Ztk7D4opj6npqugGN2B9KAXLT9m7Pdd0nLauqiGimhrNANgKt3EBOkR4lNH02OJrAkWa6e+zOHRTISmMtKdkRGLSm+lj3Es/0AxNa2hNaku9XkGfdD9gYQY6kUdg1tVEa0N4yAJ8cRMEuKkR973/dZ0HzOSMbfw0Vrvqq0ZDn2IHEhhWNN1sVPrTcUkNKekiKRpt6URCJ8lISNkBqdM3VHiS02tW3bj2vtdVUklTdBNDLZ0q4Xn3sxUgVrqBwFDqsDa85bp2kNhTVELHE1Vpde7hqiKWVO1LDg4c0TOJEUS6szF5NR0H77tY4wR6ZowaGTEjBkBsAEvl9OpL63m7Myl98vldD6dlnQzUzOSE3SfGRECLeURCXdnpGnvbVlTSl6KOdMjGaJYuvVlERUFmmqzGjAGRYxpwnL5x+7d39+go8gCSHlQOEtuQIUqUEgX9GaqOmfuPiVThbXlqkhtttppdF+ajCbTvTaR5oRLhqSJnk7raelL7wAErbe2rsuy9m52klNGxO9LNEh3PGhWTGUW3b7o4Q99QxVTEUoKK0qUX5vQAJUa3YuQ3WRt2k3a0wBmkpk8qEYHbg/woNOoqEHkoZbWC+YDApU80xQQWbStTWLtRb0LD1eJrkyqHsC+KXtrp9N6Pp/XpZspSZ0xyBqy12AVqsJUNsb8d06VmTV5Pa29nxaCHpFgb9rMJGFarIxUE5KS3lUWk7XZ+rSp5z4PLY+qcD73t0iIFZeIPvao5QTIYlJLlwks2kgmwSaiHUBG7vu+jxEGsh3PlFCVpfeXy/nlcj6dVlVlhHvM9OLHFiv/0dAcRzQiagmhr7K2ZSYm0e77SUR7VxNneniVukogwn1EwFPpUxnK7Kqvl/PL+fwsg8ahCqpA1toPkcGQqC1jrfV5/WxPpT223iBCIiODkQhRTRFJUeoUK04NINZb73Y5X17Ol/NpaaZMurOwfDA/A5yqJARxsHAz0z2m+5JsvbWWp76c+hLcrPF0OWnTiGzWuzVkxpwTsQcImulq1g0vl7Oez9+d+L+fYlA+WIapevBvICnMZERSUkSaqZmKHbuAR++vWsvEbCSNSBJOdMU02+bwONRYRPR0Ol3O51Pvpc8oSQObSClCJEULIZSS0CmlA5BwD3fPDAAm0lvrvatvYDaVdV0PiIzCFGNKdqbnCA0qYOXn4TGfhtgfi4JIEsz0FJOQ0qZJlQdfNKQJKtbWSrU1bf9GboTuvo3BoIPN1Hqz1ltrUHk5n5fWkczpjJBMEEa0pgjOKHbFA9j+nWxfDNMDj1VRM1ozVUmG+1iyt94PeiMhqhQZxQ7aB2gqypjXHz9u16ep4hBJ6pFLJUl5aBmSFJXS5yKQyPZg7BSTTJuZSOk6gFlO10TS1LqJGVqr5YwiippI703MEOFjesQnt0kSBANx6GYc2woHzbHESU1TkwoIM2LMoV5PpyYEKAQcTBCmqZopKZKUfY59f5YG84go9qw+NuJI1OiXrBUDlUCUogMka/SGFEpIQLR4hsd6O1o3USVEUFRsEYWqA8gi5lHplDTsSI6IncKHIC2oKilZ9EcTBWREFkNUAGWaJNN9TBPLHhRKEzDx2HMmWIeIkARg2nrTp5VNI+m1NykUcEha6cxChFRCKJLHyU4wELBPNiOa6rG1EEpRgJ/c1oMgexxdKRLzQzLCRCE60xCC4ZFkrSKQ4swE9ZjACKCZx/7ddEc+iLuR7uktzJKRGZ4MPvIlBKKaDALarC/9SQY9NnkhmZx0QEqVhWowlRqiU0j9nDIeMbcotVq1gAjzc5KiNex/LHLVNP9gnGVJ08accwwf4bex73PG77uMRfpWFcGnuBEp0N6XOVOlqbZav8mge7ZWxnXQeTBd+IlOROaMeNYEBG13Dw9liSSIA0Qe/GPYoffVKhMo8JB8kFoURvmdHtILMDk2m47sXtWAVuJGFZiZHGPe7vfb7f5jbNu+HYwnSBXCptZEQlFHpFZCtBglvffel7b0tjzY9XA/FF/wYK9n0fMAiiaxjXHf9ycZdHpGUEF5KDBSxA5+E/nggzO1tmDUDnAqa4nmELS03vQAUx7LgGVYlWIs5OcUIiLH8Ovt/vHx8dv9GswKugQQaWqpSdEEmoqIZWa4R0RthZhqa920RZRQec45k0GkkB6ZiXAyGUFVg9p0zqcN6SrY8CAtZ2SYgigN+Fp7K4xEwBSKkSyayYMDUkvy0uxzcoqHLFktytSBLjpnRGxjv9632+1+vd23fWu9995ErZJj7fjU6pSY9kWZkeHMCkUQhaqo2PA9DuItyVRJID9nqxHiM2vZInhIzj7DoDNTRFnqs5ngQWYlUVJ3Wnx7Uh9iAYfqw0G3tULYPvUxWOR6kERCin9bKkDD5/V2+7jeb9v24+P2/XYXwWr9vJzUehBFVhImPJJ5CISi4mOqVkSBaalnwz1Ep9IgoBzHqRbuH9GAEUiq2rOSEkmP0BQrLb/fV46PdcDSInnMPsQzNWr6dLSf+PyRg/l57DzwELGppa5IxrbtP6637x/X63b/uN+3Md9Op9WWRRezXovlEUH3wAzIkc5ARmRmEzCDGcXkV7OI4ByGpqooryhpp0QNPrKqalG1rg/Jg7+vQUeEh9c2bNMCwpCAkHrI7lbhd6iugF5tCcxMq91GJgtRZnU4BwpYiopkYE735H0ft22/btvHfbuPmRDVZmiSioSZmbZEOo4lXpVjHTwZEdNdpu8eE5LFNZtz95mLQqRF4HHMMoJBJCyTRbYXNVHF319/vc2kNDOoZHzqISuJJJQdknK0pZWoUrM04hUSSCRCQlUyDQ8E8JO2l8FIRuScMSM/brdvH9frtt3H3MPX04liQQkKxFRMWoNEhiYlS4wrqSZqFjklJDPIiPSR0zP2McXCuhkJqYiRc6ZHzpAZ8IAHk5SnDekOCFTyseheXUZt+YGkz4mHGKi7C4DWRMJUBZGRCohKZlS5lI9NEJIMBmXO6cHpcdv3j+22DfdMqqTIJLaI8KmgeOicRGZEIgmWxiEDFv7jvi3h0+c+x5g+I4bP4dPQIiNpOLj63OYMyPAYkTPk2HpKV5EnACRtRj4SDgVqD5i5gM5gSlWXAI6tSwaZmT6DSoPWdnUcdMeSpSymPCOZgI+Y4ds+r/f7fd89U/uy2JLJPYM+JSNrR7x4QExBmqAJTRCq4hPbtmTLzDF9c98jPu632/2+LKuaZVI/F7hF9jkCAlt8+jbu9/2W+aQZSKsVbeHjIopDOr54V6xNWJbmTOkEQDLhMxluJQlqDYCa+UPAqiDBjIwjjGIf4+N2//FxvW9bX9bz5WK9//bt+23sd/fhfr3ftzEo2lpTxdJt7W0xXZuds+2ZG7nOhcg5fXh87P7z9+9jzhcCZktkM1ORZNzHdHIC13G/T5cmr+/n+4/tOct0bUZGLaCjFNAeK8jHocu6CCRRrVH1RbUJnCnaaCIJkRhRBi1Q4BANASK4j3HftuvtPny01t/eXr/+9KdA/vrLb98+rjPzvu+//vb952+/JXRZlr6083k9r8vL6fT+cnnN0zL9klhnyKHyHr98XH/+8UMgy3r2oCBImOqIOTInOYIOSpP1df3pbfl5+5fn4CPHkX84n5SGeZG2a7eD8lk5sRpNo1JBKkoiKyQT0IjMSEIFoJfcJxDu1/v9dr3e7zshl5fL2/v768vLPsfSzef89uPjum3Xfffgx3aL601VrMl5Xb++v+2ZE3Ja+kbtY1YnHOTPPz4+9nFe1oQGRbPm4PREAJM5mSnYY367fcf6Skw85dA3ALWMkJBSAT8YeGoPOrMSStTpZargIWhRYlPwGuNlDSoLlyyJWkhlJB/hgVz66fX19e3ysrS+LMt/+g//cU7e77ubv/z57a9/XX69fuw+Z+Q+9m27/+uvv4Vqqp7m2oYr6e6ZFJURQ5a1nc40y9IULd1RwJkO7jlHMoXaBZ2X1+U5u1/NI1WtVtJCqRATEDWhE5BQoQBHqSwAAgUJH/pYVS17aTswC7vy9IhU1el+37Y5p6ldLufL5dz7Aoayvb28/vM//YdFl++3D7a2J3VZZFlS5b5tP//886/ffvv+cYfo6XxuZhlHZ6lNXy6nl/cvL6ezivEo4OmMFEaEI4PhoPZ27qeN+PNfvlal/Hc36Mzc3E9qao9rtADFobDESB63PxyK1oAkRZK1YqhVIxEuoBw3/RDwjJKx3ca+bRvJ0+X09vb2cjkvvdW4pam+XS7z/R3kAJScmffMmGHa3r989eT379/+9utvy+1WkK1aUxEzW8+n1/f3l+XM6fAgSpXLaXBw9+k1hxBGzuu2//mvf+3LMybJDUR4ukFE7NHeQOShjJHHlUokmSJqBARxgPQHlkdQuxYhIRPJnOFzekbu93tEnk+nL6/v7y+vp2XtarN2i8AMh1C7sUp2A5mqUGkAzufzj+v1tt83JkoUl2aCk52SEDFdmqfPiKgaT0kVp6bo7uNG35tOgdjw/FB7Sgydmbe5q3ahKFXzQD3qOB+h/iFgIZks2U4ARIjUvX0ke4wZrtoeoFXe79vcd5+zaX85vby/vr+czqaFWovndPdt3+77tsfYMnbmHmNEEFKyoefT6Xy5hHCPUfJxdc8YhSS3OZbZ66OOqs4ETLr7CBc7Sv3XL28/vV6+3a58CoLXAM7MrPFG5jzuCTiay4Liarm7tEDIQy1RPmWvAAAURkCYQY7p277f7pu7q8jr5eXl7X05nbUvEPFkQjy5Db9t2227f9yuG2Kqjjn26RBtja310+n0/v5qi133O8G6ZbAubIuIfd9Hb0aSLswinTgoZhKR6WOfv92v2PbxrzGI6c+Qw2oeVDnk5txdSDOrmUf7hDcfD5EKkwfE9IkZkwwxT9bdUmPGffjdQ1KWZTm9vp1e32xZUzQzx5wk933exriPcRvb5vsAhmLGjMd1GKJoqm+vL33t61w9ShMjkWxqnjl87mNvAoQb0M3ElIWRgNMDkMvlJdfTt19//fqPfzmdfgFuf3+DZo4aeAlKOkRJU4UB+lDDKPCdR293jIh+h+kIwBNO8eT0vA/fhs+U1szWcztdXPQ6huyjPMsjMtMjdqYrUiXBmZPCvljvq2kDmemn3tRkXfvwOef06UlvagQjYp/DQSXXEn+qBSsRqGprvS0wi3XJX76DHXhK2QRgrzsNBGKWLEXzasetOlEBUDje41aPGoHI45IuEfFkUCIxIu9j7l5Xm6z9fKLp7r7tO8m5Dw8vkczM3HwOpCsmGWTrTWt8xXT3DJpIh5gYkJkzPCVqIsjpvm1UcDFtpgdDCkLRFA0axaytp8vb169Tirz/HIOWUs+x0lkjYQgeV++U+HmV+Mdgs1z0IbFWKkBBBjEjtjGGO1T72pfzqS2LM+9jT3cmx76ramRAZPq8j33LGGSN+0nMMRh7resL1UXMmpCWUIc4lZIjuMo+Z7gbkkszE+1GgiIhMoO3bdwTCLmsOK1vm+dzLvdsAIZHFE+D8KxVLgrVpa7HVCAfZ7+6p6M7PUjkxx0odI8xxhgjM9fTup7WvjQKtrnXxdoCzPAuPcDM3N139+mMRCDnDErMOX1G03Za1t5s7UuzxSOQs7clA5l5v++UCXCCQs6pJPu6tr4EMT234bf7+LbN+WPoLRzq1OnPaj2Hx3CnakIpUIixrktjKYsxJVVNPkXDj0uURCRFApEeGek+3AcZKljW3lojOH3sW8Gm0lor0k5pZYwxZzBTwrn73O47TCv8mMCsXc6v5/NFRPcxR1CM/WS7+9xGTE+Bgem+kyC/fP3alpPPuc/pAW1rW/o2c7/7deQIzPkUMUEA1/t267q0tvQGNBMlHgJCydqlP0SuqzqBotTDBVSJECemj4gBSbW671PmHAxB0udMz2ZtWRZVm9O3fUyvvSfJFJ8ce8Qk8mClQswTTuweAG/b/u3jVljqNmdmkL5vdxF007Xb8Lxv4/Ju9ExYSqOqNOSY19sIWU7n196fIb7eAFy3/bZY9GS1H8pFH/DNIeiAkqyKuhM2XWl2TDkFIiXGHGRkALTeknHfR+t927bwaNoIlaBkzozazRFVFfPMMekBoGUWIKsz4Pd9n0lCRW+3+/ePj22MSaagLx1z8znW3uy0eEhkBvBxvae2oAznPmKGJgxiQRWX52X56TE9VNQsLEND+6J5UDuL+8A4LqjSx+sqVA+PKZykwHH8JT0GCYhn7mOObYp6t7SRqk2b7Y5tHp1ZzLjN2HefPp2pNd/KPMS1POgxxtzHHOEUkaXZmAumCqy1Zq3m1pEY7mitLt+JhAdIExUJTWpS/j+M8f+XQceMGWyNIwJjquj0QGvHpLhKzyAyqprSuhOMx+JIElBJVSccJGS6h0fri2/z+4/77XY37acTeyfhEP1xv9/ue5AJXu/XuijoEGs/NODApJIxI8ZIz0yIiS2tQX3Odm6X03o+n5ZmQEJkeqyiY8bwSAhhQXqSbFpivU+Z0zUApSoDMCLDj1t3SFQdipIMFEhNd2u1oPZYSK9bGkSpVqpWgZyeM2Khbtv+cb3dr5vp3O9Oyn0bM7nNuXsQQsOWu88QQB4KuglkJJLndY1txEwjTt1eLufL2+t6Pm1z9JOdT701E6JE2dwjEvd930dJMiOSXqIeZjy0KZ5i0BGcHgIVUWhs+4DWILkvZk1hgClMJCKkGCUHogvPECgEMIO1GL67u2dEgjM917bq2RBwz7GN++2+l1odQYiDGybAbpJg+idIwLrZD86Tytvp/PX97f3L++v7W1uXYOzcg35IGYJmqk2v99t9j+GYIR6cnpmamsBDAvU5Bo1jUS4jU9McMYfX0ixaZ6tbdVjDz7p0ZWaEVyRIVRaDHGIUrXt1BcrI87J+Ob8pNT2vH/dxnl9e/T7nNuc2fJ9zxOwi59P6ejkDxRSHmCYYHpzeX+V1Wb+8vL6cT5eX83JeApwpc+x1GVvdSLSua4RfP7aRcF08dXjMoOiK2ibB72y1v7tBS/1nRqqGmgV0DC9CokJUmlo1mmKlych8SMuWvJNk5mAGE8clMinQJvp+eXl/eT21E4PbfRdRiu3u32+37x8f1/t9n4Pwl/Pl5fLSrEGUKglsc9zud4OcWrss66JqNUsQqgEeqmy9AYzwktedY0z3PUgTynIMDFuJAHhRyZ9k0Dh0B3KKtJaejJLjZkVMCkyYhl4Y7QEj10wESqRn7mPMCGYqxCME2ft6WfrLsp6WddGel0tvi7UlVT+27cftdt/3fd/hs6n1vpg1iFBlZN7Gfr9svfXzsmhmzhE+HeExxIxy8HxBjVCS+75Pd/fcdk9DGnx6uEjLLM7ap7rfEww6PTzDUJva7RAay+NuRwXBpqDhMbbn0WsCMNUEI2PMsY9ZY6j02Vq/nE9vL2+vl4sRktlKqBp5Xtbz6fTl/W34jDl1n+6HynKSzhyZ76dT6heIqGBs9w1BBOcMj97NoF1bIKkQTXffNt8iAk2WVW09v/70f/yXf5Tl8u1j+2//8+e2nO/buP32NA/NmBEhEpk6XaxlflKOpZiCTSXJxpIkOFS5zSwYSPF0d9/3ncxu2s3eXi9f3i7rApEBlHxF5t17XxDnvqyvrUu31O6ErAKg7mGPTCdaay7cw+9j+2Eyazp0iAkLnZ+hJSkBddJTpPfl9JK6tNPp65/+NFJ//X7/z//8z9D+//63/46nxVAXHSKSaRCJoCiThozwdKSCiqWb+FzYeo0ieBDw3Gd59gifPplp0pfWlt5bs+OK+EOONXcf29hv96335XS+nE4nU2tLE0i1DZEpKqdlnZFmFj6G+3CfGV5cNpNgzvCQrL8kKSq1r9asLeuJbbVlKSm8Hx8//vLXv377cXWfdanjMwxqZn1ZkamZAdz3rUT75xSfylwVzGwlgVvL4Hzc8eXTM1lKq3WnBURa77Wh4xHIrD2bcq+ImJ7bnPc5l21rZl27mtUFqiSttZV53/dAOnjdbz/uP277tsdMoC2LNAGUwXAGahCP4XMmVFRa7+eL9NWTfT1Zb2OMX3755cfHD3laUhoe25jCLMdT97X13iwFHtzHFKGHFZOJhOnRQLVEPNSXPeKIqa31ZVHVGu0CSESRKRKkllC6jxG3udcUcFmWcHcPU1vWtfk+w29jv+/7j/t1D4cKVGmSKiPDyRG+z30fc0RumfvM0B6CkenTkSPtri1Pl/Ov3779uH6MsU9/hkBjA+CR0wOgiYgaOZ0psAakYGTk3XeVtTcSHtmaFXYHUYGRmYmgQLT11pe1r4u2WnQqHYhjmeTYuBMk6Hlcw8JMnduYc993EOXdntzmvs8ZYF/XZT1pt1pkighn7D62fd/dE6badJHh/OW3H/eff0wqrIstH7f95f2rSKs1nedINzUAB6WJGZAuptZQshgJPLghGiQIVaoEgBrWiy2LpYgXCUeg1nrvbeml/MTHbXdCRkYmCjhnAamlGSCy+djnuG236V7kXy9xbaCvaz8tfe1iZqYQ7GOnzyRLPa5uwqAYBY7MAygRgMtyysjpW+2LPMOcwP8CdBbcY3UBMa0AAAAASUVORK5CYII=\n",
279
+ "text/plain": [
280
+ "<PIL.Image.Image image mode=RGB size=112x112 at 0x7FCAB5CB4438>"
281
+ ]
282
+ },
283
+ "execution_count": 43,
284
+ "metadata": {},
285
+ "output_type": "execute_result"
286
+ }
287
+ ],
288
+ "source": [
289
+ "Image.fromarray(dst_img[...,::-1])"
290
+ ]
291
+ }
292
+ ],
293
+ "metadata": {
294
+ "hide_input": false,
295
+ "kernelspec": {
296
+ "display_name": "Python 3",
297
+ "language": "python",
298
+ "name": "python3"
299
+ },
300
+ "language_info": {
301
+ "codemirror_mode": {
302
+ "name": "ipython",
303
+ "version": 3
304
+ },
305
+ "file_extension": ".py",
306
+ "mimetype": "text/x-python",
307
+ "name": "python",
308
+ "nbconvert_exporter": "python",
309
+ "pygments_lexer": "ipython3",
310
+ "version": "3.6.4"
311
+ }
312
+ },
313
+ "nbformat": 4,
314
+ "nbformat_minor": 2
315
+ }
src/third_party/edgeface/face_alignment/mtcnn_pytorch/src/__init__.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ from .visualization_utils import show_bboxes
2
+ from .detector import detect_faces
src/third_party/edgeface/face_alignment/mtcnn_pytorch/src/align_trans.py ADDED
@@ -0,0 +1,304 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Created on Mon Apr 24 15:43:29 2017
4
+ @author: zhaoy
5
+ """
6
+ import numpy as np
7
+ import cv2
8
+
9
+ # from scipy.linalg import lstsq
10
+ # from scipy.ndimage import geometric_transform # , map_coordinates
11
+
12
+ from mtcnn_pytorch.src.matlab_cp2tform import get_similarity_transform_for_cv2
13
+
14
+ # reference facial points, a list of coordinates (x,y)
15
+ REFERENCE_FACIAL_POINTS = [
16
+ [30.29459953, 51.69630051],
17
+ [65.53179932, 51.50139999],
18
+ [48.02519989, 71.73660278],
19
+ [33.54930115, 92.3655014],
20
+ [62.72990036, 92.20410156]
21
+ ]
22
+
23
+ DEFAULT_CROP_SIZE = (96, 112)
24
+
25
+
26
+ class FaceWarpException(Exception):
27
+ def __str__(self):
28
+ return 'In File {}:{}'.format(
29
+ __file__, super.__str__(self))
30
+
31
+
32
+ def get_reference_facial_points(output_size=None,
33
+ inner_padding_factor=0.0,
34
+ outer_padding=(0, 0),
35
+ default_square=False):
36
+ """
37
+ Function:
38
+ ----------
39
+ get reference 5 key points according to crop settings:
40
+ 0. Set default crop_size:
41
+ if default_square:
42
+ crop_size = (112, 112)
43
+ else:
44
+ crop_size = (96, 112)
45
+ 1. Pad the crop_size by inner_padding_factor in each side;
46
+ 2. Resize crop_size into (output_size - outer_padding*2),
47
+ pad into output_size with outer_padding;
48
+ 3. Output reference_5point;
49
+ Parameters:
50
+ ----------
51
+ @output_size: (w, h) or None
52
+ size of aligned face image
53
+ @inner_padding_factor: (w_factor, h_factor)
54
+ padding factor for inner (w, h)
55
+ @outer_padding: (w_pad, h_pad)
56
+ each row is a pair of coordinates (x, y)
57
+ @default_square: True or False
58
+ if True:
59
+ default crop_size = (112, 112)
60
+ else:
61
+ default crop_size = (96, 112);
62
+ !!! make sure, if output_size is not None:
63
+ (output_size - outer_padding)
64
+ = some_scale * (default crop_size * (1.0 + inner_padding_factor))
65
+ Returns:
66
+ ----------
67
+ @reference_5point: 5x2 np.array
68
+ each row is a pair of transformed coordinates (x, y)
69
+ """
70
+ #print('\n===> get_reference_facial_points():')
71
+
72
+ #print('---> Params:')
73
+ #print(' output_size: ', output_size)
74
+ #print(' inner_padding_factor: ', inner_padding_factor)
75
+ #print(' outer_padding:', outer_padding)
76
+ #print(' default_square: ', default_square)
77
+
78
+ tmp_5pts = np.array(REFERENCE_FACIAL_POINTS)
79
+ tmp_crop_size = np.array(DEFAULT_CROP_SIZE)
80
+
81
+ # 0) make the inner region a square
82
+ if default_square:
83
+ size_diff = max(tmp_crop_size) - tmp_crop_size
84
+ tmp_5pts += size_diff / 2
85
+ tmp_crop_size += size_diff
86
+
87
+ #print('---> default:')
88
+ #print(' crop_size = ', tmp_crop_size)
89
+ #print(' reference_5pts = ', tmp_5pts)
90
+
91
+ if (output_size and
92
+ output_size[0] == tmp_crop_size[0] and
93
+ output_size[1] == tmp_crop_size[1]):
94
+ #print('output_size == DEFAULT_CROP_SIZE {}: return default reference points'.format(tmp_crop_size))
95
+ return tmp_5pts
96
+
97
+ if (inner_padding_factor == 0 and
98
+ outer_padding == (0, 0)):
99
+ if output_size is None:
100
+ #print('No paddings to do: return default reference points')
101
+ return tmp_5pts
102
+ else:
103
+ raise FaceWarpException(
104
+ 'No paddings to do, output_size must be None or {}'.format(tmp_crop_size))
105
+
106
+ # check output size
107
+ if not (0 <= inner_padding_factor <= 1.0):
108
+ raise FaceWarpException('Not (0 <= inner_padding_factor <= 1.0)')
109
+
110
+ if ((inner_padding_factor > 0 or outer_padding[0] > 0 or outer_padding[1] > 0)
111
+ and output_size is None):
112
+ output_size = tmp_crop_size * \
113
+ (1 + inner_padding_factor * 2).astype(np.int32)
114
+ output_size += np.array(outer_padding)
115
+ #print(' deduced from paddings, output_size = ', output_size)
116
+
117
+ if not (outer_padding[0] < output_size[0]
118
+ and outer_padding[1] < output_size[1]):
119
+ raise FaceWarpException('Not (outer_padding[0] < output_size[0]'
120
+ 'and outer_padding[1] < output_size[1])')
121
+
122
+ # 1) pad the inner region according inner_padding_factor
123
+ #print('---> STEP1: pad the inner region according inner_padding_factor')
124
+ if inner_padding_factor > 0:
125
+ size_diff = tmp_crop_size * inner_padding_factor * 2
126
+ tmp_5pts += size_diff / 2
127
+ tmp_crop_size += np.round(size_diff).astype(np.int32)
128
+
129
+ #print(' crop_size = ', tmp_crop_size)
130
+ #print(' reference_5pts = ', tmp_5pts)
131
+
132
+ # 2) resize the padded inner region
133
+ #print('---> STEP2: resize the padded inner region')
134
+ size_bf_outer_pad = np.array(output_size) - np.array(outer_padding) * 2
135
+ #print(' crop_size = ', tmp_crop_size)
136
+ #print(' size_bf_outer_pad = ', size_bf_outer_pad)
137
+
138
+ if size_bf_outer_pad[0] * tmp_crop_size[1] != size_bf_outer_pad[1] * tmp_crop_size[0]:
139
+ raise FaceWarpException('Must have (output_size - outer_padding)'
140
+ '= some_scale * (crop_size * (1.0 + inner_padding_factor)')
141
+
142
+ scale_factor = size_bf_outer_pad[0].astype(np.float32) / tmp_crop_size[0]
143
+ #print(' resize scale_factor = ', scale_factor)
144
+ tmp_5pts = tmp_5pts * scale_factor
145
+ # size_diff = tmp_crop_size * (scale_factor - min(scale_factor))
146
+ # tmp_5pts = tmp_5pts + size_diff / 2
147
+ tmp_crop_size = size_bf_outer_pad
148
+ #print(' crop_size = ', tmp_crop_size)
149
+ #print(' reference_5pts = ', tmp_5pts)
150
+
151
+ # 3) add outer_padding to make output_size
152
+ reference_5point = tmp_5pts + np.array(outer_padding)
153
+ tmp_crop_size = output_size
154
+ #print('---> STEP3: add outer_padding to make output_size')
155
+ #print(' crop_size = ', tmp_crop_size)
156
+ #print(' reference_5pts = ', tmp_5pts)
157
+
158
+ #print('===> end get_reference_facial_points\n')
159
+
160
+ return reference_5point
161
+
162
+
163
+ def get_affine_transform_matrix(src_pts, dst_pts):
164
+ """
165
+ Function:
166
+ ----------
167
+ get affine transform matrix 'tfm' from src_pts to dst_pts
168
+ Parameters:
169
+ ----------
170
+ @src_pts: Kx2 np.array
171
+ source points matrix, each row is a pair of coordinates (x, y)
172
+ @dst_pts: Kx2 np.array
173
+ destination points matrix, each row is a pair of coordinates (x, y)
174
+ Returns:
175
+ ----------
176
+ @tfm: 2x3 np.array
177
+ transform matrix from src_pts to dst_pts
178
+ """
179
+
180
+ tfm = np.float32([[1, 0, 0], [0, 1, 0]])
181
+ n_pts = src_pts.shape[0]
182
+ ones = np.ones((n_pts, 1), src_pts.dtype)
183
+ src_pts_ = np.hstack([src_pts, ones])
184
+ dst_pts_ = np.hstack([dst_pts, ones])
185
+
186
+ # #print(('src_pts_:\n' + str(src_pts_))
187
+ # #print(('dst_pts_:\n' + str(dst_pts_))
188
+
189
+ A, res, rank, s = np.linalg.lstsq(src_pts_, dst_pts_)
190
+
191
+ # #print(('np.linalg.lstsq return A: \n' + str(A))
192
+ # #print(('np.linalg.lstsq return res: \n' + str(res))
193
+ # #print(('np.linalg.lstsq return rank: \n' + str(rank))
194
+ # #print(('np.linalg.lstsq return s: \n' + str(s))
195
+
196
+ if rank == 3:
197
+ tfm = np.float32([
198
+ [A[0, 0], A[1, 0], A[2, 0]],
199
+ [A[0, 1], A[1, 1], A[2, 1]]
200
+ ])
201
+ elif rank == 2:
202
+ tfm = np.float32([
203
+ [A[0, 0], A[1, 0], 0],
204
+ [A[0, 1], A[1, 1], 0]
205
+ ])
206
+
207
+ return tfm
208
+
209
+
210
+ def warp_and_crop_face(src_img,
211
+ facial_pts,
212
+ reference_pts=None,
213
+ crop_size=(96, 112),
214
+ align_type='smilarity'):
215
+ """
216
+ Function:
217
+ ----------
218
+ apply affine transform 'trans' to uv
219
+ Parameters:
220
+ ----------
221
+ @src_img: 3x3 np.array
222
+ input image
223
+ @facial_pts: could be
224
+ 1)a list of K coordinates (x,y)
225
+ or
226
+ 2) Kx2 or 2xK np.array
227
+ each row or col is a pair of coordinates (x, y)
228
+ @reference_pts: could be
229
+ 1) a list of K coordinates (x,y)
230
+ or
231
+ 2) Kx2 or 2xK np.array
232
+ each row or col is a pair of coordinates (x, y)
233
+ or
234
+ 3) None
235
+ if None, use default reference facial points
236
+ @crop_size: (w, h)
237
+ output face image size
238
+ @align_type: transform type, could be one of
239
+ 1) 'similarity': use similarity transform
240
+ 2) 'cv2_affine': use the first 3 points to do affine transform,
241
+ by calling cv2.getAffineTransform()
242
+ 3) 'affine': use all points to do affine transform
243
+ Returns:
244
+ ----------
245
+ @face_img: output face image with size (w, h) = @crop_size
246
+ """
247
+
248
+ if reference_pts is None:
249
+ if crop_size[0] == 96 and crop_size[1] == 112:
250
+ reference_pts = REFERENCE_FACIAL_POINTS
251
+ else:
252
+ default_square = False
253
+ inner_padding_factor = 0
254
+ outer_padding = (0, 0)
255
+ output_size = crop_size
256
+
257
+ reference_pts = get_reference_facial_points(output_size,
258
+ inner_padding_factor,
259
+ outer_padding,
260
+ default_square)
261
+
262
+ ref_pts = np.float32(reference_pts)
263
+ ref_pts_shp = ref_pts.shape
264
+ if max(ref_pts_shp) < 3 or min(ref_pts_shp) != 2:
265
+ raise FaceWarpException(
266
+ 'reference_pts.shape must be (K,2) or (2,K) and K>2')
267
+
268
+ if ref_pts_shp[0] == 2:
269
+ ref_pts = ref_pts.T
270
+
271
+ src_pts = np.float32(facial_pts)
272
+ src_pts_shp = src_pts.shape
273
+ if max(src_pts_shp) < 3 or min(src_pts_shp) != 2:
274
+ raise FaceWarpException(
275
+ 'facial_pts.shape must be (K,2) or (2,K) and K>2')
276
+
277
+ if src_pts_shp[0] == 2:
278
+ src_pts = src_pts.T
279
+
280
+ # #print('--->src_pts:\n', src_pts
281
+ # #print('--->ref_pts\n', ref_pts
282
+
283
+ if src_pts.shape != ref_pts.shape:
284
+ raise FaceWarpException(
285
+ 'facial_pts and reference_pts must have the same shape')
286
+
287
+ if align_type is 'cv2_affine':
288
+ tfm = cv2.getAffineTransform(src_pts[0:3], ref_pts[0:3])
289
+ # #print(('cv2.getAffineTransform() returns tfm=\n' + str(tfm))
290
+ elif align_type is 'affine':
291
+ tfm = get_affine_transform_matrix(src_pts, ref_pts)
292
+ # #print(('get_affine_transform_matrix() returns tfm=\n' + str(tfm))
293
+ else:
294
+ tfm = get_similarity_transform_for_cv2(src_pts, ref_pts)
295
+ # #print(('get_similarity_transform_for_cv2() returns tfm=\n' + str(tfm))
296
+
297
+ # #print('--->Transform matrix: '
298
+ # #print(('type(tfm):' + str(type(tfm)))
299
+ # #print(('tfm.dtype:' + str(tfm.dtype))
300
+ # #print( tfm
301
+
302
+ face_img = cv2.warpAffine(src_img, tfm, (crop_size[0], crop_size[1]))
303
+
304
+ return face_img