diff --git a/.gitattributes b/.gitattributes index a6344aac8c09253b3b630fb776ae94478aa0275b..37052978977a6418dce6eea529a5353b380fd064 100644 --- a/.gitattributes +++ b/.gitattributes @@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text *.zip filter=lfs diff=lfs merge=lfs -text *.zst filter=lfs diff=lfs merge=lfs -text *tfevents* filter=lfs diff=lfs merge=lfs -text +assets/demo2.gif filter=lfs diff=lfs merge=lfs -text diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..fc6039f2c5a753a7ce0b264b019ed26692ec6973 --- /dev/null +++ b/.gitignore @@ -0,0 +1,4 @@ +*.pyc +*.pt +!dataset/*/data_stats.pth +dataset \ No newline at end of file diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000000000000000000000000000000000..3232ed665566ec047ce55a929db1581dbda266a1 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,80 @@ +# Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to make participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, sex characteristics, gender identity and expression, +level of experience, education, socio-economic status, nationality, personal +appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or +advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic +address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a +professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies within all project spaces, and it also applies when +an individual is representing the project or its community in public spaces. +Examples of representing a project or community include using an official +project e-mail address, posting via an official social media account, or acting +as an appointed representative at an online or offline event. Representation of +a project may be further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when there is a +reasonable belief that an individual's behavior may have a negative impact on +the project or its community. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at . All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see +https://www.contributor-covenant.org/faq diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000000000000000000000000000000000000..32bbaee9d2b29b9d8cd19bee046b93f685db47fd --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,31 @@ +# Contributing to audio2photoreal +We want to make contributing to this project as easy and transparent as +possible. + +## Pull Requests +We actively welcome your pull requests. + +1. Fork the repo and create your branch from `main`. +2. If you've added code that should be tested, add tests. +3. If you've changed APIs, update the documentation. +4. Ensure the test suite passes. +5. Make sure your code lints. +6. If you haven't already, complete the Contributor License Agreement ("CLA"). + +## Contributor License Agreement ("CLA") +In order to accept your pull request, we need you to submit a CLA. You only need +to do this once to work on any of Meta's open source projects. + +Complete your CLA here: + +## Issues +We use GitHub issues to track public bugs. Please ensure your description is +clear and has sufficient instructions to be able to reproduce the issue. + +Meta has a [bounty program](https://www.facebook.com/whitehat/) for the safe +disclosure of security bugs. In those cases, please go through the process +outlined on that page and do not file a public issue. + +## License +By contributing to audio2photoreal, you agree that your contributions will be licensed +under the LICENSE file in the root directory of this source tree. diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..320e3396e0f4867fc209f5c7b5cfe94a84f15dad --- /dev/null +++ b/LICENSE @@ -0,0 +1,400 @@ +Attribution-NonCommercial 4.0 International + +======================================================================= + +Creative Commons Corporation ("Creative Commons") is not a law firm and +does not provide legal services or legal advice. Distribution of +Creative Commons public licenses does not create a lawyer-client or +other relationship. Creative Commons makes its licenses and related +information available on an "as-is" basis. Creative Commons gives no +warranties regarding its licenses, any material licensed under their +terms and conditions, or any related information. Creative Commons +disclaims all liability for damages resulting from their use to the +fullest extent possible. + +Using Creative Commons Public Licenses + +Creative Commons public licenses provide a standard set of terms and +conditions that creators and other rights holders may use to share +original works of authorship and other material subject to copyright +and certain other rights specified in the public license below. The +following considerations are for informational purposes only, are not +exhaustive, and do not form part of our licenses. + + Considerations for licensors: Our public licenses are + intended for use by those authorized to give the public + permission to use material in ways otherwise restricted by + copyright and certain other rights. Our licenses are + irrevocable. Licensors should read and understand the terms + and conditions of the license they choose before applying it. + Licensors should also secure all rights necessary before + applying our licenses so that the public can reuse the + material as expected. Licensors should clearly mark any + material not subject to the license. This includes other CC- + licensed material, or material used under an exception or + limitation to copyright. More considerations for licensors: + wiki.creativecommons.org/Considerations_for_licensors + + Considerations for the public: By using one of our public + licenses, a licensor grants the public permission to use the + licensed material under specified terms and conditions. If + the licensor's permission is not necessary for any reason--for + example, because of any applicable exception or limitation to + copyright--then that use is not regulated by the license. Our + licenses grant only permissions under copyright and certain + other rights that a licensor has authority to grant. Use of + the licensed material may still be restricted for other + reasons, including because others have copyright or other + rights in the material. A licensor may make special requests, + such as asking that all changes be marked or described. + Although not required by our licenses, you are encouraged to + respect those requests where reasonable. More_considerations + for the public: + wiki.creativecommons.org/Considerations_for_licensees + +======================================================================= + +Creative Commons Attribution-NonCommercial 4.0 International Public +License + +By exercising the Licensed Rights (defined below), You accept and agree +to be bound by the terms and conditions of this Creative Commons +Attribution-NonCommercial 4.0 International Public License ("Public +License"). To the extent this Public License may be interpreted as a +contract, You are granted the Licensed Rights in consideration of Your +acceptance of these terms and conditions, and the Licensor grants You +such rights in consideration of benefits the Licensor receives from +making the Licensed Material available under these terms and +conditions. + +Section 1 -- Definitions. + + a. Adapted Material means material subject to Copyright and Similar + Rights that is derived from or based upon the Licensed Material + and in which the Licensed Material is translated, altered, + arranged, transformed, or otherwise modified in a manner requiring + permission under the Copyright and Similar Rights held by the + Licensor. For purposes of this Public License, where the Licensed + Material is a musical work, performance, or sound recording, + Adapted Material is always produced where the Licensed Material is + synched in timed relation with a moving image. + + b. Adapter's License means the license You apply to Your Copyright + and Similar Rights in Your contributions to Adapted Material in + accordance with the terms and conditions of this Public License. + + c. Copyright and Similar Rights means copyright and/or similar rights + closely related to copyright including, without limitation, + performance, broadcast, sound recording, and Sui Generis Database + Rights, without regard to how the rights are labeled or + categorized. For purposes of this Public License, the rights + specified in Section 2(b)(1)-(2) are not Copyright and Similar + Rights. + d. Effective Technological Measures means those measures that, in the + absence of proper authority, may not be circumvented under laws + fulfilling obligations under Article 11 of the WIPO Copyright + Treaty adopted on December 20, 1996, and/or similar international + agreements. + + e. Exceptions and Limitations means fair use, fair dealing, and/or + any other exception or limitation to Copyright and Similar Rights + that applies to Your use of the Licensed Material. + + f. Licensed Material means the artistic or literary work, database, + or other material to which the Licensor applied this Public + License. + + g. Licensed Rights means the rights granted to You subject to the + terms and conditions of this Public License, which are limited to + all Copyright and Similar Rights that apply to Your use of the + Licensed Material and that the Licensor has authority to license. + + h. Licensor means the individual(s) or entity(ies) granting rights + under this Public License. + + i. NonCommercial means not primarily intended for or directed towards + commercial advantage or monetary compensation. For purposes of + this Public License, the exchange of the Licensed Material for + other material subject to Copyright and Similar Rights by digital + file-sharing or similar means is NonCommercial provided there is + no payment of monetary compensation in connection with the + exchange. + + j. Share means to provide material to the public by any means or + process that requires permission under the Licensed Rights, such + as reproduction, public display, public performance, distribution, + dissemination, communication, or importation, and to make material + available to the public including in ways that members of the + public may access the material from a place and at a time + individually chosen by them. + + k. Sui Generis Database Rights means rights other than copyright + resulting from Directive 96/9/EC of the European Parliament and of + the Council of 11 March 1996 on the legal protection of databases, + as amended and/or succeeded, as well as other essentially + equivalent rights anywhere in the world. + + l. You means the individual or entity exercising the Licensed Rights + under this Public License. Your has a corresponding meaning. + +Section 2 -- Scope. + + a. License grant. + + 1. Subject to the terms and conditions of this Public License, + the Licensor hereby grants You a worldwide, royalty-free, + non-sublicensable, non-exclusive, irrevocable license to + exercise the Licensed Rights in the Licensed Material to: + + a. reproduce and Share the Licensed Material, in whole or + in part, for NonCommercial purposes only; and + + b. produce, reproduce, and Share Adapted Material for + NonCommercial purposes only. + + 2. Exceptions and Limitations. For the avoidance of doubt, where + Exceptions and Limitations apply to Your use, this Public + License does not apply, and You do not need to comply with + its terms and conditions. + + 3. Term. The term of this Public License is specified in Section + 6(a). + + 4. Media and formats; technical modifications allowed. The + Licensor authorizes You to exercise the Licensed Rights in + all media and formats whether now known or hereafter created, + and to make technical modifications necessary to do so. The + Licensor waives and/or agrees not to assert any right or + authority to forbid You from making technical modifications + necessary to exercise the Licensed Rights, including + technical modifications necessary to circumvent Effective + Technological Measures. For purposes of this Public License, + simply making modifications authorized by this Section 2(a) + (4) never produces Adapted Material. + + 5. Downstream recipients. + + a. Offer from the Licensor -- Licensed Material. Every + recipient of the Licensed Material automatically + receives an offer from the Licensor to exercise the + Licensed Rights under the terms and conditions of this + Public License. + + b. No downstream restrictions. You may not offer or impose + any additional or different terms or conditions on, or + apply any Effective Technological Measures to, the + Licensed Material if doing so restricts exercise of the + Licensed Rights by any recipient of the Licensed + Material. + + 6. No endorsement. Nothing in this Public License constitutes or + may be construed as permission to assert or imply that You + are, or that Your use of the Licensed Material is, connected + with, or sponsored, endorsed, or granted official status by, + the Licensor or others designated to receive attribution as + provided in Section 3(a)(1)(A)(i). + + b. Other rights. + + 1. Moral rights, such as the right of integrity, are not + licensed under this Public License, nor are publicity, + privacy, and/or other similar personality rights; however, to + the extent possible, the Licensor waives and/or agrees not to + assert any such rights held by the Licensor to the limited + extent necessary to allow You to exercise the Licensed + Rights, but not otherwise. + + 2. Patent and trademark rights are not licensed under this + Public License. + + 3. To the extent possible, the Licensor waives any right to + collect royalties from You for the exercise of the Licensed + Rights, whether directly or through a collecting society + under any voluntary or waivable statutory or compulsory + licensing scheme. In all other cases the Licensor expressly + reserves any right to collect such royalties, including when + the Licensed Material is used other than for NonCommercial + purposes. + +Section 3 -- License Conditions. + +Your exercise of the Licensed Rights is expressly made subject to the +following conditions. + + a. Attribution. + + 1. If You Share the Licensed Material (including in modified + form), You must: + + a. retain the following if it is supplied by the Licensor + with the Licensed Material: + + i. identification of the creator(s) of the Licensed + Material and any others designated to receive + attribution, in any reasonable manner requested by + the Licensor (including by pseudonym if + designated); + + ii. a copyright notice; + + iii. a notice that refers to this Public License; + + iv. a notice that refers to the disclaimer of + warranties; + + v. a URI or hyperlink to the Licensed Material to the + extent reasonably practicable; + + b. indicate if You modified the Licensed Material and + retain an indication of any previous modifications; and + + c. indicate the Licensed Material is licensed under this + Public License, and include the text of, or the URI or + hyperlink to, this Public License. + + 2. You may satisfy the conditions in Section 3(a)(1) in any + reasonable manner based on the medium, means, and context in + which You Share the Licensed Material. For example, it may be + reasonable to satisfy the conditions by providing a URI or + hyperlink to a resource that includes the required + information. + + 3. If requested by the Licensor, You must remove any of the + information required by Section 3(a)(1)(A) to the extent + reasonably practicable. + + 4. If You Share Adapted Material You produce, the Adapter's + License You apply must not prevent recipients of the Adapted + Material from complying with this Public License. + +Section 4 -- Sui Generis Database Rights. + +Where the Licensed Rights include Sui Generis Database Rights that +apply to Your use of the Licensed Material: + + a. for the avoidance of doubt, Section 2(a)(1) grants You the right + to extract, reuse, reproduce, and Share all or a substantial + portion of the contents of the database for NonCommercial purposes + only; + + b. if You include all or a substantial portion of the database + contents in a database in which You have Sui Generis Database + Rights, then the database in which You have Sui Generis Database + Rights (but not its individual contents) is Adapted Material; and + + c. You must comply with the conditions in Section 3(a) if You Share + all or a substantial portion of the contents of the database. + +For the avoidance of doubt, this Section 4 supplements and does not +replace Your obligations under this Public License where the Licensed +Rights include other Copyright and Similar Rights. + +Section 5 -- Disclaimer of Warranties and Limitation of Liability. + + a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE + EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS + AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF + ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS, + IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION, + WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR + PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS, + ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT + KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT + ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU. + + b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE + TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION, + NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT, + INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES, + COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR + USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN + ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR + DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR + IN PART, THIS LIMITATION MAY NOT APPLY TO YOU. + + c. The disclaimer of warranties and limitation of liability provided + above shall be interpreted in a manner that, to the extent + possible, most closely approximates an absolute disclaimer and + waiver of all liability. + +Section 6 -- Term and Termination. + + a. This Public License applies for the term of the Copyright and + Similar Rights licensed here. However, if You fail to comply with + this Public License, then Your rights under this Public License + terminate automatically. + + b. Where Your right to use the Licensed Material has terminated under + Section 6(a), it reinstates: + + 1. automatically as of the date the violation is cured, provided + it is cured within 30 days of Your discovery of the + violation; or + + 2. upon express reinstatement by the Licensor. + + For the avoidance of doubt, this Section 6(b) does not affect any + right the Licensor may have to seek remedies for Your violations + of this Public License. + + c. For the avoidance of doubt, the Licensor may also offer the + Licensed Material under separate terms or conditions or stop + distributing the Licensed Material at any time; however, doing so + will not terminate this Public License. + + d. Sections 1, 5, 6, 7, and 8 survive termination of this Public + License. + +Section 7 -- Other Terms and Conditions. + + a. The Licensor shall not be bound by any additional or different + terms or conditions communicated by You unless expressly agreed. + + b. Any arrangements, understandings, or agreements regarding the + Licensed Material not stated herein are separate from and + independent of the terms and conditions of this Public License. + +Section 8 -- Interpretation. + + a. For the avoidance of doubt, this Public License does not, and + shall not be interpreted to, reduce, limit, restrict, or impose + conditions on any use of the Licensed Material that could lawfully + be made without permission under this Public License. + + b. To the extent possible, if any provision of this Public License is + deemed unenforceable, it shall be automatically reformed to the + minimum extent necessary to make it enforceable. If the provision + cannot be reformed, it shall be severed from this Public License + without affecting the enforceability of the remaining terms and + conditions. + + c. No term or condition of this Public License will be waived and no + failure to comply consented to unless expressly agreed to by the + Licensor. + + d. Nothing in this Public License constitutes or may be interpreted + as a limitation upon, or waiver of, any privileges and immunities + that apply to the Licensor or You, including from the legal + processes of any jurisdiction or authority. + +======================================================================= + +Creative Commons is not a party to its public +licenses. Notwithstanding, Creative Commons may elect to apply one of +its public licenses to material it publishes and in those instances +will be considered the “Licensor.” The text of the Creative Commons +public licenses is dedicated to the public domain under the CC0 Public +Domain Dedication. Except for the limited purpose of indicating that +material is shared under a Creative Commons public license or as +otherwise permitted by the Creative Commons policies published at +creativecommons.org/policies, Creative Commons does not authorize the +use of the trademark "Creative Commons" or any other trademark or logo +of Creative Commons without its prior written consent including, +without limitation, in connection with any unauthorized modifications +to any of its public licenses or any other arrangements, +understandings, or agreements concerning use of licensed material. For +the avoidance of doubt, this paragraph does not form part of the +public licenses. + +Creative Commons may be contacted at creativecommons.org. + diff --git a/README.md b/README.md index c6c4c199cc4e9e4b5f9627ee09ba60cdfabc79cb..0e3bfc46f4bbf3932c208d001d0fdbfc5257ae88 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,378 @@ --- -title: Test Virtual -emoji: 📈 -colorFrom: gray -colorTo: indigo +title: test_virtual +app_file: ./demo/demo.py sdk: gradio sdk_version: 4.38.1 -app_file: app.py -pinned: false --- +# From Audio to Photoreal Embodiment: Synthesizing Humans in Conversations +This repository contains a pytorch implementation of ["From Audio to Photoreal Embodiment: Synthesizing Humans in Conversations"](https://people.eecs.berkeley.edu/~evonne_ng/projects/audio2photoreal/) -Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference +:hatching_chick: **Try out our demo [here](https://colab.research.google.com/drive/1lnX3d-3T3LaO3nlN6R8s6pPvVNAk5mdK?usp=sharing)** or continue following the steps below to run code locally! +And thanks everyone for the support via contributions/comments/issues! + +https://github.com/facebookresearch/audio2photoreal/assets/17986358/5cba4079-275e-48b6-aecc-f84f3108c810 + +This codebase provides: +- train code +- test code +- pretrained motion models +- access to dataset + +If you use the dataset or code, please cite our [Paper](https://arxiv.org/abs/2401.01885) + +``` +@inproceedings{ng2024audio2photoreal, + title={From Audio to Photoreal Embodiment: Synthesizing Humans in Conversations}, + author={Ng, Evonne and Romero, Javier and Bagautdinov, Timur and Bai, Shaojie and Darrell, Trevor and Kanazawa, Angjoo and Richard, Alexander}, + booktitle={IEEE Conference on Computer Vision and Pattern Recognition}, + year={2024} +} +``` + +### Repository Contents + +- [**Quickstart:**](#quickstart) easy gradio demo that lets you record audio and render a video +- [**Installation:**](#installation) environment setup and installation (for more details on the rendering pipeline, please refer to [Codec Avatar Body](https://github.com/facebookresearch/ca_body)) +- [**Download data and models:**](#download-data-and-models) download annotations and pre-trained models + - [Dataset desc.](#dataset): description of dataset annotations + - [Visualize Dataset](#visualize-ground-truth): script for visualizing ground truth annotations + - [model desc.](#pretrained-models): description of pretrained models +- [**Running the pretrained models:**](#running-the-pretrained-models) how to generate results files and visualize the results using the rendering pipeline. + - [Face generation](#face-generation): commands to generate the results file for the faces + - [Body generation](#body-generation): commands to generate the results file for the bodies + - [Visualization](#visualization): how to call into the rendering api. For full details, please refer to [this repo](https://github.com/facebookresearch/ca_body). +- [**Training from scratch (3 models):**](#training-from-scratch) scripts to get the training pipeline running from scratch for face, guide poses, and body models. + - [Face diffusion model](#1-face-diffusion-model) + - [Body diffusion](#2-body-diffusion-model) + - [Body vq vae](#3-body-vq-vae) + - [Body guide transformer](#4-body-guide-transformer) + +We annotate code that you can directly copy and paste into your terminal using the :point_down: icon. + +# Quickstart +With this demo, you can record an audio clip and select the number of samples you want to generate. + +Make sure you have CUDA 11.7 and gcc/++ 9.0 for pytorch3d compatibility + +:point_down: Install necessary components. This will do the environment configuration and install the corresponding rendering assets, prerequisite models, and pretrained models: +``` +conda create --name a2p_env python=3.9 +conda activate a2p_env +sh demo/install.sh +``` +:point_down: Run the demo. You can record your audio and then render corresponding results! +``` +python -m demo.demo +``` + +:microphone: First, record your audio + +![](assets/demo1.gif) + +:hourglass: Hold tight because the rendering can take a while! + +You can change the number of samples (1-10) you want to generate, and download your favorite video by clicking on the download button on the top right of each video. + +![](assets/demo2.gif) + +# Installation +The code has been tested with CUDA 11.7 and python 3.9, gcc/++ 9.0 + +:point_down: If you haven't done so already via the demo setup, configure the environments and download prerequisite models: +``` +conda create --name a2p_env python=3.9 +conda activate a2p_env +pip install -r scripts/requirements.txt +sh scripts/download_prereq.sh +``` +:point_down: To get the rendering working, please also make sure you install [pytorch3d](https://github.com/facebookresearch/pytorch3d/blob/main/INSTALL.md). +``` +pip install "git+https://github.com/facebookresearch/pytorch3d.git" +``` +Please see [CA Bodies repo](https://github.com/facebookresearch/ca_body) for more details on the renderer. + +# Download data and models +To download any of the datasets, you can find them at `https://github.com/facebookresearch/audio2photoreal/releases/download/v1.0/.zip`, where you can replace `` with any of `PXB184`, `RLW104`, `TXB805`, or `GQS883`. +Download over the command line can be done with this commands. +``` +curl -L https://github.com/facebookresearch/audio2photoreal/releases/download/v1.0/.zip -o .zip +unzip .zip -d dataset/ +rm .zip +``` +:point_down: To download *all* of the datasets, you can simply run the following which will download and unpack all the models. +``` +sh scripts/download_alldatasets.sh +``` + +Similarly, to download any of the models, you can find them at `http://audio2photoreal_models.berkeleyvision.org/_models.tar`. +``` +# download the motion generation +wget http://audio2photoreal_models.berkeleyvision.org/_models.tar +tar xvf _models.tar +rm _models.tar + +# download the body decoder/rendering assets and place them in the right place +mkdir -p checkpoints/ca_body/data/ +wget https://github.com/facebookresearch/ca_body/releases/download/v0.0.1-alpha/.tar.gz +tar xvf .tar.gz --directory checkpoints/ca_body/data/ +rm .tar.gz +``` +:point_down: You can also download all of the models with this script: +``` +sh scripts/download_allmodels.sh +``` +The above model script will download *both* the models for motion generation and the body decoder/rendering models. Please view the script for more details. + +### Dataset +Once the dataset is downloaded and unzipped (via `scripts/download_datasets.sh`), it should unfold into the following directory structure: +``` +|-- dataset/ + |-- PXB184/ + |-- data_stats.pth + |-- scene01_audio.wav + |-- scene01_body_pose.npy + |-- scene01_face_expression.npy + |-- scene01_missing_face_frames.npy + |-- ... + |-- scene30_audio.wav + |-- scene30_body_pose.npy + |-- scene30_face_expression.npy + |-- scene30_missing_face_frames.npy + |-- RLW104/ + |-- TXB805/ + |-- GQS883/ +``` +Each of the four participants (`PXB184`, `RLW104`, `TXB805`, `GQS883`) should have independent "scenes" (1 to 26 or so). +For each scene, there are 3 types of data annotations that we save. +``` +*audio.wav: wavefile containing the raw audio (two channels, 1600*T samples) at 48kHz; channel 0 is the audio associated with the current person, channel 1 is the audio associated with their conversational partner. + +*body_pose.npy: (T x 104) array of joint angles in a kinematic skeleton. Not all of the joints are represented with 3DoF. Each 104-d vector can be used to reconstruct a full-body skeleton. + +*face_expression.npy: (T x 256) array of facial codes, where each 256-d vector reconstructs a face mesh. + +*missing_face_frames.npy: List of indices (t) where the facial code is missing or corrupted. + +data_stats.pth: carries the mean and std for each modality of each person. +``` + +For the train/val/test split the indices are defined in `data_loaders/data.py` as: +``` +train_idx = list(range(0, len(data_dict["data"]) - 6)) +val_idx = list(range(len(data_dict["data"]) - 6, len(data_dict["data"]) - 4)) +test_idx = list(range(len(data_dict["data"]) - 4, len(data_dict["data"]))) +``` +for any of the four dataset participants we train on. + +### Visualize ground truth +If you've properly installed the rendering requirements, you can then visualize the full dataset with the following command: +``` +python -m visualize.render_anno + --save_dir + --data_root + --max_seq_length +``` + +The videos will be chunked lengths according to specified `--max_seq_length` arg, which you can specify (the default is 600). + +:point_down: For example, to visualize ground truth annotations for `PXB184`, you can run the following. +``` +python -m visualize.render_anno --save_dir vis_anno_test --data_root dataset/PXB184 --max_seq_length 600 +``` + +### Pretrained models +We train person-specific models, so each person should have an associated directory. For instance, for `PXB184`, their complete models should unzip into the following structure. +``` +|-- checkpoints/ + |-- diffusion/ + |-- c1_face/ + |-- args.json + |-- model:09d.pt + |-- c1_pose/ + |-- args.json + |-- model:09d.pt + |-- guide/ + |-- c1_pose/ + |-- args.json + |-- checkpoints/ + |-- iter-:07d.pt + |-- vq/ + |-- c1_pose/ + |-- args.json + |-- net_iter:06d.pth +``` +There are 4 models for each person and each model has an associated `args.json`. +1. a face diffusion model that outputs 256 facial codes conditioned on audio +2. a pose diffusion model that outputs 104 joint rotations conditioned on audio and guide poses +3. a guide vq pose model that outputs vq tokens conditioned on audio at 1 fps +4. a vq encoder-decoder model that vector quantizes the continuous 104-d pose space. + +# Running the pretrained models +To run the actual models, you will need to run the pretrained models and generate the associated results files before visualizing them. + +### Face generation +To generate the results file for the face, +``` +python -m sample.generate + --model_path + --num_samples + --num_repetitions + --timestep_respacing ddim500 + --guidance_param 10.0 +``` + +The `` should be the path to the diffusion model that is associated with generating the face. +E.g. for participant `PXB184`, the path might be `./checkpoints/diffusion/c1_face/model000155000.pt` +The other parameters are: +``` +--num_samples: number of samples to generate. To sample the full dataset, use 56 (except for TXB805, whcih is 58). +--num_repetitions: number of times to repeat the sampling, such that total number of sequences generated is (num_samples * num_repetitions). +--timestep_respacing: how many diffusion steps to take. Format will always be ddim. +--guidance_param: how influential the conditioning is on the results. I usually use range 2.0-10.0, and tend towards higher for the face. +``` + +:point_down: A full example of running the face model for `PXB184` with the provided pretrained models would then be: +``` +python -m sample.generate --model_path checkpoints/diffusion/c1_face/model000155000.pt --num_samples 10 --num_repetitions 5 --timestep_respacing ddim500 --guidance_param 10.0 +``` +This generates 10 samples from the dataset 1 time. The output results file will be saved to: +`./checkpoints/diffusion/c1_face/samples_c1_face_000155000_seed10_/results.npy` + +### Body generation +To generate the corresponding body, it will be very similar to generating the face, except now we have to feed in the model for generating the guide poses as well. +``` +python -m sample.generate + --model_path + --resume_trans + --num_samples + --num_repetitions + --timestep_respacing ddim500 + --guidance_param 2.0 +``` + +:point_down: Here, `` should point to the guide transformer. The full command would be: +``` +python -m sample.generate --model_path checkpoints/diffusion/c1_pose/model000340000.pt --resume_trans checkpoints/guide/c1_pose/checkpoints/iter-0100000.pt --num_samples 10 --num_repetitions 5 --timestep_respacing ddim500 --guidance_param 2.0 +``` +Similarly, the output will be saved to: +`./checkpoints/diffusion/c1_pose/samples_c1_pose_000340000_seed10_guide_iter-0100000.pt/results.npy` + +### Visualization +On the body generation side of things, you can also optionally pass in the `--plot` flag in order to render out the photorealistic avatar. You will also need to pass in the corresponding generated face codes with the `--face_codes` flag. +Optionally, if you already have the poses precomputed, you an also pass in the generated body with the `--pose_codes` flag. +This will save videos in the same directory as where the body's `results.npy` is stored. + +:point_down: An example of the full command with *the three new flags added is*: +``` +python -m sample.generate --model_path checkpoints/diffusion/c1_pose/model000340000.pt --resume_trans checkpoints/guide/c1_pose/checkpoints/iter-0100000.pt --num_samples 10 --num_repetitions 5 --timestep_respacing ddim500 --guidance_param 2.0 --face_codes ./checkpoints/diffusion/c1_face/samples_c1_face_000155000_seed10_/results.npy --pose_codes ./checkpoints/diffusion/c1_pose/samples_c1_pose_000340000_seed10_guide_iter-0100000.pt/results.npy --plot +``` +The remaining flags can be the same as before. For the actual rendering api, please see [Codec Avatar Body](https://github.com/facebookresearch/ca_body) for installation etc. +*Important: in order to visualize the full photorealistic avatar, you will need to run the face codes first, then pass them into the body generation code.* It will not work if you try to call generate with `--plot` for the face codes. + +# Training from scratch +There are four possible models you will need to train: 1) the face diffusion model, 2) the body diffusion model, 3) the body vq vae, 4) the body guide transformer. +The only dependency is that 3) is needed for 4). All other models can be trained in parallel. + +### 1) Face diffusion model +To train the face model, you will need to run the following script: +``` +python -m train.train_diffusion + --save_dir + --data_root + --batch_size + --dataset social + --data_format face + --layers 8 + --heads 8 + --timestep_respacing '' + --max_seq_length 600 +``` +Importantly, a few of the flags are as follows: +``` +--save_dir: path to directory where all outputs are stored +--data_root: path to the directory of where to load the data from +--dataset: name of dataset to load; right now we only support the 'social' dataset +--data_format: set to 'face' for the face, as opposed to pose +--timestep_respacing: set to '' which does the default spacing of 1k diffusion steps +--max_seq_length: the maximum number of frames for a given sequence to train on +``` +:point_down: A full example for training on person `PXB184` is: +``` +python -m train.train_diffusion --save_dir checkpoints/diffusion/c1_face_test --data_root ./dataset/PXB184/ --batch_size 4 --dataset social --data_format face --layers 8 --heads 8 --timestep_respacing '' --max_seq_length 600 +``` + +### 2) Body diffusion model +Training the body model is similar to the face model, but with the following additional parameters +``` +python -m train.train_diffusion + --save_dir + --data_root + --lambda_vel + --batch_size + --dataset social + --add_frame_cond 1 + --data_format pose + --layers 6 + --heads 8 + --timestep_respacing '' + --max_seq_length 600 +``` +The flags that differ from the face training are as follows: +``` +--lambda_vel: additional auxilary loss for training with velocity +--add_frame_cond: set to '1' for 1 fps. if not specified, it will default to 30 fps. +--data_format: set to 'pose' for the body, as opposed to face +``` +:point_down: A full example for training on person `PXB184` is: +``` +python -m train.train_diffusion --save_dir checkpoints/diffusion/c1_pose_test --data_root ./dataset/PXB184/ --lambda_vel 2.0 --batch_size 4 --dataset social --add_frame_cond 1 --data_format pose --layers 6 --heads 8 --timestep_respacing '' --max_seq_length 600 +``` + +### 3) Body VQ VAE +To train a vq encoder-decoder, you will need to run the following script: +``` +python -m train.train_vq + --out_dir + --data_root + --batch_size + --lr 1e-3 + --code_dim 1024 + --output_emb_width 64 + --depth 4 + --dataname social + --loss_vel 0.0 + --add_frame_cond 1 + --data_format pose + --max_seq_length 600 +``` +:point_down: For person `PXB184`, it would be: +``` +python -m train.train_vq --out_dir checkpoints/vq/c1_vq_test --data_root ./dataset/PXB184/ --lr 1e-3 --code_dim 1024 --output_emb_width 64 --depth 4 --dataname social --loss_vel 0.0 --data_format pose --batch_size 4 --add_frame_cond 1 --max_seq_length 600 +``` + +### 4) Body guide transformer +Once you have the vq trained from 3) you can then pass it in to train the body guide pose transformer: +``` +python -m train.train_guide + --out_dir + --data_root + --batch_size + --resume_pth + --add_frame_cond 1 + --layers 6 + --lr 2e-4 + --gn + --dim 64 +``` +:point_down: For person `PXB184`, it would be: +``` +python -m train.train_guide --out_dir checkpoints/guide/c1_trans_test --data_root ./dataset/PXB184/ --batch_size 4 --resume_pth checkpoints/vq/c1_vq_test/net_iter300000.pth --add_frame_cond 1 --layers 6 --lr 2e-4 --gn --dim 64 +``` + +After training these 4 models, you can now follow the ["Running the pretrained models"](#running-the-pretrained-models) section to generate samples and visualize results. + +You can also visualize the corresponding ground truth sequences by passing in the `--render_gt` flag. + + +# License +The code and dataset are released under [CC-NC 4.0 International license](https://github.com/facebookresearch/audio2photoreal/blob/main/LICENSE). diff --git a/assets/demo1.gif b/assets/demo1.gif new file mode 100644 index 0000000000000000000000000000000000000000..f7e52943590a488452809ba1b766485de1f8d9db Binary files /dev/null and b/assets/demo1.gif differ diff --git a/assets/demo2.gif b/assets/demo2.gif new file mode 100644 index 0000000000000000000000000000000000000000..9aef0cbf4e0d07faf55d3a27716bf41aef5243a8 --- /dev/null +++ b/assets/demo2.gif @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4d07d3817b4a23bdb0a36869a469d051b9b10fe68d9e6f02f6cc8765cd6f5bc3 +size 1313657 diff --git a/assets/render_defaults_GQS883.pth b/assets/render_defaults_GQS883.pth new file mode 100644 index 0000000000000000000000000000000000000000..fbd96b9f605dc9821bd0be76db9dfd09bdba92ae --- /dev/null +++ b/assets/render_defaults_GQS883.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3ae7ee73849e258bbb8d8a04aa674960896fc1dff8757fefbd2df1685225dd7d +size 71354547 diff --git a/assets/render_defaults_PXB184.pth b/assets/render_defaults_PXB184.pth new file mode 100644 index 0000000000000000000000000000000000000000..dccf18780e13e5ea8320ce902d5586568c5a0c90 --- /dev/null +++ b/assets/render_defaults_PXB184.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c86ba14a58d4829c8d05428f5e601072dc4bab1bdc60bc53ce6c73990e9b97d7 +size 71354547 diff --git a/assets/render_defaults_RLW104.pth b/assets/render_defaults_RLW104.pth new file mode 100644 index 0000000000000000000000000000000000000000..7a24fb9d4c6029bd2f524af2bedb43c7f45f5fcc --- /dev/null +++ b/assets/render_defaults_RLW104.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:808a3fbf33115d3cc132bad48c2e95bfca29bb1847d912b1f72e5e5b4a081db5 +size 71354547 diff --git a/assets/render_defaults_TXB805.pth b/assets/render_defaults_TXB805.pth new file mode 100644 index 0000000000000000000000000000000000000000..4462856fa68ed8d3b872728d7df818b0954908bd --- /dev/null +++ b/assets/render_defaults_TXB805.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d7985c79edfba70f83f560859f2ce214d9779a46031aa8ca6a917d8fd4417e24 +size 71354547 diff --git a/checkpoints/ca_body/data/PXB184/body_dec.ckpt b/checkpoints/ca_body/data/PXB184/body_dec.ckpt new file mode 100644 index 0000000000000000000000000000000000000000..b6cd20e6442e36a35c43bd4731c21bfe4b6aa035 --- /dev/null +++ b/checkpoints/ca_body/data/PXB184/body_dec.ckpt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:26394ae03c1726b7c90b5633696d0eea733a3c5e423893c4e79b490c80c35ddf +size 893279810 diff --git a/checkpoints/ca_body/data/PXB184/config.yml b/checkpoints/ca_body/data/PXB184/config.yml new file mode 100644 index 0000000000000000000000000000000000000000..a9b02cf0fa869cfb48877bb0de43485dec9e4f75 --- /dev/null +++ b/checkpoints/ca_body/data/PXB184/config.yml @@ -0,0 +1,56 @@ + +model: + class_name: ca_body.models.mesh_vae_drivable.AutoEncoder + + encoder: + n_embs: 1024 + noise_std: 1.0 + + encoder_face: + n_embs: 256 + noise_std: 1.0 + + decoder_face: + n_latent: 256 + n_vert_out: 21918 + + decoder: + init_uv_size: 64 + n_init_channels: 64 + n_min_channels: 4 + n_pose_dims: 98 + n_pose_enc_channels: 16 + n_embs: 1024 + n_embs_enc_channels: 32 + n_face_embs: 256 + uv_size: 1024 + + decoder_view: + net_uv_size: 1024 + + upscale_net: + n_ftrs: 4 + + shadow_net: + uv_size: 2048 + shadow_size: 256 + n_dims: 4 + + pose_to_shadow: + n_pose_dims: 104 + uv_size: 2048 + + renderer: + image_height: 2048 + image_width: 1334 + depth_disc_ksize: 3 + + cal: + identity_camera: '400143' + + pixel_cal: + image_height: 2048 + image_width: 1334 + ds_rate: 8 + + learn_blur: true \ No newline at end of file diff --git a/checkpoints/diffusion/c1_face/args.json b/checkpoints/diffusion/c1_face/args.json new file mode 100644 index 0000000000000000000000000000000000000000..8f0ec9abe4dcb51500b0dbd2142c305601516230 --- /dev/null +++ b/checkpoints/diffusion/c1_face/args.json @@ -0,0 +1,34 @@ +{ + "add_frame_cond": null, + "batch_size": 4, + "cond_mask_prob": 0.2, + "cuda": true, + "data_format": "face", + "data_root": "./dataset/PXB184/", + "dataset": "social", + "device": 0, + "diffusion_steps": 10, + "heads": 8, + "lambda_vel": 0.0, + "latent_dim": 512, + "layers": 8, + "log_interval": 1000, + "lr": 0.0001, + "lr_anneal_steps": 0, + "max_seq_length": 600, + "noise_schedule": "cosine", + "not_rotary": false, + "num_audio_layers": 3, + "num_steps": 800000, + "overwrite": false, + "resume_checkpoint": "", + "save_dir": "checkpoints/diffusion/c1_face/", + "save_interval": 5000, + "seed": 10, + "sigma_small": true, + "simplify_audio": false, + "timestep_respacing": "", + "train_platform_type": "NoPlatform", + "unconstrained": false, + "weight_decay": 0.0 +} \ No newline at end of file diff --git a/checkpoints/diffusion/c1_pose/args.json b/checkpoints/diffusion/c1_pose/args.json new file mode 100644 index 0000000000000000000000000000000000000000..1b0b1da9c84c010907087ac1996403236a09fa42 --- /dev/null +++ b/checkpoints/diffusion/c1_pose/args.json @@ -0,0 +1,66 @@ +{ + "add_frame_cond": 1.0, + "arch": "trans_enc", + "batch_size": 32, + "clip_body": false, + "clip_use_delta": false, + "clip_use_vae": false, + "cond_mask_prob": 0.1, + "cuda": true, + "data_format": "pose", + "data_root": "./dataset/PXB184/", + "dataset": "social", + "device": 0, + "diffusion_steps": 10, + "emb_trans_dec": false, + "eval_batch_size": 32, + "eval_during_training": false, + "eval_num_samples": 1000, + "eval_rep_times": 3, + "eval_split": "val", + "filter": false, + "heads": 8, + "lambda_fc": 0.0, + "lambda_hands": 0.0, + "lambda_lips": 0.0, + "lambda_rcxyz": 0.0, + "lambda_vel": 2.0, + "lambda_xyz": 0.0, + "lambda_xyz_vel": 0.0, + "latent_dim": 512, + "layers": 6, + "log_interval": 1000, + "lr": 0.0001, + "lr_anneal_steps": 0, + "max_seq_length": 600, + "no_split": false, + "noise_schedule": "cosine", + "not_rotary": false, + "num_frames": 60, + "num_steps": 800000, + "overwrite": false, + "partial": false, + "resume_checkpoint": "", + "save_dir": "checkpoints/diffusion/c1_pose/", + "save_interval": 5000, + "seed": 10, + "sigma_small": true, + "simplify_audio": false, + "split_net": false, + "timestep_respacing": "", + "train_platform_type": "NoPlatform", + "unconstrained": false, + "use_clip": false, + "use_cm": true, + "use_full_dataset": false, + "use_kp": false, + "use_mask": true, + "use_mdm": false, + "use_nort": false, + "use_nort_mdm": false, + "use_pose_pos": false, + "use_resnet": true, + "use_vae": null, + "weight_decay": 0.0, + "z_norm": true +} diff --git a/checkpoints/guide/c1_pose/args.json b/checkpoints/guide/c1_pose/args.json new file mode 100644 index 0000000000000000000000000000000000000000..a2055f15c70c28be4823c3fae66b2710165cf00e --- /dev/null +++ b/checkpoints/guide/c1_pose/args.json @@ -0,0 +1,41 @@ +{ + "add_audio_pe": true, + "add_conv": true, + "add_frame_cond": 1, + "batch_size": 16, + "data_format": "pose", + "data_root": "./dataset/PXB184/", + "dataset": "social", + "dec_layers": null, + "dim": 64, + "enc_layers": null, + "eval_interval": 1000, + "filter": false, + "gamma": 0.1, + "gn": true, + "layers": 6, + "log_interval": 1000, + "lr": 0.0001, + "lr_scheduler": [ + 50000, + 400000 + ], + "no_split": false, + "num_audio_layers":2, + "out_dir": "checkpoints/guide/c1_pose", + "partial": false, + "resume_pth": "checkpoints/vq/c1_pose/net_iter300000.pth", + "resume_trans": null, + "save_interval": 5000, + "seed": 10, + "simplify_audio": false, + "total_iter": 1000000, + "use_full_dataset": false, + "use_kp": false, + "use_lstm": false, + "use_nort": false, + "use_nort_mdm": false, + "use_torch": false, + "warm_up_iter": 5000, + "weight_decay": 0.1 +} diff --git a/checkpoints/vq/c1_pose/args.json b/checkpoints/vq/c1_pose/args.json new file mode 100644 index 0000000000000000000000000000000000000000..4880e5a255a4ada34a28b8c77bc810cd7d31b58c --- /dev/null +++ b/checkpoints/vq/c1_pose/args.json @@ -0,0 +1,43 @@ +{ + "add_frame_cond": 1.0, + "batch_size": 16, + "code_dim": 1024, + "commit": 0.02, + "data_format": "pose", + "data_root": "./dataset/PXB184/", + "dataname": "social", + "dataset": "social", + "depth": 4, + "eval_iter": 1000, + "exp_name": "c1_pose", + "filter": false, + "gamma": 0.05, + "loss_vel": 0.0, + "lr": 0.001, + "lr_scheduler": [ + 300000 + ], + "max_seq_length": 600, + "nb_joints": 104, + "no_split": true, + "out_dir": "checkpoints/vq/c1_pose", + "output_emb_width": 64, + "partial": false, + "print_iter": 200, + "results_dir": "visual_results/", + "resume_pth": null, + "seed": 123, + "simplify_audio": false, + "total_iter": 10000000, + "use_full_dataset": false, + "use_kp": false, + "use_linear": false, + "use_nort": false, + "use_nort_mdm": false, + "use_quant": true, + "use_vae": false, + "visual_name": "baseline", + "warm_up_iter": 1000, + "weight_decay": 0.0, + "z_norm": true +} \ No newline at end of file diff --git a/checkpoints/vq/c1_pose/net_iter300000.pth b/checkpoints/vq/c1_pose/net_iter300000.pth new file mode 100644 index 0000000000000000000000000000000000000000..a4afe7cb7448cc6ebbac6ef378d0b4ae92d4e923 --- /dev/null +++ b/checkpoints/vq/c1_pose/net_iter300000.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5649ad5e49e0e1afcd9a7390f0ee79ee66de275a67ecb1cfe7fc691cb4ceb332 +size 3129275 diff --git a/data_loaders/data.py b/data_loaders/data.py new file mode 100644 index 0000000000000000000000000000000000000000..9312ccf7aef858e43a924954e3a82a94e260c2dc --- /dev/null +++ b/data_loaders/data.py @@ -0,0 +1,253 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import os +from typing import Dict, Iterable, List, Union + +import numpy as np +import torch +from torch.utils import data + +from utils.misc import prGreen + + +class Social(data.Dataset): + def __init__( + self, + args, + data_dict: Dict[str, Iterable], + split: str = "train", + chunk: bool = False, + add_padding: bool = True, + ) -> None: + if args.data_format == "face": + prGreen("[dataset.py] training face only model") + data_dict["data"] = data_dict["face"] + elif args.data_format == "pose": + prGreen("[dataset.py] training pose only model") + missing = [] + for d in data_dict["data"]: + missing.append(np.ones_like(d)) + data_dict["missing"] = missing + + # set up variables for dataloader + self.data_format = args.data_format + self.add_frame_cond = args.add_frame_cond + self._register_keyframe_step() + self.data_root = args.data_root + self.max_seq_length = args.max_seq_length + if hasattr(args, "curr_seq_length") and args.curr_seq_length is not None: + self.max_seq_length = args.curr_seq_length + prGreen([f"[dataset.py] sequences of {self.max_seq_length}"]) + self.add_padding = add_padding + self.audio_per_frame = 1600 + self.max_audio_length = self.max_seq_length * self.audio_per_frame + self.min_seq_length = 400 + + # set up training/validation splits + train_idx = list(range(0, len(data_dict["data"]) - 6)) + val_idx = list(range(len(data_dict["data"]) - 6, len(data_dict["data"]) - 4)) + test_idx = list(range(len(data_dict["data"]) - 4, len(data_dict["data"]))) + self.split = split + if split == "train": + self._pick_sequences(data_dict, train_idx) + elif split == "val": + self._pick_sequences(data_dict, val_idx) + else: + self._pick_sequences(data_dict, test_idx) + self.chunk = chunk + if split == "test": + print("[dataset.py] chunking data...") + self._chunk_data() + self._load_std() + prGreen( + f"[dataset.py] {split} | {len(self.data)} sequences ({self.data[0].shape}) | total len {self.total_len}" + ) + + def inv_transform( + self, data: Union[np.ndarray, torch.Tensor], data_type: str + ) -> Union[np.ndarray, torch.Tensor]: + if data_type == "pose": + std = self.std + mean = self.mean + elif data_type == "face": + std = self.face_std + mean = self.face_mean + elif data_type == "audio": + std = self.audio_std + mean = self.audio_mean + else: + assert False, f"datatype not defined: {data_type}" + + if torch.is_tensor(data): + return data * torch.tensor( + std, device=data.device, requires_grad=False + ) + torch.tensor(mean, device=data.device, requires_grad=False) + else: + return data * std + mean + + def _pick_sequences(self, data_dict: Dict[str, Iterable], idx: List[int]) -> None: + self.data = np.take(data_dict["data"], idx, axis=0) + self.missing = np.take(data_dict["missing"], idx, axis=0) + self.audio = np.take(data_dict["audio"], idx, axis=0) + self.lengths = np.take(data_dict["lengths"], idx, axis=0) + self.total_len = sum([len(d) for d in self.data]) + + def _load_std(self) -> None: + stats = torch.load(os.path.join(self.data_root, "data_stats.pth")) + print( + f'[dataset.py] loading from... {os.path.join(self.data_root, "data_stats.pth")}' + ) + self.mean = stats["pose_mean"].reshape(-1) + self.std = stats["pose_std"].reshape(-1) + self.face_mean = stats["code_mean"] + self.face_std = stats["code_std"] + self.audio_mean = stats["audio_mean"] + self.audio_std = stats["audio_std_flat"] + + def _chunk_data(self) -> None: + chunk_data = [] + chunk_missing = [] + chunk_lengths = [] + chunk_audio = [] + # create sequences of set lengths + for d_idx in range(len(self.data)): + curr_data = self.data[d_idx] + curr_missing = self.missing[d_idx] + curr_audio = self.audio[d_idx] + end_range = len(self.data[d_idx]) - self.max_seq_length + for chunk_idx in range(0, end_range, self.max_seq_length): + chunk_end = chunk_idx + self.max_seq_length + curr_data_chunk = curr_data[chunk_idx:chunk_end, :] + curr_missing_chunk = curr_missing[chunk_idx:chunk_end, :] + curr_audio_chunk = curr_audio[ + chunk_idx * self.audio_per_frame : chunk_end * self.audio_per_frame, + :, + ] + if curr_data_chunk.shape[0] < self.max_seq_length: + # do not add a short chunk to the list + continue + chunk_lengths.append(curr_data_chunk.shape[0]) + chunk_data.append(curr_data_chunk) + chunk_missing.append(curr_missing_chunk) + chunk_audio.append(curr_audio_chunk) + idx = np.random.permutation(len(chunk_data)) + print("==> shuffle", idx) + self.data = np.take(chunk_data, idx, axis=0) + self.missing = np.take(chunk_missing, idx, axis=0) + self.lengths = np.take(chunk_lengths, idx, axis=0) + self.audio = np.take(chunk_audio, idx, axis=0) + self.total_len = len(self.data) + + def _register_keyframe_step(self) -> None: + if self.add_frame_cond == 1: + self.step = 30 + if self.add_frame_cond is None: + self.step = 1 + + def _pad_sequence( + self, sequence: np.ndarray, actual_length: int, max_length: int + ) -> np.ndarray: + sequence = np.concatenate( + ( + sequence, + np.zeros((max_length - actual_length, sequence.shape[-1])), + ), + axis=0, + ) + return sequence + + def _get_idx(self, item: int) -> int: + cumulative_len = 0 + seq_idx = 0 + while item > cumulative_len: + cumulative_len += len(self.data[seq_idx]) + seq_idx += 1 + item = seq_idx - 1 + return item + + def _get_random_subsection( + self, data_dict: Dict[str, Iterable] + ) -> Dict[str, np.ndarray]: + isnonzero = False + while not isnonzero: + start = np.random.randint(0, data_dict["m_length"] - self.max_seq_length) + if self.add_padding: + length = ( + np.random.randint(self.min_seq_length, self.max_seq_length) + if not self.split == "test" + else self.max_seq_length + ) + else: + length = self.max_seq_length + curr_missing = data_dict["missing"][start : start + length] + isnonzero = np.any(curr_missing) + missing = curr_missing + motion = data_dict["motion"][start : start + length, :] + keyframes = motion[:: self.step] + audio = data_dict["audio"][ + start * self.audio_per_frame : (start + length) * self.audio_per_frame, + :, + ] + data_dict["m_length"] = len(motion) + data_dict["k_length"] = len(keyframes) + data_dict["a_length"] = len(audio) + + if data_dict["m_length"] < self.max_seq_length: + motion = self._pad_sequence( + motion, data_dict["m_length"], self.max_seq_length + ) + missing = self._pad_sequence( + missing, data_dict["m_length"], self.max_seq_length + ) + audio = self._pad_sequence( + audio, data_dict["a_length"], self.max_audio_length + ) + max_step_length = len(np.zeros(self.max_seq_length)[:: self.step]) + keyframes = self._pad_sequence( + keyframes, data_dict["k_length"], max_step_length + ) + data_dict["motion"] = motion + data_dict["keyframes"] = keyframes + data_dict["audio"] = audio + data_dict["missing"] = missing + return data_dict + + def __len__(self) -> int: + return self.total_len + + def __getitem__(self, item: int) -> Dict[str, np.ndarray]: + # figure out which sequence to randomly sample from + if not self.split == "test": + item = self._get_idx(item) + motion = self.data[item] + audio = self.audio[item] + m_length = self.lengths[item] + missing = self.missing[item] + a_length = len(audio) + # Z Normalization + if self.data_format == "pose": + motion = (motion - self.mean) / self.std + elif self.data_format == "face": + motion = (motion - self.face_mean) / self.face_std + audio = (audio - self.audio_mean) / self.audio_std + keyframes = motion[:: self.step] + k_length = len(keyframes) + data_dict = { + "motion": motion, + "m_length": m_length, + "audio": audio, + "a_length": a_length, + "keyframes": keyframes, + "k_length": k_length, + "missing": missing, + } + if not self.split == "test" and not self.chunk: + data_dict = self._get_random_subsection(data_dict) + if self.data_format == "face": + data_dict["motion"] *= data_dict["missing"] + return data_dict diff --git a/data_loaders/get_data.py b/data_loaders/get_data.py new file mode 100644 index 0000000000000000000000000000000000000000..60ceb57c3608cc2b93d6215ad065dc384da9aca3 --- /dev/null +++ b/data_loaders/get_data.py @@ -0,0 +1,129 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import os + +from typing import Dict, List + +import numpy as np +import torch +import torchaudio +from data_loaders.data import Social +from data_loaders.tensors import social_collate +from torch.utils.data import DataLoader +from utils.misc import prGreen + + +def get_dataset_loader( + args, + data_dict: Dict[str, np.ndarray], + split: str = "train", + chunk: bool = False, + add_padding: bool = True, +) -> DataLoader: + dataset = Social( + args=args, + data_dict=data_dict, + split=split, + chunk=chunk, + add_padding=add_padding, + ) + loader = DataLoader( + dataset, + batch_size=args.batch_size, + shuffle=not split == "test", + num_workers=8, + drop_last=True, + collate_fn=social_collate, + pin_memory=True, + ) + return loader + + +def _load_pose_data( + all_paths: List[str], audio_per_frame: int, flip_person: bool = False +) -> Dict[str, List]: + data = [] + face = [] + audio = [] + lengths = [] + missing = [] + for _, curr_path_name in enumerate(all_paths): + if not curr_path_name.endswith("_body_pose.npy"): + continue + # load face information and deal with missing codes + curr_code = np.load( + curr_path_name.replace("_body_pose.npy", "_face_expression.npy") + ).astype(float) + # curr_code = np.array(curr_face["codes"], dtype=float) + missing_list = np.load( + curr_path_name.replace("_body_pose.npy", "_missing_face_frames.npy") + ) + if len(missing_list) == len(curr_code): + print("skipping", curr_path_name, curr_code.shape) + continue + curr_missing = np.ones_like(curr_code) + curr_missing[missing_list] = 0.0 + + # load pose information and deal with discontinuities + curr_pose = np.load(curr_path_name) + if "PXB184" in curr_path_name or "RLW104" in curr_path_name: # Capture 1 or 2 + curr_pose[:, 3] = (curr_pose[:, 3] + np.pi) % (2 * np.pi) + curr_pose[:, 3] = (curr_pose[:, 3] + np.pi) % (2 * np.pi) + + # load audio information + curr_audio, _ = torchaudio.load( + curr_path_name.replace("_body_pose.npy", "_audio.wav") + ) + curr_audio = curr_audio.T + if flip_person: + prGreen("[get_data.py] flipping the dataset of left right person") + tmp = torch.zeros_like(curr_audio) + tmp[:, 1] = curr_audio[:, 0] + tmp[:, 0] = curr_audio[:, 1] + curr_audio = tmp + + assert len(curr_pose) * audio_per_frame == len( + curr_audio + ), f"motion {curr_pose.shape} vs audio {curr_audio.shape}" + + data.append(curr_pose) + face.append(curr_code) + missing.append(curr_missing) + audio.append(curr_audio) + lengths.append(len(curr_pose)) + + data_dict = { + "data": data, + "face": face, + "audio": audio, + "lengths": lengths, + "missing": missing, + } + return data_dict + + +def load_local_data( + data_root: str, audio_per_frame: int, flip_person: bool = False +) -> Dict[str, List]: + if flip_person: + if "PXB184" in data_root: + data_root = data_root.replace("PXB184", "RLW104") + elif "RLW104" in data_root: + data_root = data_root.replace("RLW104", "PXB184") + elif "TXB805" in data_root: + data_root = data_root.replace("TXB805", "GQS883") + elif "GQS883" in data_root: + data_root = data_root.replace("GQS883", "TXB805") + + all_paths = [os.path.join(data_root, x) for x in os.listdir(data_root)] + all_paths.sort() + return _load_pose_data( + all_paths, + audio_per_frame, + flip_person=flip_person, + ) diff --git a/data_loaders/tensors.py b/data_loaders/tensors.py new file mode 100644 index 0000000000000000000000000000000000000000..a00c495d0bbc1994c7e7ee266c4dc08c94f20d3d --- /dev/null +++ b/data_loaders/tensors.py @@ -0,0 +1,86 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import torch +from torch.utils.data._utils.collate import default_collate + + +def lengths_to_mask(lengths, max_len): + mask = torch.arange(max_len, device=lengths.device).expand( + len(lengths), max_len + ) < lengths.unsqueeze(1) + return mask + + +def collate_tensors(batch): + dims = batch[0].dim() + max_size = [max([b.size(i) for b in batch]) for i in range(dims)] + size = (len(batch),) + tuple(max_size) + canvas = batch[0].new_zeros(size=size) + for i, b in enumerate(batch): + sub_tensor = canvas[i] + for d in range(dims): + sub_tensor = sub_tensor.narrow(d, 0, b.size(d)) + sub_tensor.add_(b) + return canvas + + +## social collate +def collate_v2(batch): + notnone_batches = [b for b in batch if b is not None] + databatch = [b["inp"] for b in notnone_batches] + missingbatch = [b["missing"] for b in notnone_batches] + audiobatch = [b["audio"] for b in notnone_batches] + lenbatch = [b["lengths"] for b in notnone_batches] + alenbatch = [b["audio_lengths"] for b in notnone_batches] + keyframebatch = [b["keyframes"] for b in notnone_batches] + klenbatch = [b["key_lengths"] for b in notnone_batches] + + databatchTensor = collate_tensors(databatch) + missingbatchTensor = collate_tensors(missingbatch) + audiobatchTensor = collate_tensors(audiobatch) + lenbatchTensor = torch.as_tensor(lenbatch) + alenbatchTensor = torch.as_tensor(alenbatch) + keyframeTensor = collate_tensors(keyframebatch) + klenbatchTensor = torch.as_tensor(klenbatch) + + maskbatchTensor = ( + lengths_to_mask(lenbatchTensor, databatchTensor.shape[-1]) + .unsqueeze(1) + .unsqueeze(1) + ) # unqueeze for broadcasting + motion = databatchTensor + cond = { + "y": { + "missing": missingbatchTensor, + "mask": maskbatchTensor, + "lengths": lenbatchTensor, + "audio": audiobatchTensor, + "alengths": alenbatchTensor, + "keyframes": keyframeTensor, + "klengths": klenbatchTensor, + } + } + return motion, cond + + +def social_collate(batch): + adapted_batch = [ + { + "inp": torch.tensor(b["motion"].T).to(torch.float32).unsqueeze(1), + "lengths": b["m_length"], + "audio": b["audio"] + if torch.is_tensor(b["audio"]) + else torch.tensor(b["audio"]).to(torch.float32), + "keyframes": torch.tensor(b["keyframes"]).to(torch.float32), + "key_lengths": b["k_length"], + "audio_lengths": b["a_length"], + "missing": torch.tensor(b["missing"]).to(torch.float32), + } + for b in batch + ] + return collate_v2(adapted_batch) diff --git a/demo/.ipynb_checkpoints/demo-checkpoint.py b/demo/.ipynb_checkpoints/demo-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..f26f536b3f018f67043a544e7380ae4bf32d29ee --- /dev/null +++ b/demo/.ipynb_checkpoints/demo-checkpoint.py @@ -0,0 +1,276 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import copy +import json +from typing import Dict, Union + +import gradio as gr +import numpy as np +import torch +import torchaudio +from attrdict import AttrDict +from diffusion.respace import SpacedDiffusion +from model.cfg_sampler import ClassifierFreeSampleModel +from model.diffusion import FiLMTransformer +from utils.misc import fixseed +from utils.model_util import create_model_and_diffusion, load_model +from visualize.render_codes import BodyRenderer + + +class GradioModel: + def __init__(self, face_args, pose_args) -> None: + self.face_model, self.face_diffusion, self.device = self._setup_model( + face_args, "checkpoints/diffusion/c1_face/model000155000.pt" + ) + self.pose_model, self.pose_diffusion, _ = self._setup_model( + pose_args, "checkpoints/diffusion/c1_pose/model000340000.pt" + ) + # load standardization stuff + stats = torch.load("dataset/PXB184/data_stats.pth") + stats["pose_mean"] = stats["pose_mean"].reshape(-1) + stats["pose_std"] = stats["pose_std"].reshape(-1) + self.stats = stats + # set up renderer + config_base = f"./checkpoints/ca_body/data/PXB184" + self.body_renderer = BodyRenderer( + config_base=config_base, + render_rgb=True, + ) + + def _setup_model( + self, + args_path: str, + model_path: str, + ) -> (Union[FiLMTransformer, ClassifierFreeSampleModel], SpacedDiffusion): + with open(args_path) as f: + args = json.load(f) + args = AttrDict(args) + args.device = "cuda:0" if torch.cuda.is_available() else "cpu" + print("running on...", args.device) + args.model_path = model_path + args.output_dir = "/tmp/gradio/" + args.timestep_respacing = "ddim100" + if args.data_format == "pose": + args.resume_trans = "checkpoints/guide/c1_pose/checkpoints/iter-0100000.pt" + + ## create model + model, diffusion = create_model_and_diffusion(args, split_type="test") + print(f"Loading checkpoints from [{args.model_path}]...") + state_dict = torch.load(args.model_path, map_location=args.device) + load_model(model, state_dict) + model = ClassifierFreeSampleModel(model) + model.eval() + model.to(args.device) + return model, diffusion, args.device + + def _replace_keyframes( + self, + model_kwargs: Dict[str, Dict[str, torch.Tensor]], + B: int, + T: int, + top_p: float = 0.97, + ) -> torch.Tensor: + with torch.no_grad(): + tokens = self.pose_model.transformer.generate( + model_kwargs["y"]["audio"], + T, + layers=self.pose_model.tokenizer.residual_depth, + n_sequences=B, + top_p=top_p, + ) + tokens = tokens.reshape((B, -1, self.pose_model.tokenizer.residual_depth)) + pred = self.pose_model.tokenizer.decode(tokens).detach() + return pred + + def _run_single_diffusion( + self, + model_kwargs: Dict[str, Dict[str, torch.Tensor]], + diffusion: SpacedDiffusion, + model: Union[FiLMTransformer, ClassifierFreeSampleModel], + curr_seq_length: int, + num_repetitions: int = 1, + ) -> (torch.Tensor,): + sample_fn = diffusion.ddim_sample_loop + with torch.no_grad(): + sample = sample_fn( + model, + (num_repetitions, model.nfeats, 1, curr_seq_length), + clip_denoised=False, + model_kwargs=model_kwargs, + init_image=None, + progress=True, + dump_steps=None, + noise=None, + const_noise=False, + ) + return sample + + def generate_sequences( + self, + model_kwargs: Dict[str, Dict[str, torch.Tensor]], + data_format: str, + curr_seq_length: int, + num_repetitions: int = 5, + guidance_param: float = 10.0, + top_p: float = 0.97, + # batch_size: int = 1, + ) -> Dict[str, np.ndarray]: + if data_format == "pose": + model = self.pose_model + diffusion = self.pose_diffusion + else: + model = self.face_model + diffusion = self.face_diffusion + + all_motions = [] + model_kwargs["y"]["scale"] = torch.ones(num_repetitions) * guidance_param + model_kwargs["y"] = { + key: val.to(self.device) if torch.is_tensor(val) else val + for key, val in model_kwargs["y"].items() + } + if data_format == "pose": + model_kwargs["y"]["mask"] = ( + torch.ones((num_repetitions, 1, 1, curr_seq_length)) + .to(self.device) + .bool() + ) + model_kwargs["y"]["keyframes"] = self._replace_keyframes( + model_kwargs, + num_repetitions, + int(curr_seq_length / 30), + top_p=top_p, + ) + sample = self._run_single_diffusion( + model_kwargs, diffusion, model, curr_seq_length, num_repetitions + ) + all_motions.append(sample.cpu().numpy()) + print(f"created {len(all_motions) * num_repetitions} samples") + return np.concatenate(all_motions, axis=0) + + +def generate_results(audio: np.ndarray, num_repetitions: int, top_p: float): + if audio is None: + raise gr.Error("Please record audio to start") + sr, y = audio + # set to mono and perform resampling + y = torch.Tensor(y) + if y.dim() == 2: + dim = 0 if y.shape[0] == 2 else 1 + y = torch.mean(y, dim=dim) + y = torchaudio.functional.resample(torch.Tensor(y), orig_freq=sr, new_freq=48_000) + sr = 48_000 + # make it so that it is 4 seconds long + if len(y) < (sr * 4): + raise gr.Error("Please record at least 4 second of audio") + if num_repetitions is None or num_repetitions <= 0 or num_repetitions > 10: + raise gr.Error( + f"Invalid number of samples: {num_repetitions}. Please specify a number between 1-10" + ) + cutoff = int(len(y) / (sr * 4)) + y = y[: cutoff * sr * 4] + curr_seq_length = int(len(y) / sr) * 30 + # create model_kwargs + model_kwargs = {"y": {}} + dual_audio = np.random.normal(0, 0.001, (1, len(y), 2)) + dual_audio[:, :, 0] = y / max(y) + dual_audio = (dual_audio - gradio_model.stats["audio_mean"]) / gradio_model.stats[ + "audio_std_flat" + ] + model_kwargs["y"]["audio"] = ( + torch.Tensor(dual_audio).float().tile(num_repetitions, 1, 1) + ) + face_results = ( + gradio_model.generate_sequences( + model_kwargs, "face", curr_seq_length, num_repetitions=int(num_repetitions) + ) + .squeeze(2) + .transpose(0, 2, 1) + ) + face_results = ( + face_results * gradio_model.stats["code_std"] + gradio_model.stats["code_mean"] + ) + pose_results = ( + gradio_model.generate_sequences( + model_kwargs, + "pose", + curr_seq_length, + num_repetitions=int(num_repetitions), + guidance_param=2.0, + top_p=top_p, + ) + .squeeze(2) + .transpose(0, 2, 1) + ) + pose_results = ( + pose_results * gradio_model.stats["pose_std"] + gradio_model.stats["pose_mean"] + ) + dual_audio = ( + dual_audio * gradio_model.stats["audio_std_flat"] + + gradio_model.stats["audio_mean"] + ) + return face_results, pose_results, dual_audio[0].transpose(1, 0).astype(np.float32) + + +def audio_to_avatar(audio: np.ndarray, num_repetitions: int, top_p: float): + face_results, pose_results, audio = generate_results(audio, num_repetitions, top_p) + # returns: num_rep x T x 104 + B = len(face_results) + results = [] + for i in range(B): + render_data_block = { + "audio": audio, # 2 x T + "body_motion": pose_results[i, ...], # T x 104 + "face_motion": face_results[i, ...], # T x 256 + } + gradio_model.body_renderer.render_full_video( + render_data_block, f"/tmp/sample{i}", audio_sr=48_000 + ) + results += [gr.Video(value=f"/tmp/sample{i}_pred.mp4", visible=True)] + results += [gr.Video(visible=False) for _ in range(B, 10)] + return results + + +gradio_model = GradioModel( + face_args="./checkpoints/diffusion/c1_face/args.json", + pose_args="./checkpoints/diffusion/c1_pose/args.json", +) +demo = gr.Interface( + audio_to_avatar, # function + [ + gr.Audio(sources=["microphone"]), + gr.Number( + value=3, + label="Number of Samples (default = 3)", + precision=0, + minimum=1, + maximum=10, + ), + gr.Number( + value=0.97, + label="Sample Diversity (default = 0.97)", + precision=None, + minimum=0.01, + step=0.01, + maximum=1.00, + ), + ], # input type + [gr.Video(format="mp4", visible=True)] + + [gr.Video(format="mp4", visible=False) for _ in range(9)], # output type + title='"From Audio to Photoreal Embodiment: Synthesizing Humans in Conversations" Demo', + description="You can generate a photorealistic avatar from your voice!
\ + 1) Start by recording your audio.
\ + 2) Specify the number of samples to generate.
\ + 3) Specify how diverse you want the samples to be. This tunes the cumulative probability in nucleus sampling: 0.01 = low diversity, 1.0 = high diversity.
\ + 4) Then, sit back and wait for the rendering to happen! This may take a while (e.g. 30 minutes)
\ + 5) After, you can view the videos and download the ones you like.
", + article="Relevant links: [Project Page](https://people.eecs.berkeley.edu/~evonne_ng/projects/audio2photoreal)", # TODO: code and arxiv +) + +if __name__ == "__main__": + fixseed(10) + demo.launch(share=True) diff --git a/demo/demo.py b/demo/demo.py new file mode 100644 index 0000000000000000000000000000000000000000..f26f536b3f018f67043a544e7380ae4bf32d29ee --- /dev/null +++ b/demo/demo.py @@ -0,0 +1,276 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import copy +import json +from typing import Dict, Union + +import gradio as gr +import numpy as np +import torch +import torchaudio +from attrdict import AttrDict +from diffusion.respace import SpacedDiffusion +from model.cfg_sampler import ClassifierFreeSampleModel +from model.diffusion import FiLMTransformer +from utils.misc import fixseed +from utils.model_util import create_model_and_diffusion, load_model +from visualize.render_codes import BodyRenderer + + +class GradioModel: + def __init__(self, face_args, pose_args) -> None: + self.face_model, self.face_diffusion, self.device = self._setup_model( + face_args, "checkpoints/diffusion/c1_face/model000155000.pt" + ) + self.pose_model, self.pose_diffusion, _ = self._setup_model( + pose_args, "checkpoints/diffusion/c1_pose/model000340000.pt" + ) + # load standardization stuff + stats = torch.load("dataset/PXB184/data_stats.pth") + stats["pose_mean"] = stats["pose_mean"].reshape(-1) + stats["pose_std"] = stats["pose_std"].reshape(-1) + self.stats = stats + # set up renderer + config_base = f"./checkpoints/ca_body/data/PXB184" + self.body_renderer = BodyRenderer( + config_base=config_base, + render_rgb=True, + ) + + def _setup_model( + self, + args_path: str, + model_path: str, + ) -> (Union[FiLMTransformer, ClassifierFreeSampleModel], SpacedDiffusion): + with open(args_path) as f: + args = json.load(f) + args = AttrDict(args) + args.device = "cuda:0" if torch.cuda.is_available() else "cpu" + print("running on...", args.device) + args.model_path = model_path + args.output_dir = "/tmp/gradio/" + args.timestep_respacing = "ddim100" + if args.data_format == "pose": + args.resume_trans = "checkpoints/guide/c1_pose/checkpoints/iter-0100000.pt" + + ## create model + model, diffusion = create_model_and_diffusion(args, split_type="test") + print(f"Loading checkpoints from [{args.model_path}]...") + state_dict = torch.load(args.model_path, map_location=args.device) + load_model(model, state_dict) + model = ClassifierFreeSampleModel(model) + model.eval() + model.to(args.device) + return model, diffusion, args.device + + def _replace_keyframes( + self, + model_kwargs: Dict[str, Dict[str, torch.Tensor]], + B: int, + T: int, + top_p: float = 0.97, + ) -> torch.Tensor: + with torch.no_grad(): + tokens = self.pose_model.transformer.generate( + model_kwargs["y"]["audio"], + T, + layers=self.pose_model.tokenizer.residual_depth, + n_sequences=B, + top_p=top_p, + ) + tokens = tokens.reshape((B, -1, self.pose_model.tokenizer.residual_depth)) + pred = self.pose_model.tokenizer.decode(tokens).detach() + return pred + + def _run_single_diffusion( + self, + model_kwargs: Dict[str, Dict[str, torch.Tensor]], + diffusion: SpacedDiffusion, + model: Union[FiLMTransformer, ClassifierFreeSampleModel], + curr_seq_length: int, + num_repetitions: int = 1, + ) -> (torch.Tensor,): + sample_fn = diffusion.ddim_sample_loop + with torch.no_grad(): + sample = sample_fn( + model, + (num_repetitions, model.nfeats, 1, curr_seq_length), + clip_denoised=False, + model_kwargs=model_kwargs, + init_image=None, + progress=True, + dump_steps=None, + noise=None, + const_noise=False, + ) + return sample + + def generate_sequences( + self, + model_kwargs: Dict[str, Dict[str, torch.Tensor]], + data_format: str, + curr_seq_length: int, + num_repetitions: int = 5, + guidance_param: float = 10.0, + top_p: float = 0.97, + # batch_size: int = 1, + ) -> Dict[str, np.ndarray]: + if data_format == "pose": + model = self.pose_model + diffusion = self.pose_diffusion + else: + model = self.face_model + diffusion = self.face_diffusion + + all_motions = [] + model_kwargs["y"]["scale"] = torch.ones(num_repetitions) * guidance_param + model_kwargs["y"] = { + key: val.to(self.device) if torch.is_tensor(val) else val + for key, val in model_kwargs["y"].items() + } + if data_format == "pose": + model_kwargs["y"]["mask"] = ( + torch.ones((num_repetitions, 1, 1, curr_seq_length)) + .to(self.device) + .bool() + ) + model_kwargs["y"]["keyframes"] = self._replace_keyframes( + model_kwargs, + num_repetitions, + int(curr_seq_length / 30), + top_p=top_p, + ) + sample = self._run_single_diffusion( + model_kwargs, diffusion, model, curr_seq_length, num_repetitions + ) + all_motions.append(sample.cpu().numpy()) + print(f"created {len(all_motions) * num_repetitions} samples") + return np.concatenate(all_motions, axis=0) + + +def generate_results(audio: np.ndarray, num_repetitions: int, top_p: float): + if audio is None: + raise gr.Error("Please record audio to start") + sr, y = audio + # set to mono and perform resampling + y = torch.Tensor(y) + if y.dim() == 2: + dim = 0 if y.shape[0] == 2 else 1 + y = torch.mean(y, dim=dim) + y = torchaudio.functional.resample(torch.Tensor(y), orig_freq=sr, new_freq=48_000) + sr = 48_000 + # make it so that it is 4 seconds long + if len(y) < (sr * 4): + raise gr.Error("Please record at least 4 second of audio") + if num_repetitions is None or num_repetitions <= 0 or num_repetitions > 10: + raise gr.Error( + f"Invalid number of samples: {num_repetitions}. Please specify a number between 1-10" + ) + cutoff = int(len(y) / (sr * 4)) + y = y[: cutoff * sr * 4] + curr_seq_length = int(len(y) / sr) * 30 + # create model_kwargs + model_kwargs = {"y": {}} + dual_audio = np.random.normal(0, 0.001, (1, len(y), 2)) + dual_audio[:, :, 0] = y / max(y) + dual_audio = (dual_audio - gradio_model.stats["audio_mean"]) / gradio_model.stats[ + "audio_std_flat" + ] + model_kwargs["y"]["audio"] = ( + torch.Tensor(dual_audio).float().tile(num_repetitions, 1, 1) + ) + face_results = ( + gradio_model.generate_sequences( + model_kwargs, "face", curr_seq_length, num_repetitions=int(num_repetitions) + ) + .squeeze(2) + .transpose(0, 2, 1) + ) + face_results = ( + face_results * gradio_model.stats["code_std"] + gradio_model.stats["code_mean"] + ) + pose_results = ( + gradio_model.generate_sequences( + model_kwargs, + "pose", + curr_seq_length, + num_repetitions=int(num_repetitions), + guidance_param=2.0, + top_p=top_p, + ) + .squeeze(2) + .transpose(0, 2, 1) + ) + pose_results = ( + pose_results * gradio_model.stats["pose_std"] + gradio_model.stats["pose_mean"] + ) + dual_audio = ( + dual_audio * gradio_model.stats["audio_std_flat"] + + gradio_model.stats["audio_mean"] + ) + return face_results, pose_results, dual_audio[0].transpose(1, 0).astype(np.float32) + + +def audio_to_avatar(audio: np.ndarray, num_repetitions: int, top_p: float): + face_results, pose_results, audio = generate_results(audio, num_repetitions, top_p) + # returns: num_rep x T x 104 + B = len(face_results) + results = [] + for i in range(B): + render_data_block = { + "audio": audio, # 2 x T + "body_motion": pose_results[i, ...], # T x 104 + "face_motion": face_results[i, ...], # T x 256 + } + gradio_model.body_renderer.render_full_video( + render_data_block, f"/tmp/sample{i}", audio_sr=48_000 + ) + results += [gr.Video(value=f"/tmp/sample{i}_pred.mp4", visible=True)] + results += [gr.Video(visible=False) for _ in range(B, 10)] + return results + + +gradio_model = GradioModel( + face_args="./checkpoints/diffusion/c1_face/args.json", + pose_args="./checkpoints/diffusion/c1_pose/args.json", +) +demo = gr.Interface( + audio_to_avatar, # function + [ + gr.Audio(sources=["microphone"]), + gr.Number( + value=3, + label="Number of Samples (default = 3)", + precision=0, + minimum=1, + maximum=10, + ), + gr.Number( + value=0.97, + label="Sample Diversity (default = 0.97)", + precision=None, + minimum=0.01, + step=0.01, + maximum=1.00, + ), + ], # input type + [gr.Video(format="mp4", visible=True)] + + [gr.Video(format="mp4", visible=False) for _ in range(9)], # output type + title='"From Audio to Photoreal Embodiment: Synthesizing Humans in Conversations" Demo', + description="You can generate a photorealistic avatar from your voice!
\ + 1) Start by recording your audio.
\ + 2) Specify the number of samples to generate.
\ + 3) Specify how diverse you want the samples to be. This tunes the cumulative probability in nucleus sampling: 0.01 = low diversity, 1.0 = high diversity.
\ + 4) Then, sit back and wait for the rendering to happen! This may take a while (e.g. 30 minutes)
\ + 5) After, you can view the videos and download the ones you like.
", + article="Relevant links: [Project Page](https://people.eecs.berkeley.edu/~evonne_ng/projects/audio2photoreal)", # TODO: code and arxiv +) + +if __name__ == "__main__": + fixseed(10) + demo.launch(share=True) diff --git a/demo/install.sh b/demo/install.sh new file mode 100644 index 0000000000000000000000000000000000000000..824249c7728e424603ad3a28b1230c532dafcd73 --- /dev/null +++ b/demo/install.sh @@ -0,0 +1,20 @@ +#!/bin/bash + +# make sure to have cuda 11.7 and gcc 9.0 installed +# install environment +pip install -r scripts/requirements.txt +sh scripts/download_prereq.sh + +# download pytorch3d +pip install "git+https://github.com/facebookresearch/pytorch3d.git" + +# download model stuff +wget http://audio2photoreal_models.berkeleyvision.org/PXB184_models.tar || { echo 'downloading model failed' ; exit 1; } +tar xvf PXB184_models.tar +rm PXB184_models.tar + +# install rendering stuff +mkdir -p checkpoints/ca_body/data/ +wget https://github.com/facebookresearch/ca_body/releases/download/v0.0.1-alpha/PXB184.tar.gz || { echo 'downloading ca body model failed' ; exit 1; } +tar xvf PXB184.tar.gz --directory checkpoints/ca_body/data/ +rm PXB184.tar.gz \ No newline at end of file diff --git a/demo/requirements.txt b/demo/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..1c5759ff4188f6180a82d72feaf596a9b57f0375 --- /dev/null +++ b/demo/requirements.txt @@ -0,0 +1,17 @@ +attrdict +einops==0.7.0 +fairseq==0.12.2 +gradio==4.31.3 +gradio_client==0.7.3 +huggingface-hub==0.19.4 +hydra-core==1.0.7 +mediapy==1.2.0 +numpy==1.26.2 +omegaconf==2.0.6 +opencv-python==4.8.1.78 +protobuf==4.25.1 +tensorboardX==2.6.2.2 +torch==2.0.1 +torchaudio==2.0.2 +torchvision==0.15.2 +tqdm==4.66.3 diff --git a/diffusion/fp16_util.py b/diffusion/fp16_util.py new file mode 100644 index 0000000000000000000000000000000000000000..54556e3bf91cbd69e954075c73d8862a390092cb --- /dev/null +++ b/diffusion/fp16_util.py @@ -0,0 +1,250 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +""" +original code from +https://github.com/GuyTevet/motion-diffusion-model/blob/main/diffusion/gaussian_diffusion.py +under an MIT license +https://github.com/GuyTevet/motion-diffusion-model/blob/main/LICENSE +""" + +""" +Helpers to train with 16-bit precision. +""" + +import numpy as np +import torch as th +import torch.nn as nn +from torch._utils import _flatten_dense_tensors, _unflatten_dense_tensors + +from utils import logger + +INITIAL_LOG_LOSS_SCALE = 20.0 + + +def convert_module_to_f16(l): + """ + Convert primitive modules to float16. + """ + if isinstance(l, (nn.Conv1d, nn.Conv2d, nn.Conv3d)): + l.weight.data = l.weight.data.half() + if l.bias is not None: + l.bias.data = l.bias.data.half() + + +def convert_module_to_f32(l): + """ + Convert primitive modules to float32, undoing convert_module_to_f16(). + """ + if isinstance(l, (nn.Conv1d, nn.Conv2d, nn.Conv3d)): + l.weight.data = l.weight.data.float() + if l.bias is not None: + l.bias.data = l.bias.data.float() + + +def make_master_params(param_groups_and_shapes): + """ + Copy model parameters into a (differently-shaped) list of full-precision + parameters. + """ + master_params = [] + for param_group, shape in param_groups_and_shapes: + master_param = nn.Parameter( + _flatten_dense_tensors( + [param.detach().float() for (_, param) in param_group] + ).view(shape) + ) + master_param.requires_grad = True + master_params.append(master_param) + return master_params + + +def model_grads_to_master_grads(param_groups_and_shapes, master_params): + """ + Copy the gradients from the model parameters into the master parameters + from make_master_params(). + """ + for master_param, (param_group, shape) in zip( + master_params, param_groups_and_shapes + ): + master_param.grad = _flatten_dense_tensors( + [param_grad_or_zeros(param) for (_, param) in param_group] + ).view(shape) + + +def master_params_to_model_params(param_groups_and_shapes, master_params): + """ + Copy the master parameter data back into the model parameters. + """ + # Without copying to a list, if a generator is passed, this will + # silently not copy any parameters. + for master_param, (param_group, _) in zip(master_params, param_groups_and_shapes): + for (_, param), unflat_master_param in zip( + param_group, unflatten_master_params(param_group, master_param.view(-1)) + ): + param.detach().copy_(unflat_master_param) + + +def unflatten_master_params(param_group, master_param): + return _unflatten_dense_tensors(master_param, [param for (_, param) in param_group]) + + +def get_param_groups_and_shapes(named_model_params): + named_model_params = list(named_model_params) + scalar_vector_named_params = ( + [(n, p) for (n, p) in named_model_params if p.ndim <= 1], + (-1), + ) + matrix_named_params = ( + [(n, p) for (n, p) in named_model_params if p.ndim > 1], + (1, -1), + ) + return [scalar_vector_named_params, matrix_named_params] + + +def master_params_to_state_dict( + model, param_groups_and_shapes, master_params, use_fp16 +): + if use_fp16: + state_dict = model.state_dict() + for master_param, (param_group, _) in zip( + master_params, param_groups_and_shapes + ): + for (name, _), unflat_master_param in zip( + param_group, unflatten_master_params(param_group, master_param.view(-1)) + ): + assert name in state_dict + state_dict[name] = unflat_master_param + else: + state_dict = model.state_dict() + for i, (name, _value) in enumerate(model.named_parameters()): + assert name in state_dict + state_dict[name] = master_params[i] + return state_dict + + +def state_dict_to_master_params(model, state_dict, use_fp16): + if use_fp16: + named_model_params = [ + (name, state_dict[name]) for name, _ in model.named_parameters() + ] + param_groups_and_shapes = get_param_groups_and_shapes(named_model_params) + master_params = make_master_params(param_groups_and_shapes) + else: + master_params = [state_dict[name] for name, _ in model.named_parameters()] + return master_params + + +def zero_master_grads(master_params): + for param in master_params: + param.grad = None + + +def zero_grad(model_params): + for param in model_params: + # Taken from https://pytorch.org/docs/stable/_modules/torch/optim/optimizer.html#Optimizer.add_param_group + if param.grad is not None: + param.grad.detach_() + param.grad.zero_() + + +def param_grad_or_zeros(param): + if param.grad is not None: + return param.grad.data.detach() + else: + return th.zeros_like(param) + + +class MixedPrecisionTrainer: + def __init__( + self, + *, + model, + use_fp16=False, + fp16_scale_growth=1e-3, + initial_lg_loss_scale=INITIAL_LOG_LOSS_SCALE, + ): + self.model = model + self.use_fp16 = use_fp16 + self.fp16_scale_growth = fp16_scale_growth + + self.model_params = list(self.model.parameters()) + self.master_params = self.model_params + self.param_groups_and_shapes = None + self.lg_loss_scale = initial_lg_loss_scale + + if self.use_fp16: + self.param_groups_and_shapes = get_param_groups_and_shapes( + self.model.named_parameters() + ) + self.master_params = make_master_params(self.param_groups_and_shapes) + self.model.convert_to_fp16() + + def zero_grad(self): + zero_grad(self.model_params) + + def backward(self, loss: th.Tensor): + if self.use_fp16: + loss_scale = 2**self.lg_loss_scale + (loss * loss_scale).backward() + else: + loss.backward() + + def optimize(self, opt: th.optim.Optimizer): + if self.use_fp16: + return self._optimize_fp16(opt) + else: + return self._optimize_normal(opt) + + def _optimize_fp16(self, opt: th.optim.Optimizer): + logger.logkv_mean("lg_loss_scale", self.lg_loss_scale) + model_grads_to_master_grads(self.param_groups_and_shapes, self.master_params) + grad_norm, param_norm = self._compute_norms(grad_scale=2**self.lg_loss_scale) + if check_overflow(grad_norm): + self.lg_loss_scale -= 1 + logger.log(f"Found NaN, decreased lg_loss_scale to {self.lg_loss_scale}") + zero_master_grads(self.master_params) + return False + + logger.logkv_mean("grad_norm", grad_norm) + logger.logkv_mean("param_norm", param_norm) + + self.master_params[0].grad.mul_(1.0 / (2**self.lg_loss_scale)) + opt.step() + zero_master_grads(self.master_params) + master_params_to_model_params(self.param_groups_and_shapes, self.master_params) + self.lg_loss_scale += self.fp16_scale_growth + return True + + def _optimize_normal(self, opt: th.optim.Optimizer): + grad_norm, param_norm = self._compute_norms() + logger.logkv_mean("grad_norm", grad_norm) + logger.logkv_mean("param_norm", param_norm) + opt.step() + return True + + def _compute_norms(self, grad_scale=1.0): + grad_norm = 0.0 + param_norm = 0.0 + for p in self.master_params: + with th.no_grad(): + param_norm += th.norm(p, p=2, dtype=th.float32).item() ** 2 + if p.grad is not None: + grad_norm += th.norm(p.grad, p=2, dtype=th.float32).item() ** 2 + return np.sqrt(grad_norm) / grad_scale, np.sqrt(param_norm) + + def master_params_to_state_dict(self, master_params): + return master_params_to_state_dict( + self.model, self.param_groups_and_shapes, master_params, self.use_fp16 + ) + + def state_dict_to_master_params(self, state_dict): + return state_dict_to_master_params(self.model, state_dict, self.use_fp16) + + +def check_overflow(value): + return (value == float("inf")) or (value == -float("inf")) or (value != value) diff --git a/diffusion/gaussian_diffusion.py b/diffusion/gaussian_diffusion.py new file mode 100644 index 0000000000000000000000000000000000000000..acda506b27d437ba9ade74eb443882815c2629a4 --- /dev/null +++ b/diffusion/gaussian_diffusion.py @@ -0,0 +1,1273 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +""" +original code from +https://github.com/GuyTevet/motion-diffusion-model/blob/main/diffusion/gaussian_diffusion.py +under an MIT license +https://github.com/GuyTevet/motion-diffusion-model/blob/main/LICENSE +""" + +import enum +import math +from copy import deepcopy + +import numpy as np +import torch +import torch as th +from diffusion.losses import discretized_gaussian_log_likelihood, normal_kl +from diffusion.nn import mean_flat, sum_flat + + +def get_named_beta_schedule(schedule_name, num_diffusion_timesteps, scale_betas=1.0): + """ + Get a pre-defined beta schedule for the given name. + + The beta schedule library consists of beta schedules which remain similar + in the limit of num_diffusion_timesteps. + Beta schedules may be added, but should not be removed or changed once + they are committed to maintain backwards compatibility. + """ + if schedule_name == "linear": + # Linear schedule from Ho et al, extended to work for any number of + # diffusion steps. + scale = scale_betas * 1000 / num_diffusion_timesteps + beta_start = scale * 0.0001 + beta_end = scale * 0.02 + return np.linspace( + beta_start, beta_end, num_diffusion_timesteps, dtype=np.float64 + ) + elif schedule_name == "cosine": + return betas_for_alpha_bar( + num_diffusion_timesteps, + lambda t: math.cos((t + 0.008) / 1.008 * math.pi / 2) ** 2, + ) + else: + raise NotImplementedError(f"unknown beta schedule: {schedule_name}") + + +def betas_for_alpha_bar(num_diffusion_timesteps, alpha_bar, max_beta=0.999): + """ + Create a beta schedule that discretizes the given alpha_t_bar function, + which defines the cumulative product of (1-beta) over time from t = [0,1]. + + :param num_diffusion_timesteps: the number of betas to produce. + :param alpha_bar: a lambda that takes an argument t from 0 to 1 and + produces the cumulative product of (1-beta) up to that + part of the diffusion process. + :param max_beta: the maximum beta to use; use values lower than 1 to + prevent singularities. + """ + betas = [] + for i in range(num_diffusion_timesteps): + t1 = i / num_diffusion_timesteps + t2 = (i + 1) / num_diffusion_timesteps + betas.append(min(1 - alpha_bar(t2) / alpha_bar(t1), max_beta)) + return np.array(betas) + + +class ModelMeanType(enum.Enum): + """ + Which type of output the model predicts. + """ + + PREVIOUS_X = enum.auto() # the model predicts x_{t-1} + START_X = enum.auto() # the model predicts x_0 + EPSILON = enum.auto() # the model predicts epsilon + + +class ModelVarType(enum.Enum): + """ + What is used as the model's output variance. + + The LEARNED_RANGE option has been added to allow the model to predict + values between FIXED_SMALL and FIXED_LARGE, making its job easier. + """ + + LEARNED = enum.auto() + FIXED_SMALL = enum.auto() + FIXED_LARGE = enum.auto() + LEARNED_RANGE = enum.auto() + + +class LossType(enum.Enum): + MSE = enum.auto() # use raw MSE loss (and KL when learning variances) + RESCALED_MSE = ( + enum.auto() + ) # use raw MSE loss (with RESCALED_KL when learning variances) + KL = enum.auto() # use the variational lower-bound + RESCALED_KL = enum.auto() # like KL, but rescale to estimate the full VLB + + def is_vb(self): + return self == LossType.KL or self == LossType.RESCALED_KL + + +class GaussianDiffusion: + """ + Utilities for training and sampling diffusion models. + + Ported directly from here, and then adapted over time to further experimentation. + https://github.com/hojonathanho/diffusion/blob/1e0dceb3b3495bbe19116a5e1b3596cd0706c543/diffusion_tf/diffusion_utils_2.py#L42 + + :param betas: a 1-D numpy array of betas for each diffusion timestep, + starting at T and going to 1. + :param model_mean_type: a ModelMeanType determining what the model outputs. + :param model_var_type: a ModelVarType determining how variance is output. + :param loss_type: a LossType determining the loss function to use. + :param rescale_timesteps: if True, pass floating point timesteps into the + model so that they are always scaled like in the + original paper (0 to 1000). + """ + + def __init__( + self, + *, + betas, + model_mean_type, + model_var_type, + loss_type, + rescale_timesteps=False, + lambda_vel=0.0, + data_format="pose", + model_path=None, + ): + self.model_mean_type = model_mean_type + self.model_var_type = model_var_type + self.loss_type = loss_type + self.rescale_timesteps = rescale_timesteps + self.data_format = data_format + self.lambda_vel = lambda_vel + if self.lambda_vel > 0.0: + assert ( + self.loss_type == LossType.MSE + ), "Geometric losses are supported by MSE loss type only!" + + # Use float64 for accuracy. + betas = np.array(betas, dtype=np.float64) + self.betas = betas + assert len(betas.shape) == 1, "betas must be 1-D" + assert (betas > 0).all() and (betas <= 1).all() + + self.num_timesteps = int(betas.shape[0]) + + alphas = 1.0 - betas + self.alphas_cumprod = np.cumprod(alphas, axis=0) + self.alphas_cumprod_prev = np.append(1.0, self.alphas_cumprod[:-1]) + self.alphas_cumprod_next = np.append(self.alphas_cumprod[1:], 0.0) + assert self.alphas_cumprod_prev.shape == (self.num_timesteps,) + + # calculations for diffusion q(x_t | x_{t-1}) and others + self.sqrt_alphas_cumprod = np.sqrt(self.alphas_cumprod) + self.sqrt_one_minus_alphas_cumprod = np.sqrt(1.0 - self.alphas_cumprod) + self.log_one_minus_alphas_cumprod = np.log(1.0 - self.alphas_cumprod) + self.sqrt_recip_alphas_cumprod = np.sqrt(1.0 / self.alphas_cumprod) + self.sqrt_recipm1_alphas_cumprod = np.sqrt(1.0 / self.alphas_cumprod - 1) + + # calculations for posterior q(x_{t-1} | x_t, x_0) + self.posterior_variance = ( + betas * (1.0 - self.alphas_cumprod_prev) / (1.0 - self.alphas_cumprod) + ) + # log calculation clipped because the posterior variance is 0 at the + # beginning of the diffusion chain. + self.posterior_log_variance_clipped = np.log( + np.append(self.posterior_variance[1], self.posterior_variance[1:]) + ) + self.posterior_mean_coef1 = ( + betas * np.sqrt(self.alphas_cumprod_prev) / (1.0 - self.alphas_cumprod) + ) + self.posterior_mean_coef2 = ( + (1.0 - self.alphas_cumprod_prev) + * np.sqrt(alphas) + / (1.0 - self.alphas_cumprod) + ) + + self.l2_loss = lambda a, b: (a - b) ** 2 + + def masked_l2(self, a, b, mask): + loss = self.l2_loss(a, b) + loss = sum_flat(loss * mask.float()) + n_entries = a.shape[1] * a.shape[2] + non_zero_elements = sum_flat(mask) * n_entries + mse_loss_val = loss / non_zero_elements + return mse_loss_val + + def q_mean_variance(self, x_start, t): + """ + Get the distribution q(x_t | x_0). + + :param x_start: the [N x C x ...] tensor of noiseless inputs. + :param t: the number of diffusion steps (minus 1). Here, 0 means one step. + :return: A tuple (mean, variance, log_variance), all of x_start's shape. + """ + mean = ( + _extract_into_tensor(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start + ) + variance = _extract_into_tensor(1.0 - self.alphas_cumprod, t, x_start.shape) + log_variance = _extract_into_tensor( + self.log_one_minus_alphas_cumprod, t, x_start.shape + ) + return mean, variance, log_variance + + def q_sample(self, x_start, t, noise=None): + """ + Diffuse the dataset for a given number of diffusion steps. + + In other words, sample from q(x_t | x_0). + + :param x_start: the initial dataset batch. + :param t: the number of diffusion steps (minus 1). Here, 0 means one step. + :param noise: if specified, the split-out normal noise. + :return: A noisy version of x_start. + """ + if noise is None: + noise = th.randn_like(x_start) + assert noise.shape == x_start.shape + return ( + _extract_into_tensor(self.sqrt_alphas_cumprod, t, x_start.shape) * x_start + + _extract_into_tensor(self.sqrt_one_minus_alphas_cumprod, t, x_start.shape) + * noise + ) + + def q_posterior_mean_variance(self, x_start, x_t, t): + """ + Compute the mean and variance of the diffusion posterior: + + q(x_{t-1} | x_t, x_0) + + """ + assert x_start.shape == x_t.shape, f"x_start: {x_start.shape}, x_t: {x_t.shape}" + posterior_mean = ( + _extract_into_tensor(self.posterior_mean_coef1, t, x_t.shape) * x_start + + _extract_into_tensor(self.posterior_mean_coef2, t, x_t.shape) * x_t + ) + posterior_variance = _extract_into_tensor(self.posterior_variance, t, x_t.shape) + posterior_log_variance_clipped = _extract_into_tensor( + self.posterior_log_variance_clipped, t, x_t.shape + ) + assert ( + posterior_mean.shape[0] + == posterior_variance.shape[0] + == posterior_log_variance_clipped.shape[0] + == x_start.shape[0] + ) + return posterior_mean, posterior_variance, posterior_log_variance_clipped + + def p_mean_variance( + self, model, x, t, clip_denoised=True, denoised_fn=None, model_kwargs=None + ): + """ + Apply the model to get p(x_{t-1} | x_t), as well as a prediction of + the initial x, x_0. + + :param model: the model, which takes a signal and a batch of timesteps + as input. + :param x: the [N x C x ...] tensor at time t. + :param t: a 1-D Tensor of timesteps. + :param clip_denoised: if True, clip the denoised signal into [-1, 1]. + :param denoised_fn: if not None, a function which applies to the + x_start prediction before it is used to sample. Applies before + clip_denoised. + :param model_kwargs: if not None, a dict of extra keyword arguments to + pass to the model. This can be used for conditioning. + :return: a dict with the following keys: + - 'mean': the model mean output. + - 'variance': the model variance output. + - 'log_variance': the log of 'variance'. + - 'pred_xstart': the prediction for x_0. + """ + if model_kwargs is None: + model_kwargs = {} + + B, C = x.shape[:2] + assert t.shape == (B,) + model_output = model(x, self._scale_timesteps(t), **model_kwargs) + + model_variance, model_log_variance = { + # for fixedlarge, we set the initial (log-)variance like so + # to get a better decoder log likelihood. + ModelVarType.FIXED_LARGE: ( + np.append(self.posterior_variance[1], self.betas[1:]), + np.log(np.append(self.posterior_variance[1], self.betas[1:])), + ), + ModelVarType.FIXED_SMALL: ( + self.posterior_variance, + self.posterior_log_variance_clipped, + ), + }[self.model_var_type] + + model_variance = _extract_into_tensor(model_variance, t, x.shape) + model_log_variance = _extract_into_tensor(model_log_variance, t, x.shape) + + def process_xstart(x): + if denoised_fn is not None: + x = denoised_fn(x) + if clip_denoised: + return x.clamp(-1, 1) + return x + + pred_xstart = process_xstart(model_output) + pred_xstart = pred_xstart.permute(0, 2, 1).unsqueeze(2) + model_mean, _, _ = self.q_posterior_mean_variance( + x_start=pred_xstart, x_t=x, t=t + ) + + assert ( + model_mean.shape == model_log_variance.shape == pred_xstart.shape == x.shape + ), print( + f"{model_mean.shape} == {model_log_variance.shape} == {pred_xstart.shape} == {x.shape}" + ) + return { + "mean": model_mean, + "variance": model_variance, + "log_variance": model_log_variance, + "pred_xstart": pred_xstart, + } + + def _predict_xstart_from_eps(self, x_t, t, eps): + assert x_t.shape == eps.shape + return ( + _extract_into_tensor(self.sqrt_recip_alphas_cumprod, t, x_t.shape) * x_t + - _extract_into_tensor(self.sqrt_recipm1_alphas_cumprod, t, x_t.shape) * eps + ) + + def _predict_xstart_from_xprev(self, x_t, t, xprev): + assert x_t.shape == xprev.shape + return ( + _extract_into_tensor(1.0 / self.posterior_mean_coef1, t, x_t.shape) * xprev + - _extract_into_tensor( + self.posterior_mean_coef2 / self.posterior_mean_coef1, t, x_t.shape + ) + * x_t + ) + + def _predict_eps_from_xstart(self, x_t, t, pred_xstart): + return ( + _extract_into_tensor(self.sqrt_recip_alphas_cumprod, t, x_t.shape) * x_t + - pred_xstart + ) / _extract_into_tensor(self.sqrt_recipm1_alphas_cumprod, t, x_t.shape) + + def _scale_timesteps(self, t): + if self.rescale_timesteps: + return t.float() * (1000.0 / self.num_timesteps) + return t + + def condition_mean(self, cond_fn, p_mean_var, x, t, model_kwargs=None): + """ + Compute the mean for the previous step, given a function cond_fn that + computes the gradient of a conditional log probability with respect to + x. In particular, cond_fn computes grad(log(p(y|x))), and we want to + condition on y. + + This uses the conditioning strategy from Sohl-Dickstein et al. (2015). + """ + gradient = cond_fn(x, self._scale_timesteps(t), **model_kwargs) + new_mean = ( + p_mean_var["mean"].float() + p_mean_var["variance"] * gradient.float() + ) + return new_mean + + def condition_mean_with_grad(self, cond_fn, p_mean_var, x, t, model_kwargs=None): + """ + Compute the mean for the previous step, given a function cond_fn that + computes the gradient of a conditional log probability with respect to + x. In particular, cond_fn computes grad(log(p(y|x))), and we want to + condition on y. + + This uses the conditioning strategy from Sohl-Dickstein et al. (2015). + """ + gradient = cond_fn(x, t, p_mean_var, **model_kwargs) + new_mean = ( + p_mean_var["mean"].float() + p_mean_var["variance"] * gradient.float() + ) + return new_mean + + def condition_score(self, cond_fn, p_mean_var, x, t, model_kwargs=None): + """ + Compute what the p_mean_variance output would have been, should the + model's score function be conditioned by cond_fn. + + See condition_mean() for details on cond_fn. + + Unlike condition_mean(), this instead uses the conditioning strategy + from Song et al (2020). + """ + alpha_bar = _extract_into_tensor(self.alphas_cumprod, t, x.shape) + + eps = self._predict_eps_from_xstart(x, t, p_mean_var["pred_xstart"]) + eps = eps - (1 - alpha_bar).sqrt() * cond_fn( + x, self._scale_timesteps(t), **model_kwargs + ) + + out = p_mean_var.copy() + out["pred_xstart"] = self._predict_xstart_from_eps(x, t, eps) + out["mean"], _, _ = self.q_posterior_mean_variance( + x_start=out["pred_xstart"], x_t=x, t=t + ) + return out + + def condition_score_with_grad(self, cond_fn, p_mean_var, x, t, model_kwargs=None): + """ + Compute what the p_mean_variance output would have been, should the + model's score function be conditioned by cond_fn. + + See condition_mean() for details on cond_fn. + + Unlike condition_mean(), this instead uses the conditioning strategy + from Song et al (2020). + """ + alpha_bar = _extract_into_tensor(self.alphas_cumprod, t, x.shape) + + eps = self._predict_eps_from_xstart(x, t, p_mean_var["pred_xstart"]) + eps = eps - (1 - alpha_bar).sqrt() * cond_fn(x, t, p_mean_var, **model_kwargs) + + out = p_mean_var.copy() + out["pred_xstart"] = self._predict_xstart_from_eps(x, t, eps) + out["mean"], _, _ = self.q_posterior_mean_variance( + x_start=out["pred_xstart"], x_t=x, t=t + ) + return out + + def p_sample( + self, + model, + x, + t, + clip_denoised=True, + denoised_fn=None, + cond_fn=None, + model_kwargs=None, + const_noise=False, + ): + """ + Sample x_{t-1} from the model at the given timestep. + + :param model: the model to sample from. + :param x: the current tensor at x_{t-1}. + :param t: the value of t, starting at 0 for the first diffusion step. + :param clip_denoised: if True, clip the x_start prediction to [-1, 1]. + :param denoised_fn: if not None, a function which applies to the + x_start prediction before it is used to sample. + :param cond_fn: if not None, this is a gradient function that acts + similarly to the model. + :param model_kwargs: if not None, a dict of extra keyword arguments to + pass to the model. This can be used for conditioning. + :return: a dict containing the following keys: + - 'sample': a random sample from the model. + - 'pred_xstart': a prediction of x_0. + """ + out = self.p_mean_variance( + model, + x, + t, + clip_denoised=clip_denoised, + denoised_fn=denoised_fn, + model_kwargs=model_kwargs, + ) + + nonzero_mask = (t != 0).float().view(-1, *([1] * (len(x.shape) - 1))) + if cond_fn is not None: + out["mean"] = self.condition_mean( + cond_fn, out, x, t, model_kwargs=model_kwargs + ) + sample = out["mean"] + nonzero_mask * th.exp(0.5 * out["log_variance"]) * noise + return {"sample": sample, "pred_xstart": out["pred_xstart"]} + + def p_sample_with_grad( + self, + model, + x, + t, + clip_denoised=True, + denoised_fn=None, + cond_fn=None, + model_kwargs=None, + ): + """ + Sample x_{t-1} from the model at the given timestep. + + :param model: the model to sample from. + :param x: the current tensor at x_{t-1}. + :param t: the value of t, starting at 0 for the first diffusion step. + :param clip_denoised: if True, clip the x_start prediction to [-1, 1]. + :param denoised_fn: if not None, a function which applies to the + x_start prediction before it is used to sample. + :param cond_fn: if not None, this is a gradient function that acts + similarly to the model. + :param model_kwargs: if not None, a dict of extra keyword arguments to + pass to the model. This can be used for conditioning. + :return: a dict containing the following keys: + - 'sample': a random sample from the model. + - 'pred_xstart': a prediction of x_0. + """ + with th.enable_grad(): + x = x.detach().requires_grad_() + out = self.p_mean_variance( + model, + x, + t, + clip_denoised=clip_denoised, + denoised_fn=denoised_fn, + model_kwargs=model_kwargs, + ) + noise = th.randn_like(x) + nonzero_mask = (t != 0).float().view(-1, *([1] * (len(x.shape) - 1))) + if cond_fn is not None: + out["mean"] = self.condition_mean_with_grad( + cond_fn, out, x, t, model_kwargs=model_kwargs + ) + sample = out["mean"] + nonzero_mask * th.exp(0.5 * out["log_variance"]) * noise + return {"sample": sample, "pred_xstart": out["pred_xstart"].detach()} + + def p_sample_loop( + self, + model, + shape, + noise=None, + clip_denoised=True, + denoised_fn=None, + cond_fn=None, + model_kwargs=None, + device=None, + progress=False, + skip_timesteps=0, + init_image=None, + randomize_class=False, + cond_fn_with_grad=False, + dump_steps=None, + const_noise=False, + ): + """ + Generate samples from the model. + + :param model: the model module. + :param shape: the shape of the samples, (N, C, H, W). + :param noise: if specified, the noise from the encoder to sample. + Should be of the same shape as `shape`. + :param clip_denoised: if True, clip x_start predictions to [-1, 1]. + :param denoised_fn: if not None, a function which applies to the + x_start prediction before it is used to sample. + :param cond_fn: if not None, this is a gradient function that acts + similarly to the model. + :param model_kwargs: if not None, a dict of extra keyword arguments to + pass to the model. This can be used for conditioning. + :param device: if specified, the device to create the samples on. + If not specified, use a model parameter's device. + :param progress: if True, show a tqdm progress bar. + :param const_noise: If True, will noise all samples with the same noise throughout sampling + :return: a non-differentiable batch of samples. + """ + final = None + if dump_steps is not None: + dump = [] + + for i, sample in enumerate( + self.p_sample_loop_progressive( + model, + shape, + noise=noise, + clip_denoised=clip_denoised, + denoised_fn=denoised_fn, + cond_fn=cond_fn, + model_kwargs=model_kwargs, + device=device, + progress=progress, + skip_timesteps=skip_timesteps, + init_image=init_image, + randomize_class=randomize_class, + cond_fn_with_grad=cond_fn_with_grad, + const_noise=const_noise, + ) + ): + if dump_steps is not None and i in dump_steps: + dump.append(deepcopy(sample["sample"])) + final = sample + if dump_steps is not None: + return dump + return final["sample"] + + def p_sample_loop_progressive( + self, + model, + shape, + noise=None, + clip_denoised=True, + denoised_fn=None, + cond_fn=None, + model_kwargs=None, + device=None, + progress=False, + skip_timesteps=0, + init_image=None, + randomize_class=False, + cond_fn_with_grad=False, + const_noise=False, + ): + """ + Generate samples from the model and yield intermediate samples from + each timestep of diffusion. + + Arguments are the same as p_sample_loop(). + Returns a generator over dicts, where each dict is the return value of + p_sample(). + """ + if device is None: + device = next(model.parameters()).device + assert isinstance(shape, (tuple, list)) + if noise is not None: + img = noise + else: + img = th.randn(*shape, device=device) + + if skip_timesteps and init_image is None: + init_image = th.zeros_like(img) + + indices = list(range(self.num_timesteps - skip_timesteps))[::-1] + + if init_image is not None: + my_t = th.ones([shape[0]], device=device, dtype=th.long) * indices[0] + img = self.q_sample(init_image, my_t, img) + + if progress: + # Lazy import so that we don't depend on tqdm. + from tqdm.auto import tqdm + + indices = tqdm(indices) + + # number of timestamps to diffuse + for i in indices: + t = th.tensor([i] * shape[0], device=device) + if randomize_class and "y" in model_kwargs: + model_kwargs["y"] = th.randint( + low=0, + high=model.num_classes, + size=model_kwargs["y"].shape, + device=model_kwargs["y"].device, + ) + with th.no_grad(): + sample_fn = ( + self.p_sample_with_grad if cond_fn_with_grad else self.p_sample + ) + out = sample_fn( + model, + img, + t, + clip_denoised=clip_denoised, + denoised_fn=denoised_fn, + cond_fn=cond_fn, + model_kwargs=model_kwargs, + const_noise=const_noise, + ) + yield out + img = out["sample"] + + def ddim_sample( + self, + model, + x, + t, + clip_denoised=True, + denoised_fn=None, + cond_fn=None, + model_kwargs=None, + eta=0.0, + ): + """ + Sample x_{t-1} from the model using DDIM. + + Same usage as p_sample(). + """ + out_orig = self.p_mean_variance( + model, + x, + t, + clip_denoised=clip_denoised, + denoised_fn=denoised_fn, + model_kwargs=model_kwargs, + ) + if cond_fn is not None: + out = self.condition_score( + cond_fn, out_orig, x, t, model_kwargs=model_kwargs + ) + else: + out = out_orig + # Usually our model outputs epsilon, but we re-derive it + # in case we used x_start or x_prev prediction. + eps = self._predict_eps_from_xstart(x, t, out["pred_xstart"]) + + alpha_bar = _extract_into_tensor(self.alphas_cumprod, t, x.shape) + alpha_bar_prev = _extract_into_tensor(self.alphas_cumprod_prev, t, x.shape) + sigma = ( + eta + * th.sqrt((1 - alpha_bar_prev) / (1 - alpha_bar)) + * th.sqrt(1 - alpha_bar / alpha_bar_prev) + ) + noise = th.randn_like(x) + + mean_pred = ( + out["pred_xstart"] * th.sqrt(alpha_bar_prev) + + th.sqrt(1 - alpha_bar_prev - sigma**2) * eps + ) + nonzero_mask = ( + (t != 0).float().view(-1, *([1] * (len(x.shape) - 1))) + ) # no noise when t == 0 + sample = mean_pred + nonzero_mask * sigma * noise + return {"sample": sample, "pred_xstart": out_orig["pred_xstart"]} + + def ddim_sample_with_grad( + self, + model, + x, + t, + clip_denoised=True, + denoised_fn=None, + cond_fn=None, + model_kwargs=None, + eta=0.0, + ): + """ + Sample x_{t-1} from the model using DDIM. + + Same usage as p_sample(). + """ + with th.enable_grad(): + x = x.detach().requires_grad_() + out_orig = self.p_mean_variance( + model, + x, + t, + clip_denoised=clip_denoised, + denoised_fn=denoised_fn, + model_kwargs=model_kwargs, + ) + if cond_fn is not None: + out = self.condition_score_with_grad( + cond_fn, out_orig, x, t, model_kwargs=model_kwargs + ) + else: + out = out_orig + + out["pred_xstart"] = out["pred_xstart"].detach() + # Usually our model outputs epsilon, but we re-derive it + # in case we used x_start or x_prev prediction. + eps = self._predict_eps_from_xstart(x, t, out["pred_xstart"]) + + alpha_bar = _extract_into_tensor(self.alphas_cumprod, t, x.shape) + alpha_bar_prev = _extract_into_tensor(self.alphas_cumprod_prev, t, x.shape) + sigma = ( + eta + * th.sqrt((1 - alpha_bar_prev) / (1 - alpha_bar)) + * th.sqrt(1 - alpha_bar / alpha_bar_prev) + ) + # Equation 12. + noise = th.randn_like(x) + mean_pred = ( + out["pred_xstart"] * th.sqrt(alpha_bar_prev) + + th.sqrt(1 - alpha_bar_prev - sigma**2) * eps + ) + nonzero_mask = ( + (t != 0).float().view(-1, *([1] * (len(x.shape) - 1))) + ) # no noise when t == 0 + sample = mean_pred + nonzero_mask * sigma * noise + return {"sample": sample, "pred_xstart": out_orig["pred_xstart"].detach()} + + def ddim_reverse_sample( + self, + model, + x, + t, + clip_denoised=True, + denoised_fn=None, + model_kwargs=None, + eta=0.0, + ): + """ + Sample x_{t+1} from the model using DDIM reverse ODE. + """ + assert eta == 0.0, "Reverse ODE only for deterministic path" + out = self.p_mean_variance( + model, + x, + t, + clip_denoised=clip_denoised, + denoised_fn=denoised_fn, + model_kwargs=model_kwargs, + ) + # Usually our model outputs epsilon, but we re-derive it + # in case we used x_start or x_prev prediction. + eps = ( + _extract_into_tensor(self.sqrt_recip_alphas_cumprod, t, x.shape) * x + - out["pred_xstart"] + ) / _extract_into_tensor(self.sqrt_recipm1_alphas_cumprod, t, x.shape) + alpha_bar_next = _extract_into_tensor(self.alphas_cumprod_next, t, x.shape) + + # Equation 12. reversed + mean_pred = ( + out["pred_xstart"] * th.sqrt(alpha_bar_next) + + th.sqrt(1 - alpha_bar_next) * eps + ) + + return {"sample": mean_pred, "pred_xstart": out["pred_xstart"]} + + def ddim_sample_loop( + self, + model, + shape, + noise=None, + clip_denoised=True, + denoised_fn=None, + cond_fn=None, + model_kwargs=None, + device=None, + progress=False, + eta=0.0, + skip_timesteps=0, + init_image=None, + randomize_class=False, + cond_fn_with_grad=False, + dump_steps=None, + const_noise=False, + ): + """ + Generate samples from the model using DDIM. + + Same usage as p_sample_loop(). + """ + if dump_steps is not None: + raise NotImplementedError() + if const_noise == True: + raise NotImplementedError() + + final = None + for sample in self.ddim_sample_loop_progressive( + model, + shape, + noise=noise, + clip_denoised=clip_denoised, + denoised_fn=denoised_fn, + cond_fn=cond_fn, + model_kwargs=model_kwargs, + device=device, + progress=progress, + eta=eta, + skip_timesteps=skip_timesteps, + init_image=init_image, + randomize_class=randomize_class, + cond_fn_with_grad=cond_fn_with_grad, + ): + final = sample + return final["pred_xstart"] + + def ddim_sample_loop_progressive( + self, + model, + shape, + noise=None, + clip_denoised=True, + denoised_fn=None, + cond_fn=None, + model_kwargs=None, + device=None, + progress=False, + eta=0.0, + skip_timesteps=0, + init_image=None, + randomize_class=False, + cond_fn_with_grad=False, + ): + """ + Use DDIM to sample from the model and yield intermediate samples from + each timestep of DDIM. + + Same usage as p_sample_loop_progressive(). + """ + if device is None: + device = next(model.parameters()).device + assert isinstance(shape, (tuple, list)) + if noise is not None: + img = noise + else: + img = th.randn(*shape, device=device) + + if skip_timesteps and init_image is None: + init_image = th.zeros_like(img) + + indices = list(range(self.num_timesteps - skip_timesteps))[::-1] + + if init_image is not None: + my_t = th.ones([shape[0]], device=device, dtype=th.long) * indices[0] + img = self.q_sample(init_image, my_t, img) + + if progress: + # Lazy import so that we don't depend on tqdm. + from tqdm.auto import tqdm + + indices = tqdm(indices) + + for i in indices: + t = th.tensor([i] * shape[0], device=device) + if randomize_class and "y" in model_kwargs: + model_kwargs["y"] = th.randint( + low=0, + high=model.num_classes, + size=model_kwargs["y"].shape, + device=model_kwargs["y"].device, + ) + with th.no_grad(): + sample_fn = ( + self.ddim_sample_with_grad + if cond_fn_with_grad + else self.ddim_sample + ) + out = sample_fn( + model, + img, + t, + clip_denoised=clip_denoised, + denoised_fn=denoised_fn, + cond_fn=cond_fn, + model_kwargs=model_kwargs, + eta=eta, + ) + yield out + img = out["sample"] + + def plms_sample( + self, + model, + x, + t, + clip_denoised=True, + denoised_fn=None, + cond_fn=None, + model_kwargs=None, + cond_fn_with_grad=False, + order=2, + old_out=None, + ): + """ + Sample x_{t-1} from the model using Pseudo Linear Multistep. + + Same usage as p_sample(). + """ + if not int(order) or not 1 <= order <= 4: + raise ValueError("order is invalid (should be int from 1-4).") + + def get_model_output(x, t): + with th.set_grad_enabled(cond_fn_with_grad and cond_fn is not None): + x = x.detach().requires_grad_() if cond_fn_with_grad else x + out_orig = self.p_mean_variance( + model, + x, + t, + clip_denoised=clip_denoised, + denoised_fn=denoised_fn, + model_kwargs=model_kwargs, + ) + if cond_fn is not None: + if cond_fn_with_grad: + out = self.condition_score_with_grad( + cond_fn, out_orig, x, t, model_kwargs=model_kwargs + ) + x = x.detach() + else: + out = self.condition_score( + cond_fn, out_orig, x, t, model_kwargs=model_kwargs + ) + else: + out = out_orig + + # Usually our model outputs epsilon, but we re-derive it + # in case we used x_start or x_prev prediction. + eps = self._predict_eps_from_xstart(x, t, out["pred_xstart"]) + return eps, out, out_orig + + alpha_bar = _extract_into_tensor(self.alphas_cumprod, t, x.shape) + alpha_bar_prev = _extract_into_tensor(self.alphas_cumprod_prev, t, x.shape) + eps, out, out_orig = get_model_output(x, t) + + if order > 1 and old_out is None: + # Pseudo Improved Euler + old_eps = [eps] + mean_pred = ( + out["pred_xstart"] * th.sqrt(alpha_bar_prev) + + th.sqrt(1 - alpha_bar_prev) * eps + ) + eps_2, _, _ = get_model_output(mean_pred, t - 1) + eps_prime = (eps + eps_2) / 2 + pred_prime = self._predict_xstart_from_eps(x, t, eps_prime) + mean_pred = ( + pred_prime * th.sqrt(alpha_bar_prev) + + th.sqrt(1 - alpha_bar_prev) * eps_prime + ) + else: + # Pseudo Linear Multistep (Adams-Bashforth) + old_eps = old_out["old_eps"] + old_eps.append(eps) + cur_order = min(order, len(old_eps)) + if cur_order == 1: + eps_prime = old_eps[-1] + elif cur_order == 2: + eps_prime = (3 * old_eps[-1] - old_eps[-2]) / 2 + elif cur_order == 3: + eps_prime = (23 * old_eps[-1] - 16 * old_eps[-2] + 5 * old_eps[-3]) / 12 + elif cur_order == 4: + eps_prime = ( + 55 * old_eps[-1] + - 59 * old_eps[-2] + + 37 * old_eps[-3] + - 9 * old_eps[-4] + ) / 24 + else: + raise RuntimeError("cur_order is invalid.") + pred_prime = self._predict_xstart_from_eps(x, t, eps_prime) + mean_pred = ( + pred_prime * th.sqrt(alpha_bar_prev) + + th.sqrt(1 - alpha_bar_prev) * eps_prime + ) + + if len(old_eps) >= order: + old_eps.pop(0) + + nonzero_mask = (t != 0).float().view(-1, *([1] * (len(x.shape) - 1))) + sample = mean_pred * nonzero_mask + out["pred_xstart"] * (1 - nonzero_mask) + + return { + "sample": sample, + "pred_xstart": out_orig["pred_xstart"], + "old_eps": old_eps, + } + + def plms_sample_loop( + self, + model, + shape, + noise=None, + clip_denoised=True, + denoised_fn=None, + cond_fn=None, + model_kwargs=None, + device=None, + progress=False, + skip_timesteps=0, + init_image=None, + randomize_class=False, + cond_fn_with_grad=False, + order=2, + ): + """ + Generate samples from the model using Pseudo Linear Multistep. + + Same usage as p_sample_loop(). + """ + final = None + for sample in self.plms_sample_loop_progressive( + model, + shape, + noise=noise, + clip_denoised=clip_denoised, + denoised_fn=denoised_fn, + cond_fn=cond_fn, + model_kwargs=model_kwargs, + device=device, + progress=progress, + skip_timesteps=skip_timesteps, + init_image=init_image, + randomize_class=randomize_class, + cond_fn_with_grad=cond_fn_with_grad, + order=order, + ): + final = sample + return final["sample"] + + def plms_sample_loop_progressive( + self, + model, + shape, + noise=None, + clip_denoised=True, + denoised_fn=None, + cond_fn=None, + model_kwargs=None, + device=None, + progress=False, + skip_timesteps=0, + init_image=None, + randomize_class=False, + cond_fn_with_grad=False, + order=2, + ): + """ + Use PLMS to sample from the model and yield intermediate samples from each + timestep of PLMS. + + Same usage as p_sample_loop_progressive(). + """ + if device is None: + device = next(model.parameters()).device + assert isinstance(shape, (tuple, list)) + if noise is not None: + img = noise + else: + img = th.randn(*shape, device=device) + + if skip_timesteps and init_image is None: + init_image = th.zeros_like(img) + + indices = list(range(self.num_timesteps - skip_timesteps))[::-1] + + if init_image is not None: + my_t = th.ones([shape[0]], device=device, dtype=th.long) * indices[0] + img = self.q_sample(init_image, my_t, img) + + if progress: + # Lazy import so that we don't depend on tqdm. + from tqdm.auto import tqdm + + indices = tqdm(indices) + + old_out = None + + for i in indices: + t = th.tensor([i] * shape[0], device=device) + if randomize_class and "y" in model_kwargs: + model_kwargs["y"] = th.randint( + low=0, + high=model.num_classes, + size=model_kwargs["y"].shape, + device=model_kwargs["y"].device, + ) + with th.no_grad(): + out = self.plms_sample( + model, + img, + t, + clip_denoised=clip_denoised, + denoised_fn=denoised_fn, + cond_fn=cond_fn, + model_kwargs=model_kwargs, + cond_fn_with_grad=cond_fn_with_grad, + order=order, + old_out=old_out, + ) + yield out + old_out = out + img = out["sample"] + + def _vb_terms_bpd( + self, model, x_start, x_t, t, clip_denoised=True, model_kwargs=None + ): + """ + Get a term for the variational lower-bound. + + The resulting units are bits (rather than nats, as one might expect). + This allows for comparison to other papers. + + :return: a dict with the following keys: + - 'output': a shape [N] tensor of NLLs or KLs. + - 'pred_xstart': the x_0 predictions. + """ + true_mean, _, true_log_variance_clipped = self.q_posterior_mean_variance( + x_start=x_start, x_t=x_t, t=t + ) + out = self.p_mean_variance( + model, x_t, t, clip_denoised=clip_denoised, model_kwargs=model_kwargs + ) + kl = normal_kl( + true_mean, true_log_variance_clipped, out["mean"], out["log_variance"] + ) + kl = mean_flat(kl) / np.log(2.0) + + decoder_nll = -discretized_gaussian_log_likelihood( + x_start, means=out["mean"], log_scales=0.5 * out["log_variance"] + ) + assert decoder_nll.shape == x_start.shape + decoder_nll = mean_flat(decoder_nll) / np.log(2.0) + + # At the first timestep return the decoder NLL, + # otherwise return KL(q(x_{t-1}|x_t,x_0) || p(x_{t-1}|x_t)) + output = th.where((t == 0), decoder_nll, kl) + return {"output": output, "pred_xstart": out["pred_xstart"]} + + def training_losses(self, model, x_start, t, model_kwargs=None, noise=None): + """ + Compute training losses for a single timestep. + + :param model: the model to evaluate loss on. + :param x_start: the [N x C x ...] tensor of inputs. + :param t: a batch of timestep indices. + :param model_kwargs: if not None, a dict of extra keyword arguments to + pass to the model. This can be used for conditioning. + :param noise: if specified, the specific Gaussian noise to try to remove. + :return: a dict with the key "loss" containing a tensor of shape [N]. + Some mean or variance settings may also have other keys. + """ + mask = model_kwargs["y"]["mask"] + if model_kwargs is None: + model_kwargs = {} + if noise is None: + noise = th.randn_like(x_start) + x_t = self.q_sample( + x_start, t, noise=noise + ) # use the formula to diffuse the starting tensor by t steps + terms = {} + + # set random dropout for conditioning in training + model_kwargs["cond_drop_prob"] = 0.2 + model_output = model(x_t, self._scale_timesteps(t), **model_kwargs) + target = { + ModelMeanType.PREVIOUS_X: self.q_posterior_mean_variance( + x_start=x_start, x_t=x_t, t=t + )[0], + ModelMeanType.START_X: x_start, + ModelMeanType.EPSILON: noise, + }[self.model_mean_type] + + model_output = model_output.permute(0, 2, 1).unsqueeze(2) + assert model_output.shape == target.shape == x_start.shape + + missing_mask = model_kwargs["y"]["missing"][..., 0] + missing_mask = missing_mask.unsqueeze(1).unsqueeze(1) + missing_mask = mask * missing_mask + terms["rot_mse"] = self.masked_l2(target, model_output, missing_mask) + if self.lambda_vel > 0.0: + target_vel = target[..., 1:] - target[..., :-1] + model_output_vel = model_output[..., 1:] - model_output[..., :-1] + terms["vel_mse"] = self.masked_l2( + target_vel, + model_output_vel, + mask[:, :, :, 1:], + ) + + terms["loss"] = terms["rot_mse"] + (self.lambda_vel * terms.get("vel_mse", 0.0)) + + with torch.no_grad(): + terms["vb"] = self._vb_terms_bpd( + model, + x_start, + x_t, + t, + clip_denoised=False, + model_kwargs=model_kwargs, + )["output"] + + return terms + + +def _extract_into_tensor(arr, timesteps, broadcast_shape): + """ + Extract values from a 1-D numpy array for a batch of indices. + + :param arr: the 1-D numpy array. + :param timesteps: a tensor of indices into the array to extract. + :param broadcast_shape: a larger shape of K dimensions with the batch + dimension equal to the length of timesteps. + :return: a tensor of shape [batch_size, 1, ...] where the shape has K dims. + """ + res = th.from_numpy(arr).to(device=timesteps.device)[timesteps].float() + while len(res.shape) < len(broadcast_shape): + res = res[..., None] + return res.expand(broadcast_shape) diff --git a/diffusion/losses.py b/diffusion/losses.py new file mode 100644 index 0000000000000000000000000000000000000000..c3234f4cff570266a826670e80677bbd6ffd0a74 --- /dev/null +++ b/diffusion/losses.py @@ -0,0 +1,83 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +""" +Helpers for various likelihood-based losses. These are ported from the original +Ho et al. diffusion models codebase: +https://github.com/hojonathanho/diffusion/blob/1e0dceb3b3495bbe19116a5e1b3596cd0706c543/diffusion_tf/utils.py +""" + +import numpy as np +import torch as th + + +def normal_kl(mean1, logvar1, mean2, logvar2): + """ + Compute the KL divergence between two gaussians. + + Shapes are automatically broadcasted, so batches can be compared to + scalars, among other use cases. + """ + tensor = None + for obj in (mean1, logvar1, mean2, logvar2): + if isinstance(obj, th.Tensor): + tensor = obj + break + assert tensor is not None, "at least one argument must be a Tensor" + + # Force variances to be Tensors. Broadcasting helps convert scalars to + # Tensors, but it does not work for th.exp(). + logvar1, logvar2 = [ + x if isinstance(x, th.Tensor) else th.tensor(x).to(tensor) + for x in (logvar1, logvar2) + ] + + return 0.5 * ( + -1.0 + + logvar2 + - logvar1 + + th.exp(logvar1 - logvar2) + + ((mean1 - mean2) ** 2) * th.exp(-logvar2) + ) + + +def approx_standard_normal_cdf(x): + """ + A fast approximation of the cumulative distribution function of the + standard normal. + """ + return 0.5 * (1.0 + th.tanh(np.sqrt(2.0 / np.pi) * (x + 0.044715 * th.pow(x, 3)))) + + +def discretized_gaussian_log_likelihood(x, *, means, log_scales): + """ + Compute the log-likelihood of a Gaussian distribution discretizing to a + given image. + + :param x: the target images. It is assumed that this was uint8 values, + rescaled to the range [-1, 1]. + :param means: the Gaussian mean Tensor. + :param log_scales: the Gaussian log stddev Tensor. + :return: a tensor like x of log probabilities (in nats). + """ + assert x.shape == means.shape == log_scales.shape + centered_x = x - means + inv_stdv = th.exp(-log_scales) + plus_in = inv_stdv * (centered_x + 1.0 / 255.0) + cdf_plus = approx_standard_normal_cdf(plus_in) + min_in = inv_stdv * (centered_x - 1.0 / 255.0) + cdf_min = approx_standard_normal_cdf(min_in) + log_cdf_plus = th.log(cdf_plus.clamp(min=1e-12)) + log_one_minus_cdf_min = th.log((1.0 - cdf_min).clamp(min=1e-12)) + cdf_delta = cdf_plus - cdf_min + log_probs = th.where( + x < -0.999, + log_cdf_plus, + th.where(x > 0.999, log_one_minus_cdf_min, th.log(cdf_delta.clamp(min=1e-12))), + ) + assert log_probs.shape == x.shape + return log_probs diff --git a/diffusion/nn.py b/diffusion/nn.py new file mode 100644 index 0000000000000000000000000000000000000000..f5a4169aafa14b1b2d7c45413615a613b3c9d1b9 --- /dev/null +++ b/diffusion/nn.py @@ -0,0 +1,213 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +""" +original code from +https://github.com/GuyTevet/motion-diffusion-model/blob/main/diffusion/gaussian_diffusion.py +under an MIT license +https://github.com/GuyTevet/motion-diffusion-model/blob/main/LICENSE +""" + +""" +Various utilities for neural networks. +""" + +import math + +import torch as th +import torch.nn as nn + + +# PyTorch 1.7 has SiLU, but we support PyTorch 1.5. +class SiLU(nn.Module): + def forward(self, x): + return x * th.sigmoid(x) + + +class GroupNorm32(nn.GroupNorm): + def forward(self, x): + return super().forward(x.float()).type(x.dtype) + + +def conv_nd(dims, *args, **kwargs): + """ + Create a 1D, 2D, or 3D convolution module. + """ + if dims == 1: + return nn.Conv1d(*args, **kwargs) + elif dims == 2: + return nn.Conv2d(*args, **kwargs) + elif dims == 3: + return nn.Conv3d(*args, **kwargs) + raise ValueError(f"unsupported dimensions: {dims}") + + +def linear(*args, **kwargs): + """ + Create a linear module. + """ + return nn.Linear(*args, **kwargs) + + +def avg_pool_nd(dims, *args, **kwargs): + """ + Create a 1D, 2D, or 3D average pooling module. + """ + if dims == 1: + return nn.AvgPool1d(*args, **kwargs) + elif dims == 2: + return nn.AvgPool2d(*args, **kwargs) + elif dims == 3: + return nn.AvgPool3d(*args, **kwargs) + raise ValueError(f"unsupported dimensions: {dims}") + + +def update_ema(target_params, source_params, rate=0.99): + """ + Update target parameters to be closer to those of source parameters using + an exponential moving average. + + :param target_params: the target parameter sequence. + :param source_params: the source parameter sequence. + :param rate: the EMA rate (closer to 1 means slower). + """ + for targ, src in zip(target_params, source_params): + targ.detach().mul_(rate).add_(src, alpha=1 - rate) + + +def zero_module(module): + """ + Zero out the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().zero_() + return module + + +def scale_module(module, scale): + """ + Scale the parameters of a module and return it. + """ + for p in module.parameters(): + p.detach().mul_(scale) + return module + + +def mean_flat(tensor): + """ + Take the mean over all non-batch dimensions. + """ + return tensor.mean(dim=list(range(1, len(tensor.shape)))) + + +def sum_flat(tensor): + """ + Take the sum over all non-batch dimensions. + """ + return tensor.sum(dim=list(range(1, len(tensor.shape)))) + + +def normalization(channels): + """ + Make a standard normalization layer. + + :param channels: number of input channels. + :return: an nn.Module for normalization. + """ + return GroupNorm32(32, channels) + + +def timestep_embedding(timesteps, dim, max_period=10000): + """ + Create sinusoidal timestep embeddings. + + :param timesteps: a 1-D Tensor of N indices, one per batch element. + These may be fractional. + :param dim: the dimension of the output. + :param max_period: controls the minimum frequency of the embeddings. + :return: an [N x dim] Tensor of positional embeddings. + """ + half = dim // 2 + freqs = th.exp( + -math.log(max_period) * th.arange(start=0, end=half, dtype=th.float32) / half + ).to(device=timesteps.device) + args = timesteps[:, None].float() * freqs[None] + embedding = th.cat([th.cos(args), th.sin(args)], dim=-1) + if dim % 2: + embedding = th.cat([embedding, th.zeros_like(embedding[:, :1])], dim=-1) + return embedding + + +def checkpoint(func, inputs, params, flag): + """ + Evaluate a function without caching intermediate activations, allowing for + reduced memory at the expense of extra compute in the backward pass. + :param func: the function to evaluate. + :param inputs: the argument sequence to pass to `func`. + :param params: a sequence of parameters `func` depends on but does not + explicitly take as arguments. + :param flag: if False, disable gradient checkpointing. + """ + if flag: + args = tuple(inputs) + tuple(params) + return CheckpointFunction.apply(func, len(inputs), *args) + else: + return func(*inputs) + + +class CheckpointFunction(th.autograd.Function): + @staticmethod + @th.cuda.amp.custom_fwd + def forward(ctx, run_function, length, *args): + ctx.run_function = run_function + ctx.input_length = length + ctx.save_for_backward(*args) + with th.no_grad(): + output_tensors = ctx.run_function(*args[:length]) + return output_tensors + + @staticmethod + @th.cuda.amp.custom_bwd + def backward(ctx, *output_grads): + args = list(ctx.saved_tensors) + + # Filter for inputs that require grad. If none, exit early. + input_indices = [i for (i, x) in enumerate(args) if x.requires_grad] + if not input_indices: + return (None, None) + tuple(None for _ in args) + + with th.enable_grad(): + for i in input_indices: + if i < ctx.input_length: + # Not sure why the OAI code does this little + # dance. It might not be necessary. + args[i] = args[i].detach().requires_grad_() + args[i] = args[i].view_as(args[i]) + output_tensors = ctx.run_function(*args[: ctx.input_length]) + + if isinstance(output_tensors, th.Tensor): + output_tensors = [output_tensors] + + # Filter for outputs that require grad. If none, exit early. + out_and_grads = [ + (o, g) for (o, g) in zip(output_tensors, output_grads) if o.requires_grad + ] + if not out_and_grads: + return (None, None) + tuple(None for _ in args) + + # Compute gradients on the filtered tensors. + computed_grads = th.autograd.grad( + [o for (o, g) in out_and_grads], + [args[i] for i in input_indices], + [g for (o, g) in out_and_grads], + ) + + # Reassemble the complete gradient tuple. + input_grads = [None for _ in args] + for i, g in zip(input_indices, computed_grads): + input_grads[i] = g + return (None, None) + tuple(input_grads) diff --git a/diffusion/resample.py b/diffusion/resample.py new file mode 100644 index 0000000000000000000000000000000000000000..18f2633ed2d4d764de53bff483fc854a73b2145f --- /dev/null +++ b/diffusion/resample.py @@ -0,0 +1,168 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +""" +original code from +https://github.com/GuyTevet/motion-diffusion-model/blob/main/diffusion/gaussian_diffusion.py +under an MIT license +https://github.com/GuyTevet/motion-diffusion-model/blob/main/LICENSE +""" + +from abc import ABC, abstractmethod + +import numpy as np +import torch as th +import torch.distributed as dist + + +def create_named_schedule_sampler(name, diffusion): + """ + Create a ScheduleSampler from a library of pre-defined samplers. + + :param name: the name of the sampler. + :param diffusion: the diffusion object to sample for. + """ + if name == "uniform": + return UniformSampler(diffusion) + elif name == "loss-second-moment": + return LossSecondMomentResampler(diffusion) + else: + raise NotImplementedError(f"unknown schedule sampler: {name}") + + +class ScheduleSampler(ABC): + """ + A distribution over timesteps in the diffusion process, intended to reduce + variance of the objective. + + By default, samplers perform unbiased importance sampling, in which the + objective's mean is unchanged. + However, subclasses may override sample() to change how the resampled + terms are reweighted, allowing for actual changes in the objective. + """ + + @abstractmethod + def weights(self): + """ + Get a numpy array of weights, one per diffusion step. + + The weights needn't be normalized, but must be positive. + """ + + def sample(self, batch_size, device): + """ + Importance-sample timesteps for a batch. + + :param batch_size: the number of timesteps. + :param device: the torch device to save to. + :return: a tuple (timesteps, weights): + - timesteps: a tensor of timestep indices. + - weights: a tensor of weights to scale the resulting losses. + """ + w = self.weights() + p = w / np.sum(w) + indices_np = np.random.choice(len(p), size=(batch_size,), p=p) + indices = th.from_numpy(indices_np).long().to(device) + weights_np = 1 / (len(p) * p[indices_np]) + weights = th.from_numpy(weights_np).float().to(device) + return indices, weights + + +class UniformSampler(ScheduleSampler): + def __init__(self, diffusion): + self.diffusion = diffusion + self._weights = np.ones([diffusion.num_timesteps]) + + def weights(self): + return self._weights + + +class LossAwareSampler(ScheduleSampler): + def update_with_local_losses(self, local_ts, local_losses): + """ + Update the reweighting using losses from a model. + + Call this method from each rank with a batch of timesteps and the + corresponding losses for each of those timesteps. + This method will perform synchronization to make sure all of the ranks + maintain the exact same reweighting. + + :param local_ts: an integer Tensor of timesteps. + :param local_losses: a 1D Tensor of losses. + """ + batch_sizes = [ + th.tensor([0], dtype=th.int32, device=local_ts.device) + for _ in range(dist.get_world_size()) + ] + dist.all_gather( + batch_sizes, + th.tensor([len(local_ts)], dtype=th.int32, device=local_ts.device), + ) + + # Pad all_gather batches to be the maximum batch size. + batch_sizes = [x.item() for x in batch_sizes] + max_bs = max(batch_sizes) + + timestep_batches = [th.zeros(max_bs).to(local_ts) for bs in batch_sizes] + loss_batches = [th.zeros(max_bs).to(local_losses) for bs in batch_sizes] + dist.all_gather(timestep_batches, local_ts) + dist.all_gather(loss_batches, local_losses) + timesteps = [ + x.item() for y, bs in zip(timestep_batches, batch_sizes) for x in y[:bs] + ] + losses = [x.item() for y, bs in zip(loss_batches, batch_sizes) for x in y[:bs]] + self.update_with_all_losses(timesteps, losses) + + @abstractmethod + def update_with_all_losses(self, ts, losses): + """ + Update the reweighting using losses from a model. + + Sub-classes should override this method to update the reweighting + using losses from the model. + + This method directly updates the reweighting without synchronizing + between workers. It is called by update_with_local_losses from all + ranks with identical arguments. Thus, it should have deterministic + behavior to maintain state across workers. + + :param ts: a list of int timesteps. + :param losses: a list of float losses, one per timestep. + """ + + +class LossSecondMomentResampler(LossAwareSampler): + def __init__(self, diffusion, history_per_term=10, uniform_prob=0.001): + self.diffusion = diffusion + self.history_per_term = history_per_term + self.uniform_prob = uniform_prob + self._loss_history = np.zeros( + [diffusion.num_timesteps, history_per_term], dtype=np.float64 + ) + self._loss_counts = np.zeros([diffusion.num_timesteps], dtype=np.int) + + def weights(self): + if not self._warmed_up(): + return np.ones([self.diffusion.num_timesteps], dtype=np.float64) + weights = np.sqrt(np.mean(self._loss_history ** 2, axis=-1)) + weights /= np.sum(weights) + weights *= 1 - self.uniform_prob + weights += self.uniform_prob / len(weights) + return weights + + def update_with_all_losses(self, ts, losses): + for t, loss in zip(ts, losses): + if self._loss_counts[t] == self.history_per_term: + # Shift out the oldest loss term. + self._loss_history[t, :-1] = self._loss_history[t, 1:] + self._loss_history[t, -1] = loss + else: + self._loss_history[t, self._loss_counts[t]] = loss + self._loss_counts[t] += 1 + + def _warmed_up(self): + return (self._loss_counts == self.history_per_term).all() diff --git a/diffusion/respace.py b/diffusion/respace.py new file mode 100644 index 0000000000000000000000000000000000000000..6f7a7f7a28fa20ff6c61d3db39cd5757ac6ea063 --- /dev/null +++ b/diffusion/respace.py @@ -0,0 +1,145 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +""" +original code from +https://github.com/GuyTevet/motion-diffusion-model/blob/main/diffusion/gaussian_diffusion.py +under an MIT license +https://github.com/GuyTevet/motion-diffusion-model/blob/main/LICENSE +""" + +import numpy as np +import torch as th + +from .gaussian_diffusion import GaussianDiffusion + + +def space_timesteps(num_timesteps, section_counts): + """ + Create a list of timesteps to use from an original diffusion process, + given the number of timesteps we want to take from equally-sized portions + of the original process. + + For example, if there's 300 timesteps and the section counts are [10,15,20] + then the first 100 timesteps are strided to be 10 timesteps, the second 100 + are strided to be 15 timesteps, and the final 100 are strided to be 20. + + If the stride is a string starting with "ddim", then the fixed striding + from the DDIM paper is used, and only one section is allowed. + + :param num_timesteps: the number of diffusion steps in the original + process to divide up. + :param section_counts: either a list of numbers, or a string containing + comma-separated numbers, indicating the step count + per section. As a special case, use "ddimN" where N + is a number of steps to use the striding from the + DDIM paper. + :return: a set of diffusion steps from the original process to use. + """ + if isinstance(section_counts, str): + if section_counts.startswith("ddim"): + desired_count = int(section_counts[len("ddim") :]) + for i in range(1, num_timesteps): + if len(range(0, num_timesteps, i)) == desired_count: + return set(range(0, num_timesteps, i)) + raise ValueError( + f"cannot create exactly {num_timesteps} steps with an integer stride" + ) + section_counts = [int(x) for x in section_counts.split(",")] + size_per = num_timesteps // len(section_counts) + extra = num_timesteps % len(section_counts) + start_idx = 0 + all_steps = [] + for i, section_count in enumerate(section_counts): + size = size_per + (1 if i < extra else 0) + if size < section_count: + raise ValueError( + f"cannot divide section of {size} steps into {section_count}" + ) + if section_count <= 1: + frac_stride = 1 + else: + frac_stride = (size - 1) / (section_count - 1) + cur_idx = 0.0 + taken_steps = [] + for _ in range(section_count): + taken_steps.append(start_idx + round(cur_idx)) + cur_idx += frac_stride + all_steps += taken_steps + start_idx += size + return set(all_steps) + + +class SpacedDiffusion(GaussianDiffusion): + """ + A diffusion process which can skip steps in a base diffusion process. + + :param use_timesteps: a collection (sequence or set) of timesteps from the + original diffusion process to retain. + :param kwargs: the kwargs to create the base diffusion process. + """ + + def __init__(self, use_timesteps, **kwargs): + self.use_timesteps = set(use_timesteps) + self.timestep_map = [] + self.original_num_steps = len(kwargs["betas"]) + + base_diffusion = GaussianDiffusion(**kwargs) # pylint: disable=missing-kwoa + last_alpha_cumprod = 1.0 + new_betas = [] + for i, alpha_cumprod in enumerate(base_diffusion.alphas_cumprod): + if i in self.use_timesteps: + new_betas.append(1 - alpha_cumprod / last_alpha_cumprod) + last_alpha_cumprod = alpha_cumprod + self.timestep_map.append(i) + kwargs["betas"] = np.array(new_betas) + super().__init__(**kwargs) + + def p_mean_variance( + self, model, *args, **kwargs + ): # pylint: disable=signature-differs + return super().p_mean_variance(self._wrap_model(model), *args, **kwargs) + + def training_losses( + self, model, *args, **kwargs + ): # pylint: disable=signature-differs + return super().training_losses(self._wrap_model(model), *args, **kwargs) + + def condition_mean(self, cond_fn, *args, **kwargs): + return super().condition_mean(self._wrap_model(cond_fn), *args, **kwargs) + + def condition_score(self, cond_fn, *args, **kwargs): + return super().condition_score(self._wrap_model(cond_fn), *args, **kwargs) + + def _wrap_model(self, model): + if isinstance(model, _WrappedModel): + return model + return _WrappedModel( + model, self.timestep_map, self.rescale_timesteps, self.original_num_steps + ) + + def _scale_timesteps(self, t): + # Scaling is done by the wrapped model. + return t + + +class _WrappedModel: + def __init__(self, model, timestep_map, rescale_timesteps, original_num_steps): + self.model = model + if hasattr(model, "step"): + self.step = model.step + self.add_frame_cond = model.add_frame_cond + self.timestep_map = timestep_map + self.rescale_timesteps = rescale_timesteps + self.original_num_steps = original_num_steps + + def __call__(self, x, ts, **kwargs): + map_tensor = th.tensor(self.timestep_map, device=ts.device, dtype=ts.dtype) + new_ts = map_tensor[ts] + if self.rescale_timesteps: + new_ts = new_ts.float() * (1000.0 / self.original_num_steps) + return self.model(x, new_ts, **kwargs) diff --git a/flagged/audio/b90d90dbca93f47e8d01/audio.wav b/flagged/audio/b90d90dbca93f47e8d01/audio.wav new file mode 100644 index 0000000000000000000000000000000000000000..7dcca3910551f767644e57d44d1f31679f09eb09 Binary files /dev/null and b/flagged/audio/b90d90dbca93f47e8d01/audio.wav differ diff --git a/flagged/audio/d8e03e2e6deae2f981b1/audio.wav b/flagged/audio/d8e03e2e6deae2f981b1/audio.wav new file mode 100644 index 0000000000000000000000000000000000000000..7dcca3910551f767644e57d44d1f31679f09eb09 Binary files /dev/null and b/flagged/audio/d8e03e2e6deae2f981b1/audio.wav differ diff --git a/flagged/log.csv b/flagged/log.csv new file mode 100644 index 0000000000000000000000000000000000000000..5a39f5607523b0e36a9741d7df544a935108fc27 --- /dev/null +++ b/flagged/log.csv @@ -0,0 +1,4 @@ +audio,Number of Samples (default = 3),Sample Diversity (default = 0.97),output 0,output 1,output 2,output 3,output 4,output 5,output 6,output 7,output 8,output 9,flag,username,timestamp +,1,0.69,,,,,,,,,,,,,2024-07-15 05:46:49.672259 +flagged/audio/d8e03e2e6deae2f981b1/audio.wav,1,0.69,,,,,,,,,,,,,2024-07-15 06:28:21.003877 +flagged/audio/b90d90dbca93f47e8d01/audio.wav,1,0.69,,,,,,,,,,,,,2024-07-15 06:28:24.442449 diff --git a/model/cfg_sampler.py b/model/cfg_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..a017b98486f6ae0788f8b161652a5e7ef5d7d0c4 --- /dev/null +++ b/model/cfg_sampler.py @@ -0,0 +1,33 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +from copy import deepcopy + +import numpy as np +import torch +import torch.nn as nn + + +# A wrapper model for Classifier-free guidance **SAMPLING** only +# https://arxiv.org/abs/2207.12598 +class ClassifierFreeSampleModel(nn.Module): + def __init__(self, model): + super().__init__() + self.model = model # model is the actual model to run + self.nfeats = self.model.nfeats + self.cond_mode = self.model.cond_mode + self.add_frame_cond = self.model.add_frame_cond + if self.add_frame_cond is not None: + if self.model.resume_trans is not None: + self.transformer = self.model.transformer + self.tokenizer = self.model.tokenizer + self.step = self.model.step + + def forward(self, x, timesteps, y=None): + out = self.model(x, timesteps, y, cond_drop_prob=0.0) + out_uncond = self.model(x, timesteps, y, cond_drop_prob=1.0) + return out_uncond + (y["scale"].view(-1, 1, 1) * (out - out_uncond)) diff --git a/model/diffusion.py b/model/diffusion.py new file mode 100644 index 0000000000000000000000000000000000000000..10e271c0edd2d4feaa04305a9b0f576e9263c9e7 --- /dev/null +++ b/model/diffusion.py @@ -0,0 +1,403 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import json +from typing import Callable, Optional + +import torch +import torch.nn as nn +from einops import rearrange +from einops.layers.torch import Rearrange + +from model.guide import GuideTransformer +from model.modules.audio_encoder import Wav2VecEncoder +from model.modules.rotary_embedding_torch import RotaryEmbedding +from model.modules.transformer_modules import ( + DecoderLayerStack, + FiLMTransformerDecoderLayer, + RegressionTransformer, + TransformerEncoderLayerRotary, +) +from model.utils import ( + init_weight, + PositionalEncoding, + prob_mask_like, + setup_lip_regressor, + SinusoidalPosEmb, +) +from model.vqvae import setup_tokenizer +from torch.nn import functional as F +from utils.misc import prGreen, prRed + + +class Audio2LipRegressionTransformer(torch.nn.Module): + def __init__( + self, + n_vertices: int = 338, + causal: bool = False, + train_wav2vec: bool = False, + transformer_encoder_layers: int = 2, + transformer_decoder_layers: int = 4, + ): + super().__init__() + self.n_vertices = n_vertices + + self.audio_encoder = Wav2VecEncoder() + if not train_wav2vec: + self.audio_encoder.eval() + for param in self.audio_encoder.parameters(): + param.requires_grad = False + + self.regression_model = RegressionTransformer( + transformer_encoder_layers=transformer_encoder_layers, + transformer_decoder_layers=transformer_decoder_layers, + d_model=512, + d_cond=512, + num_heads=4, + causal=causal, + ) + self.project_output = torch.nn.Linear(512, self.n_vertices * 3) + + def forward(self, audio): + """ + :param audio: tensor of shape B x T x 1600 + :return: tensor of shape B x T x n_vertices x 3 containing reconstructed lip geometry + """ + B, T = audio.shape[0], audio.shape[1] + + cond = self.audio_encoder(audio) + + x = torch.zeros(B, T, 512, device=audio.device) + x = self.regression_model(x, cond) + x = self.project_output(x) + + verts = x.view(B, T, self.n_vertices, 3) + return verts + + +class FiLMTransformer(nn.Module): + def __init__( + self, + args, + nfeats: int, + latent_dim: int = 512, + ff_size: int = 1024, + num_layers: int = 4, + num_heads: int = 4, + dropout: float = 0.1, + cond_feature_dim: int = 4800, + activation: Callable[[torch.Tensor], torch.Tensor] = F.gelu, + use_rotary: bool = True, + cond_mode: str = "audio", + split_type: str = "train", + device: str = "cuda", + **kwargs, + ) -> None: + super().__init__() + self.nfeats = nfeats + self.cond_mode = cond_mode + self.cond_feature_dim = cond_feature_dim + self.add_frame_cond = args.add_frame_cond + self.data_format = args.data_format + self.split_type = split_type + self.device = device + + # positional embeddings + self.rotary = None + self.abs_pos_encoding = nn.Identity() + # if rotary, replace absolute embedding with a rotary embedding instance (absolute becomes an identity) + if use_rotary: + self.rotary = RotaryEmbedding(dim=latent_dim) + else: + self.abs_pos_encoding = PositionalEncoding( + latent_dim, dropout, batch_first=True + ) + + # time embedding processing + self.time_mlp = nn.Sequential( + SinusoidalPosEmb(latent_dim), + nn.Linear(latent_dim, latent_dim * 4), + nn.Mish(), + ) + self.to_time_cond = nn.Sequential( + nn.Linear(latent_dim * 4, latent_dim), + ) + self.to_time_tokens = nn.Sequential( + nn.Linear(latent_dim * 4, latent_dim * 2), + Rearrange("b (r d) -> b r d", r=2), + ) + + # null embeddings for guidance dropout + self.seq_len = args.max_seq_length + emb_len = 1998 # hardcoded for now + self.null_cond_embed = nn.Parameter(torch.randn(1, emb_len, latent_dim)) + self.null_cond_hidden = nn.Parameter(torch.randn(1, latent_dim)) + self.norm_cond = nn.LayerNorm(latent_dim) + self.setup_audio_models() + + # set up pose/face specific parts of the model + self.input_projection = nn.Linear(self.nfeats, latent_dim) + if self.data_format == "pose": + cond_feature_dim = 1024 + key_feature_dim = 104 + self.step = 30 + self.use_cm = True + self.setup_guide_models(args, latent_dim, key_feature_dim) + self.post_pose_layers = self._build_single_pose_conv(self.nfeats) + self.post_pose_layers.apply(init_weight) + self.final_conv = torch.nn.Conv1d(self.nfeats, self.nfeats, kernel_size=1) + self.receptive_field = 25 + elif self.data_format == "face": + self.use_cm = False + cond_feature_dim = 1024 + 1014 + self.setup_lip_models() + self.cond_encoder = nn.Sequential() + for _ in range(2): + self.cond_encoder.append( + TransformerEncoderLayerRotary( + d_model=latent_dim, + nhead=num_heads, + dim_feedforward=ff_size, + dropout=dropout, + activation=activation, + batch_first=True, + rotary=self.rotary, + ) + ) + self.cond_encoder.apply(init_weight) + + self.cond_projection = nn.Linear(cond_feature_dim, latent_dim) + self.non_attn_cond_projection = nn.Sequential( + nn.LayerNorm(latent_dim), + nn.Linear(latent_dim, latent_dim), + nn.SiLU(), + nn.Linear(latent_dim, latent_dim), + ) + + # decoder + decoderstack = nn.ModuleList([]) + for _ in range(num_layers): + decoderstack.append( + FiLMTransformerDecoderLayer( + latent_dim, + num_heads, + dim_feedforward=ff_size, + dropout=dropout, + activation=activation, + batch_first=True, + rotary=self.rotary, + use_cm=self.use_cm, + ) + ) + self.seqTransDecoder = DecoderLayerStack(decoderstack) + self.seqTransDecoder.apply(init_weight) + self.final_layer = nn.Linear(latent_dim, self.nfeats) + self.final_layer.apply(init_weight) + + def _build_single_pose_conv(self, nfeats: int) -> nn.ModuleList: + post_pose_layers = torch.nn.ModuleList( + [ + torch.nn.Conv1d(nfeats, max(256, nfeats), kernel_size=3, dilation=1), + torch.nn.Conv1d(max(256, nfeats), nfeats, kernel_size=3, dilation=2), + torch.nn.Conv1d(nfeats, nfeats, kernel_size=3, dilation=3), + torch.nn.Conv1d(nfeats, nfeats, kernel_size=3, dilation=1), + torch.nn.Conv1d(nfeats, nfeats, kernel_size=3, dilation=2), + torch.nn.Conv1d(nfeats, nfeats, kernel_size=3, dilation=3), + ] + ) + return post_pose_layers + + def _run_single_pose_conv(self, output: torch.Tensor) -> torch.Tensor: + output = torch.nn.functional.pad(output, pad=[self.receptive_field - 1, 0]) + for _, layer in enumerate(self.post_pose_layers): + y = torch.nn.functional.leaky_relu(layer(output), negative_slope=0.2) + if self.split_type == "train": + y = torch.nn.functional.dropout(y, 0.2) + if output.shape[1] == y.shape[1]: + output = (output[:, :, -y.shape[-1] :] + y) / 2.0 # skip connection + else: + output = y + return output + + def setup_guide_models(self, args, latent_dim: int, key_feature_dim: int) -> None: + # set up conditioning info + max_keyframe_len = len(list(range(self.seq_len))[:: self.step]) + self.null_pose_embed = nn.Parameter( + torch.randn(1, max_keyframe_len, latent_dim) + ) + prGreen(f"using keyframes: {self.null_pose_embed.shape}") + self.frame_cond_projection = nn.Linear(key_feature_dim, latent_dim) + self.frame_norm_cond = nn.LayerNorm(latent_dim) + # for test time set up keyframe transformer + self.resume_trans = None + if self.split_type == "test": + if hasattr(args, "resume_trans") and args.resume_trans is not None: + self.resume_trans = args.resume_trans + self.setup_guide_predictor(args.resume_trans) + else: + prRed("not using transformer, just using ground truth") + + def setup_guide_predictor(self, cp_path: str) -> None: + cp_dir = cp_path.split("checkpoints/iter-")[0] + with open(f"{cp_dir}/args.json") as f: + trans_args = json.load(f) + + # set up tokenizer based on trans_arg load point + self.tokenizer = setup_tokenizer(trans_args["resume_pth"]) + + # set up transformer + self.transformer = GuideTransformer( + tokens=self.tokenizer.n_clusters, + num_layers=trans_args["layers"], + dim=trans_args["dim"], + emb_len=1998, + num_audio_layers=trans_args["num_audio_layers"], + ) + for param in self.transformer.parameters(): + param.requires_grad = False + prGreen("loading TRANSFORMER checkpoint from {}".format(cp_path)) + cp = torch.load(cp_path) + missing_keys, unexpected_keys = self.transformer.load_state_dict( + cp["model_state_dict"], strict=False + ) + assert len(missing_keys) == 0, missing_keys + assert len(unexpected_keys) == 0, unexpected_keys + + def setup_audio_models(self) -> None: + self.audio_model, self.audio_resampler = setup_lip_regressor() + + def setup_lip_models(self) -> None: + self.lip_model = Audio2LipRegressionTransformer() + cp_path = "./assets/iter-0200000.pt" + cp = torch.load(cp_path, map_location=torch.device(self.device)) + self.lip_model.load_state_dict(cp["model_state_dict"]) + for param in self.lip_model.parameters(): + param.requires_grad = False + prGreen(f"adding lip conditioning {cp_path}") + + def parameters_w_grad(self): + return [p for p in self.parameters() if p.requires_grad] + + def encode_audio(self, raw_audio: torch.Tensor) -> torch.Tensor: + device = next(self.parameters()).device + a0 = self.audio_resampler(raw_audio[:, :, 0].to(device)) + a1 = self.audio_resampler(raw_audio[:, :, 1].to(device)) + with torch.no_grad(): + z0 = self.audio_model.feature_extractor(a0) + z1 = self.audio_model.feature_extractor(a1) + emb = torch.cat((z0, z1), axis=1).permute(0, 2, 1) + return emb + + def encode_lip(self, audio: torch.Tensor, cond_embed: torch.Tensor) -> torch.Tensor: + reshaped_audio = audio.reshape((audio.shape[0], -1, 1600, 2))[..., 0] + # processes 4 seconds at a time + B, T, _ = reshaped_audio.shape + lip_cond = torch.zeros( + (audio.shape[0], T, 338, 3), + device=audio.device, + dtype=audio.dtype, + ) + for i in range(0, T, 120): + lip_cond[:, i : i + 120, ...] = self.lip_model( + reshaped_audio[:, i : i + 120, ...] + ) + lip_cond = lip_cond.permute(0, 2, 3, 1).reshape((B, 338 * 3, -1)) + lip_cond = torch.nn.functional.interpolate( + lip_cond, size=cond_embed.shape[1], mode="nearest-exact" + ).permute(0, 2, 1) + cond_embed = torch.cat((cond_embed, lip_cond), dim=-1) + return cond_embed + + def encode_keyframes( + self, y: torch.Tensor, cond_drop_prob: float, batch_size: int + ) -> torch.Tensor: + pred = y["keyframes"] + new_mask = y["mask"][..., :: self.step].squeeze((1, 2)) + pred[~new_mask] = 0.0 # pad the unknown + pose_hidden = self.frame_cond_projection(pred.detach().clone().cuda()) + pose_embed = self.abs_pos_encoding(pose_hidden) + pose_tokens = self.frame_norm_cond(pose_embed) + # do conditional dropout for guide poses + key_cond_drop_prob = cond_drop_prob + keep_mask_pose = prob_mask_like( + (batch_size,), 1 - key_cond_drop_prob, device=pose_tokens.device + ) + keep_mask_pose_embed = rearrange(keep_mask_pose, "b -> b 1 1") + null_pose_embed = self.null_pose_embed.to(pose_tokens.dtype) + pose_tokens = torch.where( + keep_mask_pose_embed, + pose_tokens, + null_pose_embed[:, : pose_tokens.shape[1], :], + ) + return pose_tokens + + def forward( + self, + x: torch.Tensor, + times: torch.Tensor, + y: Optional[torch.Tensor] = None, + cond_drop_prob: float = 0.0, + ) -> torch.Tensor: + if x.dim() == 4: + x = x.permute(0, 3, 1, 2).squeeze(-1) + batch_size, device = x.shape[0], x.device + if self.cond_mode == "uncond": + cond_embed = torch.zeros( + (x.shape[0], x.shape[1], self.cond_feature_dim), + dtype=x.dtype, + device=x.device, + ) + else: + cond_embed = y["audio"] + cond_embed = self.encode_audio(cond_embed) + if self.data_format == "face": + cond_embed = self.encode_lip(y["audio"], cond_embed) + pose_tokens = None + if self.data_format == "pose": + pose_tokens = self.encode_keyframes(y, cond_drop_prob, batch_size) + assert cond_embed is not None, "cond emb should not be none" + # process conditioning information + x = self.input_projection(x) + x = self.abs_pos_encoding(x) + audio_cond_drop_prob = cond_drop_prob + keep_mask = prob_mask_like( + (batch_size,), 1 - audio_cond_drop_prob, device=device + ) + keep_mask_embed = rearrange(keep_mask, "b -> b 1 1") + keep_mask_hidden = rearrange(keep_mask, "b -> b 1") + cond_tokens = self.cond_projection(cond_embed) + cond_tokens = self.abs_pos_encoding(cond_tokens) + if self.data_format == "face": + cond_tokens = self.cond_encoder(cond_tokens) + null_cond_embed = self.null_cond_embed.to(cond_tokens.dtype) + cond_tokens = torch.where( + keep_mask_embed, cond_tokens, null_cond_embed[:, : cond_tokens.shape[1], :] + ) + mean_pooled_cond_tokens = cond_tokens.mean(dim=-2) + cond_hidden = self.non_attn_cond_projection(mean_pooled_cond_tokens) + + # create t conditioning + t_hidden = self.time_mlp(times) + t = self.to_time_cond(t_hidden) + t_tokens = self.to_time_tokens(t_hidden) + null_cond_hidden = self.null_cond_hidden.to(t.dtype) + cond_hidden = torch.where(keep_mask_hidden, cond_hidden, null_cond_hidden) + t += cond_hidden + + # cross-attention conditioning + c = torch.cat((cond_tokens, t_tokens), dim=-2) + cond_tokens = self.norm_cond(c) + + # Pass through the transformer decoder + output = self.seqTransDecoder(x, cond_tokens, t, memory2=pose_tokens) + output = self.final_layer(output) + if self.data_format == "pose": + output = output.permute(0, 2, 1) + output = self._run_single_pose_conv(output) + output = self.final_conv(output) + output = output.permute(0, 2, 1) + return output diff --git a/model/guide.py b/model/guide.py new file mode 100644 index 0000000000000000000000000000000000000000..2b362612e238a8ba9f3a4ed78fd2170246eaf3f3 --- /dev/null +++ b/model/guide.py @@ -0,0 +1,222 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +from typing import Callable, List + +import torch +import torch as th +import torch.nn as nn +from einops import rearrange +from model.modules.rotary_embedding_torch import RotaryEmbedding + +from model.modules.transformer_modules import ( + DecoderLayerStack, + FiLMTransformerDecoderLayer, + PositionalEncoding, +) +from model.utils import prob_mask_like, setup_lip_regressor +from torch.distributions import Categorical +from torch.nn import functional as F + + +class GuideTransformer(nn.Module): + def __init__( + self, + tokens: int, + num_heads: int = 4, + num_layers: int = 4, + dim: int = 512, + ff_size: int = 1024, + dropout: float = 0.1, + activation: Callable = F.gelu, + use_rotary: bool = True, + cond_feature_dim: int = 1024, + emb_len: int = 798, + num_audio_layers: int = 2, + ): + super().__init__() + self.tokens = tokens + self.token_embedding = th.nn.Embedding( + num_embeddings=tokens + 1, # account for sequence start and end tokens + embedding_dim=dim, + ) + self.abs_pos_encoding = nn.Identity() + # if rotary, replace absolute embedding with a rotary embedding instance (absolute becomes an identity) + if use_rotary: + self.rotary = RotaryEmbedding(dim=dim) + else: + self.abs_pos_encoding = PositionalEncoding(dim, dropout, batch_first=True) + self.setup_audio_models(cond_feature_dim, num_audio_layers) + + self.null_cond_embed = nn.Parameter(torch.randn(1, emb_len, dim)) + self.null_cond_hidden = nn.Parameter(torch.randn(1, dim)) + self.norm_cond = nn.LayerNorm(dim) + + self.cond_projection = nn.Linear(cond_feature_dim, dim) + self.non_attn_cond_projection = nn.Sequential( + nn.LayerNorm(dim), + nn.Linear(dim, dim), + nn.SiLU(), + nn.Linear(dim, dim), + ) + # decoder + decoderstack = nn.ModuleList([]) + for _ in range(num_layers): + decoderstack.append( + FiLMTransformerDecoderLayer( + dim, + num_heads, + dim_feedforward=ff_size, + dropout=dropout, + activation=activation, + batch_first=True, + rotary=self.rotary, + ) + ) + self.seqTransDecoder = DecoderLayerStack(decoderstack) + self.final_layer = nn.Linear(dim, tokens) + + def _build_single_audio_conv(self, c: int) -> List[nn.Module]: + return [ + torch.nn.Conv1d(c, max(256, c), kernel_size=3, dilation=1), + torch.nn.LeakyReLU(negative_slope=0.2), + torch.nn.Dropout(0.2), + # + torch.nn.Conv1d(max(256, c), max(256, c), kernel_size=3, dilation=2), + torch.nn.LeakyReLU(negative_slope=0.2), + torch.nn.Dropout(0.2), + # + torch.nn.Conv1d(max(128, c), max(128, c), kernel_size=3, dilation=3), + torch.nn.LeakyReLU(negative_slope=0.2), + torch.nn.Dropout(0.2), + # + torch.nn.Conv1d(max(128, c), c, kernel_size=3, dilation=1), + torch.nn.LeakyReLU(negative_slope=0.2), + torch.nn.Dropout(0.2), + # + torch.nn.Conv1d(c, c, kernel_size=3, dilation=2), + torch.nn.LeakyReLU(negative_slope=0.2), + torch.nn.Dropout(0.2), + # + torch.nn.Conv1d(c, c, kernel_size=3, dilation=3), + torch.nn.LeakyReLU(negative_slope=0.2), + torch.nn.Dropout(0.2), + ] + + def setup_audio_models(self, cond_feature_dim: int, num_audio_layers: int) -> None: + pre_layers = [] + for _ in range(num_audio_layers): + pre_layers += self._build_single_audio_conv(cond_feature_dim) + pre_layers += [ + torch.nn.Conv1d(cond_feature_dim, cond_feature_dim, kernel_size=1) + ] + pre_layers = torch.nn.ModuleList(pre_layers) + self.pre_audio = nn.Sequential(*pre_layers) + self.audio_model, self.audio_resampler = setup_lip_regressor() + + def encode_audio(self, raw_audio: torch.Tensor) -> torch.Tensor: + device = next(self.parameters()).device + a0 = self.audio_resampler(raw_audio[:, :, 0].to(device)) # B x T + a1 = self.audio_resampler(raw_audio[:, :, 1].to(device)) # B x T + with torch.no_grad(): + z0 = self.audio_model.feature_extractor(a0) + z1 = self.audio_model.feature_extractor(a1) + emb = torch.cat((z0, z1), axis=1).permute(0, 2, 1) + return emb + + def get_tgt_mask(self, size: int, device: str) -> torch.tensor: + mask = torch.tril( + torch.ones((size, size), device=device) == 1 + ) # Lower triangular matrix + mask = mask.float() + mask = mask.masked_fill(mask == 0, float("-inf")) # Convert zeros to -inf + mask = mask.masked_fill(mask == 1, float(0.0)) # Convert ones to 0 + return mask + + def forward( + self, tokens: th.Tensor, condition: th.Tensor, cond_drop_prob: float = 0.0 + ) -> torch.Tensor: + batch_size, device = tokens.shape[0], tokens.device + + x = self.token_embedding(tokens) + x = self.abs_pos_encoding(x) + tgt_mask = self.get_tgt_mask(x.shape[1], x.device) + + cond_embed = self.encode_audio(condition) + keep_mask = prob_mask_like((batch_size,), 1 - cond_drop_prob, device=device) + keep_mask_embed = rearrange(keep_mask, "b -> b 1 1") + keep_mask_hidden = rearrange(keep_mask, "b -> b 1") + cond_tokens = self.pre_audio(cond_embed.permute(0, 2, 1)).permute(0, 2, 1) + # + cond_tokens = self.cond_projection(cond_tokens) + cond_tokens = self.abs_pos_encoding(cond_tokens) + + null_cond_embed = self.null_cond_embed.to(cond_tokens.dtype) + cond_tokens = torch.where( + keep_mask_embed, cond_tokens, null_cond_embed[:, : cond_tokens.shape[1], :] + ) + mean_pooled_cond_tokens = cond_tokens.mean(dim=-2) + cond_hidden = self.non_attn_cond_projection(mean_pooled_cond_tokens) + + # FiLM conditioning + null_cond_hidden = self.null_cond_hidden.to(cond_tokens.dtype) + cond_hidden = torch.where(keep_mask_hidden, cond_hidden, null_cond_hidden) + cond_tokens = self.norm_cond(cond_tokens) + + output = self.seqTransDecoder(x, cond_tokens, cond_hidden, tgt_mask=tgt_mask) + output = self.final_layer(output) + return output + + def generate( + self, + condition: th.Tensor, + sequence_length: int, + layers: int, + n_sequences: int = 1, + max_key_len: int = 8, + max_seq_len: int = 240, + top_p: float = 0.94, + ) -> torch.Tensor: + """ + :param sequence_length: number of tokens to generate in autoregressive fashion + :param n_sequences: number of sequences to generate simultaneously + :param temperature: temerature of the softmax for sampling from the output logits + :return n_sequences x sequence_length LongTensor containing generated tokens + """ + assert max_key_len == int(max_seq_len / 30), "currently only running for 1fps" + max_key_len *= layers + with th.no_grad(): + input_tokens = ( + th.zeros(n_sequences, 1, dtype=th.int64).to(condition.device) + + self.tokens + ) + for _ in range(sequence_length * layers): + curr_input_tokens = input_tokens + curr_condition = condition + logits = self.forward(curr_input_tokens, curr_condition) + logits = logits[:, -1, :] # only most recent time step is relevant + one_hot = th.nn.functional.softmax(logits, dim=-1) + sorted_probs, indices = torch.sort(one_hot, dim=-1, descending=True) + cumulative_probs = torch.cumsum(sorted_probs, dim=-1) + nucleus = cumulative_probs < top_p + nucleus = torch.cat( + [ + nucleus.new_ones(nucleus.shape[:-1] + (1,)), + nucleus[..., :-1], + ], + dim=-1, + ) + sorted_probs[~nucleus] = 0 + sorted_probs /= sorted_probs.sum(-1, keepdim=True) + dist = Categorical(sorted_probs) + idx = dist.sample() + tokens = indices.gather(-1, idx.unsqueeze(-1)) + input_tokens = th.cat([input_tokens, tokens], dim=-1) + + # return generated tokens except for sequence start token + tokens = input_tokens[:, 1:].contiguous() + return tokens diff --git a/model/modules/audio_encoder.py b/model/modules/audio_encoder.py new file mode 100644 index 0000000000000000000000000000000000000000..e46c7a99d22169f3b9843bc17aceaffb6ab09bcf --- /dev/null +++ b/model/modules/audio_encoder.py @@ -0,0 +1,194 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import fairseq +import torch as th +import torchaudio as ta + +wav2vec_model_path = "./assets/wav2vec_large.pt" + + +def weights_init(m): + if isinstance(m, th.nn.Conv1d): + th.nn.init.xavier_uniform_(m.weight) + try: + th.nn.init.constant_(m.bias, 0.01) + except: + pass + + +class Wav2VecEncoder(th.nn.Module): + def __init__(self): + super().__init__() + self.resampler = ta.transforms.Resample(orig_freq=48000, new_freq=16000) + model, cfg, task = fairseq.checkpoint_utils.load_model_ensemble_and_task( + [wav2vec_model_path] + ) + self.wav2vec_model = model[0] + + def forward(self, audio: th.Tensor): + """ + :param audio: B x T x 1600 + :return: B x T_wav2vec x 512 + """ + audio = audio.view(audio.shape[0], audio.shape[1] * 1600) + audio = self.resampler(audio) + audio = th.cat( + [th.zeros(audio.shape[0], 320, device=audio.device), audio], dim=-1 + ) # zero padding on the left + x = self.wav2vec_model.feature_extractor(audio) + x = self.wav2vec_model.feature_aggregator(x) + x = x.permute(0, 2, 1).contiguous() + return x + + +class Wav2VecDownsampler(th.nn.Module): + def __init__(self): + super().__init__() + self.conv1 = th.nn.Conv1d(512, 512, kernel_size=3) + self.conv2 = th.nn.Conv1d(512, 512, kernel_size=3) + self.norm = th.nn.LayerNorm(512) + + def forward(self, x: th.Tensor, target_length: int): + """ + :param x: B x T x 512 tensor containing wav2vec features at 100Hz + :return: B x target_length x 512 tensor containing downsampled wav2vec features at 30Hz + """ + x = x.permute(0, 2, 1).contiguous() + # first conv + x = th.nn.functional.pad(x, pad=(2, 0)) + x = th.nn.functional.relu(self.conv1(x)) + # first downsampling + x = th.nn.functional.interpolate(x, size=(x.shape[-1] + target_length) // 2) + # second conv + x = th.nn.functional.pad(x, pad=(2, 0)) + x = self.conv2(x) + # second downsampling + x = th.nn.functional.interpolate(x, size=target_length) + # layer norm + x = x.permute(0, 2, 1).contiguous() + x = self.norm(x) + return x + + +class AudioTcn(th.nn.Module): + def __init__( + self, + encoding_dim: int = 128, + use_melspec: bool = True, + use_wav2vec: bool = True, + ): + """ + :param encoding_dim: size of encoding + :param use_melspec: extract mel spectrogram features as input + :param use_wav2vec: extract wav2vec features as input + """ + super().__init__() + self.encoding_dim = encoding_dim + self.use_melspec = use_melspec + self.use_wav2vec = use_wav2vec + + if use_melspec: + # hop_length=400 -> two feature vectors per visual frame (downsampling to 24kHz -> 800 samples per frame) + self.melspec = th.nn.Sequential( + ta.transforms.Resample(orig_freq=48000, new_freq=24000), + ta.transforms.MelSpectrogram( + sample_rate=24000, + n_fft=1024, + win_length=800, + hop_length=400, + n_mels=80, + ), + ) + + if use_wav2vec: + model, cfg, task = fairseq.checkpoint_utils.load_model_ensemble_and_task( + [wav2vec_model_path] + ) + self.wav2vec_model = model[0] + self.wav2vec_model.eval() + self.wav2vec_postprocess = th.nn.Conv1d(512, 256, kernel_size=3) + self.wav2vec_postprocess.apply(lambda x: weights_init(x)) + + # temporal model + input_dim = 0 + (160 if use_melspec else 0) + (256 if use_wav2vec else 0) + self.layers = th.nn.ModuleList( + [ + th.nn.Conv1d( + input_dim, max(256, encoding_dim), kernel_size=3, dilation=1 + ), # 2 (+1) + th.nn.Conv1d( + max(256, encoding_dim), encoding_dim, kernel_size=3, dilation=2 + ), # 4 (+1) + th.nn.Conv1d( + encoding_dim, encoding_dim, kernel_size=3, dilation=3 + ), # 6 (+1) + th.nn.Conv1d( + encoding_dim, encoding_dim, kernel_size=3, dilation=1 + ), # 2 (+1) + th.nn.Conv1d( + encoding_dim, encoding_dim, kernel_size=3, dilation=2 + ), # 4 (+1) + th.nn.Conv1d( + encoding_dim, encoding_dim, kernel_size=3, dilation=3 + ), # 6 (+1) + ] + ) + self.layers.apply(lambda x: weights_init(x)) + self.receptive_field = 25 + + self.final = th.nn.Conv1d(encoding_dim, encoding_dim, kernel_size=1) + self.final.apply(lambda x: weights_init(x)) + + def forward(self, audio): + """ + :param audio: B x T x 1600 tensor containing audio samples for each frame + :return: B x T x encoding_dim tensor containing audio encodings for each frame + """ + B, T = audio.shape[0], audio.shape[1] + + # preprocess raw audio signal to extract feature vectors + audio = audio.view(B, T * 1600) + x_mel, x_w2v = th.zeros(B, 0, T).to(audio.device), th.zeros(B, 0, T).to( + audio.device + ) + if self.use_melspec: + x_mel = self.melspec(audio)[:, :, 1:].contiguous() + x_mel = th.log(x_mel.clamp(min=1e-10, max=None)) + x_mel = ( + x_mel.permute(0, 2, 1) + .contiguous() + .view(x_mel.shape[0], T, 160) + .permute(0, 2, 1) + .contiguous() + ) + if self.use_wav2vec: + with th.no_grad(): + x_w2v = ta.functional.resample(audio, 48000, 16000) + x_w2v = self.wav2vec_model.feature_extractor(x_w2v) + x_w2v = self.wav2vec_model.feature_aggregator(x_w2v) + x_w2v = self.wav2vec_postprocess(th.nn.functional.pad(x_w2v, pad=[2, 0])) + x_w2v = th.nn.functional.interpolate( + x_w2v, size=T, align_corners=True, mode="linear" + ) + x = th.cat([x_mel, x_w2v], dim=1) + + # process signal with TCN + x = th.nn.functional.pad(x, pad=[self.receptive_field - 1, 0]) + for layer_idx, layer in enumerate(self.layers): + y = th.nn.functional.leaky_relu(layer(x), negative_slope=0.2) + if self.training: + y = th.nn.functional.dropout(y, 0.2) + if x.shape[1] == y.shape[1]: + x = (x[:, :, -y.shape[-1] :] + y) / 2.0 # skip connection + else: + x = y + + x = self.final(x) + x = x.permute(0, 2, 1).contiguous() + + return x diff --git a/model/modules/rotary_embedding_torch.py b/model/modules/rotary_embedding_torch.py new file mode 100644 index 0000000000000000000000000000000000000000..517b6460641c1e20fac24ee67d53bb98200b87a3 --- /dev/null +++ b/model/modules/rotary_embedding_torch.py @@ -0,0 +1,139 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +from inspect import isfunction +from math import log, pi + +import torch +from einops import rearrange, repeat +from torch import einsum, nn + +# helper functions + + +def exists(val): + return val is not None + + +def broadcat(tensors, dim=-1): + num_tensors = len(tensors) + shape_lens = set(list(map(lambda t: len(t.shape), tensors))) + assert len(shape_lens) == 1, "tensors must all have the same number of dimensions" + shape_len = list(shape_lens)[0] + + dim = (dim + shape_len) if dim < 0 else dim + dims = list(zip(*map(lambda t: list(t.shape), tensors))) + + expandable_dims = [(i, val) for i, val in enumerate(dims) if i != dim] + assert all( + [*map(lambda t: len(set(t[1])) <= 2, expandable_dims)] + ), "invalid dimensions for broadcastable concatentation" + max_dims = list(map(lambda t: (t[0], max(t[1])), expandable_dims)) + expanded_dims = list(map(lambda t: (t[0], (t[1],) * num_tensors), max_dims)) + expanded_dims.insert(dim, (dim, dims[dim])) + expandable_shapes = list(zip(*map(lambda t: t[1], expanded_dims))) + tensors = list(map(lambda t: t[0].expand(*t[1]), zip(tensors, expandable_shapes))) + return torch.cat(tensors, dim=dim) + + +# rotary embedding helper functions + + +def rotate_half(x): + x = rearrange(x, "... (d r) -> ... d r", r=2) + x1, x2 = x.unbind(dim=-1) + x = torch.stack((-x2, x1), dim=-1) + return rearrange(x, "... d r -> ... (d r)") + + +def apply_rotary_emb(freqs, t, start_index=0): + freqs = freqs.to(t) + rot_dim = freqs.shape[-1] + end_index = start_index + rot_dim + assert ( + rot_dim <= t.shape[-1] + ), f"feature dimension {t.shape[-1]} is not of sufficient size to rotate in all the positions {rot_dim}" + t_left, t, t_right = ( + t[..., :start_index], + t[..., start_index:end_index], + t[..., end_index:], + ) + t = (t * freqs.cos()) + (rotate_half(t) * freqs.sin()) + return torch.cat((t_left, t, t_right), dim=-1) + + +# learned rotation helpers + + +def apply_learned_rotations(rotations, t, start_index=0, freq_ranges=None): + if exists(freq_ranges): + rotations = einsum("..., f -> ... f", rotations, freq_ranges) + rotations = rearrange(rotations, "... r f -> ... (r f)") + + rotations = repeat(rotations, "... n -> ... (n r)", r=2) + return apply_rotary_emb(rotations, t, start_index=start_index) + + +# classes + + +class RotaryEmbedding(nn.Module): + def __init__( + self, + dim, + custom_freqs=None, + freqs_for="lang", + theta=10000, + max_freq=10, + num_freqs=1, + learned_freq=False, + ): + super().__init__() + if exists(custom_freqs): + freqs = custom_freqs + elif freqs_for == "lang": + freqs = 1.0 / ( + theta ** (torch.arange(0, dim, 2)[: (dim // 2)].float() / dim) + ) + elif freqs_for == "pixel": + freqs = torch.linspace(1.0, max_freq / 2, dim // 2) * pi + elif freqs_for == "constant": + freqs = torch.ones(num_freqs).float() + else: + raise ValueError(f"unknown modality {freqs_for}") + + self.cache = dict() + + if learned_freq: + self.freqs = nn.Parameter(freqs) + else: + self.register_buffer("freqs", freqs) + + def rotate_queries_or_keys(self, t, seq_dim=-2): + device = t.device + seq_len = t.shape[seq_dim] + freqs = self.forward( + lambda: torch.arange(seq_len, device=device), cache_key=seq_len + ) + return apply_rotary_emb(freqs, t) + + def forward(self, t, cache_key=None): + if exists(cache_key) and cache_key in self.cache: + return self.cache[cache_key] + + if isfunction(t): + t = t() + + freqs = self.freqs + + freqs = torch.einsum("..., f -> ... f", t.type(freqs.dtype), freqs) + freqs = repeat(freqs, "... n -> ... (n r)", r=2) + + if exists(cache_key): + self.cache[cache_key] = freqs + + return freqs \ No newline at end of file diff --git a/model/modules/transformer_modules.py b/model/modules/transformer_modules.py new file mode 100644 index 0000000000000000000000000000000000000000..3f2375c5f9e3f6e4d2ac47cc4fbaa549f35bd584 --- /dev/null +++ b/model/modules/transformer_modules.py @@ -0,0 +1,702 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import math +from typing import Any, Callable, List, Optional, Union + +import torch +import torch.nn as nn +from einops import rearrange +from torch import Tensor +from torch.nn import functional as F + + +def generate_causal_mask(source_length, target_length, device="cpu"): + if source_length == target_length: + mask = ( + torch.triu(torch.ones(target_length, source_length, device=device)) == 1 + ).transpose(0, 1) + else: + mask = torch.zeros(target_length, source_length, device=device) + idx = torch.linspace(0, source_length, target_length + 1)[1:].round().long() + for i in range(target_length): + mask[i, 0 : idx[i]] = 1 + + return ( + mask.float() + .masked_fill(mask == 0, float("-inf")) + .masked_fill(mask == 1, float(0.0)) + ) + + +class TransformerEncoderLayerRotary(nn.Module): + def __init__( + self, + d_model: int, + nhead: int, + dim_feedforward: int = 2048, + dropout: float = 0.1, + activation: Union[str, Callable[[Tensor], Tensor]] = F.relu, + layer_norm_eps: float = 1e-5, + batch_first: bool = False, + norm_first: bool = True, + rotary=None, + ) -> None: + super().__init__() + self.self_attn = nn.MultiheadAttention( + d_model, nhead, dropout=dropout, batch_first=batch_first + ) + # Implementation of Feedforward model + self.linear1 = nn.Linear(d_model, dim_feedforward) + self.dropout = nn.Dropout(dropout) + self.linear2 = nn.Linear(dim_feedforward, d_model) + + self.norm_first = norm_first + self.norm1 = nn.LayerNorm(d_model, eps=layer_norm_eps) + self.norm2 = nn.LayerNorm(d_model, eps=layer_norm_eps) + self.dropout1 = nn.Dropout(dropout) + self.dropout2 = nn.Dropout(dropout) + self.activation = activation + + self.rotary = rotary + self.use_rotary = rotary is not None + + def forward( + self, + src: Tensor, + src_mask: Optional[Tensor] = None, + src_key_padding_mask: Optional[Tensor] = None, + ) -> Tensor: + x = src + if self.norm_first: + x = x + self._sa_block(self.norm1(x), src_mask, src_key_padding_mask) + x = x + self._ff_block(self.norm2(x)) + else: + x = self.norm1(x + self._sa_block(x, src_mask, src_key_padding_mask)) + x = self.norm2(x + self._ff_block(x)) + + return x + + # self-attention block + def _sa_block( + self, x: Tensor, attn_mask: Optional[Tensor], key_padding_mask: Optional[Tensor] + ) -> Tensor: + qk = self.rotary.rotate_queries_or_keys(x) if self.use_rotary else x + x = self.self_attn( + qk, + qk, + x, + attn_mask=attn_mask, + key_padding_mask=key_padding_mask, + need_weights=False, + )[0] + return self.dropout1(x) + + # feed forward block + def _ff_block(self, x: Tensor) -> Tensor: + x = self.linear2(self.dropout(self.activation(self.linear1(x)))) + return self.dropout2(x) + + +class DenseFiLM(nn.Module): + """Feature-wise linear modulation (FiLM) generator.""" + + def __init__(self, embed_channels): + super().__init__() + self.embed_channels = embed_channels + self.block = nn.Sequential( + nn.Mish(), nn.Linear(embed_channels, embed_channels * 2) + ) + + def forward(self, position): + pos_encoding = self.block(position) + pos_encoding = rearrange(pos_encoding, "b c -> b 1 c") + scale_shift = pos_encoding.chunk(2, dim=-1) + return scale_shift + + +def featurewise_affine(x, scale_shift): + scale, shift = scale_shift + return (scale + 1) * x + shift + + +class FiLMTransformerDecoderLayer(nn.Module): + def __init__( + self, + d_model: int, + nhead: int, + dim_feedforward=2048, + dropout=0.1, + activation=F.relu, + layer_norm_eps=1e-5, + batch_first=False, + norm_first=True, + rotary=None, + use_cm=False, + ): + super().__init__() + self.self_attn = nn.MultiheadAttention( + d_model, nhead, dropout=dropout, batch_first=batch_first + ) + self.multihead_attn = nn.MultiheadAttention( + d_model, nhead, dropout=dropout, batch_first=batch_first + ) + # Feedforward + self.linear1 = nn.Linear(d_model, dim_feedforward) + self.dropout = nn.Dropout(dropout) + self.linear2 = nn.Linear(dim_feedforward, d_model) + + self.norm_first = norm_first + self.norm1 = nn.LayerNorm(d_model, eps=layer_norm_eps) + self.norm2 = nn.LayerNorm(d_model, eps=layer_norm_eps) + self.norm3 = nn.LayerNorm(d_model, eps=layer_norm_eps) + self.dropout1 = nn.Dropout(dropout) + self.dropout2 = nn.Dropout(dropout) + self.dropout3 = nn.Dropout(dropout) + self.activation = activation + + self.film1 = DenseFiLM(d_model) + self.film2 = DenseFiLM(d_model) + self.film3 = DenseFiLM(d_model) + + if use_cm: + self.multihead_attn2 = nn.MultiheadAttention( # 2 + d_model, nhead, dropout=dropout, batch_first=batch_first + ) + self.norm2a = nn.LayerNorm(d_model, eps=layer_norm_eps) # 2 + self.dropout2a = nn.Dropout(dropout) # 2 + self.film2a = DenseFiLM(d_model) # 2 + + self.rotary = rotary + self.use_rotary = rotary is not None + + # x, cond, t + def forward( + self, + tgt, + memory, + t, + tgt_mask=None, + memory_mask=None, + tgt_key_padding_mask=None, + memory_key_padding_mask=None, + memory2=None, + ): + x = tgt + if self.norm_first: + # self-attention -> film -> residual + x_1 = self._sa_block(self.norm1(x), tgt_mask, tgt_key_padding_mask) + x = x + featurewise_affine(x_1, self.film1(t)) + # cross-attention -> film -> residual + x_2 = self._mha_block( + self.norm2(x), + memory, + memory_mask, + memory_key_padding_mask, + self.multihead_attn, + self.dropout2, + ) + x = x + featurewise_affine(x_2, self.film2(t)) + if memory2 is not None: + # cross-attention x2 -> film -> residual + x_2a = self._mha_block( + self.norm2a(x), + memory2, + memory_mask, + memory_key_padding_mask, + self.multihead_attn2, + self.dropout2a, + ) + x = x + featurewise_affine(x_2a, self.film2a(t)) + # feedforward -> film -> residual + x_3 = self._ff_block(self.norm3(x)) + x = x + featurewise_affine(x_3, self.film3(t)) + else: + x = self.norm1( + x + + featurewise_affine( + self._sa_block(x, tgt_mask, tgt_key_padding_mask), self.film1(t) + ) + ) + x = self.norm2( + x + + featurewise_affine( + self._mha_block(x, memory, memory_mask, memory_key_padding_mask), + self.film2(t), + ) + ) + x = self.norm3(x + featurewise_affine(self._ff_block(x), self.film3(t))) + return x + + # self-attention block + # qkv + def _sa_block(self, x, attn_mask, key_padding_mask): + qk = self.rotary.rotate_queries_or_keys(x) if self.use_rotary else x + x = self.self_attn( + qk, + qk, + x, + attn_mask=attn_mask, + key_padding_mask=key_padding_mask, + need_weights=False, + )[0] + return self.dropout1(x) + + # multihead attention block + # qkv + def _mha_block(self, x, mem, attn_mask, key_padding_mask, mha, dropout): + q = self.rotary.rotate_queries_or_keys(x) if self.use_rotary else x + k = self.rotary.rotate_queries_or_keys(mem) if self.use_rotary else mem + x = mha( + q, + k, + mem, + attn_mask=attn_mask, + key_padding_mask=key_padding_mask, + need_weights=False, + )[0] + return dropout(x) + + # feed forward block + def _ff_block(self, x): + x = self.linear2(self.dropout(self.activation(self.linear1(x)))) + return self.dropout3(x) + + +class DecoderLayerStack(nn.Module): + def __init__(self, stack): + super().__init__() + self.stack = stack + + def forward(self, x, cond, t, tgt_mask=None, memory2=None): + for layer in self.stack: + x = layer(x, cond, t, tgt_mask=tgt_mask, memory2=memory2) + return x + + +class PositionalEncoding(nn.Module): + def __init__(self, d_model: int, dropout: float = 0.1, max_len: int = 1024): + super().__init__() + pe = torch.zeros(max_len, d_model) + position = torch.arange(0, max_len).unsqueeze(1) + div_term = torch.exp( + torch.arange(0, d_model, 2) * (-math.log(10000.0) / d_model) + ) + pe[:, 0::2] = torch.sin(position * div_term) + pe[:, 1::2] = torch.cos(position * div_term) + + self.register_buffer("pe", pe) + self.dropout = nn.Dropout(p=dropout) + + def forward(self, x: torch.Tensor): + """ + :param x: B x T x d_model tensor + :return: B x T x d_model tensor + """ + x = x + self.pe[None, : x.shape[1], :] + x = self.dropout(x) + return x + + +class TimestepEncoding(nn.Module): + def __init__(self, embedding_dim: int): + super().__init__() + + # Fourier embedding + half_dim = embedding_dim // 2 + emb = math.log(10000) / (half_dim - 1) + emb = torch.exp(torch.arange(half_dim) * -emb) + self.register_buffer("emb", emb) + + # encoding + self.encoding = nn.Sequential( + nn.Linear(embedding_dim, 4 * embedding_dim), + nn.Mish(), + nn.Linear(4 * embedding_dim, embedding_dim), + ) + + def forward(self, t: torch.Tensor): + """ + :param t: B-dimensional tensor containing timesteps in range [0, 1] + :return: B x embedding_dim tensor containing timestep encodings + """ + x = t[:, None] * self.emb[None, :] + x = torch.cat([torch.sin(x), torch.cos(x)], dim=-1) + x = self.encoding(x) + return x + + +class FiLM(nn.Module): + def __init__(self, dim: int): + super().__init__() + self.dim = dim + self.film = nn.Sequential(nn.Mish(), nn.Linear(dim, dim * 2)) + + def forward(self, x: torch.Tensor, cond: torch.Tensor): + """ + :param x: ... x dim tensor + :param cond: ... x dim tensor + :return: ... x dim tensor as scale(cond) * x + bias(cond) + """ + cond = self.film(cond) + scale, bias = torch.chunk(cond, chunks=2, dim=-1) + x = (scale + 1) * x + bias + return x + + +class FeedforwardBlock(nn.Module): + def __init__(self, d_model: int, d_feedforward: int = 1024, dropout: float = 0.1): + super().__init__() + self.ff = nn.Sequential( + nn.Linear(d_model, d_feedforward), + nn.ReLU(), + nn.Dropout(p=dropout), + nn.Linear(d_feedforward, d_model), + nn.Dropout(p=dropout), + ) + + def forward(self, x: torch.Tensor): + """ + :param x: ... x d_model tensor + :return: ... x d_model tensor + """ + return self.ff(x) + + +class SelfAttention(nn.Module): + def __init__(self, d_model: int, num_heads: int, dropout: float = 0.1): + super().__init__() + self.self_attn = nn.MultiheadAttention( + d_model, num_heads, dropout=dropout, batch_first=True + ) + self.dropout = nn.Dropout(p=dropout) + + def forward( + self, + x: torch.Tensor, + attn_mask: torch.Tensor = None, + key_padding_mask: torch.Tensor = None, + ): + """ + :param x: B x T x d_model input tensor + :param attn_mask: B * num_heads x L x S mask with L=target sequence length, S=source sequence length + for a float mask: values will be added to attention weight + for a binary mask: True indicates that the element is not allowed to attend + :param key_padding_mask: B x S mask + for a float mask: values will be added directly to the corresponding key values + for a binary mask: True indicates that the corresponding key value will be ignored + :return: B x T x d_model output tensor + """ + x = self.self_attn( + x, + x, + x, + attn_mask=attn_mask, + key_padding_mask=key_padding_mask, + need_weights=False, + )[0] + x = self.dropout(x) + return x + + +class CrossAttention(nn.Module): + def __init__(self, d_model: int, d_cond: int, num_heads: int, dropout: float = 0.1): + super().__init__() + self.cross_attn = nn.MultiheadAttention( + d_model, + num_heads, + dropout=dropout, + batch_first=True, + kdim=d_cond, + vdim=d_cond, + ) + self.dropout = nn.Dropout(p=dropout) + + def forward( + self, + x: torch.Tensor, + cond: torch.Tensor, + attn_mask: torch.Tensor = None, + key_padding_mask: torch.Tensor = None, + ): + """ + :param x: B x T_target x d_model input tensor + :param cond: B x T_cond x d_cond condition tensor + :param attn_mask: B * num_heads x L x S mask with L=target sequence length, S=source sequence length + for a float mask: values will be added to attention weight + for a binary mask: True indicates that the element is not allowed to attend + :param key_padding_mask: B x S mask + for a float mask: values will be added directly to the corresponding key values + for a binary mask: True indicates that the corresponding key value will be ignored + :return: B x T x d_model output tensor + """ + x = self.cross_attn( + x, + cond, + cond, + attn_mask=attn_mask, + key_padding_mask=key_padding_mask, + need_weights=False, + )[0] + x = self.dropout(x) + return x + + +class TransformerEncoderLayer(nn.Module): + def __init__( + self, + d_model: int, + num_heads: int, + d_feedforward: int = 1024, + dropout: float = 0.1, + ): + super().__init__() + self.norm1 = nn.LayerNorm(d_model) + self.self_attn = SelfAttention(d_model, num_heads, dropout) + self.norm2 = nn.LayerNorm(d_model) + self.feedforward = FeedforwardBlock(d_model, d_feedforward, dropout) + + def forward( + self, + x: torch.Tensor, + mask: torch.Tensor = None, + key_padding_mask: torch.Tensor = None, + ): + x = x + self.self_attn(self.norm1(x), mask, key_padding_mask) + x = x + self.feedforward(self.norm2(x)) + return x + + +class TransformerDecoderLayer(nn.Module): + def __init__( + self, + d_model: int, + d_cond: int, + num_heads: int, + d_feedforward: int = 1024, + dropout: float = 0.1, + ): + super().__init__() + self.norm1 = nn.LayerNorm(d_model) + self.self_attn = SelfAttention(d_model, num_heads, dropout) + self.norm2 = nn.LayerNorm(d_model) + self.cross_attn = CrossAttention(d_model, d_cond, num_heads, dropout) + self.norm3 = nn.LayerNorm(d_model) + self.feedforward = FeedforwardBlock(d_model, d_feedforward, dropout) + + def forward( + self, + x: torch.Tensor, + cross_cond: torch.Tensor, + target_mask: torch.Tensor = None, + target_key_padding_mask: torch.Tensor = None, + cross_cond_mask: torch.Tensor = None, + cross_cond_key_padding_mask: torch.Tensor = None, + ): + """ + :param x: B x T x d_model tensor + :param cross_cond: B x T x d_cond tensor containing the conditioning input to cross attention layers + :return: B x T x d_model tensor + """ + x = x + self.self_attn(self.norm1(x), target_mask, target_key_padding_mask) + x = x + self.cross_attn( + self.norm2(x), cross_cond, cross_cond_mask, cross_cond_key_padding_mask + ) + x = x + self.feedforward(self.norm3(x)) + return x + + +class FilmTransformerDecoderLayer(nn.Module): + def __init__( + self, + d_model: int, + d_cond: int, + num_heads: int, + d_feedforward: int = 1024, + dropout: float = 0.1, + ): + super().__init__() + self.norm1 = nn.LayerNorm(d_model) + self.self_attn = SelfAttention(d_model, num_heads, dropout) + self.film1 = FiLM(d_model) + self.norm2 = nn.LayerNorm(d_model) + self.cross_attn = CrossAttention(d_model, d_cond, num_heads, dropout) + self.film2 = FiLM(d_model) + self.norm3 = nn.LayerNorm(d_model) + self.feedforward = FeedforwardBlock(d_model, d_feedforward, dropout) + self.film3 = FiLM(d_model) + + def forward( + self, + x: torch.Tensor, + cross_cond: torch.Tensor, + film_cond: torch.Tensor, + target_mask: torch.Tensor = None, + target_key_padding_mask: torch.Tensor = None, + cross_cond_mask: torch.Tensor = None, + cross_cond_key_padding_mask: torch.Tensor = None, + ): + """ + :param x: B x T x d_model tensor + :param cross_cond: B x T x d_cond tensor containing the conditioning input to cross attention layers + :param film_cond: B x [1 or T] x film_cond tensor containing the conditioning input to FiLM layers + :return: B x T x d_model tensor + """ + x1 = self.self_attn(self.norm1(x), target_mask, target_key_padding_mask) + x = x + self.film1(x1, film_cond) + x2 = self.cross_attn( + self.norm2(x), cross_cond, cross_cond_mask, cross_cond_key_padding_mask + ) + x = x + self.film2(x2, film_cond) + x3 = self.feedforward(self.norm3(x)) + x = x + self.film3(x3, film_cond) + return x + + +class RegressionTransformer(nn.Module): + def __init__( + self, + transformer_encoder_layers: int = 2, + transformer_decoder_layers: int = 4, + d_model: int = 512, + d_cond: int = 512, + num_heads: int = 4, + d_feedforward: int = 1024, + dropout: float = 0.1, + causal: bool = False, + ): + super().__init__() + self.causal = causal + + self.cond_positional_encoding = PositionalEncoding(d_cond, dropout) + self.target_positional_encoding = PositionalEncoding(d_model, dropout) + + self.transformer_encoder = nn.ModuleList( + [ + TransformerEncoderLayer(d_cond, num_heads, d_feedforward, dropout) + for _ in range(transformer_encoder_layers) + ] + ) + + self.transformer_decoder = nn.ModuleList( + [ + TransformerDecoderLayer( + d_model, d_cond, num_heads, d_feedforward, dropout + ) + for _ in range(transformer_decoder_layers) + ] + ) + + def forward(self, x: torch.Tensor, cond: torch.Tensor): + """ + :param x: B x T x d_model input tensor + :param cond: B x T x d_cond conditional tensor + :return: B x T x d_model output tensor + """ + x = self.target_positional_encoding(x) + cond = self.cond_positional_encoding(cond) + + if self.causal: + encoder_mask = generate_causal_mask( + cond.shape[1], cond.shape[1], device=cond.device + ) + decoder_self_attn_mask = generate_causal_mask( + x.shape[1], x.shape[1], device=x.device + ) + decoder_cross_attn_mask = generate_causal_mask( + cond.shape[1], x.shape[1], device=x.device + ) + else: + encoder_mask = None + decoder_self_attn_mask = None + decoder_cross_attn_mask = None + + for encoder_layer in self.transformer_encoder: + cond = encoder_layer(cond, mask=encoder_mask) + for decoder_layer in self.transformer_decoder: + x = decoder_layer( + x, + cond, + target_mask=decoder_self_attn_mask, + cross_cond_mask=decoder_cross_attn_mask, + ) + return x + + +class DiffusionTransformer(nn.Module): + def __init__( + self, + transformer_encoder_layers: int = 2, + transformer_decoder_layers: int = 4, + d_model: int = 512, + d_cond: int = 512, + num_heads: int = 4, + d_feedforward: int = 1024, + dropout: float = 0.1, + causal: bool = False, + ): + super().__init__() + self.causal = causal + + self.timestep_encoder = TimestepEncoding(d_model) + self.cond_positional_encoding = PositionalEncoding(d_cond, dropout) + self.target_positional_encoding = PositionalEncoding(d_model, dropout) + + self.transformer_encoder = nn.ModuleList( + [ + TransformerEncoderLayer(d_cond, num_heads, d_feedforward, dropout) + for _ in range(transformer_encoder_layers) + ] + ) + + self.transformer_decoder = nn.ModuleList( + [ + FilmTransformerDecoderLayer( + d_model, d_cond, num_heads, d_feedforward, dropout + ) + for _ in range(transformer_decoder_layers) + ] + ) + + def forward(self, x: torch.Tensor, cond: torch.Tensor, t: torch.Tensor): + """ + :param x: B x T x d_model input tensor + :param cond: B x T x d_cond conditional tensor + :param t: B-dimensional tensor containing diffusion timesteps in range [0, 1] + :return: B x T x d_model output tensor + """ + t = self.timestep_encoder(t).unsqueeze(1) # B x 1 x d_model + x = self.target_positional_encoding(x) + cond = self.cond_positional_encoding(cond) + + if self.causal: + encoder_mask = generate_causal_mask( + cond.shape[1], cond.shape[1], device=cond.device + ) + decoder_self_attn_mask = generate_causal_mask( + x.shape[1], x.shape[1], device=x.device + ) + decoder_cross_attn_mask = generate_causal_mask( + cond.shape[1], x.shape[1], device=x.device + ) + else: + encoder_mask = None + decoder_self_attn_mask = None + decoder_cross_attn_mask = None + + for encoder_layer in self.transformer_encoder: + cond = encoder_layer(cond, mask=encoder_mask) + for decoder_layer in self.transformer_decoder: + x = decoder_layer( + x, + cond, + t, + target_mask=decoder_self_attn_mask, + cross_cond_mask=decoder_cross_attn_mask, + ) + + return x diff --git a/model/utils.py b/model/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..2d23d49d4d3ae61559702f347cc8ca60939d0327 --- /dev/null +++ b/model/utils.py @@ -0,0 +1,130 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import math + +import fairseq + +import numpy as np +import torch +import torchaudio.transforms as T +from torch import nn + + +def setup_lip_regressor() -> ("Audio2LipRegressionTransformer", T.Resample): + cp_path = "./assets/vq-wav2vec.pt" + audio_model, _, _ = fairseq.checkpoint_utils.load_model_ensemble_and_task([cp_path]) + audio_model = audio_model[0] + for param in audio_model.parameters(): + param.requires_grad = False + audio_model.eval() + audio_resampler = T.Resample(48000, 16000) + return audio_model, audio_resampler + + +def init_weight(m): + if ( + isinstance(m, nn.Conv1d) + or isinstance(m, nn.Linear) + or isinstance(m, nn.ConvTranspose1d) + ): + nn.init.xavier_normal_(m.weight) + # m.bias.data.fill_(0.01) + if m.bias is not None: + nn.init.constant_(m.bias, 0) + + +# absolute positional embedding used for vanilla transformer sequential data +class PositionalEncoding(nn.Module): + def __init__(self, d_model, dropout=0.1, max_len=800, batch_first=False): + super().__init__() + self.batch_first = batch_first + + self.dropout = nn.Dropout(p=dropout) + + pe = torch.zeros(max_len, d_model) + position = torch.arange(0, max_len).unsqueeze(1) + div_term = torch.exp(torch.arange(0, d_model, 2) * (-np.log(10000.0) / d_model)) + pe[:, 0::2] = torch.sin(position * div_term) + pe[:, 1::2] = torch.cos(position * div_term) + pe = pe.unsqueeze(0).transpose(0, 1) + + self.register_buffer("pe", pe) + + def forward(self, x): + if self.batch_first: + x = x + self.pe.permute(1, 0, 2)[:, : x.shape[1], :] + else: + x = x + self.pe[: x.shape[0], :] + return self.dropout(x) + + +# very similar positional embedding used for diffusion timesteps +class SinusoidalPosEmb(nn.Module): + def __init__(self, dim): + super().__init__() + self.dim = dim + + def forward(self, x): + device = x.device + half_dim = self.dim // 2 + emb = math.log(10000) / (half_dim - 1) + emb = torch.exp(torch.arange(half_dim, device=device) * -emb) + emb = x[:, None] * emb[None, :] + emb = torch.cat((emb.sin(), emb.cos()), dim=-1) + return emb + + +# dropout mask +def prob_mask_like(shape, prob, device): + if prob == 1: + return torch.ones(shape, device=device, dtype=torch.bool) + elif prob == 0: + return torch.zeros(shape, device=device, dtype=torch.bool) + else: + return torch.zeros(shape, device=device).float().uniform_(0, 1) < prob + + +def extract(a, t, x_shape): + b, *_ = t.shape + out = a.gather(-1, t) + return out.reshape(b, *((1,) * (len(x_shape) - 1))) + + +def make_beta_schedule( + schedule, n_timestep, linear_start=1e-4, linear_end=2e-2, cosine_s=8e-3 +): + if schedule == "linear": + betas = ( + torch.linspace( + linear_start**0.5, linear_end**0.5, n_timestep, dtype=torch.float64 + ) + ** 2 + ) + + elif schedule == "cosine": + timesteps = ( + torch.arange(n_timestep + 1, dtype=torch.float64) / n_timestep + cosine_s + ) + alphas = timesteps / (1 + cosine_s) * np.pi / 2 + alphas = torch.cos(alphas).pow(2) + alphas = alphas / alphas[0] + betas = 1 - alphas[1:] / alphas[:-1] + betas = np.clip(betas, a_min=0, a_max=0.999) + + elif schedule == "sqrt_linear": + betas = torch.linspace( + linear_start, linear_end, n_timestep, dtype=torch.float64 + ) + elif schedule == "sqrt": + betas = ( + torch.linspace(linear_start, linear_end, n_timestep, dtype=torch.float64) + ** 0.5 + ) + else: + raise ValueError(f"schedule '{schedule}' unknown.") + return betas.numpy() diff --git a/model/vqvae.py b/model/vqvae.py new file mode 100644 index 0000000000000000000000000000000000000000..e3363c0789ca19a7c8b6263f066a66c7726e9769 --- /dev/null +++ b/model/vqvae.py @@ -0,0 +1,550 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import json +import os + +import torch +import torch.nn as nn +import torch.nn.functional as F +from einops import rearrange, repeat +from utils.misc import broadcast_tensors + + +def setup_tokenizer(resume_pth: str) -> "TemporalVertexCodec": + args_path = os.path.dirname(resume_pth) + with open(os.path.join(args_path, "args.json")) as f: + trans_args = json.load(f) + tokenizer = TemporalVertexCodec( + n_vertices=trans_args["nb_joints"], + latent_dim=trans_args["output_emb_width"], + categories=trans_args["code_dim"], + residual_depth=trans_args["depth"], + ) + print("loading checkpoint from {}".format(resume_pth)) + ckpt = torch.load(resume_pth, map_location="cpu") + tokenizer.load_state_dict(ckpt["net"], strict=True) + for p in tokenizer.parameters(): + p.requires_grad = False + tokenizer.cuda() + return tokenizer + + +def default(val, d): + return val if val is not None else d + + +def ema_inplace(moving_avg, new, decay: float): + moving_avg.data.mul_(decay).add_(new, alpha=(1 - decay)) + + +def laplace_smoothing(x, n_categories: int, epsilon: float = 1e-5): + return (x + epsilon) / (x.sum() + n_categories * epsilon) + + +def uniform_init(*shape: int): + t = torch.empty(shape) + nn.init.kaiming_uniform_(t) + return t + + +def sum_flat(tensor): + """ + Take the sum over all non-batch dimensions. + """ + return tensor.sum(dim=list(range(1, len(tensor.shape)))) + + +def sample_vectors(samples, num: int): + num_samples, device = samples.shape[0], samples.device + + if num_samples >= num: + indices = torch.randperm(num_samples, device=device)[:num] + else: + indices = torch.randint(0, num_samples, (num,), device=device) + + return samples[indices] + + +def kmeans(samples, num_clusters: int, num_iters: int = 10): + dim, dtype = samples.shape[-1], samples.dtype + + means = sample_vectors(samples, num_clusters) + + for _ in range(num_iters): + diffs = rearrange(samples, "n d -> n () d") - rearrange(means, "c d -> () c d") + dists = -(diffs**2).sum(dim=-1) + + buckets = dists.max(dim=-1).indices + bins = torch.bincount(buckets, minlength=num_clusters) + zero_mask = bins == 0 + bins_min_clamped = bins.masked_fill(zero_mask, 1) + + new_means = buckets.new_zeros(num_clusters, dim, dtype=dtype) + new_means.scatter_add_(0, repeat(buckets, "n -> n d", d=dim), samples) + new_means = new_means / bins_min_clamped[..., None] + + means = torch.where(zero_mask[..., None], means, new_means) + + return means, bins + + +class EuclideanCodebook(nn.Module): + """Codebook with Euclidean distance. + Args: + dim (int): Dimension. + codebook_size (int): Codebook size. + kmeans_init (bool): Whether to use k-means to initialize the codebooks. + If set to true, run the k-means algorithm on the first training batch and use + the learned centroids as initialization. + kmeans_iters (int): Number of iterations used for k-means algorithm at initialization. + decay (float): Decay for exponential moving average over the codebooks. + epsilon (float): Epsilon value for numerical stability. + threshold_ema_dead_code (int): Threshold for dead code expiration. Replace any codes + that have an exponential moving average cluster size less than the specified threshold with + randomly selected vector from the current batch. + """ + + def __init__( + self, + dim: int, + codebook_size: int, + kmeans_init: int = False, + kmeans_iters: int = 10, + decay: float = 0.99, + epsilon: float = 1e-5, + threshold_ema_dead_code: int = 2, + ): + super().__init__() + self.decay = decay + init_fn = uniform_init if not kmeans_init else torch.zeros + embed = init_fn(codebook_size, dim) + + self.codebook_size = codebook_size + + self.kmeans_iters = kmeans_iters + self.epsilon = epsilon + self.threshold_ema_dead_code = threshold_ema_dead_code + + self.register_buffer("inited", torch.Tensor([not kmeans_init])) + self.register_buffer("cluster_size", torch.zeros(codebook_size)) + self.register_buffer("embed", embed) + self.register_buffer("embed_avg", embed.clone()) + + @torch.jit.ignore + def init_embed_(self, data): + if self.inited: + return + + embed, cluster_size = kmeans(data, self.codebook_size, self.kmeans_iters) + self.embed.data.copy_(embed) + self.embed_avg.data.copy_(embed.clone()) + self.cluster_size.data.copy_(cluster_size) + self.inited.data.copy_(torch.Tensor([True])) + # Make sure all buffers across workers are in sync after initialization + broadcast_tensors(self.buffers()) + + def replace_(self, samples, mask): + modified_codebook = torch.where( + mask[..., None], sample_vectors(samples, self.codebook_size), self.embed + ) + self.embed.data.copy_(modified_codebook) + + def expire_codes_(self, batch_samples): + if self.threshold_ema_dead_code == 0: + return + + expired_codes = self.cluster_size < self.threshold_ema_dead_code + if not torch.any(expired_codes): + return + + batch_samples = rearrange(batch_samples, "... d -> (...) d") + self.replace_(batch_samples, mask=expired_codes) + broadcast_tensors(self.buffers()) + + def preprocess(self, x): + x = rearrange(x, "... d -> (...) d") + return x + + def quantize(self, x): + embed = self.embed.t() + dist = -( + x.pow(2).sum(1, keepdim=True) + - 2 * x @ embed + + embed.pow(2).sum(0, keepdim=True) + ) + embed_ind = dist.max(dim=-1).indices + return embed_ind + + def postprocess_emb(self, embed_ind, shape): + return embed_ind.view(*shape[:-1]) + + def dequantize(self, embed_ind): + quantize = F.embedding(embed_ind, self.embed) + return quantize + + def encode(self, x): + shape = x.shape + x = self.preprocess(x) + embed_ind = self.quantize(x) + embed_ind = self.postprocess_emb(embed_ind, shape) + return embed_ind + + def decode(self, embed_ind): + quantize = self.dequantize(embed_ind) + return quantize + + def forward(self, x): + shape, dtype = x.shape, x.dtype + x = self.preprocess(x) + + self.init_embed_(x) + + embed_ind = self.quantize(x) + embed_onehot = F.one_hot(embed_ind, self.codebook_size).type(dtype) + embed_ind = self.postprocess_emb(embed_ind, shape) + quantize = self.dequantize(embed_ind) + + if self.training: + # We do the expiry of code at that point as buffers are in sync + # and all the workers will take the same decision. + self.expire_codes_(x) + ema_inplace(self.cluster_size, embed_onehot.sum(0), self.decay) + embed_sum = x.t() @ embed_onehot + ema_inplace(self.embed_avg, embed_sum.t(), self.decay) + cluster_size = ( + laplace_smoothing(self.cluster_size, self.codebook_size, self.epsilon) + * self.cluster_size.sum() + ) + embed_normalized = self.embed_avg / cluster_size.unsqueeze(1) + self.embed.data.copy_(embed_normalized) + + return quantize, embed_ind + + +class VectorQuantization(nn.Module): + """Vector quantization implementation. + Currently supports only euclidean distance. + Args: + dim (int): Dimension + codebook_size (int): Codebook size + codebook_dim (int): Codebook dimension. If not defined, uses the specified dimension in dim. + decay (float): Decay for exponential moving average over the codebooks. + epsilon (float): Epsilon value for numerical stability. + kmeans_init (bool): Whether to use kmeans to initialize the codebooks. + kmeans_iters (int): Number of iterations used for kmeans initialization. + threshold_ema_dead_code (int): Threshold for dead code expiration. Replace any codes + that have an exponential moving average cluster size less than the specified threshold with + randomly selected vector from the current batch. + commitment_weight (float): Weight for commitment loss. + """ + + def __init__( + self, + dim: int, + codebook_size: int, + codebook_dim=None, + decay: float = 0.99, + epsilon: float = 1e-5, + kmeans_init: bool = True, + kmeans_iters: int = 50, + threshold_ema_dead_code: int = 2, + commitment_weight: float = 1.0, + ): + super().__init__() + _codebook_dim: int = default(codebook_dim, dim) + + requires_projection = _codebook_dim != dim + self.project_in = ( + nn.Linear(dim, _codebook_dim) if requires_projection else nn.Identity() + ) + self.project_out = ( + nn.Linear(_codebook_dim, dim) if requires_projection else nn.Identity() + ) + + self.epsilon = epsilon + self.commitment_weight = commitment_weight + + self._codebook = EuclideanCodebook( + dim=_codebook_dim, + codebook_size=codebook_size, + kmeans_init=kmeans_init, + kmeans_iters=kmeans_iters, + decay=decay, + epsilon=epsilon, + threshold_ema_dead_code=threshold_ema_dead_code, + ) + self.codebook_size = codebook_size + self.l2_loss = lambda a, b: (a - b) ** 2 + + @property + def codebook(self): + return self._codebook.embed + + def encode(self, x: torch.Tensor) -> torch.Tensor: + x = self.project_in(x) + embed_in = self._codebook.encode(x) + return embed_in + + def decode(self, embed_ind: torch.Tensor) -> torch.Tensor: + quantize = self._codebook.decode(embed_ind) + quantize = self.project_out(quantize) + return quantize + + def forward(self, x: torch.Tensor) -> torch.Tensor: + """ + :param x: B x dim input tensor + :return: quantize: B x dim tensor containing reconstruction after quantization + embed_ind: B-dimensional tensor containing embedding indices + loss: scalar tensor containing commitment loss + """ + device = x.device + x = self.project_in(x) + + quantize, embed_ind = self._codebook(x) + + if self.training: + quantize = x + (quantize - x).detach() + + loss = torch.tensor([0.0], device=device, requires_grad=self.training) + + if self.training: + if self.commitment_weight > 0: + commit_loss = F.mse_loss(quantize.detach(), x) + loss = loss + commit_loss * self.commitment_weight + + quantize = self.project_out(quantize) + return quantize, embed_ind, loss + + +class ResidualVectorQuantization(nn.Module): + """Residual vector quantization implementation. + Follows Algorithm 1. in https://arxiv.org/pdf/2107.03312.pdf + """ + + def __init__(self, *, num_quantizers: int, **kwargs): + super().__init__() + self.layers = nn.ModuleList( + [VectorQuantization(**kwargs) for _ in range(num_quantizers)] + ) + + def forward(self, x, B, T, mask, n_q=None): + """ + :param x: B x dim tensor + :return: quantized_out: B x dim tensor + out_indices: B x n_q LongTensor containing indices for each quantizer + out_losses: scalar tensor containing commitment loss + """ + quantized_out = 0.0 + residual = x + + all_losses = [] + all_indices = [] + + n_q = n_q or len(self.layers) + + for layer in self.layers[:n_q]: + quantized, indices, loss = layer(residual) + residual = ( + residual - quantized + ) # would need quantizer.detach() to have commitment gradients beyond the first quantizer, but this seems to harm performance + quantized_out = quantized_out + quantized + + all_indices.append(indices) + all_losses.append(loss) + + out_indices = torch.stack(all_indices, dim=-1) + out_losses = torch.mean(torch.stack(all_losses)) + return quantized_out, out_indices, out_losses + + def encode(self, x: torch.Tensor, n_q=None) -> torch.Tensor: + """ + :param x: B x dim input tensor + :return: B x n_q LongTensor containing indices for each quantizer + """ + residual = x + all_indices = [] + n_q = n_q or len(self.layers) + for layer in self.layers[:n_q]: + indices = layer.encode(residual) # indices = 16 x 8 = B x T + # print(indices.shape, residual.shape, x.shape) + quantized = layer.decode(indices) + residual = residual - quantized + all_indices.append(indices) + out_indices = torch.stack(all_indices, dim=-1) + return out_indices + + def decode(self, q_indices: torch.Tensor) -> torch.Tensor: + """ + :param q_indices: B x n_q LongTensor containing indices for each quantizer + :return: B x dim tensor containing reconstruction after quantization + """ + quantized_out = torch.tensor(0.0, device=q_indices.device) + q_indices = q_indices.permute(1, 0).contiguous() + for i, indices in enumerate(q_indices): + layer = self.layers[i] + quantized = layer.decode(indices) + quantized_out = quantized_out + quantized + return quantized_out + + +class TemporalVertexEncoder(nn.Module): + def __init__( + self, + n_vertices: int = 338, + latent_dim: int = 128, + ): + super().__init__() + self.input_dim = n_vertices + self.enc = nn.Sequential( + nn.Conv1d(self.input_dim, latent_dim, kernel_size=1), + nn.LeakyReLU(negative_slope=0.2, inplace=True), + nn.Conv1d(latent_dim, latent_dim, kernel_size=2, dilation=1), + nn.LeakyReLU(negative_slope=0.2, inplace=True), + nn.Conv1d(latent_dim, latent_dim, kernel_size=2, dilation=2), + nn.LeakyReLU(negative_slope=0.2, inplace=True), + nn.Conv1d(latent_dim, latent_dim, kernel_size=2, dilation=3), + nn.LeakyReLU(negative_slope=0.2, inplace=True), + nn.Conv1d(latent_dim, latent_dim, kernel_size=2, dilation=1), + ) + self.receptive_field = 8 + + def forward(self, verts): + """ + :param verts: B x T x n_vertices x 3 tensor containing batched sequences of vertices + :return: B x T x latent_dim tensor containing the latent representation + """ + if verts.dim() == 4: + verts = verts.permute(0, 2, 3, 1).contiguous() + verts = verts.view(verts.shape[0], self.input_dim, verts.shape[3]) + else: + verts = verts.permute(0, 2, 1) + verts = nn.functional.pad(verts, pad=[self.receptive_field - 1, 0]) + x = self.enc(verts) + x = x.permute(0, 2, 1).contiguous() + return x + + +class TemporalVertexDecoder(nn.Module): + def __init__( + self, + n_vertices: int = 338, + latent_dim: int = 128, + ): + super().__init__() + self.output_dim = n_vertices + self.project_mean_shape = nn.Linear(self.output_dim, latent_dim) + self.dec = nn.Sequential( + nn.Conv1d(latent_dim, latent_dim, kernel_size=2, dilation=1), + nn.LeakyReLU(negative_slope=0.2, inplace=True), + nn.Conv1d(latent_dim, latent_dim, kernel_size=2, dilation=2), + nn.LeakyReLU(negative_slope=0.2, inplace=True), + nn.Conv1d(latent_dim, latent_dim, kernel_size=2, dilation=3), + nn.LeakyReLU(negative_slope=0.2, inplace=True), + nn.Conv1d(latent_dim, latent_dim, kernel_size=2, dilation=1), + nn.LeakyReLU(negative_slope=0.2, inplace=True), + nn.Conv1d(latent_dim, self.output_dim, kernel_size=1), + ) + self.receptive_field = 8 + + def forward(self, x): + """ + :param x: B x T x latent_dim tensor containing batched sequences of vertex encodings + :return: B x T x n_vertices x 3 tensor containing batched sequences of vertices + """ + x = x.permute(0, 2, 1).contiguous() + x = nn.functional.pad(x, pad=[self.receptive_field - 1, 0]) + verts = self.dec(x) + verts = verts.permute(0, 2, 1) + return verts + + +class TemporalVertexCodec(nn.Module): + def __init__( + self, + n_vertices: int = 338, + latent_dim: int = 128, + categories: int = 128, + residual_depth: int = 4, + ): + super().__init__() + self.latent_dim = latent_dim + self.categories = categories + self.residual_depth = residual_depth + self.n_clusters = categories + self.encoder = TemporalVertexEncoder( + n_vertices=n_vertices, latent_dim=latent_dim + ) + self.decoder = TemporalVertexDecoder( + n_vertices=n_vertices, latent_dim=latent_dim + ) + self.quantizer = ResidualVectorQuantization( + dim=latent_dim, + codebook_size=categories, + num_quantizers=residual_depth, + decay=0.99, + kmeans_init=True, + kmeans_iters=10, + threshold_ema_dead_code=2, + ) + + def predict(self, verts): + """wrapper to provide compatibility with kmeans""" + return self.encode(verts) + + def encode(self, verts): + """ + :param verts: B x T x n_vertices x 3 tensor containing batched sequences of vertices + :return: B x T x categories x residual_depth LongTensor containing quantized encodings + """ + enc = self.encoder(verts) + q = self.quantizer.encode(enc) + return q + + def decode(self, q): + """ + :param q: B x T x categories x residual_depth LongTensor containing quantized encodings + :return: B x T x n_vertices x 3 tensor containing decoded vertices + """ + reformat = q.dim() > 2 + if reformat: + B, T, _ = q.shape + q = q.reshape((-1, self.residual_depth)) + enc = self.quantizer.decode(q) + if reformat: + enc = enc.reshape((B, T, -1)) + verts = self.decoder(enc) + return verts + + @torch.no_grad() + def compute_perplexity(self, code_idx): + # Calculate new centres + code_onehot = torch.zeros( + self.categories, code_idx.shape[0], device=code_idx.device + ) # categories, N * L + code_onehot.scatter_(0, code_idx.view(1, code_idx.shape[0]), 1) + + code_count = code_onehot.sum(dim=-1) # categories + prob = code_count / torch.sum(code_count) + perplexity = torch.exp(-torch.sum(prob * torch.log(prob + 1e-7))) + return perplexity + + def forward(self, verts, mask=None): + """ + :param verts: B x T x n_vertices x 3 tensor containing mesh sequences + :return: verts: B x T x n_vertices x 3 tensor containing reconstructed mesh sequences + vq_loss: scalar tensor for vq commitment loss + """ + B, T = verts.shape[0], verts.shape[1] + x = self.encoder(verts) + x, code_idx, vq_loss = self.quantizer( + x.view(B * T, self.latent_dim), B, T, mask + ) + perplexity = self.compute_perplexity(code_idx[:, -1].view((-1))) + verts = self.decoder(x.view(B, T, self.latent_dim)) + verts = verts.reshape((verts.shape[0], verts.shape[1], -1)) + return verts, vq_loss, perplexity diff --git a/sample/generate.py b/sample/generate.py new file mode 100644 index 0000000000000000000000000000000000000000..c8996ffaf14913448db28e178713934db39a8a6e --- /dev/null +++ b/sample/generate.py @@ -0,0 +1,316 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import os + +from typing import Callable, Dict, Union + +import numpy as np +import torch +from data_loaders.get_data import get_dataset_loader, load_local_data +from diffusion.respace import SpacedDiffusion +from model.cfg_sampler import ClassifierFreeSampleModel +from model.diffusion import FiLMTransformer + +from torch.utils.data import DataLoader +from utils.diff_parser_utils import generate_args +from utils.misc import fixseed, prGreen +from utils.model_util import create_model_and_diffusion, get_person_num, load_model + + +def _construct_template_variables(unconstrained: bool) -> (str,): + row_file_template = "sample{:02d}.mp4" + all_file_template = "samples_{:02d}_to_{:02d}.mp4" + if unconstrained: + sample_file_template = "row{:02d}_col{:02d}.mp4" + sample_print_template = "[{} row #{:02d} column #{:02d} | -> {}]" + row_file_template = row_file_template.replace("sample", "row") + row_print_template = "[{} row #{:02d} | all columns | -> {}]" + all_file_template = all_file_template.replace("samples", "rows") + all_print_template = "[rows {:02d} to {:02d} | -> {}]" + else: + sample_file_template = "sample{:02d}_rep{:02d}.mp4" + sample_print_template = '["{}" ({:02d}) | Rep #{:02d} | -> {}]' + row_print_template = '[ "{}" ({:02d}) | all repetitions | -> {}]' + all_print_template = "[samples {:02d} to {:02d} | all repetitions | -> {}]" + + return ( + sample_print_template, + row_print_template, + all_print_template, + sample_file_template, + row_file_template, + all_file_template, + ) + + +def _replace_keyframes( + model_kwargs: Dict[str, Dict[str, torch.Tensor]], + model: Union[FiLMTransformer, ClassifierFreeSampleModel], +) -> torch.Tensor: + B, T = ( + model_kwargs["y"]["keyframes"].shape[0], + model_kwargs["y"]["keyframes"].shape[1], + ) + with torch.no_grad(): + tokens = model.transformer.generate( + model_kwargs["y"]["audio"], + T, + layers=model.tokenizer.residual_depth, + n_sequences=B, + ) + tokens = tokens.reshape((B, -1, model.tokenizer.residual_depth)) + pred = model.tokenizer.decode(tokens).detach().cpu() + assert ( + model_kwargs["y"]["keyframes"].shape == pred.shape + ), f"{model_kwargs['y']['keyframes'].shape} vs {pred.shape}" + return pred + + +def _run_single_diffusion( + args, + model_kwargs: Dict[str, Dict[str, torch.Tensor]], + diffusion: SpacedDiffusion, + model: Union[FiLMTransformer, ClassifierFreeSampleModel], + inv_transform: Callable, + gt: torch.Tensor, +) -> (torch.Tensor,): + if args.data_format == "pose" and args.resume_trans is not None: + model_kwargs["y"]["keyframes"] = _replace_keyframes(model_kwargs, model) + + sample_fn = diffusion.ddim_sample_loop + with torch.no_grad(): + sample = sample_fn( + model, + (args.batch_size, model.nfeats, 1, args.curr_seq_length), + clip_denoised=False, + model_kwargs=model_kwargs, + init_image=None, + progress=True, + dump_steps=None, + noise=None, + const_noise=False, + ) + sample = inv_transform(sample.cpu().permute(0, 2, 3, 1), args.data_format).permute( + 0, 3, 1, 2 + ) + curr_audio = inv_transform(model_kwargs["y"]["audio"].cpu().numpy(), "audio") + keyframes = inv_transform(model_kwargs["y"]["keyframes"], args.data_format) + gt_seq = inv_transform(gt.cpu().permute(0, 2, 3, 1), args.data_format).permute( + 0, 3, 1, 2 + ) + + return sample, curr_audio, keyframes, gt_seq + + +def _generate_sequences( + args, + model_kwargs: Dict[str, Dict[str, torch.Tensor]], + diffusion: SpacedDiffusion, + model: Union[FiLMTransformer, ClassifierFreeSampleModel], + test_data: torch.Tensor, + gt: torch.Tensor, +) -> Dict[str, np.ndarray]: + all_motions = [] + all_lengths = [] + all_audio = [] + all_gt = [] + all_keyframes = [] + + for rep_i in range(args.num_repetitions): + print(f"### Sampling [repetitions #{rep_i}]") + # add CFG scale to batch + if args.guidance_param != 1: + model_kwargs["y"]["scale"] = ( + torch.ones(args.batch_size, device=args.device) * args.guidance_param + ) + model_kwargs["y"] = { + key: val.to(args.device) if torch.is_tensor(val) else val + for key, val in model_kwargs["y"].items() + } + sample, curr_audio, keyframes, gt_seq = _run_single_diffusion( + args, model_kwargs, diffusion, model, test_data.dataset.inv_transform, gt + ) + all_motions.append(sample.cpu().numpy()) + all_audio.append(curr_audio) + all_keyframes.append(keyframes.cpu().numpy()) + all_gt.append(gt_seq.cpu().numpy()) + all_lengths.append(model_kwargs["y"]["lengths"].cpu().numpy()) + + print(f"created {len(all_motions) * args.batch_size} samples") + + return { + "motions": np.concatenate(all_motions, axis=0), + "audio": np.concatenate(all_audio, axis=0), + "gt": np.concatenate(all_gt, axis=0), + "lengths": np.concatenate(all_lengths, axis=0), + "keyframes": np.concatenate(all_keyframes, axis=0), + } + + +def _render_pred( + args, + data_block: Dict[str, torch.Tensor], + sample_file_template: str, + audio_per_frame: int, +) -> None: + from visualize.render_codes import BodyRenderer + + face_codes = None + if args.face_codes is not None: + face_codes = np.load(args.face_codes, allow_pickle=True).item() + face_motions = face_codes["motions"] + face_gts = face_codes["gt"] + face_audio = face_codes["audio"] + + config_base = f"./checkpoints/ca_body/data/{get_person_num(args.data_root)}" + body_renderer = BodyRenderer( + config_base=config_base, + render_rgb=True, + ) + + for sample_i in range(args.num_samples): + for rep_i in range(args.num_repetitions): + idx = rep_i * args.batch_size + sample_i + save_file = sample_file_template.format(sample_i, rep_i) + animation_save_path = os.path.join(args.output_dir, save_file) + # format data + length = data_block["lengths"][idx] + body_motion = ( + data_block["motions"][idx].transpose(2, 0, 1)[:length].squeeze(-1) + ) + face_motion = face_motions[idx].transpose(2, 0, 1)[:length].squeeze(-1) + assert np.array_equal( + data_block["audio"][idx], face_audio[idx] + ), "face audio is not the same" + audio = data_block["audio"][idx, : length * audio_per_frame, :].T + # set up render data block to pass into renderer + render_data_block = { + "audio": audio, + "body_motion": body_motion, + "face_motion": face_motion, + } + if args.render_gt: + gt_body = data_block["gt"][idx].transpose(2, 0, 1)[:length].squeeze(-1) + gt_face = face_gts[idx].transpose(2, 0, 1)[:length].squeeze(-1) + render_data_block["gt_body"] = gt_body + render_data_block["gt_face"] = gt_face + body_renderer.render_full_video( + render_data_block, + animation_save_path, + audio_sr=audio_per_frame * 30, + render_gt=args.render_gt, + ) + + +def _reset_sample_args(args) -> None: + # set the sequence length to match the one specified by user + name = os.path.basename(os.path.dirname(args.model_path)) + niter = os.path.basename(args.model_path).replace("model", "").replace(".pt", "") + args.curr_seq_length = ( + args.curr_seq_length + if args.curr_seq_length is not None + else args.max_seq_length + ) + # add the resume predictor model path + resume_trans_name = "" + if args.data_format == "pose" and args.resume_trans is not None: + resume_trans_parts = args.resume_trans.split("/") + resume_trans_name = f"{resume_trans_parts[1]}_{resume_trans_parts[-1]}" + # reformat the output directory + args.output_dir = os.path.join( + os.path.dirname(args.model_path), + "samples_{}_{}_seed{}_{}".format(name, niter, args.seed, resume_trans_name), + ) + assert ( + args.num_samples <= args.batch_size + ), f"Please either increase batch_size({args.batch_size}) or reduce num_samples({args.num_samples})" + # set the batch size to match the number of samples to generate + args.batch_size = args.num_samples + + +def _setup_dataset(args) -> DataLoader: + data_root = args.data_root + data_dict = load_local_data( + data_root, + audio_per_frame=1600, + flip_person=args.flip_person, + ) + test_data = get_dataset_loader( + args=args, + data_dict=data_dict, + split="test", + chunk=True, + ) + return test_data + + +def _setup_model( + args, +) -> (Union[FiLMTransformer, ClassifierFreeSampleModel], SpacedDiffusion): + model, diffusion = create_model_and_diffusion(args, split_type="test") + print(f"Loading checkpoints from [{args.model_path}]...") + state_dict = torch.load(args.model_path, map_location="cpu") + load_model(model, state_dict) + + if not args.unconstrained: + assert args.guidance_param != 1 + + if args.guidance_param != 1: + prGreen("[CFS] wrapping model in classifier free sample") + model = ClassifierFreeSampleModel(model) + model.to(args.device) + model.eval() + return model, diffusion + + +def main(): + args = generate_args() + fixseed(args.seed) + _reset_sample_args(args) + + print("Loading dataset...") + test_data = _setup_dataset(args) + iterator = iter(test_data) + + print("Creating model and diffusion...") + model, diffusion = _setup_model(args) + + if args.pose_codes is None: + # generate sequences + gt, model_kwargs = next(iterator) + data_block = _generate_sequences( + args, model_kwargs, diffusion, model, test_data, gt + ) + os.makedirs(args.output_dir, exist_ok=True) + npy_path = os.path.join(args.output_dir, "results.npy") + print(f"saving results file to [{npy_path}]") + np.save(npy_path, data_block) + else: + # load the pre generated results + data_block = np.load(args.pose_codes, allow_pickle=True).item() + + # plot function only if face_codes exist and we are on pose prediction + if args.plot: + assert args.face_codes is not None, "need body and faces" + assert ( + args.data_format == "pose" + ), "currently only supporting plot on pose stuff" + print(f"saving visualizations to [{args.output_dir}]...") + _, _, _, sample_file_template, _, _ = _construct_template_variables( + args.unconstrained + ) + _render_pred( + args, + data_block, + sample_file_template, + test_data.dataset.audio_per_frame, + ) + + +if __name__ == "__main__": + main() diff --git a/scripts/download_alldatasets.sh b/scripts/download_alldatasets.sh new file mode 100644 index 0000000000000000000000000000000000000000..2efc8ce71aa1b6cbaee7f1a22499ddc609286b4e --- /dev/null +++ b/scripts/download_alldatasets.sh @@ -0,0 +1,6 @@ +for i in "PXB184" "RLW104" "TXB805" "GQS883" +do + curl -L https://github.com/facebookresearch/audio2photoreal/releases/download/v1.0/${i}.zip -o ${i}.zip || { echo 'downloading dataset failed' ; exit 1; } + unzip ${i}.zip -d dataset/ + rm ${i}.zip +done diff --git a/scripts/download_allmodels.sh b/scripts/download_allmodels.sh new file mode 100644 index 0000000000000000000000000000000000000000..4e69a2a76a9c9a5366478acb41187100bcc3ef29 --- /dev/null +++ b/scripts/download_allmodels.sh @@ -0,0 +1,13 @@ +for i in "PXB184" "RLW104" "TXB805" "GQS883" +do + # download motion models + wget http://audio2photoreal_models.berkeleyvision.org/${i}_models.tar || { echo 'downloading model failed' ; exit 1; } + tar xvf ${i}_models.tar + rm ${i}_models.tar + + # download ca body rendering checkpoints and assets + mkdir -p checkpoints/ca_body/data/ + wget https://github.com/facebookresearch/ca_body/releases/download/v0.0.1-alpha/${i}.tar.gz || { echo 'downloading ca body model failed' ; exit 1; } + tar xvf ${i}.tar.gz --directory checkpoints/ca_body/data/ + rm ${i}.tar.gz +done \ No newline at end of file diff --git a/scripts/download_prereq.sh b/scripts/download_prereq.sh new file mode 100644 index 0000000000000000000000000000000000000000..65896a8b42c2eceed8f6218a04085b7d13ddc6a8 --- /dev/null +++ b/scripts/download_prereq.sh @@ -0,0 +1,9 @@ + +# install the prerequisite asset models (lip regressor and wav2vec) +wget http://audio2photoreal_models.berkeleyvision.org/asset_models.tar +tar xvf asset_models.tar +rm asset_models.tar + +# we obtained the wav2vec models via these links: +# wget https://dl.fbaipublicfiles.com/fairseq/wav2vec/wav2vec_large.pt -P ./assets/ +# wget https://dl.fbaipublicfiles.com/fairseq/wav2vec/vq-wav2vec.pt -P ./assets/ diff --git a/scripts/installation.sh b/scripts/installation.sh new file mode 100644 index 0000000000000000000000000000000000000000..453ffb0f258e6f13247f77910a2d19b2f6084403 --- /dev/null +++ b/scripts/installation.sh @@ -0,0 +1,4 @@ +# download the prerequisite asset models (lip regressor and wav2vec) +wget http://audio2photoreal_models.berkeleyvision.org/asset_models.tar +tar xvf asset_models.tar +rm asset_models.tar diff --git a/scripts/requirements.txt b/scripts/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..741b25894df97dabe013febe39b62ded2e6cb9f1 --- /dev/null +++ b/scripts/requirements.txt @@ -0,0 +1,17 @@ +attrdict +blobfile +einops +fairseq +gradio +matplotlib +mediapy +numpy==1.23.0 +opencv-python +packaging +scikit-learn +tensorboard +tensorboardX +torch==2.0.1 +torchaudio==2.0.2 +torchvision==0.15.2 +tqdm diff --git a/train/train_diffusion.py b/train/train_diffusion.py new file mode 100644 index 0000000000000000000000000000000000000000..e2992e2caaf8f4603de4c5297595d5f7f1c46276 --- /dev/null +++ b/train/train_diffusion.py @@ -0,0 +1,83 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import json +import os + +import torch +import torch.multiprocessing as mp + +from data_loaders.get_data import get_dataset_loader, load_local_data +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.utils.tensorboard import SummaryWriter +from train.train_platforms import ClearmlPlatform, NoPlatform, TensorboardPlatform +from train.training_loop import TrainLoop +from utils.diff_parser_utils import train_args +from utils.misc import cleanup, fixseed, setup_dist +from utils.model_util import create_model_and_diffusion + + +def main(rank: int, world_size: int): + args = train_args() + fixseed(args.seed) + train_platform_type = eval(args.train_platform_type) + train_platform = train_platform_type(args.save_dir) + train_platform.report_args(args, name="Args") + setup_dist(args.device) + + if rank == 0: + if args.save_dir is None: + raise FileNotFoundError("save_dir was not specified.") + elif os.path.exists(args.save_dir) and not args.overwrite: + raise FileExistsError("save_dir [{}] already exists.".format(args.save_dir)) + elif not os.path.exists(args.save_dir): + os.makedirs(args.save_dir) + args_path = os.path.join(args.save_dir, "args.json") + with open(args_path, "w") as fw: + json.dump(vars(args), fw, indent=4, sort_keys=True) + + if not os.path.exists(args.data_root): + args.data_root = args.data_root.replace("/home/", "/derived/") + + data_dict = load_local_data(args.data_root, audio_per_frame=1600) + print("creating data loader...") + data = get_dataset_loader(args=args, data_dict=data_dict) + + print("creating logger...") + writer = SummaryWriter(args.save_dir) + + print("creating model and diffusion...") + model, diffusion = create_model_and_diffusion(args, split_type="train") + model.to(rank) + + if world_size > 1: + model = DDP( + model, device_ids=[rank], output_device=rank, find_unused_parameters=True + ) + + params = ( + model.module.parameters_w_grad() + if world_size > 1 + else model.parameters_w_grad() + ) + print("Total params: %.2fM" % (sum(p.numel() for p in params) / 1000000.0)) + print("Training...") + + TrainLoop( + args, train_platform, model, diffusion, data, writer, rank, world_size + ).run_loop() + train_platform.close() + cleanup() + + +if __name__ == "__main__": + world_size = torch.cuda.device_count() + print(f"using {world_size} gpus") + if world_size > 1: + mp.spawn(main, args=(world_size,), nprocs=world_size, join=True) + else: + main(rank=0, world_size=1) diff --git a/train/train_guide.py b/train/train_guide.py new file mode 100644 index 0000000000000000000000000000000000000000..e5e7a4a38e209bd6fdf5ac7fdfa045f9a28d886c --- /dev/null +++ b/train/train_guide.py @@ -0,0 +1,362 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import json +import os +from typing import Any, Dict + +import numpy as np +import torch +import torch.optim as optim + +from data_loaders.get_data import get_dataset_loader, load_local_data +from diffusion.nn import sum_flat +from model.guide import GuideTransformer +from model.vqvae import setup_tokenizer, TemporalVertexCodec +from torch.utils.data import DataLoader +from torch.utils.tensorboard import SummaryWriter +from tqdm import tqdm +from utils.guide_parser_utils import train_args +from utils.misc import fixseed + + +class ModelTrainer: + def __init__( + self, args, model: GuideTransformer, tokenizer: TemporalVertexCodec + ) -> None: + self.add_frame_cond = args.add_frame_cond + self.data_format = args.data_format + self.tokenizer = tokenizer + self.model = model.cuda() + self.gn = args.gn + self.max_seq_length = args.max_seq_length + self.optimizer = optim.AdamW( + model.parameters(), + lr=args.lr, + betas=(0.9, 0.99), + weight_decay=args.weight_decay, + ) + self.scheduler = optim.lr_scheduler.MultiStepLR( + self.optimizer, milestones=args.lr_scheduler, gamma=args.gamma + ) + self.l2_loss = lambda a, b: (a - b) ** 2 + self.start_step = 0 + self.warm_up_iter = args.warm_up_iter + self.lr = args.lr + self.ce_loss = torch.nn.CrossEntropyLoss( + ignore_index=self.tokenizer.n_clusters + 1, label_smoothing=0.1 + ) + + if args.resume_trans is not None: + self._load_from_checkpoint() + + def _load_from_checkpoint(self) -> None: + print("loading", args.resume_trans) + ckpt = torch.load(args.resume_trans, map_location="cpu") + self.model.load_state_dict(ckpt["model_state_dict"], strict=True) + self.optimizer.load_state_dict(ckpt["optimizer_state_dict"]) + self.start_step = ckpt["iteration"] + + def _abbreviate( + self, meshes: torch.Tensor, mask: torch.Tensor, step: int + ) -> (torch.Tensor,): + keyframes = meshes[..., ::step] + new_mask = mask[..., ::step] + return keyframes, new_mask + + def _prepare_tokens( + self, meshes: torch.Tensor, mask: torch.Tensor + ) -> (torch.Tensor,): + if self.add_frame_cond == 1: + keyframes, new_mask = self._abbreviate(meshes, mask, 30) + elif self.add_frame_cond is None: + keyframes, new_mask = self._abbreviate(meshes, mask, 1) + + meshes = keyframes.squeeze(2).permute((0, 2, 1)) + B, T, _ = meshes.shape + target_tokens = self.tokenizer.predict(meshes) + target_tokens = target_tokens.reshape(B, -1) + input_tokens = torch.cat( + [ + torch.zeros( + (B, 1), dtype=target_tokens.dtype, device=target_tokens.device + ) + + self.model.tokens, + target_tokens[:, :-1], + ], + axis=-1, + ) + return input_tokens, target_tokens, new_mask, meshes.reshape((B, T, -1)) + + def _run_single_train_step(self, input_tokens, audio, target_tokens): + B, T = input_tokens.shape[0], input_tokens.shape[1] + self.optimizer.zero_grad() + logits = self.model(input_tokens, audio, cond_drop_prob=0.20) + loss = self.ce_loss( + logits.reshape((B * T, -1)), target_tokens.reshape((B * T)).long() + ) + loss.backward() + if self.gn: + torch.nn.utils.clip_grad_norm_(self.model.parameters(), 1.0) + self.optimizer.step() + self.scheduler.step() + return logits, loss + + def _run_single_val_step( + self, motion: torch.Tensor, cond: torch.Tensor + ) -> Dict[str, Any]: + self.model.eval() + with torch.no_grad(): + motion = torch.as_tensor(motion).cuda() + ( + input_tokens, + target_tokens, + new_mask, + downsampled_gt, + ) = self._prepare_tokens(motion, cond["mask"]) + audio = cond["audio"].cuda() + + new_mask = torch.as_tensor(new_mask) + B, T = target_tokens.shape[0], target_tokens.shape[1] + logits = self.model(input_tokens, audio) + tokens = torch.argmax(logits, dim=-1).view( + B, -1, self.tokenizer.residual_depth + ) + pred = self.tokenizer.decode(tokens).detach().cpu() + ce_loss = self.ce_loss( + logits.reshape((B * T, -1)), target_tokens.reshape((B * T)).long() + ) + l2_loss = self._masked_l2( + downsampled_gt.permute(0, 2, 1).unsqueeze(2).detach().cpu(), + pred.permute(0, 2, 1).unsqueeze(2), + new_mask, + ) + acc = self.compute_accuracy(logits, target_tokens, new_mask) + + return { + "pred": pred, + "gt": downsampled_gt, + "metrics": { + "ce_loss": ce_loss.item(), + "l2_loss": l2_loss.item(), + "perplexity": np.exp(ce_loss.item()), + "acc": acc.item(), + }, + } + + def _masked_l2(self, a: torch.Tensor, b: torch.Tensor, mask: torch.Tensor) -> float: + loss = self.l2_loss(a, b) + loss = sum_flat(loss * mask.float()) + n_entries = a.shape[1] * a.shape[2] + non_zero_elements = sum_flat(mask) * n_entries + mse_loss_val = loss / non_zero_elements + return mse_loss_val.mean() + + def compute_ce_loss( + self, logits: torch.Tensor, target_tokens: torch.Tensor, mask: torch.Tensor + ) -> float: + target_tokens[~mask.squeeze().detach().cpu()] = 0 + B = logits.shape[0] + logprobs = torch.log_softmax(logits, dim=-1).view( + B, -1, 1, self.tokenizer.n_clusters + ) + logprobs = logprobs[:, self.mask_left :, :, :].contiguous() + labels = target_tokens.view(B, -1, 1) + labels = labels[:, self.mask_left :, :].contiguous() + loss = torch.nn.functional.nll_loss( + logprobs.view(-1, self.tokenizer.n_clusters), + labels.view(-1).long(), + reduction="none", + ).reshape((B, 1, 1, -1)) + mask = mask.float().to(loss.device) + loss = sum_flat(loss * mask) + non_zero_elements = sum_flat(mask) + ce_loss_val = loss / non_zero_elements + return ce_loss_val.mean() + + def compute_accuracy( + self, logits: torch.Tensor, target: torch.Tensor, mask: torch.Tensor + ) -> float: + mask = mask.squeeze() + probs = torch.softmax(logits, dim=-1) + _, cls_pred_index = torch.max(probs, dim=-1) + acc = (cls_pred_index.flatten(0) == target.flatten(0)).reshape( + cls_pred_index.shape + ) + acc = sum_flat(acc).detach().cpu() + non_zero_elements = sum_flat(mask) + acc_val = acc / non_zero_elements * 100 + return acc_val.mean() + + def update_lr_warm_up(self, nb_iter: int) -> float: + current_lr = self.lr * (nb_iter + 1) / (self.warm_up_iter + 1) + for param_group in self.optimizer.param_groups: + param_group["lr"] = current_lr + return current_lr + + def train_step(self, motion: torch.Tensor, cond: torch.Tensor) -> Dict[str, Any]: + self.model.train() + motion = torch.as_tensor(motion).cuda() + input_tokens, target_tokens, new_mask, downsampled_gt = self._prepare_tokens( + motion, cond["mask"] + ) + audio = cond["audio"].cuda() + new_mask = torch.as_tensor(new_mask) + + logits, loss = self._run_single_train_step(input_tokens, audio, target_tokens) + with torch.no_grad(): + tokens = torch.argmax(logits, dim=-1).view( + input_tokens.shape[0], -1, self.tokenizer.residual_depth + ) + pred = self.tokenizer.decode(tokens).detach().cpu() + l2_loss = self._masked_l2( + downsampled_gt.permute(0, 2, 1).unsqueeze(2).detach().cpu(), + pred.permute(0, 2, 1).unsqueeze(2), + new_mask, + ) + acc = self.compute_accuracy(logits, target_tokens, new_mask) + + return { + "pred": pred, + "gt": downsampled_gt, + "loss": loss, + "metrics": { + "ce_loss": loss.item(), + "l2_loss": l2_loss.item(), + "perplexity": np.exp(loss.item()), + "acc": acc.item(), + }, + } + + def validate( + self, + val_data: DataLoader, + writer: SummaryWriter, + step: int, + save_dir: str, + log_step: int = 100, + max_samples: int = 30, + ) -> None: + val_metrics = {} + pred_values = [] + gt_values = [] + for i, (val_motion, val_cond) in enumerate(val_data): + val_out = self._run_single_val_step(val_motion, val_cond["y"]) + if "metrics" in val_out.keys(): + for k, v in val_out["metrics"].items(): + val_metrics[k] = val_metrics.get(k, 0.0) + v + if "pred" in val_out.keys() and i % log_step == 0: + pred_values.append( + val_data.dataset.inv_transform(val_out["pred"], self.data_format) + ) + gt_values.append( + val_data.dataset.inv_transform(val_out["gt"], self.data_format) + ) + if i % log_step == 0: + print( + f'val_l2_loss at {step} [{i}]: {val_metrics["l2_loss"] / len(val_data):.4f}' + ) + pred_values = torch.concatenate((pred_values), dim=0) + gt_values = torch.concatenate((gt_values), dim=0) + idx = np.random.permutation(len(pred_values))[:max_samples] + pred_values = pred_values[idx] + gt_values = gt_values[idx] + for i, (pred, gt) in enumerate(zip(pred_values, gt_values)): + pred = pred.unsqueeze(0).detach().cpu().numpy() + pose = gt.unsqueeze(0).detach().cpu().numpy() + np.save(os.path.join(save_dir, f"b{i:04d}_pred.npy"), pred) + np.save(os.path.join(save_dir, f"b{i:04d}_gt.npy"), pose) + + msg = "" + for k, v in val_metrics.items(): + writer.add_scalar(f"val_{k}", v / len(val_data), step) + msg += f"val_{k} at {step}: {v / len(val_data):.4f} | " + print(msg) + + +def _save_checkpoint( + args, iteration: int, model: GuideTransformer, optimizer: optim.Optimizer +) -> None: + os.makedirs(f"{args.out_dir}/checkpoints/", exist_ok=True) + filename = f"iter-{iteration:07d}.pt" + torch.save( + { + "iteration": iteration, + "model_state_dict": model.state_dict(), + "optimizer_state_dict": optimizer.state_dict(), + }, + f"{args.out_dir}/checkpoints/{filename}", + ) + + +def _load_data_info(args) -> (DataLoader, DataLoader): + data_dict = load_local_data(args.data_root, audio_per_frame=1600) + train_data = get_dataset_loader( + args=args, data_dict=data_dict, split="train", add_padding=False + ) + val_data = get_dataset_loader(args=args, data_dict=data_dict, split="val") + return train_data, val_data + + +def main(args): + fixseed(args.seed) + os.makedirs(args.out_dir, exist_ok=True) + writer = SummaryWriter(f"{args.out_dir}/logs/") + args_path = os.path.join(args.out_dir, "args.json") + with open(args_path, "w") as fw: + json.dump(vars(args), fw, indent=4, sort_keys=True) + tokenizer = setup_tokenizer(args.resume_pth) + + model = GuideTransformer( + tokens=tokenizer.n_clusters, + emb_len=798 if args.max_seq_length == 240 else 1998, + num_layers=args.layers, + dim=args.dim, + ) + train_data, val_data = _load_data_info(args) + trainer = ModelTrainer(args, model, tokenizer) + step = trainer.start_step + + for _ in range(1, args.total_iter + 1): + train_metrics = {} + count = 0 + for motion, cond in tqdm(train_data): + if step < args.warm_up_iter: + current_lr = trainer.update_lr_warm_up(step) + + # rum single train step + train_out = trainer.train_step(motion, cond["y"]) + if "metrics" in train_out.keys(): + for k, v in train_out["metrics"].items(): + train_metrics[k] = train_metrics.get(k, 0.0) + v + count += 1 + + # log all of the metrics + if step % args.log_interval == 0: + msg = "" + for k, v in train_metrics.items(): + writer.add_scalar(f"train_{k}", v / count, step) + msg += f"train_{k} at {step}: {v / count:.4f} | " + train_metrics = {} + count = 0 + writer.add_scalar(f"train_lr", trainer.scheduler.get_lr()[0], step) + if step < args.warm_up_iter: + msg += f"lr: {current_lr} | " + print(msg) + writer.flush() + + # run single evaluation step and save + if step % args.eval_interval == 0: + trainer.validate(val_data, writer, step, args.out_dir) + if step % args.save_interval == 0: + _save_checkpoint(args, step, trainer.model, trainer.optimizer) + step += 1 + + +if __name__ == "__main__": + args = train_args() + main(args) diff --git a/train/train_platforms.py b/train/train_platforms.py new file mode 100644 index 0000000000000000000000000000000000000000..83200ee6956a9680f557200f38fc559481c4527a --- /dev/null +++ b/train/train_platforms.py @@ -0,0 +1,59 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import os + +class TrainPlatform: + def __init__(self, save_dir): + pass + + def report_scalar(self, name, value, iteration, group_name=None): + pass + + def report_args(self, args, name): + pass + + def close(self): + pass + + +class ClearmlPlatform(TrainPlatform): + def __init__(self, save_dir): + from clearml import Task + path, name = os.path.split(save_dir) + self.task = Task.init(project_name='motion_diffusion', + task_name=name, + output_uri=path) + self.logger = self.task.get_logger() + + def report_scalar(self, name, value, iteration, group_name): + self.logger.report_scalar(title=group_name, series=name, iteration=iteration, value=value) + + def report_args(self, args, name): + self.task.connect(args, name=name) + + def close(self): + self.task.close() + + +class TensorboardPlatform(TrainPlatform): + def __init__(self, save_dir): + from torch.utils.tensorboard import SummaryWriter + self.writer = SummaryWriter(log_dir=save_dir) + + def report_scalar(self, name, value, iteration, group_name=None): + self.writer.add_scalar(f'{group_name}/{name}', value, iteration) + + def close(self): + self.writer.close() + + +class NoPlatform(TrainPlatform): + def __init__(self, save_dir): + pass + + diff --git a/train/train_vq.py b/train/train_vq.py new file mode 100644 index 0000000000000000000000000000000000000000..41829ff5785ca667a6ff4b90038c5438df0a8e24 --- /dev/null +++ b/train/train_vq.py @@ -0,0 +1,374 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import argparse +import copy +import json +import logging +import os +import sys +import warnings +from typing import Any, Dict + +import model.vqvae as vqvae + +import numpy as np +import torch +import torch.optim as optim +from data_loaders.get_data import get_dataset_loader, load_local_data +from diffusion.nn import sum_flat +from torch.utils.tensorboard import SummaryWriter +from tqdm import tqdm +from utils.vq_parser_utils import train_args + +warnings.filterwarnings("ignore") + + +def cycle(iterable): + while True: + for x in iterable: + yield x + + +def get_logger(out_dir: str): + logger = logging.getLogger("Exp") + logger.setLevel(logging.INFO) + formatter = logging.Formatter("%(asctime)s %(levelname)s %(message)s") + + file_path = os.path.join(out_dir, "run.log") + file_hdlr = logging.FileHandler(file_path) + file_hdlr.setFormatter(formatter) + + strm_hdlr = logging.StreamHandler(sys.stdout) + strm_hdlr.setFormatter(formatter) + + logger.addHandler(file_hdlr) + logger.addHandler(strm_hdlr) + return logger + + +class ModelTrainer: + def __init__(self, args, net: vqvae.TemporalVertexCodec, logger, writer): + self.net = net + self.warm_up_iter = args.warm_up_iter + self.lr = args.lr + self.optimizer = optim.AdamW( + self.net.parameters(), + lr=args.lr, + betas=(0.9, 0.99), + weight_decay=args.weight_decay, + ) + self.scheduler = torch.optim.lr_scheduler.MultiStepLR( + self.optimizer, milestones=args.lr_scheduler, gamma=args.gamma + ) + self.data_format = args.data_format + self.loss = torch.nn.SmoothL1Loss() + self.loss_vel = args.loss_vel + self.commit = args.commit + self.logger = logger + self.writer = writer + self.best_commit = float("inf") + self.best_recons = float("inf") + self.best_perplexity = float("inf") + self.best_iter = 0 + self.out_dir = args.out_dir + + def _masked_l2(self, a, b, mask): + loss = self._l2_loss(a, b) + loss = sum_flat(loss * mask.float()) + n_entries = a.shape[1] * a.shape[2] + non_zero_elements = sum_flat(mask) * n_entries + mse_loss_val = loss / non_zero_elements + return mse_loss_val + + def _l2_loss(self, motion_pred, motion_gt, mask=None): + if mask is not None: + return self._masked_l2(motion_pred, motion_gt, mask) + else: + return self.loss(motion_pred, motion_gt) + + def _vel_loss(self, motion_pred, motion_gt): + model_results_vel = motion_pred[..., :-1] - motion_pred[..., 1:] + model_targets_vel = motion_gt[..., :-1] - motion_gt[..., 1:] + return self.loss(model_results_vel, model_targets_vel) + + def _update_lr_warm_up(self, nb_iter): + current_lr = self.lr * (nb_iter + 1) / (self.warm_up_iter + 1) + for param_group in self.optimizer.param_groups: + param_group["lr"] = current_lr + return current_lr + + def run_warmup_steps(self, train_loader_iter, skip_step, logger): + avg_recons, avg_perplexity, avg_commit = 0.0, 0.0, 0.0 + for nb_iter in tqdm(range(1, args.warm_up_iter)): + current_lr = self._update_lr_warm_up(nb_iter) + gt_motion, cond = next(train_loader_iter) + loss_dict = self.run_train_step(gt_motion, cond, skip_step) + + avg_recons += loss_dict["loss_motion"] + avg_perplexity += loss_dict["perplexity"] + avg_commit += loss_dict["loss_commit"] + + if nb_iter % args.print_iter == 0: + avg_recons /= args.print_iter + avg_perplexity /= args.print_iter + avg_commit /= args.print_iter + + logger.info( + f"Warmup. Iter {nb_iter} : lr {current_lr:.5f} \t Commit. {avg_commit:.5f} \t PPL. {avg_perplexity:.2f} \t Recons. {avg_recons:.5f}" + ) + + avg_recons, avg_perplexity, avg_commit = 0.0, 0.0, 0.0 + + def run_train_step( + self, gt_motion: torch.Tensor, cond: torch.Tensor, skip_step: int + ) -> Dict[str, Any]: + self.net.train() + loss_dict = {} + # run model + gt_motion = gt_motion.permute(0, 3, 1, 2).squeeze(-1).cuda().float() + cond["y"] = { + key: val.to(gt_motion.device) if torch.is_tensor(val) else val + for key, val in cond["y"].items() + } + gt_motion = gt_motion[:, ::skip_step, :] + pred_motion, loss_commit, perplexity = self.net(gt_motion, mask=None) + loss_motion = self._l2_loss(pred_motion, gt_motion).mean() + loss_vel = 0.0 + if self.loss_vel > 0: + loss_vel = self._vel_loss(pred_motion, gt_motion) + loss = loss_motion + self.commit * loss_commit + self.loss_vel * loss_vel + self.optimizer.zero_grad() + loss.backward() + self.optimizer.step() + # record losses + if self.loss_vel > 0: + loss_dict["vel"] = loss_vel.item() + loss_dict["loss"] = loss.item() + loss_dict["loss_motion"] = loss_motion.item() + loss_dict["loss_commit"] = loss_commit.item() + loss_dict["perplexity"] = perplexity.item() + return loss_dict + + def save_model(self, save_path): + torch.save( + { + "net": self.net.state_dict(), + "optimizer": self.optimizer.state_dict(), + "scheduler": self.scheduler, + }, + save_path, + ) + + def _save_predictions(self, name, unstd_pose, unstd_pred): + curr_name = os.path.basename(name) + path = os.path.join(self.out_dir, curr_name) + for j in range(len(path.split("/")) - 1): + if not os.path.exists("/".join(path.split("/")[: j + 1])): + os.system("mkdir " + "/".join(path.split("/")[: j + 1])) + np.save(os.path.join(self.out_dir, curr_name + "_gt.npy"), unstd_pose) + np.save(os.path.join(self.out_dir, curr_name + "_pred.npy"), unstd_pred) + + def _log_losses( + self, + commit_loss: float, + recons_loss: float, + total_perplexity: float, + nb_iter: int, + nb_sample: int, + draw: bool, + save: bool, + ) -> None: + avg_commit = commit_loss / nb_sample + avg_recons = recons_loss / nb_sample + avg_perplexity = total_perplexity / nb_sample + self.logger.info( + f"Eval. Iter {nb_iter} : \t Commit. {avg_commit:.5f} \t PPL. {avg_perplexity:.2f} \t Recons. {avg_recons:.5f}" + ) + + if draw: + self.writer.add_scalar("./Val/Perplexity", avg_perplexity, nb_iter) + self.writer.add_scalar("./Val/Commit", avg_commit, nb_iter) + self.writer.add_scalar("./Val/Recons", avg_recons, nb_iter) + + if avg_perplexity < self.best_perplexity: + msg = f"--> --> \t Perplexity Improved from {self.best_perplexity:.5f} to {avg_perplexity:.5f} !!!" + self.logger.info(msg) + self.best_perplexity = avg_perplexity + if save: + print(f"saving checkpoint net_best.pth") + self.save_model(os.path.join(self.out_dir, "net_best.pth")) + + if avg_commit < self.best_commit: + msg = f"--> --> \t Commit Improved from {self.best_commit:.5f} to {avg_commit:.5f} !!!" + self.logger.info(msg) + self.best_commit = avg_commit + + if avg_recons < self.best_recons: + msg = f"--> --> \t Recons Improved from {self.best_recons:.5f} to {avg_recons:.5f} !!!" + self.logger.info(msg) + self.best_recons = avg_recons + + @torch.no_grad() + def evaluation_vqvae( + self, + val_loader, + nb_iter: int, + draw: bool = True, + save: bool = True, + savenpy: bool = False, + ) -> None: + self.net.eval() + nb_sample = 0 + commit_loss = 0 + recons_loss = 0 + total_perplexity = 0 + for _, batch in enumerate(val_loader): + motion, cond = batch + m_length = cond["y"]["lengths"] + motion = motion.permute(0, 3, 1, 2).squeeze(-1).cuda().float() + cond["y"] = { + key: val.to(motion.device) if torch.is_tensor(val) else val + for key, val in cond["y"].items() + } + motion = motion[:, :: val_loader.dataset.step, :].cuda().float() + bs, seq = motion.shape[0], motion.shape[1] + pred_pose_eval = torch.zeros((bs, seq, motion.shape[-1])).cuda() + for i in range(bs): + curr_gt = motion[i : i + 1, : m_length[i]] + pred, loss_commit, perplexity = self.net(curr_gt) + l2_loss = self._l2_loss(pred, curr_gt) + recons_loss += l2_loss.mean().item() + commit_loss += loss_commit + total_perplexity += perplexity + unstd_pred = val_loader.dataset.inv_transform( + pred.detach().cpu().numpy(), self.data_format + ) + unstd_pose = val_loader.dataset.inv_transform( + curr_gt.detach().cpu().numpy(), self.data_format + ) + if savenpy: + self._save_predictions( + "b{i:04d}", unstd_pose[:, : m_length[i]], unstd_pred + ) + pred_pose_eval[i : i + 1, : m_length[i], :] = pred + nb_sample += bs + + self._log_losses( + commit_loss, recons_loss, total_perplexity, nb_iter, nb_sample, draw, save + ) + if save: + print(f"saving checkpoint net_last.pth") + self.save_model(os.path.join(self.out_dir, "net_last.pth")) + if nb_iter % 100000 == 0: + print(f"saving checkpoint net_iter_x.pth") + self.save_model( + os.path.join(self.out_dir, "net_iter" + str(nb_iter) + ".pth") + ) + + +def _load_data_info(args, logger): + data_dict = load_local_data(args.data_root, audio_per_frame=1600) + train_loader = get_dataset_loader( + args=args, data_dict=data_dict, split="train", add_padding=False + ) + val_loader = get_dataset_loader( + args=args, data_dict=data_dict, split="val", add_padding=False + ) + + logger.info( + f"Training on {args.dataname}, motions are with {args.nb_joints} joints" + ) + train_loader_iter = cycle(train_loader) + skip_step = train_loader.dataset.step + return train_loader_iter, val_loader, skip_step + + +def _load_checkpoint(args, net, logger): + cp_dir = os.path.dirname(args.resume_pth) + with open(f"{cp_dir}/args.json") as f: + trans_args = json.load(f) + assert trans_args["data_root"] == args.data_root, "data_root doesnt match" + logger.info("loading checkpoint from {}".format(args.resume_pth)) + ckpt = torch.load(args.resume_pth, map_location="cpu") + net.load_state_dict(ckpt["net"], strict=True) + return net + + +def main(args): + torch.manual_seed(args.seed) + os.makedirs(args.out_dir, exist_ok=True) + logger = get_logger(args.out_dir) + writer = SummaryWriter(args.out_dir) + logger.info(json.dumps(vars(args), indent=4, sort_keys=True)) + + if args.data_format == "pose": + args.nb_joints = 104 + elif args.data_format == "face": + args.nb_joints = 256 + + args_path = os.path.join(args.out_dir, "args.json") + with open(args_path, "w") as fw: + json.dump(vars(args), fw, indent=4, sort_keys=True) + + if not os.path.exists(args.data_root): + args.data_root = args.data_root.replace("/home/", "/derived/") + + train_loader_iter, val_loader, skip_step = _load_data_info(args, logger) + net = vqvae.TemporalVertexCodec( + n_vertices=args.nb_joints, + latent_dim=args.output_emb_width, + categories=args.code_dim, + residual_depth=args.depth, + ) + if args.resume_pth: + net = _load_checkpoint(args, net, logger) + net.train() + net.cuda() + + trainer = ModelTrainer(args, net, logger, writer) + + trainer.run_warmup_steps(train_loader_iter, skip_step, logger) + avg_recons, avg_perplexity, avg_commit = 0.0, 0.0, 0.0 + with torch.no_grad(): + trainer.evaluation_vqvae( + val_loader, 0, save=(args.total_iter > 0), savenpy=True + ) + + for nb_iter in range(1, args.total_iter + 1): + gt_motion, cond = next(train_loader_iter) + loss_dict = trainer.run_train_step(gt_motion, cond, skip_step) + trainer.scheduler.step() + + avg_recons += loss_dict["loss_motion"] + avg_perplexity += loss_dict["perplexity"] + avg_commit += loss_dict["loss_commit"] + + if nb_iter % args.print_iter == 0: + avg_recons /= args.print_iter + avg_perplexity /= args.print_iter + avg_commit /= args.print_iter + + writer.add_scalar("./Train/L1", avg_recons, nb_iter) + writer.add_scalar("./Train/PPL", avg_perplexity, nb_iter) + writer.add_scalar("./Train/Commit", avg_commit, nb_iter) + + logger.info( + f"Train. Iter {nb_iter} : \t Commit. {avg_commit:.5f} \t PPL. {avg_perplexity:.2f} \t Recons. {avg_recons:.5f}" + ) + + avg_recons, avg_perplexity, avg_commit = (0.0, 0.0, 0.0) + + if nb_iter % args.eval_iter == 0: + trainer.evaluation_vqvae( + val_loader, nb_iter, save=(args.total_iter > 0), savenpy=True + ) + + +if __name__ == "__main__": + args = train_args() + main(args) diff --git a/train/training_loop.py b/train/training_loop.py new file mode 100644 index 0000000000000000000000000000000000000000..29d9c69db9e0c7e26345f28c48b6b2573f8c7ebf --- /dev/null +++ b/train/training_loop.py @@ -0,0 +1,288 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import cProfile as profile +import functools +import pstats + +import blobfile as bf +import numpy as np +import torch +from torch.optim import AdamW +from tqdm import tqdm + +import utils.logger as logger +from diffusion.fp16_util import MixedPrecisionTrainer +from diffusion.resample import LossAwareSampler, create_named_schedule_sampler +from utils.misc import dev, load_state_dict + +INITIAL_LOG_LOSS_SCALE = 20.0 + + +class TrainLoop: + def __init__( + self, args, train_platform, model, diffusion, data, writer, rank=0, world_size=1 + ): + self.args = args + self.dataset = args.dataset + self.train_platform = train_platform + self.model = model + self.diffusion = diffusion + self.cond_mode = model.module.cond_mode if world_size > 1 else model.cond_mode + self.data = data + self.batch_size = args.batch_size + self.microbatch = args.batch_size # deprecating this option + self.lr = args.lr + self.log_interval = args.log_interval + self.save_interval = args.save_interval + self.resume_checkpoint = args.resume_checkpoint + self.use_fp16 = False # deprecating this option + self.fp16_scale_growth = 1e-3 # deprecating this option + self.weight_decay = args.weight_decay + self.lr_anneal_steps = args.lr_anneal_steps + self.rank = rank + self.world_size = world_size + + self.step = 0 + self.resume_step = 0 + self.global_batch = self.batch_size + self.num_steps = args.num_steps + self.num_epochs = self.num_steps // len(self.data) + 1 + chunks = list(range(self.num_steps)) + num_chunks = int(self.num_steps / 10) + chunks = np.array_split(chunks, num_chunks) + self.chunks = np.reshape(chunks[10_000::10], (-1)) + self.sync_cuda = torch.cuda.is_available() + self.writer = writer + + self._load_and_sync_parameters() + self.mp_trainer = MixedPrecisionTrainer( + model=self.model, + use_fp16=self.use_fp16, + fp16_scale_growth=self.fp16_scale_growth, + ) + + self.save_dir = args.save_dir + self.overwrite = args.overwrite + + self.opt = AdamW( + self.mp_trainer.master_params, lr=self.lr, weight_decay=self.weight_decay + ) + if self.resume_step: + self._load_optimizer_state() + + if torch.cuda.is_available(): + self.device = torch.device(f"cuda:{self.rank}") + + self.schedule_sampler_type = "uniform" + self.schedule_sampler = create_named_schedule_sampler( + self.schedule_sampler_type, diffusion + ) + self.eval_wrapper, self.eval_data, self.eval_gt_data = None, None, None + self.use_ddp = True + self.ddp_model = self.model + + def _load_and_sync_parameters(self): + resume_checkpoint = find_resume_checkpoint() or self.resume_checkpoint + + if resume_checkpoint: + self.resume_step = parse_resume_step_from_filename(resume_checkpoint) + logger.log(f"loading model from checkpoint: {resume_checkpoint}...") + self.model.load_state_dict( + load_state_dict(resume_checkpoint, map_location=dev()) + ) + + def _load_optimizer_state(self): + main_checkpoint = find_resume_checkpoint() or self.resume_checkpoint + opt_checkpoint = bf.join( + bf.dirname(main_checkpoint), f"opt{self.resume_step:09}.pt" + ) + if bf.exists(opt_checkpoint): + logger.log(f"loading optimizer state from checkpoint: {opt_checkpoint}") + state_dict = load_state_dict(opt_checkpoint, map_location=dev()) + self.opt.load_state_dict(state_dict) + + def _print_stats(self, logger): + if (self.step % 100 == 0 and self.step > 0) and self.rank == 0: + v = logger.get_current().name2val + v = v["loss"] + print("step[{}]: loss[{:0.5f}]".format(self.step + self.resume_step, v)) + + def _write_to_logger(self, logger): + if (self.step % self.log_interval == 0) and self.rank == 0: + for k, v in logger.get_current().name2val.items(): + if k == "loss": + print( + "step[{}]: loss[{:0.5f}]".format( + self.step + self.resume_step, v + ) + ) + self.writer.add_scalar(f"./Train/{k}", v, self.step) + if k in ["step", "samples"] or "_q" in k: + continue + else: + self.train_platform.report_scalar( + name=k, value=v, iteration=self.step, group_name="Loss" + ) + self.writer.add_scalar(f"./Train/{k}", v, self.step) + + def run_loop(self): + for _ in range(self.num_epochs): + if self.rank == 0: + prof = profile.Profile() + prof.enable() + + for motion, cond in tqdm(self.data, disable=(self.rank != 0)): + if not ( + not self.lr_anneal_steps + or self.step + self.resume_step < self.lr_anneal_steps + ): + break + + motion = motion.to(self.device) + cond["y"] = { + key: val.to(self.device) if torch.is_tensor(val) else val + for key, val in cond["y"].items() + } + self.run_step(motion, cond) + self._print_stats(logger) + self._write_to_logger(logger) + if (self.step % self.save_interval == 0) and self.rank == 0: + self.save() + + self.step += 1 + + if (self.step == 1000) and self.rank == 0: + prof.disable() + stats = pstats.Stats(prof).strip_dirs().sort_stats("cumtime") + stats.print_stats(10) + + if not ( + not self.lr_anneal_steps + or self.step + self.resume_step < self.lr_anneal_steps + ): + break + + # Save the last checkpoint if it wasn't already saved. + if ((self.step - 1) % self.save_interval != 0) and self.rank == 0: + self.save() + + def run_step(self, batch, cond): + self.forward_backward(batch, cond) + self.mp_trainer.optimize(self.opt) + self._anneal_lr() + if self.rank == 0: + self.log_step() + + def forward_backward(self, batch, cond): + self.mp_trainer.zero_grad() + for i in range(0, batch.shape[0], self.microbatch): + # Eliminates the microbatch feature + assert i == 0 + assert self.microbatch == self.batch_size + micro = batch + micro_cond = cond + last_batch = (i + self.microbatch) >= batch.shape[0] + t, weights = self.schedule_sampler.sample(micro.shape[0], batch.device) + + compute_losses = functools.partial( + self.diffusion.training_losses, + self.ddp_model, + micro, + t, + model_kwargs=micro_cond, + ) + + if last_batch or not self.use_ddp: + losses = compute_losses() + else: + with self.ddp_model.no_sync(): + losses = compute_losses() + + if isinstance(self.schedule_sampler, LossAwareSampler): + self.schedule_sampler.update_with_local_losses( + t, losses["loss"].detach() + ) + + loss = (losses["loss"] * weights).mean() + log_loss_dict( + self.diffusion, t, {k: v * weights for k, v in losses.items()} + ) + self.mp_trainer.backward(loss) + + def _anneal_lr(self): + if not self.lr_anneal_steps: + return + frac_done = (self.step + self.resume_step) / self.lr_anneal_steps + lr = self.lr * (1 - frac_done) + for param_group in self.opt.param_groups: + param_group["lr"] = lr + + def log_step(self): + logger.logkv("step", self.step + self.resume_step) + logger.logkv("samples", (self.step + self.resume_step + 1) * self.global_batch) + + def ckpt_file_name(self): + return f"model{(self.step+self.resume_step):09d}.pt" + + def save(self): + def save_checkpoint(params): + state_dict = self.mp_trainer.master_params_to_state_dict(params) + + # Do not save CLIP weights + clip_weights = [e for e in state_dict.keys() if e.startswith("clip_model.")] + for e in clip_weights: + del state_dict[e] + + logger.log(f"saving model...") + filename = self.ckpt_file_name() + with bf.BlobFile(bf.join(self.save_dir, filename), "wb") as f: + torch.save(state_dict, f) + + save_checkpoint(self.mp_trainer.master_params) + + with bf.BlobFile( + bf.join(self.save_dir, f"opt{(self.step+self.resume_step):09d}.pt"), + "wb", + ) as f: + torch.save(self.opt.state_dict(), f) + + +def parse_resume_step_from_filename(filename): + """ + Parse filenames of the form path/to/modelNNNNNN.pt, where NNNNNN is the + checkpoint's number of steps. + """ + split = filename.split("model") + if len(split) < 2: + return 0 + split1 = split[-1].split(".")[0] + try: + return int(split1) + except ValueError: + return 0 + + +def get_blob_logdir(): + # You can change this to be a separate path to save checkpoints to + # a blobstore or some external drive. + return logger.get_dir() + + +def find_resume_checkpoint(): + # On your infrastructure, you may want to override this to automatically + # discover the latest checkpoint on your blob storage, etc. + return None + + +def log_loss_dict(diffusion, ts, losses): + for key, values in losses.items(): + logger.logkv_mean(key, values.mean().item()) + # Log the quantiles (four quartiles, in particular). + for sub_t, sub_loss in zip(ts.cpu().numpy(), values.detach().cpu().numpy()): + quartile = int(4 * sub_t / diffusion.num_timesteps) + logger.logkv_mean(f"{key}_q{quartile}", sub_loss) diff --git a/utils/diff_parser_utils.py b/utils/diff_parser_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..10f0e819ff4649a17d6127a4695f9599710a7b27 --- /dev/null +++ b/utils/diff_parser_utils.py @@ -0,0 +1,307 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import argparse +import json +import os +from argparse import ArgumentParser + + +def parse_and_load_from_model(parser): + # args according to the loaded model + # do not try to specify them from cmd line since they will be overwritten + add_data_options(parser) + add_model_options(parser) + add_diffusion_options(parser) + args = parser.parse_args() + args_to_overwrite = [] + for group_name in ["dataset", "model", "diffusion"]: + args_to_overwrite += get_args_per_group_name(parser, args, group_name) + args_to_overwrite += ["data_root"] + + # load args from model + model_path = get_model_path_from_args() + args_path = os.path.join(os.path.dirname(model_path), "args.json") + print(args_path) + assert os.path.exists(args_path), "Arguments json file was not found!" + with open(args_path, "r") as fr: + model_args = json.load(fr) + + for a in args_to_overwrite: + if a in model_args.keys(): + if a == "timestep_respacing" or a == "partial": + continue + setattr(args, a, model_args[a]) + + elif "cond_mode" in model_args: # backward compitability + unconstrained = model_args["cond_mode"] == "no_cond" + setattr(args, "unconstrained", unconstrained) + + else: + print( + "Warning: was not able to load [{}], using default value [{}] instead.".format( + a, args.__dict__[a] + ) + ) + + if args.cond_mask_prob == 0: + args.guidance_param = 1 + return args + + +def get_args_per_group_name(parser, args, group_name): + for group in parser._action_groups: + if group.title == group_name: + group_dict = { + a.dest: getattr(args, a.dest, None) for a in group._group_actions + } + return list(argparse.Namespace(**group_dict).__dict__.keys()) + return ValueError("group_name was not found.") + + +def get_model_path_from_args(): + try: + dummy_parser = ArgumentParser() + dummy_parser.add_argument("model_path") + dummy_args, _ = dummy_parser.parse_known_args() + return dummy_args.model_path + except: + raise ValueError("model_path argument must be specified.") + + +def add_base_options(parser): + group = parser.add_argument_group("base") + group.add_argument( + "--cuda", default=True, type=bool, help="Use cuda device, otherwise use CPU." + ) + group.add_argument("--device", default=0, type=int, help="Device id to use.") + group.add_argument("--seed", default=10, type=int, help="For fixing random seed.") + group.add_argument( + "--batch_size", default=64, type=int, help="Batch size during training." + ) + + +def add_diffusion_options(parser): + group = parser.add_argument_group("diffusion") + group.add_argument( + "--noise_schedule", + default="cosine", + choices=["linear", "cosine"], + type=str, + help="Noise schedule type", + ) + group.add_argument( + "--diffusion_steps", + default=10, + type=int, + help="Number of diffusion steps (denoted T in the paper)", + ) + group.add_argument( + "--timestep_respacing", + default="ddim100", + type=str, + help="ddimN, else empty string", + ) + group.add_argument( + "--sigma_small", default=True, type=bool, help="Use smaller sigma values." + ) + + +def add_model_options(parser): + group = parser.add_argument_group("model") + group.add_argument("--layers", default=8, type=int, help="Number of layers.") + group.add_argument( + "--num_audio_layers", default=3, type=int, help="Number of audio layers." + ) + group.add_argument("--heads", default=4, type=int, help="Number of heads.") + group.add_argument( + "--latent_dim", default=512, type=int, help="Transformer/GRU width." + ) + group.add_argument( + "--cond_mask_prob", + default=0.20, + type=float, + help="The probability of masking the condition during training." + " For classifier-free guidance learning.", + ) + group.add_argument( + "--lambda_vel", default=0.0, type=float, help="Joint velocity loss." + ) + group.add_argument( + "--unconstrained", + action="store_true", + help="Model is trained unconditionally. That is, it is constrained by neither text nor action. " + "Currently tested on HumanAct12 only.", + ) + group.add_argument( + "--data_format", + type=str, + choices=["pose", "face"], + default="pose", + help="whether or not to use vae for diffusion process", + ) + group.add_argument("--not_rotary", action="store_true") + group.add_argument("--simplify_audio", action="store_true") + group.add_argument("--add_frame_cond", type=float, choices=[1], default=None) + + +def add_data_options(parser): + group = parser.add_argument_group("dataset") + group.add_argument( + "--dataset", + default="social", + choices=["social"], + type=str, + help="Dataset name (choose from list).", + ) + group.add_argument("--data_root", type=str, default=None, help="dataset directory") + group.add_argument("--max_seq_length", default=600, type=int) + group.add_argument( + "--split", type=str, default=None, choices=["test", "train", "val"] + ) + + +def add_training_options(parser): + group = parser.add_argument_group("training") + group.add_argument( + "--save_dir", + required=True, + type=str, + help="Path to save checkpoints and results.", + ) + group.add_argument( + "--overwrite", + action="store_true", + help="If True, will enable to use an already existing save_dir.", + ) + group.add_argument( + "--train_platform_type", + default="NoPlatform", + choices=["NoPlatform", "ClearmlPlatform", "TensorboardPlatform"], + type=str, + help="Choose platform to log results. NoPlatform means no logging.", + ) + group.add_argument("--lr", default=1e-4, type=float, help="Learning rate.") + group.add_argument( + "--weight_decay", default=0.0, type=float, help="Optimizer weight decay." + ) + group.add_argument( + "--lr_anneal_steps", + default=0, + type=int, + help="Number of learning rate anneal steps.", + ) + group.add_argument( + "--log_interval", default=1_000, type=int, help="Log losses each N steps" + ) + group.add_argument( + "--save_interval", + default=5_000, + type=int, + help="Save checkpoints and run evaluation each N steps", + ) + group.add_argument( + "--num_steps", + default=800_000, + type=int, + help="Training will stop after the specified number of steps.", + ) + group.add_argument( + "--resume_checkpoint", + default="", + type=str, + help="If not empty, will start from the specified checkpoint (path to model###.pt file).", + ) + + +def add_sampling_options(parser): + group = parser.add_argument_group("sampling") + group.add_argument( + "--model_path", + required=True, + type=str, + help="Path to model####.pt file to be sampled.", + ) + group.add_argument( + "--output_dir", + default="", + type=str, + help="Path to results dir (auto created by the script). " + "If empty, will create dir in parallel to checkpoint.", + ) + group.add_argument("--face_codes", default=None, type=str) + group.add_argument("--pose_codes", default=None, type=str) + group.add_argument( + "--num_samples", + default=10, + type=int, + help="Maximal number of prompts to sample, " + "if loading dataset from file, this field will be ignored.", + ) + group.add_argument( + "--num_repetitions", + default=3, + type=int, + help="Number of repetitions, per sample (text prompt/action)", + ) + group.add_argument( + "--guidance_param", + default=2.5, + type=float, + help="For classifier-free sampling - specifies the s parameter, as defined in the paper.", + ) + group.add_argument( + "--curr_seq_length", + default=None, + type=int, + ) + group.add_argument( + "--render_gt", + action="store_true", + help="whether to use pretrained clipmodel for audio encoding", + ) + + +def add_generate_options(parser): + group = parser.add_argument_group("generate") + group.add_argument( + "--plot", + action="store_true", + help="Whether or not to save the renderings as a video.", + ) + group.add_argument( + "--resume_trans", + default=None, + type=str, + help="keyframe prediction network.", + ) + group.add_argument("--flip_person", action="store_true") + + +def get_cond_mode(args): + if args.dataset == "social": + cond_mode = "audio" + return cond_mode + + +def train_args(): + parser = ArgumentParser() + add_base_options(parser) + add_data_options(parser) + add_model_options(parser) + add_diffusion_options(parser) + add_training_options(parser) + return parser.parse_args() + + +def generate_args(): + parser = ArgumentParser() + add_base_options(parser) + add_sampling_options(parser) + add_generate_options(parser) + args = parse_and_load_from_model(parser) + return args diff --git a/utils/eval.py b/utils/eval.py new file mode 100644 index 0000000000000000000000000000000000000000..2dc3360baa465e48c400158adb154fa9c649043f --- /dev/null +++ b/utils/eval.py @@ -0,0 +1,115 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import argparse + +import numpy as np +from scipy import linalg + + +def calculate_diversity(activation: np.ndarray, diversity_times: int = 10_000) -> float: + assert len(activation.shape) == 2 + assert activation.shape[0] > diversity_times + num_samples = activation.shape[0] + first_indices = np.random.choice(num_samples, diversity_times, replace=False) + second_indices = np.random.choice(num_samples, diversity_times, replace=False) + dist = linalg.norm(activation[first_indices] - activation[second_indices], axis=1) + return dist + + +def calculate_activation_statistics( + activations: np.ndarray, +) -> (np.ndarray, np.ndarray): + mu = np.mean(activations, axis=0) + cov = np.cov(activations, rowvar=False) + return mu, cov + + +def calculate_frechet_distance( + mu1: np.ndarray, + sigma1: np.ndarray, + mu2: np.ndarray, + sigma2: np.ndarray, + eps: float = 1e-6, +) -> float: + mu1 = np.atleast_1d(mu1) + mu2 = np.atleast_1d(mu2) + + sigma1 = np.atleast_2d(sigma1) + sigma2 = np.atleast_2d(sigma2) + + assert ( + mu1.shape == mu2.shape + ), "Training and test mean vectors have different lengths" + assert ( + sigma1.shape == sigma2.shape + ), "Training and test covariances have different dimensions" + + diff = mu1 - mu2 + + # Product might be almost singular + covmean, _ = linalg.sqrtm(sigma1.dot(sigma2), disp=False) + if not np.isfinite(covmean).all(): + msg = ( + "fid calculation produces singular product; " + "adding %s to diagonal of cov estimates" + ) % eps + print(msg) + offset = np.eye(sigma1.shape[0]) * eps + covmean = linalg.sqrtm((sigma1 + offset).dot(sigma2 + offset)) + + # Numerical error might give slight imaginary component + if np.iscomplexobj(covmean): + if not np.allclose(np.diagonal(covmean).imag, 0, atol=1e-3): + m = np.max(np.abs(covmean.imag)) + raise ValueError("Imaginary component {}".format(m)) + covmean = covmean.real + + tr_covmean = np.trace(covmean) + + return diff.dot(diff) + np.trace(sigma1) + np.trace(sigma2) - 2 * tr_covmean + + +def main(args): + num_samples = 5 + results = np.load(args.results, allow_pickle=True).item() + pred_reshaped = results["motion"].squeeze().reshape((num_samples, -1, 104, 600)) + gt_reshaped = results["gt"].squeeze().reshape((num_samples, -1, 104, 600)) + + # calulate variance across the different samples generated + cross_sample_var = np.var(pred_reshaped.reshape((num_samples, -1)), axis=0) + print("cross var", cross_sample_var.mean()) + + pred_pose_last = pred_reshaped.transpose((0, 1, 3, 2)).reshape(-1, 104) + gt_pose_last = gt_reshaped.transpose((0, 1, 3, 2)).reshape(-1, 104) + # calculate the static and kinematic diversity + var_g = calculate_diversity(pred_pose_last) + print("var_g", var_g.mean()) + var_k = np.var(pred_reshaped, axis=-1) + print("var_k", var_k.mean()) + + # calculate the static and kinematic fid + pred_mu_g, pred_cov_g = calculate_activation_statistics(pred_pose_last) + gt_mu_g, gt_cov_g = calculate_activation_statistics(gt_pose_last) + fid_g = calculate_frechet_distance(gt_mu_g, gt_cov_g, pred_mu_g, pred_cov_g) + print("fid_g", fid_g) + # reshape for kinematic fid + pred_motion = pred_reshaped[..., 1:] - pred_reshaped[..., :-1] + gt_motion = gt_reshaped[..., 1:] - gt_reshaped[..., :-1] + pred_motion_last = pred_motion.transpose((0, 1, 3, 2)).reshape(-1, 104) + gt_motion_last = gt_motion.transpose((0, 1, 3, 2)).reshape(-1, 104) + pred_mu_k, pred_cov_k = calculate_activation_statistics(pred_motion_last) + gt_mu_k, gt_cov_k = calculate_activation_statistics(gt_motion_last) + fid_k = calculate_frechet_distance(gt_mu_k, gt_cov_k, pred_mu_k, pred_cov_k) + print("fid_k", fid_k) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--results", type=str, required=True) + args = parser.parse_args() + main(args) diff --git a/utils/guide_parser_utils.py b/utils/guide_parser_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..a8a6279a9d61af0739eb11686e77cd0bd7f2671a --- /dev/null +++ b/utils/guide_parser_utils.py @@ -0,0 +1,56 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import argparse + + +def _add_dataset_args(parser): + parser.add_argument("--dataset", default="social", type=str) + parser.add_argument( + "--data_format", type=str, default="pose", choices=["pose", "face"] + ) + parser.add_argument("--data_root", type=str, default=None, help="dataset directory") + parser.add_argument("--batch_size", default=16, type=int) + parser.add_argument("--add_frame_cond", type=int, default=None, choices=[1]) + parser.add_argument("--max_seq_length", default=600, type=int) + + +def _add_opt_args(parser): + parser.add_argument("--lr", type=float, default=1e-4) + parser.add_argument("--warm_up_iter", type=int, default=5_000) + parser.add_argument("--weight_decay", type=float, default=0.1) + parser.add_argument( + "--lr-scheduler", + default=[50000, 400000], + nargs="+", + type=int, + help="learning rate schedule (iterations)", + ) + parser.add_argument("--gamma", default=0.1, type=float) + parser.add_argument("--gn", action="store_true", help="gradient clipping") + + +def _add_model_args(parser): + parser.add_argument("--layers", default=8, type=int) + parser.add_argument("--dim", default=8, type=int) + parser.add_argument("--resume_pth", type=str, required=True) + parser.add_argument("--resume_trans", type=str, default=None) + + +def train_args(): + parser = argparse.ArgumentParser() + parser.add_argument("--seed", default=10, type=int, help="For fixing random seed.") + parser.add_argument("--out_dir", type=str, required=True) + parser.add_argument("--total_iter", default=1_000_000, type=int) + parser.add_argument("--log_interval", default=1_000, type=int) + parser.add_argument("--eval_interval", default=1_000, type=int) + parser.add_argument("--save_interval", default=5_000, type=int) + _add_model_args(parser) + _add_opt_args(parser) + _add_dataset_args(parser) + args = parser.parse_args() + return args diff --git a/utils/logger.py b/utils/logger.py new file mode 100644 index 0000000000000000000000000000000000000000..e0a603e456c40a7aab437455b48792ecb784a35f --- /dev/null +++ b/utils/logger.py @@ -0,0 +1,496 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import os +import sys +import shutil +import os.path as osp +import json +import time +import datetime +import tempfile +import warnings +from collections import defaultdict +from contextlib import contextmanager + +DEBUG = 10 +INFO = 20 +WARN = 30 +ERROR = 40 + +DISABLED = 50 + + +class KVWriter(object): + def writekvs(self, kvs): + raise NotImplementedError + + +class SeqWriter(object): + def writeseq(self, seq): + raise NotImplementedError + + +class HumanOutputFormat(KVWriter, SeqWriter): + def __init__(self, filename_or_file): + if isinstance(filename_or_file, str): + self.file = open(filename_or_file, "wt") + self.own_file = True + else: + assert hasattr(filename_or_file, "read"), ( + "expected file or str, got %s" % filename_or_file + ) + self.file = filename_or_file + self.own_file = False + + def writekvs(self, kvs): + # Create strings for printing + key2str = {} + for key, val in sorted(kvs.items()): + if hasattr(val, "__float__"): + valstr = "%-8.3g" % val + else: + valstr = str(val) + key2str[self._truncate(key)] = self._truncate(valstr) + + # Find max widths + if len(key2str) == 0: + print("WARNING: tried to write empty key-value dict") + return + else: + keywidth = max(map(len, key2str.keys())) + valwidth = max(map(len, key2str.values())) + + # Write out the data + dashes = "-" * (keywidth + valwidth + 7) + lines = [dashes] + for key, val in sorted(key2str.items(), key=lambda kv: kv[0].lower()): + lines.append( + "| %s%s | %s%s |" + % (key, " " * (keywidth - len(key)), val, " " * (valwidth - len(val))) + ) + lines.append(dashes) + self.file.write("\n".join(lines) + "\n") + + # Flush the output to the file + self.file.flush() + + def _truncate(self, s): + maxlen = 30 + return s[: maxlen - 3] + "..." if len(s) > maxlen else s + + def writeseq(self, seq): + seq = list(seq) + for i, elem in enumerate(seq): + self.file.write(elem) + if i < len(seq) - 1: # add space unless this is the last one + self.file.write(" ") + self.file.write("\n") + self.file.flush() + + def close(self): + if self.own_file: + self.file.close() + + +class JSONOutputFormat(KVWriter): + def __init__(self, filename): + self.file = open(filename, "wt") + + def writekvs(self, kvs): + for k, v in sorted(kvs.items()): + if hasattr(v, "dtype"): + kvs[k] = float(v) + self.file.write(json.dumps(kvs) + "\n") + self.file.flush() + + def close(self): + self.file.close() + + +class CSVOutputFormat(KVWriter): + def __init__(self, filename): + self.file = open(filename, "w+t") + self.keys = [] + self.sep = "," + + def writekvs(self, kvs): + # Add our current row to the history + extra_keys = list(kvs.keys() - self.keys) + extra_keys.sort() + if extra_keys: + self.keys.extend(extra_keys) + self.file.seek(0) + lines = self.file.readlines() + self.file.seek(0) + for i, k in enumerate(self.keys): + if i > 0: + self.file.write(",") + self.file.write(k) + self.file.write("\n") + for line in lines[1:]: + self.file.write(line[:-1]) + self.file.write(self.sep * len(extra_keys)) + self.file.write("\n") + for i, k in enumerate(self.keys): + if i > 0: + self.file.write(",") + v = kvs.get(k) + if v is not None: + self.file.write(str(v)) + self.file.write("\n") + self.file.flush() + + def close(self): + self.file.close() + + +class TensorBoardOutputFormat(KVWriter): + """ + Dumps key/value pairs into TensorBoard's numeric format. + """ + + def __init__(self, dir): + os.makedirs(dir, exist_ok=True) + self.dir = dir + self.step = 1 + prefix = "events" + path = osp.join(osp.abspath(dir), prefix) + import tensorflow as tf + from tensorflow.python import pywrap_tensorflow + from tensorflow.core.util import event_pb2 + from tensorflow.python.util import compat + + self.tf = tf + self.event_pb2 = event_pb2 + self.pywrap_tensorflow = pywrap_tensorflow + self.writer = pywrap_tensorflow.EventsWriter(compat.as_bytes(path)) + + def writekvs(self, kvs): + def summary_val(k, v): + kwargs = {"tag": k, "simple_value": float(v)} + return self.tf.Summary.Value(**kwargs) + + summary = self.tf.Summary(value=[summary_val(k, v) for k, v in kvs.items()]) + event = self.event_pb2.Event(wall_time=time.time(), summary=summary) + event.step = ( + self.step + ) # is there any reason why you'd want to specify the step? + self.writer.WriteEvent(event) + self.writer.Flush() + self.step += 1 + + def close(self): + if self.writer: + self.writer.Close() + self.writer = None + + +def make_output_format(format, ev_dir, log_suffix=""): + os.makedirs(ev_dir, exist_ok=True) + if format == "stdout": + return HumanOutputFormat(sys.stdout) + elif format == "log": + return HumanOutputFormat(osp.join(ev_dir, "log%s.txt" % log_suffix)) + elif format == "json": + return JSONOutputFormat(osp.join(ev_dir, "progress%s.json" % log_suffix)) + elif format == "csv": + return CSVOutputFormat(osp.join(ev_dir, "progress%s.csv" % log_suffix)) + elif format == "tensorboard": + return TensorBoardOutputFormat(osp.join(ev_dir, "tb%s" % log_suffix)) + else: + raise ValueError("Unknown format specified: %s" % (format,)) + + +# ================================================================ +# API +# ================================================================ + + +def logkv(key, val): + """ + Log a value of some diagnostic + Call this once for each diagnostic quantity, each iteration + If called many times, last value will be used. + """ + get_current().logkv(key, val) + + +def logkv_mean(key, val): + """ + The same as logkv(), but if called many times, values averaged. + """ + get_current().logkv_mean(key, val) + + +def logkvs(d): + """ + Log a dictionary of key-value pairs + """ + for k, v in d.items(): + logkv(k, v) + + +def dumpkvs(): + """ + Write all of the diagnostics from the current iteration + """ + return get_current().dumpkvs() + + +def getkvs(): + return get_current().name2val + + +def log(*args, level=INFO): + """ + Write the sequence of args, with no separators, to the console and output files (if you've configured an output file). + """ + get_current().log(*args, level=level) + + +def debug(*args): + log(*args, level=DEBUG) + + +def info(*args): + log(*args, level=INFO) + + +def warn(*args): + log(*args, level=WARN) + + +def error(*args): + log(*args, level=ERROR) + + +def set_level(level): + """ + Set logging threshold on current logger. + """ + get_current().set_level(level) + + +def set_comm(comm): + get_current().set_comm(comm) + + +def get_dir(): + """ + Get directory that log files are being written to. + will be None if there is no output directory (i.e., if you didn't call start) + """ + return get_current().get_dir() + + +record_tabular = logkv +dump_tabular = dumpkvs + + +@contextmanager +def profile_kv(scopename): + logkey = "wait_" + scopename + tstart = time.time() + try: + yield + finally: + get_current().name2val[logkey] += time.time() - tstart + + +def profile(n): + """ + Usage: + @profile("my_func") + def my_func(): code + """ + + def decorator_with_name(func): + def func_wrapper(*args, **kwargs): + with profile_kv(n): + return func(*args, **kwargs) + + return func_wrapper + + return decorator_with_name + + +# ================================================================ +# Backend +# ================================================================ + + +def get_current(): + if Logger.CURRENT is None: + _configure_default_logger() + + return Logger.CURRENT + + +class Logger(object): + DEFAULT = None # A logger with no output files. (See right below class definition) + # So that you can still log to the terminal without setting up any output files + CURRENT = None # Current logger being used by the free functions above + + def __init__(self, dir, output_formats, comm=None): + self.name2val = defaultdict(float) # values this iteration + self.name2cnt = defaultdict(int) + self.level = INFO + self.dir = dir + self.output_formats = output_formats + self.comm = comm + + # Logging API, forwarded + # ---------------------------------------- + def logkv(self, key, val): + self.name2val[key] = val + + def logkv_mean(self, key, val): + oldval, cnt = self.name2val[key], self.name2cnt[key] + self.name2val[key] = oldval * cnt / (cnt + 1) + val / (cnt + 1) + self.name2cnt[key] = cnt + 1 + + def dumpkvs(self): + if self.comm is None: + d = self.name2val + else: + d = mpi_weighted_mean( + self.comm, + { + name: (val, self.name2cnt.get(name, 1)) + for (name, val) in self.name2val.items() + }, + ) + if self.comm.rank != 0: + d["dummy"] = 1 # so we don't get a warning about empty dict + out = d.copy() # Return the dict for unit testing purposes + for fmt in self.output_formats: + if isinstance(fmt, KVWriter): + fmt.writekvs(d) + self.name2val.clear() + self.name2cnt.clear() + return out + + def log(self, *args, level=INFO): + if self.level <= level: + self._do_log(args) + + # Configuration + # ---------------------------------------- + def set_level(self, level): + self.level = level + + def set_comm(self, comm): + self.comm = comm + + def get_dir(self): + return self.dir + + def close(self): + for fmt in self.output_formats: + fmt.close() + + # Misc + # ---------------------------------------- + def _do_log(self, args): + for fmt in self.output_formats: + if isinstance(fmt, SeqWriter): + fmt.writeseq(map(str, args)) + + +def get_rank_without_mpi_import(): + # check environment variables here instead of importing mpi4py + # to avoid calling MPI_Init() when this module is imported + for varname in ["PMI_RANK", "OMPI_COMM_WORLD_RANK"]: + if varname in os.environ: + return int(os.environ[varname]) + return 0 + + +def mpi_weighted_mean(comm, local_name2valcount): + """ + Copied from: https://github.com/EXP/baselines/blob/ea25b9e8b234e6ee1bca43083f8f3cf974143998/baselines/common/mpi_util.py#L110 + Perform a weighted average over dicts that are each on a different node + Input: local_name2valcount: dict mapping key -> (value, count) + Returns: key -> mean + """ + all_name2valcount = comm.gather(local_name2valcount) + if comm.rank == 0: + name2sum = defaultdict(float) + name2count = defaultdict(float) + for n2vc in all_name2valcount: + for name, (val, count) in n2vc.items(): + try: + val = float(val) + except ValueError: + if comm.rank == 0: + warnings.warn( + "WARNING: tried to compute mean on non-float {}={}".format( + name, val + ) + ) + else: + name2sum[name] += val * count + name2count[name] += count + return {name: name2sum[name] / name2count[name] for name in name2sum} + else: + return {} + + +def configure(dir=None, format_strs=None, comm=None, log_suffix=""): + """ + If comm is provided, average all numerical stats across that comm + """ + if dir is None: + dir = os.getenv("EXP_LOGDIR") + if dir is None: + dir = osp.join( + tempfile.gettempdir(), + datetime.datetime.now().strftime("exp-%Y-%m-%d-%H-%M-%S-%f"), + ) + assert isinstance(dir, str) + dir = os.path.expanduser(dir) + os.makedirs(os.path.expanduser(dir), exist_ok=True) + + rank = get_rank_without_mpi_import() + if rank > 0: + log_suffix = log_suffix + "-rank%03i" % rank + + if format_strs is None: + if rank == 0: + format_strs = os.getenv("EXP_LOG_FORMAT", "stdout,log,csv").split(",") + else: + format_strs = os.getenv("EXP_LOG_FORMAT_MPI", "log").split(",") + format_strs = filter(None, format_strs) + output_formats = [make_output_format(f, dir, log_suffix) for f in format_strs] + + Logger.CURRENT = Logger(dir=dir, output_formats=output_formats, comm=comm) + if output_formats: + log("Logging to %s" % dir) + + +def _configure_default_logger(): + configure() + Logger.DEFAULT = Logger.CURRENT + + +def reset(): + if Logger.CURRENT is not Logger.DEFAULT: + Logger.CURRENT.close() + Logger.CURRENT = Logger.DEFAULT + log("Reset logger") + + +@contextmanager +def scoped_configure(dir=None, format_strs=None, comm=None): + prevlogger = Logger.CURRENT + configure(dir=dir, format_strs=format_strs, comm=comm) + try: + yield + finally: + Logger.CURRENT.close() + Logger.CURRENT = prevlogger diff --git a/utils/misc.py b/utils/misc.py new file mode 100644 index 0000000000000000000000000000000000000000..e4511ed604c55e15d32b2d317c60f04c6deb3077 --- /dev/null +++ b/utils/misc.py @@ -0,0 +1,223 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import time +import numpy as np +import random + +import os +import socket +import typing as tp + +import torch +import torch.distributed as dist +from torch.nn.parallel import DistributedDataParallel as DDP + +# Change this to reflect your cluster layout. +# The GPU for a given rank is (rank % GPUS_PER_NODE). +GPUS_PER_NODE = 8 + +SETUP_RETRY_COUNT = 3 + +used_device = 0 + + +def setup(rank, world_size): + os.environ["MASTER_ADDR"] = "localhost" + os.environ["MASTER_PORT"] = "12355" + + # initialize the process group + dist.init_process_group("gloo", rank=rank, world_size=world_size) + + +def cleanup(): + dist.destroy_process_group() + + +def setup_dist(device=0): + """ + Setup a distributed process group. + """ + global used_device + used_device = device + if dist.is_initialized(): + return + +def dev(): + """ + Get the device to use for torch.distributed. + """ + global used_device + if torch.cuda.is_available() and used_device >= 0: + return torch.device(f"cuda:{used_device}") + return torch.device("cpu") + + +def load_state_dict(path, **kwargs): + """ + Load a PyTorch file without redundant fetches across MPI ranks. + """ + return torch.load(path, **kwargs) + + +def sync_params(params): + """ + Synchronize a sequence of Tensors across ranks from rank 0. + """ + for p in params: + with torch.no_grad(): + dist.broadcast(p, 0) + + +def _find_free_port(): + try: + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + s.bind(("", 0)) + s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + return s.getsockname()[1] + finally: + s.close() + + +def world_size(): + if torch.distributed.is_initialized(): + return torch.distributed.get_world_size() + else: + return 1 + + +def is_distributed(): + return world_size() > 1 + + +def all_reduce(tensor: torch.Tensor, op=torch.distributed.ReduceOp.SUM): + if is_distributed(): + return torch.distributed.all_reduce(tensor, op) + + +def _is_complex_or_float(tensor): + return torch.is_floating_point(tensor) or torch.is_complex(tensor) + + +def _check_number_of_params(params: tp.List[torch.Tensor]): + # utility function to check that the number of params in all workers is the same, + # and thus avoid a deadlock with distributed all reduce. + if not is_distributed() or not params: + return + tensor = torch.tensor([len(params)], device=params[0].device, dtype=torch.long) + all_reduce(tensor) + if tensor.item() != len(params) * world_size(): + # If not all the workers have the same number, for at least one of them, + # this inequality will be verified. + raise RuntimeError( + f"Mismatch in number of params: ours is {len(params)}, " + "at least one worker has a different one." + ) + + +def broadcast_tensors(tensors: tp.Iterable[torch.Tensor], src: int = 0): + """Broadcast the tensors from the given parameters to all workers. + This can be used to ensure that all workers have the same model to start with. + """ + if not is_distributed(): + return + tensors = [tensor for tensor in tensors if _is_complex_or_float(tensor)] + _check_number_of_params(tensors) + handles = [] + for tensor in tensors: + handle = torch.distributed.broadcast(tensor.data, src=src, async_op=True) + handles.append(handle) + for handle in handles: + handle.wait() + + +def fixseed(seed): + torch.backends.cudnn.benchmark = False + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + + +def prGreen(skk): + print("\033[92m {}\033[00m".format(skk)) + + +def prRed(skk): + print("\033[91m {}\033[00m".format(skk)) + + +def to_numpy(tensor): + if torch.is_tensor(tensor): + return tensor.cpu().numpy() + elif type(tensor).__module__ != "numpy": + raise ValueError("Cannot convert {} to numpy array".format(type(tensor))) + return tensor + + +def to_torch(ndarray): + if type(ndarray).__module__ == "numpy": + return torch.from_numpy(ndarray) + elif not torch.is_tensor(ndarray): + raise ValueError("Cannot convert {} to torch tensor".format(type(ndarray))) + return ndarray + + +def cleanexit(): + import sys + import os + + try: + sys.exit(0) + except SystemExit: + os._exit(0) + + +def load_model_wo_clip(model, state_dict): + missing_keys, unexpected_keys = model.load_state_dict(state_dict, strict=False) + assert len(unexpected_keys) == 0 + assert all([k.startswith("clip_model.") for k in missing_keys]) + + +def freeze_joints(x, joints_to_freeze): + # Freezes selected joint *rotations* as they appear in the first frame + # x [bs, [root+n_joints], joint_dim(6), seqlen] + frozen = x.detach().clone() + frozen[:, joints_to_freeze, :, :] = frozen[:, joints_to_freeze, :, :1] + return frozen + + +class TimerError(Exception): + """A custom exception used to report errors in use of Timer class""" + + +class Timer: + def __init__(self): + self._start_time = None + + def start(self): + """Start a new timer""" + if self._start_time is not None: + raise TimerError(f"Timer is running. Use .stop() to stop it") + + self._start_time = time.perf_counter() + + def stop(self, iter=None): + """Stop the timer, and report the elapsed time""" + if self._start_time is None: + raise TimerError(f"Timer is not running. Use .start() to start it") + + elapsed_time = time.perf_counter() - self._start_time + self._start_time = None + iter_msg = "" + if iter is not None: + if iter > elapsed_time: + iter_per_sec = iter / elapsed_time + iter_msg = f"[iter/s: {iter_per_sec:0.4f}]" + else: + sec_per_iter = elapsed_time / iter + iter_msg = f"[s/iter: {sec_per_iter:0.4f}]" + print(f"Elapsed time: {elapsed_time:0.4f} seconds {iter_msg}") diff --git a/utils/model_util.py b/utils/model_util.py new file mode 100644 index 0000000000000000000000000000000000000000..da1d177038d15a069cfe2e228951d20168ab2014 --- /dev/null +++ b/utils/model_util.py @@ -0,0 +1,114 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import torch + +from diffusion import gaussian_diffusion as gd +from diffusion.respace import space_timesteps, SpacedDiffusion +from model.diffusion import FiLMTransformer +from torch.nn import functional as F + + +def get_person_num(config_path): + if "PXB184" in config_path: + person = "PXB184" + elif "RLW104" in config_path: + person = "RLW104" + elif "TXB805" in config_path: + person = "TXB805" + elif "GQS883" in config_path: + person = "GQS883" + else: + assert False, f"something wrong with config: {config_path}" + return person + + +def load_model(model, state_dict): + missing_keys, unexpected_keys = model.load_state_dict(state_dict, strict=False) + assert len(unexpected_keys) == 0, unexpected_keys + assert all( + [ + k.startswith("transformer.") or k.startswith("tokenizer.") + for k in missing_keys + ] + ), missing_keys + + +def create_model_and_diffusion(args, split_type): + model = FiLMTransformer(**get_model_args(args, split_type=split_type)).to( + torch.float32 + ) + diffusion = create_gaussian_diffusion(args) + return model, diffusion + + +def get_model_args(args, split_type): + if args.data_format == "face": + nfeat = 256 + lfeat = 512 + elif args.data_format == "pose": + nfeat = 104 + lfeat = 256 + + if not hasattr(args, "num_audio_layers"): + args.num_audio_layers = 3 # backwards compat + + model_args = { + "args": args, + "nfeats": nfeat, + "latent_dim": lfeat, + "ff_size": 1024, + "num_layers": args.layers, + "num_heads": args.heads, + "dropout": 0.1, + "cond_feature_dim": 512 * 2, + "activation": F.gelu, + "use_rotary": not args.not_rotary, + "cond_mode": "uncond" if args.unconstrained else "audio", + "split_type": split_type, + "num_audio_layers": args.num_audio_layers, + "device": args.device, + } + return model_args + + +def create_gaussian_diffusion(args): + predict_xstart = True + steps = 1000 + scale_beta = 1.0 + timestep_respacing = args.timestep_respacing + learn_sigma = False + rescale_timesteps = False + + betas = gd.get_named_beta_schedule(args.noise_schedule, steps, scale_beta) + loss_type = gd.LossType.MSE + + if not timestep_respacing: + timestep_respacing = [steps] + + name = args.save_dir if hasattr(args, "save_dir") else args.model_path + return SpacedDiffusion( + use_timesteps=space_timesteps(steps, timestep_respacing), + betas=betas, + model_mean_type=( + gd.ModelMeanType.EPSILON if not predict_xstart else gd.ModelMeanType.START_X + ), + model_var_type=( + ( + gd.ModelVarType.FIXED_LARGE + if not args.sigma_small + else gd.ModelVarType.FIXED_SMALL + ) + if not learn_sigma + else gd.ModelVarType.LEARNED_RANGE + ), + data_format=args.data_format, + loss_type=loss_type, + rescale_timesteps=rescale_timesteps, + lambda_vel=args.lambda_vel, + model_path=name, + ) diff --git a/utils/vq_parser_utils.py b/utils/vq_parser_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..92f7ad7ba783c73a8dfa82b1358350a8b6a04403 --- /dev/null +++ b/utils/vq_parser_utils.py @@ -0,0 +1,93 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import argparse + + +def _add_dataset_args(parser): + parser.add_argument("--dataname", type=str, default="kit", help="dataset directory") + parser.add_argument("--data_root", type=str, default=None, help="dataset directory") + parser.add_argument("--max_seq_length", default=600, type=int) + parser.add_argument("--add_frame_cond", type=float, choices=[1], default=None) + parser.add_argument( + "--data_format", type=str, default="pose", choices=["pose", "face"] + ) + parser.add_argument("--dataset", default="social", type=str) + parser.add_argument("--batch_size", default=64, type=int, help="batch size") + + +def _add_optim_args(parser): + parser.add_argument( + "--total_iter", + default=300_000, + type=int, + help="number of total iterations to run", + ) + parser.add_argument( + "--warm_up_iter", + default=1000, + type=int, + help="number of total iterations for warmup", + ) + parser.add_argument("--lr", default=2e-4, type=float, help="max learning rate") + parser.add_argument( + "--lr_scheduler", + default=[300_000], + nargs="+", + type=int, + help="learning rate schedule (iterations)", + ) + parser.add_argument("--gamma", default=0.05, type=float, help="learning rate decay") + + parser.add_argument("--weight_decay", default=0.0, type=float, help="weight decay") + parser.add_argument( + "--commit", + type=float, + default=0.02, + help="hyper-parameter for the commitment loss", + ) + parser.add_argument( + "--loss_vel", + type=float, + default=0.1, + help="hyper-parameter for the velocity loss", + ) + + +def _add_model_args(parser): + parser.add_argument("--code_dim", type=int, default=512, help="embedding dimension") + parser.add_argument("--depth", type=int, default=3, help="depth of the network") + parser.add_argument( + "--output_emb_width", type=int, default=512, help="output embedding width" + ) + parser.add_argument( + "--resume_pth", type=str, default=None, help="resume pth for VQ" + ) + + +def train_args(): + parser = argparse.ArgumentParser( + description="Optimal Transport AutoEncoder training for AIST", + add_help=True, + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) + _add_dataset_args(parser) + _add_optim_args(parser) + _add_model_args(parser) + + ## output directory + parser.add_argument("--out_dir", type=str, required=True, help="output directory") + ## other + parser.add_argument("--print_iter", default=200, type=int, help="print frequency") + parser.add_argument( + "--eval_iter", default=1000, type=int, help="evaluation frequency" + ) + parser.add_argument( + "--seed", default=123, type=int, help="seed for initializing training." + ) + args = parser.parse_args() + return args diff --git a/visualize/.ipynb_checkpoints/render_codes-checkpoint.py b/visualize/.ipynb_checkpoints/render_codes-checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..16f9cd1c6a4c31bfe4f3ed63379d5d450b3d02cd --- /dev/null +++ b/visualize/.ipynb_checkpoints/render_codes-checkpoint.py @@ -0,0 +1,163 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import copy +import glob +import os +import re +import subprocess +from collections import OrderedDict +from typing import Dict, List + +import mediapy + +import numpy as np + +import torch +import torch as th +import torchaudio +from attrdict import AttrDict + +from omegaconf import OmegaConf +from tqdm import tqdm +from utils.model_util import get_person_num +from visualize.ca_body.utils.image import linear2displayBatch +from visualize.ca_body.utils.train import load_checkpoint, load_from_config + +ffmpeg_header = "ffmpeg -y " # -hide_banner -loglevel error " + + +def filter_params(params, ignore_names): + return OrderedDict( + [ + (k, v) + for k, v in params.items() + if not any([re.match(n, k) is not None for n in ignore_names]) + ] + ) + + +def call_ffmpeg(command: str) -> None: + print(command, "-" * 100) + e = subprocess.call(command, shell=True) + if e != 0: + assert False, e + + +class BodyRenderer(th.nn.Module): + def __init__( + self, + config_base: str, + render_rgb: bool, + ): + super().__init__() + self.config_base = config_base + ckpt_path = f"{config_base}/body_dec.ckpt" + config_path = f"{config_base}/config.yml" + assets_path = f"{config_base}/static_assets.pt" + # config + config = OmegaConf.load(config_path) + gpu = config.get("gpu", 0) + self.device = th.device(f"cuda:{gpu}") + # assets + static_assets = AttrDict(torch.load(assets_path)) + # build model + self.model = load_from_config(config.model, assets=static_assets).to( + self.device + ) + self.model.cal_enabled = False + self.model.pixel_cal_enabled = False + self.model.learn_blur_enabled = False + self.render_rgb = render_rgb + if not self.render_rgb: + self.model.rendering_enabled = None + # load model checkpoints + print("loading...", ckpt_path) + load_checkpoint( + ckpt_path, + modules={"model": self.model}, + ignore_names={"model": ["lbs_fn.*"]}, + ) + self.model.eval() + self.model.to(self.device) + # load default parameters for renderer + person = get_person_num(config_path) + self.default_inputs = th.load(f"assets/render_defaults_{person}.pth") + + def _write_video_stream( + self, motion: np.ndarray, face: np.ndarray, save_name: str + ) -> None: + out = self._render_loop(motion, face) + mediapy.write_video(save_name, out, fps=30) + + def _render_loop(self, body_pose: np.ndarray, face: np.ndarray) -> List[np.ndarray]: + all_rgb = [] + default_inputs_copy = copy.deepcopy(self.default_inputs) + for b in tqdm(range(len(body_pose))): + B = default_inputs_copy["K"].shape[0] + default_inputs_copy["lbs_motion"] = ( + th.tensor(body_pose[b : b + 1, :], device=self.device, dtype=th.float) + .tile(B, 1) + .to(self.device) + ) + geom = ( + self.model.lbs_fn.lbs_fn( + default_inputs_copy["lbs_motion"], + self.model.lbs_fn.lbs_scale.unsqueeze(0).tile(B, 1), + self.model.lbs_fn.lbs_template_verts.unsqueeze(0).tile(B, 1, 1), + ) + * self.model.lbs_fn.global_scaling + ) + default_inputs_copy["geom"] = geom + face_codes = ( + th.from_numpy(face).float().cuda() if not th.is_tensor(face) else face + ) + curr_face = th.tile(face_codes[b : b + 1, ...], (2, 1)) + default_inputs_copy["face_embs"] = curr_face + preds = self.model(**default_inputs_copy) + rgb0 = linear2displayBatch(preds["rgb"])[0] + rgb1 = linear2displayBatch(preds["rgb"])[1] + rgb = th.cat((rgb0, rgb1), axis=-1).permute(1, 2, 0) + rgb = rgb.clip(0, 255).to(th.uint8) + all_rgb.append(rgb.contiguous().detach().byte().cpu().numpy()) + return all_rgb + + def render_full_video( + self, + data_block: Dict[str, np.ndarray], + animation_save_path: str, + audio_sr: int = None, + render_gt: bool = False, + ) -> None: + tag = os.path.basename(os.path.dirname(animation_save_path)) + save_name = os.path.splitext(os.path.basename(animation_save_path))[0] + save_name = f"{tag}_{save_name}" + torchaudio.save( + f"/tmp/audio_{save_name}.wav", + torch.tensor(data_block["audio"]), + audio_sr, + ) + if render_gt: + tag = "gt" + self._write_video_stream( + data_block["gt_body"], + data_block["gt_face"], + f"/tmp/{tag}_{save_name}.mp4", + ) + else: + tag = "pred" + self._write_video_stream( + data_block["body_motion"], + data_block["face_motion"], + f"/tmp/{tag}_{save_name}.mp4", + ) + command = f"{ffmpeg_header} -i /tmp/{tag}_{save_name}.mp4 -i /tmp/audio_{save_name}.wav -c:v copy -map 0:v:0 -map 1:a:0 -c:a aac -b:a 192k -pix_fmt yuva420p {animation_save_path}_{tag}.mp4" + call_ffmpeg(command) + subprocess.call( + f"rm /tmp/audio_{save_name}.wav && rm /tmp/{tag}_{save_name}.mp4", + shell=True, + ) diff --git a/visualize/ca_body/LICENSE b/visualize/ca_body/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..872bc82ca7881a1c072b24e4c33783c7fc288c1d --- /dev/null +++ b/visualize/ca_body/LICENSE @@ -0,0 +1,399 @@ +Attribution-NonCommercial 4.0 International + +======================================================================= + +Creative Commons Corporation ("Creative Commons") is not a law firm and +does not provide legal services or legal advice. Distribution of +Creative Commons public licenses does not create a lawyer-client or +other relationship. Creative Commons makes its licenses and related +information available on an "as-is" basis. Creative Commons gives no +warranties regarding its licenses, any material licensed under their +terms and conditions, or any related information. Creative Commons +disclaims all liability for damages resulting from their use to the +fullest extent possible. + +Using Creative Commons Public Licenses + +Creative Commons public licenses provide a standard set of terms and +conditions that creators and other rights holders may use to share +original works of authorship and other material subject to copyright +and certain other rights specified in the public license below. The +following considerations are for informational purposes only, are not +exhaustive, and do not form part of our licenses. + + Considerations for licensors: Our public licenses are + intended for use by those authorized to give the public + permission to use material in ways otherwise restricted by + copyright and certain other rights. Our licenses are + irrevocable. Licensors should read and understand the terms + and conditions of the license they choose before applying it. + Licensors should also secure all rights necessary before + applying our licenses so that the public can reuse the + material as expected. Licensors should clearly mark any + material not subject to the license. This includes other CC- + licensed material, or material used under an exception or + limitation to copyright. More considerations for licensors: + wiki.creativecommons.org/Considerations_for_licensors + + Considerations for the public: By using one of our public + licenses, a licensor grants the public permission to use the + licensed material under specified terms and conditions. If + the licensor's permission is not necessary for any reason--for + example, because of any applicable exception or limitation to + copyright--then that use is not regulated by the license. Our + licenses grant only permissions under copyright and certain + other rights that a licensor has authority to grant. Use of + the licensed material may still be restricted for other + reasons, including because others have copyright or other + rights in the material. A licensor may make special requests, + such as asking that all changes be marked or described. + Although not required by our licenses, you are encouraged to + respect those requests where reasonable. More_considerations + for the public: + wiki.creativecommons.org/Considerations_for_licensees + +======================================================================= + +Creative Commons Attribution-NonCommercial 4.0 International Public +License + +By exercising the Licensed Rights (defined below), You accept and agree +to be bound by the terms and conditions of this Creative Commons +Attribution-NonCommercial 4.0 International Public License ("Public +License"). To the extent this Public License may be interpreted as a +contract, You are granted the Licensed Rights in consideration of Your +acceptance of these terms and conditions, and the Licensor grants You +such rights in consideration of benefits the Licensor receives from +making the Licensed Material available under these terms and +conditions. + +Section 1 -- Definitions. + + a. Adapted Material means material subject to Copyright and Similar + Rights that is derived from or based upon the Licensed Material + and in which the Licensed Material is translated, altered, + arranged, transformed, or otherwise modified in a manner requiring + permission under the Copyright and Similar Rights held by the + Licensor. For purposes of this Public License, where the Licensed + Material is a musical work, performance, or sound recording, + Adapted Material is always produced where the Licensed Material is + synched in timed relation with a moving image. + + b. Adapter's License means the license You apply to Your Copyright + and Similar Rights in Your contributions to Adapted Material in + accordance with the terms and conditions of this Public License. + + c. Copyright and Similar Rights means copyright and/or similar rights + closely related to copyright including, without limitation, + performance, broadcast, sound recording, and Sui Generis Database + Rights, without regard to how the rights are labeled or + categorized. For purposes of this Public License, the rights + specified in Section 2(b)(1)-(2) are not Copyright and Similar + Rights. + d. Effective Technological Measures means those measures that, in the + absence of proper authority, may not be circumvented under laws + fulfilling obligations under Article 11 of the WIPO Copyright + Treaty adopted on December 20, 1996, and/or similar international + agreements. + + e. Exceptions and Limitations means fair use, fair dealing, and/or + any other exception or limitation to Copyright and Similar Rights + that applies to Your use of the Licensed Material. + + f. Licensed Material means the artistic or literary work, database, + or other material to which the Licensor applied this Public + License. + + g. Licensed Rights means the rights granted to You subject to the + terms and conditions of this Public License, which are limited to + all Copyright and Similar Rights that apply to Your use of the + Licensed Material and that the Licensor has authority to license. + + h. Licensor means the individual(s) or entity(ies) granting rights + under this Public License. + + i. NonCommercial means not primarily intended for or directed towards + commercial advantage or monetary compensation. For purposes of + this Public License, the exchange of the Licensed Material for + other material subject to Copyright and Similar Rights by digital + file-sharing or similar means is NonCommercial provided there is + no payment of monetary compensation in connection with the + exchange. + + j. Share means to provide material to the public by any means or + process that requires permission under the Licensed Rights, such + as reproduction, public display, public performance, distribution, + dissemination, communication, or importation, and to make material + available to the public including in ways that members of the + public may access the material from a place and at a time + individually chosen by them. + + k. Sui Generis Database Rights means rights other than copyright + resulting from Directive 96/9/EC of the European Parliament and of + the Council of 11 March 1996 on the legal protection of databases, + as amended and/or succeeded, as well as other essentially + equivalent rights anywhere in the world. + + l. You means the individual or entity exercising the Licensed Rights + under this Public License. Your has a corresponding meaning. + +Section 2 -- Scope. + + a. License grant. + + 1. Subject to the terms and conditions of this Public License, + the Licensor hereby grants You a worldwide, royalty-free, + non-sublicensable, non-exclusive, irrevocable license to + exercise the Licensed Rights in the Licensed Material to: + + a. reproduce and Share the Licensed Material, in whole or + in part, for NonCommercial purposes only; and + + b. produce, reproduce, and Share Adapted Material for + NonCommercial purposes only. + + 2. Exceptions and Limitations. For the avoidance of doubt, where + Exceptions and Limitations apply to Your use, this Public + License does not apply, and You do not need to comply with + its terms and conditions. + + 3. Term. The term of this Public License is specified in Section + 6(a). + + 4. Media and formats; technical modifications allowed. The + Licensor authorizes You to exercise the Licensed Rights in + all media and formats whether now known or hereafter created, + and to make technical modifications necessary to do so. The + Licensor waives and/or agrees not to assert any right or + authority to forbid You from making technical modifications + necessary to exercise the Licensed Rights, including + technical modifications necessary to circumvent Effective + Technological Measures. For purposes of this Public License, + simply making modifications authorized by this Section 2(a) + (4) never produces Adapted Material. + + 5. Downstream recipients. + + a. Offer from the Licensor -- Licensed Material. Every + recipient of the Licensed Material automatically + receives an offer from the Licensor to exercise the + Licensed Rights under the terms and conditions of this + Public License. + + b. No downstream restrictions. You may not offer or impose + any additional or different terms or conditions on, or + apply any Effective Technological Measures to, the + Licensed Material if doing so restricts exercise of the + Licensed Rights by any recipient of the Licensed + Material. + + 6. No endorsement. Nothing in this Public License constitutes or + may be construed as permission to assert or imply that You + are, or that Your use of the Licensed Material is, connected + with, or sponsored, endorsed, or granted official status by, + the Licensor or others designated to receive attribution as + provided in Section 3(a)(1)(A)(i). + + b. Other rights. + + 1. Moral rights, such as the right of integrity, are not + licensed under this Public License, nor are publicity, + privacy, and/or other similar personality rights; however, to + the extent possible, the Licensor waives and/or agrees not to + assert any such rights held by the Licensor to the limited + extent necessary to allow You to exercise the Licensed + Rights, but not otherwise. + + 2. Patent and trademark rights are not licensed under this + Public License. + + 3. To the extent possible, the Licensor waives any right to + collect royalties from You for the exercise of the Licensed + Rights, whether directly or through a collecting society + under any voluntary or waivable statutory or compulsory + licensing scheme. In all other cases the Licensor expressly + reserves any right to collect such royalties, including when + the Licensed Material is used other than for NonCommercial + purposes. + +Section 3 -- License Conditions. + +Your exercise of the Licensed Rights is expressly made subject to the +following conditions. + + a. Attribution. + + 1. If You Share the Licensed Material (including in modified + form), You must: + + a. retain the following if it is supplied by the Licensor + with the Licensed Material: + + i. identification of the creator(s) of the Licensed + Material and any others designated to receive + attribution, in any reasonable manner requested by + the Licensor (including by pseudonym if + designated); + + ii. a copyright notice; + + iii. a notice that refers to this Public License; + + iv. a notice that refers to the disclaimer of + warranties; + + v. a URI or hyperlink to the Licensed Material to the + extent reasonably practicable; + + b. indicate if You modified the Licensed Material and + retain an indication of any previous modifications; and + + c. indicate the Licensed Material is licensed under this + Public License, and include the text of, or the URI or + hyperlink to, this Public License. + + 2. You may satisfy the conditions in Section 3(a)(1) in any + reasonable manner based on the medium, means, and context in + which You Share the Licensed Material. For example, it may be + reasonable to satisfy the conditions by providing a URI or + hyperlink to a resource that includes the required + information. + + 3. If requested by the Licensor, You must remove any of the + information required by Section 3(a)(1)(A) to the extent + reasonably practicable. + + 4. If You Share Adapted Material You produce, the Adapter's + License You apply must not prevent recipients of the Adapted + Material from complying with this Public License. + +Section 4 -- Sui Generis Database Rights. + +Where the Licensed Rights include Sui Generis Database Rights that +apply to Your use of the Licensed Material: + + a. for the avoidance of doubt, Section 2(a)(1) grants You the right + to extract, reuse, reproduce, and Share all or a substantial + portion of the contents of the database for NonCommercial purposes + only; + + b. if You include all or a substantial portion of the database + contents in a database in which You have Sui Generis Database + Rights, then the database in which You have Sui Generis Database + Rights (but not its individual contents) is Adapted Material; and + + c. You must comply with the conditions in Section 3(a) if You Share + all or a substantial portion of the contents of the database. + +For the avoidance of doubt, this Section 4 supplements and does not +replace Your obligations under this Public License where the Licensed +Rights include other Copyright and Similar Rights. + +Section 5 -- Disclaimer of Warranties and Limitation of Liability. + + a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE + EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS + AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF + ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS, + IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION, + WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR + PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS, + ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT + KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT + ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU. + + b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE + TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION, + NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT, + INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES, + COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR + USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN + ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR + DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR + IN PART, THIS LIMITATION MAY NOT APPLY TO YOU. + + c. The disclaimer of warranties and limitation of liability provided + above shall be interpreted in a manner that, to the extent + possible, most closely approximates an absolute disclaimer and + waiver of all liability. + +Section 6 -- Term and Termination. + + a. This Public License applies for the term of the Copyright and + Similar Rights licensed here. However, if You fail to comply with + this Public License, then Your rights under this Public License + terminate automatically. + + b. Where Your right to use the Licensed Material has terminated under + Section 6(a), it reinstates: + + 1. automatically as of the date the violation is cured, provided + it is cured within 30 days of Your discovery of the + violation; or + + 2. upon express reinstatement by the Licensor. + + For the avoidance of doubt, this Section 6(b) does not affect any + right the Licensor may have to seek remedies for Your violations + of this Public License. + + c. For the avoidance of doubt, the Licensor may also offer the + Licensed Material under separate terms or conditions or stop + distributing the Licensed Material at any time; however, doing so + will not terminate this Public License. + + d. Sections 1, 5, 6, 7, and 8 survive termination of this Public + License. + +Section 7 -- Other Terms and Conditions. + + a. The Licensor shall not be bound by any additional or different + terms or conditions communicated by You unless expressly agreed. + + b. Any arrangements, understandings, or agreements regarding the + Licensed Material not stated herein are separate from and + independent of the terms and conditions of this Public License. + +Section 8 -- Interpretation. + + a. For the avoidance of doubt, this Public License does not, and + shall not be interpreted to, reduce, limit, restrict, or impose + conditions on any use of the Licensed Material that could lawfully + be made without permission under this Public License. + + b. To the extent possible, if any provision of this Public License is + deemed unenforceable, it shall be automatically reformed to the + minimum extent necessary to make it enforceable. If the provision + cannot be reformed, it shall be severed from this Public License + without affecting the enforceability of the remaining terms and + conditions. + + c. No term or condition of this Public License will be waived and no + failure to comply consented to unless expressly agreed to by the + Licensor. + + d. Nothing in this Public License constitutes or may be interpreted + as a limitation upon, or waiver of, any privileges and immunities + that apply to the Licensor or You, including from the legal + processes of any jurisdiction or authority. + +======================================================================= + +Creative Commons is not a party to its public +licenses. Notwithstanding, Creative Commons may elect to apply one of +its public licenses to material it publishes and in those instances +will be considered the “Licensor.” The text of the Creative Commons +public licenses is dedicated to the public domain under the CC0 Public +Domain Dedication. Except for the limited purpose of indicating that +material is shared under a Creative Commons public license or as +otherwise permitted by the Creative Commons policies published at +creativecommons.org/policies, Creative Commons does not authorize the +use of the trademark "Creative Commons" or any other trademark or logo +of Creative Commons without its prior written consent including, +without limitation, in connection with any unauthorized modifications +to any of its public licenses or any other arrangements, +understandings, or agreements concerning use of licensed material. For +the avoidance of doubt, this paragraph does not form part of the +public licenses. + +Creative Commons may be contacted at creativecommons.org. \ No newline at end of file diff --git a/visualize/ca_body/README.md b/visualize/ca_body/README.md new file mode 100644 index 0000000000000000000000000000000000000000..9c5b110923561e5fb5679b11fed7c0bdd8ec927f --- /dev/null +++ b/visualize/ca_body/README.md @@ -0,0 +1,17 @@ +# ca_body + +Codec Avatar Body + +### Dependencies + +See `requirements.txt` + +### Repository structure + +- `ca_body/` - python source + * `models` - standalone models + * `nn` - reusable modules (layers, blocks, learnable, modules, networks) + * `utils` - reusable utils (functions, modules w/o learnable params) + +- `notebooks/` - example notebooks +- `data/` - location of sample data and checkpoints diff --git a/visualize/ca_body/models/mesh_vae_drivable.py b/visualize/ca_body/models/mesh_vae_drivable.py new file mode 100644 index 0000000000000000000000000000000000000000..e236b1ed1108a247a49543220467ca6fdbb2f4ad --- /dev/null +++ b/visualize/ca_body/models/mesh_vae_drivable.py @@ -0,0 +1,765 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import logging +from typing import Dict, Optional, Tuple + +import numpy as np +import torch as th +import torch.nn as nn +import torch.nn.functional as F + +from torchvision.utils import make_grid +from torchvision.transforms.functional import gaussian_blur + +import visualize.ca_body.nn.layers as la + +from visualize.ca_body.nn.blocks import ( + ConvBlock, + ConvDownBlock, + UpConvBlockDeep, + tile2d, + weights_initializer, +) +from visualize.ca_body.nn.dof_cal import LearnableBlur + +from visualize.ca_body.utils.geom import ( + GeometryModule, + compute_view_cos, + depth_discontuity_mask, + depth2normals, +) + +from visualize.ca_body.nn.shadow import ShadowUNet, PoseToShadow +from visualize.ca_body.nn.unet import UNetWB +from visualize.ca_body.nn.color_cal import CalV5 + +from visualize.ca_body.utils.image import linear2displayBatch +from visualize.ca_body.utils.lbs import LBSModule +from visualize.ca_body.utils.render import RenderLayer +from visualize.ca_body.utils.seams import SeamSampler +from visualize.ca_body.utils.render import RenderLayer + +from visualize.ca_body.nn.face import FaceDecoderFrontal + +logger = logging.getLogger(__name__) + + +class CameraPixelBias(nn.Module): + def __init__(self, image_height, image_width, cameras, ds_rate) -> None: + super().__init__() + self.image_height = image_height + self.image_width = image_width + self.cameras = cameras + self.n_cameras = len(cameras) + + bias = th.zeros( + (self.n_cameras, 1, image_width // ds_rate, image_height // ds_rate), dtype=th.float32 + ) + self.register_parameter("bias", nn.Parameter(bias)) + + def forward(self, idxs: th.Tensor): + bias_up = F.interpolate( + self.bias[idxs], size=(self.image_height, self.image_width), mode='bilinear' + ) + return bias_up + + +class AutoEncoder(nn.Module): + def __init__( + self, + encoder, + decoder, + decoder_view, + encoder_face, + # hqlp decoder to get the codes + decoder_face, + shadow_net, + upscale_net, + assets, + pose_to_shadow=None, + renderer=None, + cal=None, + pixel_cal=None, + learn_blur: bool = True, + ): + super().__init__() + # TODO: should we have a shared LBS here? + + self.geo_fn = GeometryModule( + assets.topology.vi, + assets.topology.vt, + assets.topology.vti, + assets.topology.v2uv, + uv_size=1024, + impaint=True, + ) + + self.lbs_fn = LBSModule( + assets.lbs_model_json, + assets.lbs_config_dict, + assets.lbs_template_verts, + assets.lbs_scale, + assets.global_scaling, + ) + + self.seam_sampler = SeamSampler(assets.seam_data_1024) + self.seam_sampler_2k = SeamSampler(assets.seam_data_2048) + + # joint tex -> body and clothes + # TODO: why do we have a joint one in the first place? + tex_mean = gaussian_blur(th.as_tensor(assets.tex_mean)[np.newaxis], kernel_size=11) + self.register_buffer("tex_mean", F.interpolate(tex_mean, (2048, 2048), mode='bilinear')) + + # this is shared + self.tex_std = assets.tex_var if 'tex_var' in assets else 64.0 + + face_cond_mask = th.as_tensor(assets.face_cond_mask, dtype=th.float32)[ + np.newaxis, np.newaxis + ] + self.register_buffer("face_cond_mask", face_cond_mask) + + meye_mask = self.geo_fn.to_uv( + th.as_tensor(assets.mouth_eyes_mask_geom[np.newaxis, :, np.newaxis]) + ) + meye_mask = F.interpolate(meye_mask, (2048, 2048), mode='bilinear') + self.register_buffer("meye_mask", meye_mask) + + self.decoder = ConvDecoder( + geo_fn=self.geo_fn, + seam_sampler=self.seam_sampler, + **decoder, + assets=assets, + ) + + # embs for everything but face + non_head_mask = 1.0 - assets.face_mask + self.encoder = Encoder( + geo_fn=self.geo_fn, + mask=non_head_mask, + **encoder, + ) + self.encoder_face = FaceEncoder( + assets=assets, + **encoder_face, + ) + + # using face decoder to generate better conditioning + decoder_face_ckpt_path = None + if 'ckpt' in decoder_face: + decoder_face_ckpt_path = decoder_face.pop('ckpt') + self.decoder_face = FaceDecoderFrontal(assets=assets, **decoder_face) + + if decoder_face_ckpt_path is not None: + self.decoder_face.load_state_dict(th.load(decoder_face_ckpt_path), strict=False) + + self.decoder_view = UNetViewDecoder( + self.geo_fn, + seam_sampler=self.seam_sampler, + **decoder_view, + ) + + self.shadow_net = ShadowUNet( + ao_mean=assets.ao_mean, + interp_mode="bilinear", + biases=False, + **shadow_net, + ) + + self.pose_to_shadow_enabled = False + if pose_to_shadow is not None: + self.pose_to_shadow_enabled = True + self.pose_to_shadow = PoseToShadow(**pose_to_shadow) + + self.upscale_net = UpscaleNet( + in_channels=6, size=1024, upscale_factor=2, out_channels=3, **upscale_net + ) + + self.pixel_cal_enabled = False + if pixel_cal is not None: + self.pixel_cal_enabled = True + self.pixel_cal = CameraPixelBias(**pixel_cal, cameras=assets.camera_ids) + + self.learn_blur_enabled = False + if learn_blur: + self.learn_blur_enabled = True + self.learn_blur = LearnableBlur(assets.camera_ids) + + # training-only stuff + self.cal_enabled = False + if cal is not None: + self.cal_enabled = True + self.cal = CalV5(**cal, cameras=assets.camera_ids) + + self.rendering_enabled = False + if renderer is not None: + self.rendering_enabled = True + self.renderer = RenderLayer( + h=renderer.image_height, + w=renderer.image_width, + vt=self.geo_fn.vt, + vi=self.geo_fn.vi, + vti=self.geo_fn.vti, + flip_uvs=False, + ) + + @th.jit.unused + def compute_summaries(self, preds, batch): + # TODO: switch to common summaries? + # return compute_summaries_mesh(preds, batch) + rgb = linear2displayBatch(preds['rgb'][:, :3]) + rgb_gt = linear2displayBatch(batch['image']) + depth = preds['depth'][:, np.newaxis] + mask = depth > 0.0 + normals = ( + 255 * (1.0 - depth2normals(depth, batch['focal'], batch['princpt'])) / 2.0 + ) * mask + grid_rgb = make_grid(rgb, nrow=16).permute(1, 2, 0).clip(0, 255).to(th.uint8) + grid_rgb_gt = make_grid(rgb_gt, nrow=16).permute(1, 2, 0).clip(0, 255).to(th.uint8) + grid_normals = make_grid(normals, nrow=16).permute(1, 2, 0).clip(0, 255).to(th.uint8) + + progress_image = th.cat([grid_rgb, grid_rgb_gt, grid_normals], dim=0) + return { + 'progress_image': (progress_image, 'png'), + } + + def forward_tex(self, tex_mean_rec, tex_view_rec, shadow_map): + x = th.cat([tex_mean_rec, tex_view_rec], dim=1) + tex_rec = tex_mean_rec + tex_view_rec + + tex_rec = self.seam_sampler.impaint(tex_rec) + tex_rec = self.seam_sampler.resample(tex_rec) + + tex_rec = F.interpolate(tex_rec, size=(2048, 2048), mode="bilinear", align_corners=False) + tex_rec = tex_rec + self.upscale_net(x) + + tex_rec = tex_rec * self.tex_std + self.tex_mean + + shadow_map = self.seam_sampler_2k.impaint(shadow_map) + shadow_map = self.seam_sampler_2k.resample(shadow_map) + shadow_map = self.seam_sampler_2k.resample(shadow_map) + + tex_rec = tex_rec * shadow_map + + tex_rec = self.seam_sampler_2k.impaint(tex_rec) + tex_rec = self.seam_sampler_2k.resample(tex_rec) + tex_rec = self.seam_sampler_2k.resample(tex_rec) + + return tex_rec + + def encode(self, geom: th.Tensor, lbs_motion: th.Tensor, face_embs_hqlp: th.Tensor): + + with th.no_grad(): + verts_unposed = self.lbs_fn.unpose(geom, lbs_motion) + verts_unposed_uv = self.geo_fn.to_uv(verts_unposed) + + # extract face region for geom + tex + enc_preds = self.encoder(motion=lbs_motion, verts_unposed=verts_unposed) + # TODO: probably need to rename these to `face_embs_mugsy` or smth + # TODO: we need the same thing for face? + # enc_face_preds = self.encoder_face(verts_unposed_uv) + with th.no_grad(): + face_dec_preds = self.decoder_face(face_embs_hqlp) + enc_face_preds = self.encoder_face(**face_dec_preds) + + preds = { + **enc_preds, + **enc_face_preds, + 'face_dec_preds': face_dec_preds, + } + return preds + + def forward( + self, + # TODO: should we try using this as well for cond? + lbs_motion: th.Tensor, + campos: th.Tensor, + geom: Optional[th.Tensor] = None, + ao: Optional[th.Tensor] = None, + K: Optional[th.Tensor] = None, + Rt: Optional[th.Tensor] = None, + image_bg: Optional[th.Tensor] = None, + image: Optional[th.Tensor] = None, + image_mask: Optional[th.Tensor] = None, + embs: Optional[th.Tensor] = None, + _index: Optional[Dict[str, th.Tensor]] = None, + face_embs: Optional[th.Tensor] = None, + embs_conv: Optional[th.Tensor] = None, + tex_seg: Optional[th.Tensor] = None, + encode=True, + iteration: Optional[int] = None, + **kwargs, + ): + B = lbs_motion.shape[0] + + if not th.jit.is_scripting() and encode: + # NOTE: these are `face_embs_hqlp` + enc_preds = self.encode(geom, lbs_motion, face_embs) + embs = enc_preds['embs'] + # NOTE: these are `face_embs` in body space + face_embs_body = enc_preds['face_embs'] + + dec_preds = self.decoder( + motion=lbs_motion, + embs=embs, + face_embs=face_embs_body, + embs_conv=embs_conv, + ) + + geom_rec = self.lbs_fn.pose(dec_preds['geom_delta_rec'], lbs_motion) + + dec_view_preds = self.decoder_view( + geom_rec=geom_rec, + tex_mean_rec=dec_preds["tex_mean_rec"], + camera_pos=campos, + ) + + # TODO: should we train an AO model? + if self.training and self.pose_to_shadow_enabled: + shadow_preds = self.shadow_net(ao_map=ao) + pose_shadow_preds = self.pose_to_shadow(lbs_motion) + shadow_preds['pose_shadow_map'] = pose_shadow_preds['shadow_map'] + elif self.pose_to_shadow_enabled: + shadow_preds = self.pose_to_shadow(lbs_motion) + else: + shadow_preds = self.shadow_net(ao_map=ao) + + tex_rec = self.forward_tex( + dec_preds["tex_mean_rec"], + dec_view_preds["tex_view_rec"], + shadow_preds["shadow_map"], + ) + + if not th.jit.is_scripting() and self.cal_enabled: + tex_rec = self.cal(tex_rec, self.cal.name_to_idx(_index['camera'])) + + preds = { + 'geom': geom_rec, + 'tex_rec': tex_rec, + **dec_preds, + **shadow_preds, + **dec_view_preds, + } + + if not th.jit.is_scripting() and encode: + preds.update(**enc_preds) + + if not th.jit.is_scripting() and self.rendering_enabled: + + # NOTE: this is a reduced version tested for forward only + renders = self.renderer( + preds['geom'], + tex_rec, + K=K, + Rt=Rt, + ) + + preds.update(rgb=renders['render']) + + if not th.jit.is_scripting() and self.learn_blur_enabled: + preds['rgb'] = self.learn_blur(preds['rgb'], _index['camera']) + preds['learn_blur_weights'] = self.learn_blur.reg(_index['camera']) + + if not th.jit.is_scripting() and self.pixel_cal_enabled: + assert self.cal_enabled + cam_idxs = self.cal.name_to_idx(_index['camera']) + pixel_bias = self.pixel_cal(cam_idxs) + preds['rgb'] = preds['rgb'] + pixel_bias + + return preds + + +class Encoder(nn.Module): + """A joint encoder for tex and geometry.""" + + def __init__( + self, + geo_fn, + n_embs, + noise_std, + mask, + logvar_scale=0.1, + ): + """Fixed-width conv encoder.""" + super().__init__() + + self.noise_std = noise_std + self.n_embs = n_embs + self.geo_fn = geo_fn + self.logvar_scale = logvar_scale + + self.verts_conv = ConvDownBlock(3, 8, 512) + + mask = th.as_tensor(mask[np.newaxis, np.newaxis], dtype=th.float32) + mask = F.interpolate(mask, size=(512, 512), mode='bilinear').to(th.bool) + self.register_buffer("mask", mask) + + self.joint_conv_blocks = nn.Sequential( + ConvDownBlock(8, 16, 256), + ConvDownBlock(16, 32, 128), + ConvDownBlock(32, 32, 64), + ConvDownBlock(32, 64, 32), + ConvDownBlock(64, 128, 16), + ConvDownBlock(128, 128, 8), + # ConvDownBlock(128, 128, 4), + ) + + # TODO: should we put initializer + self.mu = la.LinearWN(4 * 4 * 128, self.n_embs) + self.logvar = la.LinearWN(4 * 4 * 128, self.n_embs) + + self.apply(weights_initializer(0.2)) + self.mu.apply(weights_initializer(1.0)) + self.logvar.apply(weights_initializer(1.0)) + + def forward(self, motion, verts_unposed): + preds = {} + + B = motion.shape[0] + + # converting motion to the unposed + verts_cond = ( + F.interpolate(self.geo_fn.to_uv(verts_unposed), size=(512, 512), mode='bilinear') + * self.mask + ) + verts_cond = self.verts_conv(verts_cond) + + # tex_cond = F.interpolate(tex_avg, size=(512, 512), mode='bilinear') * self.mask + # tex_cond = self.tex_conv(tex_cond) + # joint_cond = th.cat([verts_cond, tex_cond], dim=1) + joint_cond = verts_cond + x = self.joint_conv_blocks(joint_cond) + x = x.reshape(B, -1) + embs_mu = self.mu(x) + embs_logvar = self.logvar_scale * self.logvar(x) + + # NOTE: the noise is only applied to the input-conditioned values + if self.training: + noise = th.randn_like(embs_mu) + embs = embs_mu + th.exp(embs_logvar) * noise * self.noise_std + else: + embs = embs_mu.clone() + + preds.update( + embs=embs, + embs_mu=embs_mu, + embs_logvar=embs_logvar, + ) + + return preds + + +class ConvDecoder(nn.Module): + """Multi-region view-independent decoder.""" + + def __init__( + self, + geo_fn, + uv_size, + seam_sampler, + init_uv_size, + n_pose_dims, + n_pose_enc_channels, + n_embs, + n_embs_enc_channels, + n_face_embs, + n_init_channels, + n_min_channels, + assets, + ): + super().__init__() + + self.geo_fn = geo_fn + + self.uv_size = uv_size + self.init_uv_size = init_uv_size + self.n_pose_dims = n_pose_dims + self.n_pose_enc_channels = n_pose_enc_channels + self.n_embs = n_embs + self.n_embs_enc_channels = n_embs_enc_channels + self.n_face_embs = n_face_embs + + self.n_blocks = int(np.log2(self.uv_size // init_uv_size)) + self.sizes = [init_uv_size * 2**s for s in range(self.n_blocks + 1)] + + # TODO: just specify a sequence? + self.n_channels = [ + max(n_init_channels // 2**b, n_min_channels) for b in range(self.n_blocks + 1) + ] + + logger.info(f"ConvDecoder: n_channels = {self.n_channels}") + + self.local_pose_conv_block = ConvBlock( + n_pose_dims, + n_pose_enc_channels, + init_uv_size, + kernel_size=1, + padding=0, + ) + + self.embs_fc = nn.Sequential( + la.LinearWN(n_embs, 4 * 4 * 128), + nn.LeakyReLU(0.2, inplace=True), + ) + # TODO: should we switch to the basic version? + self.embs_conv_block = nn.Sequential( + UpConvBlockDeep(128, 128, 8), + UpConvBlockDeep(128, 128, 16), + UpConvBlockDeep(128, 64, 32), + UpConvBlockDeep(64, n_embs_enc_channels, 64), + ) + + self.face_embs_fc = nn.Sequential( + la.LinearWN(n_face_embs, 4 * 4 * 32), + nn.LeakyReLU(0.2, inplace=True), + ) + self.face_embs_conv_block = nn.Sequential( + UpConvBlockDeep(32, 64, 8), + UpConvBlockDeep(64, 64, 16), + UpConvBlockDeep(64, n_embs_enc_channels, 32), + ) + + n_groups = 2 + + self.joint_conv_block = ConvBlock( + n_pose_enc_channels + n_embs_enc_channels, + n_init_channels, + self.init_uv_size, + ) + + self.conv_blocks = nn.ModuleList([]) + for b in range(self.n_blocks): + self.conv_blocks.append( + UpConvBlockDeep( + self.n_channels[b] * n_groups, + self.n_channels[b + 1] * n_groups, + self.sizes[b + 1], + groups=n_groups, + ), + ) + + self.verts_conv = la.Conv2dWNUB( + in_channels=self.n_channels[-1], + out_channels=3, + kernel_size=3, + height=self.uv_size, + width=self.uv_size, + padding=1, + ) + self.tex_conv = la.Conv2dWNUB( + in_channels=self.n_channels[-1], + out_channels=3, + kernel_size=3, + height=self.uv_size, + width=self.uv_size, + padding=1, + ) + + self.apply(weights_initializer(0.2)) + self.verts_conv.apply(weights_initializer(1.0)) + self.tex_conv.apply(weights_initializer(1.0)) + + self.seam_sampler = seam_sampler + + # NOTE: removing head region from pose completely + pose_cond_mask = th.as_tensor( + assets.pose_cond_mask[np.newaxis] * (1 - assets.head_cond_mask[np.newaxis, np.newaxis]), + dtype=th.int32, + ) + self.register_buffer("pose_cond_mask", pose_cond_mask) + face_cond_mask = th.as_tensor(assets.face_cond_mask, dtype=th.float32)[ + np.newaxis, np.newaxis + ] + self.register_buffer("face_cond_mask", face_cond_mask) + + body_cond_mask = th.as_tensor(assets.body_cond_mask, dtype=th.float32)[ + np.newaxis, np.newaxis + ] + self.register_buffer("body_cond_mask", body_cond_mask) + + def forward(self, motion, embs, face_embs, embs_conv: Optional[th.Tensor] = None): + + # processing pose + pose = motion[:, 6:] + + B = pose.shape[0] + + non_head_mask = (self.body_cond_mask * (1.0 - self.face_cond_mask)).clip(0.0, 1.0) + + pose_masked = tile2d(pose, self.init_uv_size) * self.pose_cond_mask + pose_conv = self.local_pose_conv_block(pose_masked) * non_head_mask + + # TODO: decoding properly? + if embs_conv is None: + embs_conv = self.embs_conv_block(self.embs_fc(embs).reshape(B, 128, 4, 4)) + + face_conv = self.face_embs_conv_block(self.face_embs_fc(face_embs).reshape(B, 32, 4, 4)) + # merging embeddings with spatial masks + embs_conv[:, :, 32:, :32] = ( + face_conv * self.face_cond_mask[:, :, 32:, :32] + + embs_conv[:, :, 32:, :32] * non_head_mask[:, :, 32:, :32] + ) + + joint = th.cat([pose_conv, embs_conv], axis=1) + joint = self.joint_conv_block(joint) + + x = th.cat([joint, joint], axis=1) + for b in range(self.n_blocks): + x = self.conv_blocks[b](x) + + # NOTE: here we do resampling at feature level + x = self.seam_sampler.impaint(x) + x = self.seam_sampler.resample(x) + x = self.seam_sampler.resample(x) + + verts_features, tex_features = th.split(x, self.n_channels[-1], 1) + + verts_uv_delta_rec = self.verts_conv(verts_features) + # TODO: need to get values + verts_delta_rec = self.geo_fn.from_uv(verts_uv_delta_rec) + tex_mean_rec = self.tex_conv(tex_features) + + preds = { + 'geom_delta_rec': verts_delta_rec, + 'geom_uv_delta_rec': verts_uv_delta_rec, + 'tex_mean_rec': tex_mean_rec, + 'embs_conv': embs_conv, + 'pose_conv': pose_conv, + } + + return preds + + +class FaceEncoder(nn.Module): + """A joint encoder for tex and geometry.""" + + def __init__( + self, + noise_std, + assets, + n_embs=256, + uv_size=512, + logvar_scale=0.1, + n_vert_in=7306 * 3, + prefix="face_", + ): + + """Fixed-width conv encoder.""" + super().__init__() + + # TODO: + self.noise_std = noise_std + self.n_embs = n_embs + self.logvar_scale = logvar_scale + self.prefix = prefix + self.uv_size = uv_size + + assert self.uv_size == 512 + + tex_cond_mask = assets.mugsy_face_mask[..., 0] + tex_cond_mask = th.as_tensor(tex_cond_mask, dtype=th.float32)[np.newaxis, np.newaxis] + tex_cond_mask = F.interpolate( + tex_cond_mask, (self.uv_size, self.uv_size), mode="bilinear", align_corners=True + ) + self.register_buffer("tex_cond_mask", tex_cond_mask) + + self.conv_blocks = nn.Sequential( + ConvDownBlock(3, 4, 512), + ConvDownBlock(4, 8, 256), + ConvDownBlock(8, 16, 128), + ConvDownBlock(16, 32, 64), + ConvDownBlock(32, 64, 32), + ConvDownBlock(64, 128, 16), + ConvDownBlock(128, 128, 8), + ) + self.geommod = nn.Sequential(la.LinearWN(n_vert_in, 256), nn.LeakyReLU(0.2, inplace=True)) + self.jointmod = nn.Sequential( + la.LinearWN(256 + 128 * 4 * 4, 512), nn.LeakyReLU(0.2, inplace=True) + ) + # TODO: should we put initializer + self.mu = la.LinearWN(512, self.n_embs) + self.logvar = la.LinearWN(512, self.n_embs) + + self.apply(weights_initializer(0.2)) + self.mu.apply(weights_initializer(1.0)) + self.logvar.apply(weights_initializer(1.0)) + + # TODO: compute_losses()? + + def forward(self, face_geom: th.Tensor, face_tex: th.Tensor, **kwargs): + B = face_geom.shape[0] + + tex_cond = F.interpolate( + face_tex, (self.uv_size, self.uv_size), mode="bilinear", align_corners=False + ) + tex_cond = (tex_cond / 255.0 - 0.5) * self.tex_cond_mask + x = self.conv_blocks(tex_cond) + tex_enc = x.reshape(B, 4 * 4 * 128) + + geom_enc = self.geommod(face_geom.reshape(B, -1)) + + x = self.jointmod(th.cat([tex_enc, geom_enc], dim=1)) + embs_mu = self.mu(x) + embs_logvar = self.logvar_scale * self.logvar(x) + + # NOTE: the noise is only applied to the input-conditioned values + if self.training: + noise = th.randn_like(embs_mu) + embs = embs_mu + th.exp(embs_logvar) * noise * self.noise_std + else: + embs = embs_mu.clone() + + preds = {"embs": embs, "embs_mu": embs_mu, "embs_logvar": embs_logvar, "tex_cond": tex_cond} + preds = {f"{self.prefix}{k}": v for k, v in preds.items()} + return preds + + +class UNetViewDecoder(nn.Module): + def __init__(self, geo_fn, net_uv_size, seam_sampler, n_init_ftrs=8): + super().__init__() + self.geo_fn = geo_fn + self.net_uv_size = net_uv_size + self.unet = UNetWB(4, 3, n_init_ftrs=n_init_ftrs, size=net_uv_size) + self.register_buffer("faces", self.geo_fn.vi.to(th.int64), persistent=False) + + def forward(self, geom_rec, tex_mean_rec, camera_pos): + + with th.no_grad(): + view_cos = compute_view_cos(geom_rec, self.faces, camera_pos) + view_cos_uv = self.geo_fn.to_uv(view_cos[..., np.newaxis]) + cond_view = th.cat([view_cos_uv, tex_mean_rec], dim=1) + tex_view = self.unet(cond_view) + # TODO: should we try warping here? + return {"tex_view_rec": tex_view, "cond_view": cond_view} + + +class UpscaleNet(nn.Module): + def __init__(self, in_channels, out_channels, n_ftrs, size=1024, upscale_factor=2): + super().__init__() + + self.conv_block = nn.Sequential( + la.Conv2dWNUB(in_channels, n_ftrs, size, size, kernel_size=3, padding=1), + nn.LeakyReLU(0.2, inplace=True), + ) + + self.out_block = la.Conv2dWNUB( + n_ftrs, + out_channels * upscale_factor**2, + size, + size, + kernel_size=1, + padding=0, + ) + + self.pixel_shuffle = nn.PixelShuffle(upscale_factor=upscale_factor) + self.apply(weights_initializer(0.2)) + self.out_block.apply(weights_initializer(1.0)) + + def forward(self, x): + x = self.conv_block(x) + x = self.out_block(x) + return self.pixel_shuffle(x) \ No newline at end of file diff --git a/visualize/ca_body/nn/blocks.py b/visualize/ca_body/nn/blocks.py new file mode 100644 index 0000000000000000000000000000000000000000..9e1dbb62dd8ffc294eed8bd9472ea6857fd43105 --- /dev/null +++ b/visualize/ca_body/nn/blocks.py @@ -0,0 +1,786 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + + +import logging +from turtle import forward + +import visualize.ca_body.nn.layers as la +from visualize.ca_body.nn.layers import weight_norm_wrapper + +import numpy as np +import torch as th +import torch.nn as nn +import torch.nn.functional as F + + +logger = logging.getLogger(__name__) + + +# pyre-ignore +def weights_initializer(lrelu_slope=0.2): + # pyre-ignore + def init_fn(m): + if isinstance( + m, + ( + nn.Conv2d, + nn.Conv1d, + nn.ConvTranspose2d, + nn.Linear, + ), + ): + gain = nn.init.calculate_gain("leaky_relu", lrelu_slope) + nn.init.kaiming_uniform_(m.weight.data, a=gain) + if hasattr(m, "bias") and m.bias is not None: + nn.init.zeros_(m.bias.data) + else: + logger.debug(f"skipping initialization for {m}") + + return init_fn + + +# pyre-ignore +def WeightNorm(x, dim=0): + return nn.utils.weight_norm(x, dim=dim) + + +# pyre-ignore +def np_warp_bias(uv_size): + xgrid, ygrid = np.meshgrid(np.linspace(-1.0, 1.0, uv_size), np.linspace(-1.0, 1.0, uv_size)) + grid = np.concatenate((xgrid[None, :, :], ygrid[None, :, :]), axis=0)[None, ...].astype( + np.float32 + ) + return grid + + +class Conv2dBias(nn.Conv2d): + __annotations__ = {"bias": th.Tensor} + + def __init__( + self, + in_channels, + out_channels, + kernel_size, + size, + stride=1, + padding=1, + bias=True, + *args, + **kwargs, + ): + super().__init__( + in_channels, + out_channels, + bias=False, + kernel_size=kernel_size, + stride=stride, + padding=padding, + *args, + **kwargs, + ) + if not bias: + logger.warning("ignoring bias=False") + self.bias = nn.Parameter(th.zeros(out_channels, size, size)) + + def forward(self, x): + bias = self.bias.clone() + return ( + # pyre-ignore + th.conv2d( + x, + self.weight, + bias=None, + stride=self.stride, + # pyre-ignore + padding=self.padding, + dilation=self.dilation, + groups=self.groups, + ) + + bias[np.newaxis] + ) + + +class Conv1dBias(nn.Conv1d): + def __init__( + self, + in_channels, + out_channels, + kernel_size, + size, + stride=1, + padding=0, + bias=True, + *args, + **kwargs, + ): + super().__init__( + in_channels, + out_channels, + bias=False, + kernel_size=kernel_size, + stride=stride, + padding=padding, + *args, + **kwargs, + ) + if not bias: + logger.warning("ignoring bias=False") + self.bias = nn.Parameter(th.zeros(out_channels, size)) + + def forward(self, x): + return ( + # pyre-ignore + th.conv1d( + x, + self.weight, + bias=None, + stride=self.stride, + # pyre-ignore + padding=self.padding, + dilation=self.dilation, + groups=self.groups, + ) + + self.bias + ) + + +class UpConvBlock(nn.Module): + # pyre-ignore + def __init__(self, in_channels, out_channels, size, lrelu_slope=0.2): + super().__init__() + # Intergration: it was not exist in github, but assume upsample is same as other class + self.upsample = nn.UpsamplingBilinear2d(size) + self.conv_resize = la.Conv2dWN( + in_channels=in_channels, out_channels=out_channels, kernel_size=1 + ) + self.conv1 = la.Conv2dWNUB( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=3, + height=size, + width=size, + padding=1, + ) + self.lrelu1 = nn.LeakyReLU(lrelu_slope) + # self.conv2 = nn.utils.weight_norm( + # Conv2dBias(in_channels, out_channels, kernel_size=3, size=size), dim=None, + # ) + # self.lrelu2 = nn.LeakyReLU(lrelu_slope) + + # pyre-ignore + def forward(self, x): + x_up = self.upsample(x) + x_skip = self.conv_resize(x_up) + x = self.conv1(x_up) + x = self.lrelu1(x) + return x + x_skip + + +class ConvBlock1d(nn.Module): + def __init__( + self, + in_channels, + out_channels, + size, + lrelu_slope=0.2, + kernel_size=3, + padding=1, + wnorm_dim=0, + ): + super().__init__() + + self.conv_resize = WeightNorm( + nn.Conv1d(in_channels, out_channels, kernel_size=1), dim=wnorm_dim + ) + self.conv1 = WeightNorm( + Conv1dBias( + in_channels, + in_channels, + kernel_size=kernel_size, + padding=padding, + size=size, + ), + dim=wnorm_dim, + ) + self.lrelu1 = nn.LeakyReLU(lrelu_slope) + self.conv2 = WeightNorm( + Conv1dBias( + in_channels, + out_channels, + kernel_size=kernel_size, + padding=padding, + size=size, + ), + dim=wnorm_dim, + ) + self.lrelu2 = nn.LeakyReLU(lrelu_slope) + + def forward(self, x): + x_skip = self.conv_resize(x) + x = self.conv1(x) + x = self.lrelu1(x) + x = self.conv2(x) + x = self.lrelu2(x) + return x + x_skip + + +class ConvBlock(nn.Module): + def __init__( + self, + in_channels, + out_channels, + size, + lrelu_slope=0.2, + kernel_size=3, + padding=1, + wnorm_dim=0, + ): + super().__init__() + + Conv2dWNUB = weight_norm_wrapper(la.Conv2dUB, "Conv2dWNUB", g_dim=wnorm_dim, v_dim=None) + Conv2dWN = weight_norm_wrapper(th.nn.Conv2d, "Conv2dWN", g_dim=wnorm_dim, v_dim=None) + + # TODO: do we really need this? + self.conv_resize = Conv2dWN(in_channels, out_channels, kernel_size=1) + self.conv1 = Conv2dWNUB( + in_channels, + in_channels, + kernel_size=kernel_size, + padding=padding, + height=size, + width=size, + ) + + self.lrelu1 = nn.LeakyReLU(lrelu_slope) + self.conv2 = Conv2dWNUB( + in_channels, + out_channels, + kernel_size=kernel_size, + padding=padding, + height=size, + width=size, + ) + self.lrelu2 = nn.LeakyReLU(lrelu_slope) + + def forward(self, x): + x_skip = self.conv_resize(x) + x = self.conv1(x) + x = self.lrelu1(x) + x = self.conv2(x) + x = self.lrelu2(x) + return x + x_skip + + +class ConvBlockNoSkip(nn.Module): + def __init__( + self, + in_channels, + out_channels, + size, + lrelu_slope=0.2, + kernel_size=3, + padding=1, + wnorm_dim=0, + ): + super().__init__() + + self.conv1 = WeightNorm( + Conv2dBias( + in_channels, + in_channels, + kernel_size=kernel_size, + padding=padding, + size=size, + ), + dim=wnorm_dim, + ) + self.lrelu1 = nn.LeakyReLU(lrelu_slope) + self.conv2 = WeightNorm( + Conv2dBias( + in_channels, + out_channels, + kernel_size=kernel_size, + padding=padding, + size=size, + ), + dim=wnorm_dim, + ) + self.lrelu2 = nn.LeakyReLU(lrelu_slope) + + def forward(self, x): + x = self.conv1(x) + x = self.lrelu1(x) + x = self.conv2(x) + x = self.lrelu2(x) + return x + + +class ConvDownBlock(nn.Module): + def __init__(self, in_channels, out_channels, size, lrelu_slope=0.2, groups=1, wnorm_dim=0): + """Constructor. + + Args: + in_channels: int, # of input channels + out_channels: int, # of input channels + size: the *input* size + """ + super().__init__() + + Conv2dWNUB = weight_norm_wrapper(la.Conv2dUB, "Conv2dWNUB", g_dim=wnorm_dim, v_dim=None) + Conv2dWN = weight_norm_wrapper(th.nn.Conv2d, "Conv2dWN", g_dim=wnorm_dim, v_dim=None) + + self.conv_resize = Conv2dWN( + in_channels, out_channels, kernel_size=1, stride=2, groups=groups + ) + self.conv1 = Conv2dWNUB( + in_channels, + in_channels, + kernel_size=3, + height=size, + width=size, + groups=groups, + padding=1, + ) + self.lrelu1 = nn.LeakyReLU(lrelu_slope) + + self.conv2 = Conv2dWNUB( + in_channels, + out_channels, + kernel_size=3, + stride=2, + height=size // 2, + width=size // 2, + groups=groups, + padding=1, + ) + self.lrelu2 = nn.LeakyReLU(lrelu_slope) + + def forward(self, x): + x_skip = self.conv_resize(x) + x = self.conv1(x) + x = self.lrelu1(x) + x = self.conv2(x) + x = self.lrelu2(x) + return x + x_skip + + +class UpConvBlockDeep(nn.Module): + def __init__(self, in_channels, out_channels, size, lrelu_slope=0.2, wnorm_dim=0, groups=1): + super().__init__() + self.upsample = nn.UpsamplingBilinear2d(size) + + Conv2dWNUB = weight_norm_wrapper(la.Conv2dUB, "Conv2dWNUB", g_dim=wnorm_dim, v_dim=None) + Conv2dWN = weight_norm_wrapper(th.nn.Conv2d, "Conv2dWN", g_dim=wnorm_dim, v_dim=None) + # NOTE: the old one normalizes only across one dimension + + self.conv_resize = Conv2dWN( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=1, + groups=groups, + ) + self.conv1 = Conv2dWNUB( + in_channels, + in_channels, + kernel_size=3, + height=size, + width=size, + padding=1, + groups=groups, + ) + self.lrelu1 = nn.LeakyReLU(lrelu_slope) + self.conv2 = Conv2dWNUB( + in_channels, + out_channels, + kernel_size=3, + height=size, + width=size, + padding=1, + groups=groups, + ) + self.lrelu2 = nn.LeakyReLU(lrelu_slope) + + def forward(self, x): + x_up = self.upsample(x) + x_skip = self.conv_resize(x_up) + + x = x_up + x = self.conv1(x) + x = self.lrelu1(x) + x = self.conv2(x) + x = self.lrelu2(x) + + return x + x_skip + + +class ConvBlockPositional(nn.Module): + def __init__( + self, + in_channels, + out_channels, + pos_map, + lrelu_slope=0.2, + kernel_size=3, + padding=1, + wnorm_dim=0, + ): + """Block with positional encoding. + + Args: + in_channels: # of input channels (not counting the positional encoding) + out_channels: # of output channels + pos_map: tensor [P, size, size] + """ + super().__init__() + assert len(pos_map.shape) == 3 and pos_map.shape[1] == pos_map.shape[2] + self.register_buffer("pos_map", pos_map) + + self.conv_resize = WeightNorm(nn.Conv2d(in_channels, out_channels, 1), dim=wnorm_dim) + + self.conv1 = WeightNorm( + nn.Conv2d( + in_channels + pos_map.shape[0], + in_channels, + kernel_size=3, + padding=padding, + ), + dim=wnorm_dim, + ) + self.lrelu1 = nn.LeakyReLU(lrelu_slope) + self.conv2 = WeightNorm( + nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=padding), + dim=wnorm_dim, + ) + self.lrelu2 = nn.LeakyReLU(lrelu_slope) + + def forward(self, x): + B = x.shape[0] + + x_skip = self.conv_resize(x) + + pos = self.pos_map[np.newaxis].expand(B, -1, -1, -1) + + x = th.cat([x, pos], dim=1) + x = self.conv1(x) + x = self.lrelu1(x) + x = self.conv2(x) + x = self.lrelu2(x) + return x + x_skip + + +class UpConvBlockPositional(nn.Module): + def __init__( + self, + in_channels, + out_channels, + pos_map, + lrelu_slope=0.2, + wnorm_dim=0, + ): + """Block with positional encoding. + + Args: + in_channels: # of input channels (not counting the positional encoding) + out_channels: # of output channels + pos_map: tensor [P, size, size] + """ + super().__init__() + assert len(pos_map.shape) == 3 and pos_map.shape[1] == pos_map.shape[2] + self.register_buffer("pos_map", pos_map) + size = pos_map.shape[1] + + self.in_channels = in_channels + self.out_channels = out_channels + + self.upsample = nn.UpsamplingBilinear2d(size) + + if in_channels != out_channels: + self.conv_resize = WeightNorm(nn.Conv2d(in_channels, out_channels, 1), dim=wnorm_dim) + + self.conv1 = WeightNorm( + nn.Conv2d( + in_channels + pos_map.shape[0], + in_channels, + kernel_size=3, + padding=1, + ), + dim=wnorm_dim, + ) + self.lrelu1 = nn.LeakyReLU(lrelu_slope) + self.conv2 = WeightNorm( + nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1), + dim=wnorm_dim, + ) + self.lrelu2 = nn.LeakyReLU(lrelu_slope) + + def forward(self, x): + B = x.shape[0] + + x_up = self.upsample(x) + + x_skip = x_up + if self.in_channels != self.out_channels: + x_skip = self.conv_resize(x_up) + + pos = self.pos_map[np.newaxis].expand(B, -1, -1, -1) + + x = th.cat([x_up, pos], dim=1) + x = self.conv1(x) + x = self.lrelu1(x) + x = self.conv2(x) + x = self.lrelu2(x) + + return x + x_skip + + +class UpConvBlockDeepNoBias(nn.Module): + def __init__(self, in_channels, out_channels, size, lrelu_slope=0.2, wnorm_dim=0, groups=1): + super().__init__() + self.upsample = nn.UpsamplingBilinear2d(size) + # NOTE: the old one normalizes only across one dimension + self.conv_resize = WeightNorm( + nn.Conv2d(in_channels, out_channels, 1, groups=groups), dim=wnorm_dim + ) + self.conv1 = WeightNorm( + nn.Conv2d(in_channels, in_channels, padding=1, kernel_size=3, groups=groups), + dim=wnorm_dim, + ) + self.lrelu1 = nn.LeakyReLU(lrelu_slope) + self.conv2 = WeightNorm( + nn.Conv2d(in_channels, out_channels, padding=1, kernel_size=3, groups=groups), + dim=wnorm_dim, + ) + self.lrelu2 = nn.LeakyReLU(lrelu_slope) + + def forward(self, x): + x_up = self.upsample(x) + x_skip = self.conv_resize(x_up) + + x = x_up + x = self.conv1(x) + x = self.lrelu1(x) + x = self.conv2(x) + x = self.lrelu2(x) + + return x + x_skip + + +class UpConvBlockXDeep(nn.Module): + def __init__(self, in_channels, out_channels, size, lrelu_slope=0.2, wnorm_dim=0): + super().__init__() + self.upsample = nn.UpsamplingBilinear2d(size) + # TODO: see if this is necce + self.conv_resize = WeightNorm(nn.Conv2d(in_channels, out_channels, 1), dim=wnorm_dim) + self.conv1 = WeightNorm( + Conv2dBias(in_channels, in_channels // 2, kernel_size=3, size=size), + dim=wnorm_dim, + ) + self.lrelu1 = nn.LeakyReLU(lrelu_slope) + + self.conv2 = WeightNorm( + Conv2dBias(in_channels // 2, in_channels // 2, kernel_size=3, size=size), + dim=wnorm_dim, + ) + self.lrelu2 = nn.LeakyReLU(lrelu_slope) + + self.conv2 = WeightNorm( + Conv2dBias(in_channels // 2, in_channels // 2, kernel_size=3, size=size), + dim=wnorm_dim, + ) + self.lrelu2 = nn.LeakyReLU(lrelu_slope) + + self.conv3 = WeightNorm( + Conv2dBias(in_channels // 2, out_channels, kernel_size=3, size=size), + dim=wnorm_dim, + ) + self.lrelu3 = nn.LeakyReLU(lrelu_slope) + + def forward(self, x): + x_up = self.upsample(x) + x_skip = self.conv_resize(x_up) + + x = x_up + x = self.conv1(x) + x = self.lrelu1(x) + x = self.conv2(x) + x = self.lrelu2(x) + x = self.conv3(x) + x = self.lrelu3(x) + + return x + x_skip + + +class UpConvCondBlock(nn.Module): + def __init__(self, in_channels, out_channels, size, cond_channels, lrelu_slope=0.2): + super().__init__() + self.upsample = nn.UpsamplingBilinear2d(size) + self.conv_resize = nn.utils.weight_norm(nn.Conv2d(in_channels, out_channels, 1), dim=None) + self.conv1 = WeightNorm( + Conv2dBias(in_channels + cond_channels, in_channels, kernel_size=3, size=size), + ) + self.lrelu1 = nn.LeakyReLU(lrelu_slope) + self.conv2 = WeightNorm( + Conv2dBias(in_channels, out_channels, kernel_size=3, size=size), + ) + self.lrelu2 = nn.LeakyReLU(lrelu_slope) + + def forward(self, x, cond): + x_up = self.upsample(x) + x_skip = self.conv_resize(x_up) + + x = x_up + x = th.cat([x, cond], dim=1) + x = self.conv1(x) + x = self.lrelu1(x) + x = self.conv2(x) + x = self.lrelu2(x) + + return x + x_skip + + +class UpConvBlockPS(nn.Module): + # pyre-ignore + def __init__(self, n_in, n_out, size, kernel_size=3, padding=1): + super().__init__() + self.conv1 = la.Conv2dWNUB( + n_in, + n_out * 4, + size, + size, + kernel_size=kernel_size, + padding=padding, + ) + self.lrelu = nn.LeakyReLU(0.2, inplace=True) + self.ps = nn.PixelShuffle(2) + + def forward(self, x): + x = self.conv(x) + x = self.lrelu(x) + return self.ps(x) + + +# pyre-ignore +def apply_crop( + image, + ymin, + ymax, + xmin, + xmax, +): + """Crops a region from an image.""" + # NOTE: here we are expecting one of [H, W] [H, W, C] [B, H, W, C] + if len(image.shape) == 2: + return image[ymin:ymax, xmin:xmax] + elif len(image.shape) == 3: + return image[ymin:ymax, xmin:xmax, :] + elif len(image.shape) == 4: + return image[:, ymin:ymax, xmin:xmax, :] + else: + raise ValueError("provide a batch of images or a single image") + + +def tile1d(x, size): + """Tile a given set of features into a convolutional map. + Args: + x: float tensor of shape [N, F] + size: int or a tuple + Returns: + a feature map [N, F, ∑size[0], size[1]] + """ + # size = size if isinstance(size, tuple) else (size, size) + return x[:, :, np.newaxis].expand(-1, -1, size) + + +def tile2d(x, size: int): + """Tile a given set of features into a convolutional map. + + Args: + x: float tensor of shape [N, F] + size: int or a tuple + + Returns: + a feature map [N, F, size[0], size[1]] + """ + # size = size if isinstance(size, tuple) else (size, size) + # NOTE: expecting only int here (!!!) + return x[:, :, np.newaxis, np.newaxis].expand(-1, -1, size, size) + + +def sample_negative_idxs(size, *args, **kwargs): + idxs = th.randperm(size, *args, **kwargs) + if th.all(idxs == th.arange(size, dtype=idxs.dtype, device=idxs.device)): + return th.flip(idxs, (0,)) + return idxs + + +def icnr_init(x, scale=2, init=nn.init.kaiming_normal_): + ni, nf, h, w = x.shape + ni2 = int(ni / (scale**2)) + k = init(x.new_zeros([ni2, nf, h, w])).transpose(0, 1) + k = k.contiguous().view(ni2, nf, -1) + k = k.repeat(1, 1, scale**2) + return k.contiguous().view([nf, ni, h, w]).transpose(0, 1) + + +class PixelShuffleWN(nn.Module): + """PixelShuffle with the right initialization. + + NOTE: make sure to create this one + """ + + def __init__(self, n_in, n_out, upscale_factor=2): + super().__init__() + self.upscale_factor = upscale_factor + self.n_in = n_in + self.n_out = n_out + self.conv = la.Conv2dWN(n_in, n_out * (upscale_factor**2), kernel_size=1, padding=0) + # NOTE: the bias is 2K? + self.ps = nn.PixelShuffle(upscale_factor) + self._init_icnr() + + def _init_icnr(self): + self.conv.weight_v.data.copy_(icnr_init(self.conv.weight_v.data)) + self.conv.weight_g.data.copy_( + ((self.conv.weight_v.data**2).sum(dim=[1, 2, 3]) ** 0.5)[:, None, None, None] + ) + + def forward(self, x): + x = self.conv(x) + return self.ps(x) + + +class UpscaleNet(nn.Module): + def __init__(self, in_channels, out_channels=3, n_ftrs=16, size=1024, upscale_factor=2): + super().__init__() + + self.conv_block = nn.Sequential( + la.Conv2dWNUB(in_channels, n_ftrs, size, size, kernel_size=3, padding=1), + nn.LeakyReLU(0.2, inplace=True), + la.Conv2dWNUB(n_ftrs, n_ftrs, size, size, kernel_size=3, padding=1), + nn.LeakyReLU(0.2, inplace=True), + ) + + self.out_block = la.Conv2dWNUB( + n_ftrs, + out_channels * upscale_factor**2, + size, + size, + kernel_size=1, + padding=0, + ) + + self.pixel_shuffle = nn.PixelShuffle(upscale_factor=upscale_factor) + self.apply(lambda x: la.glorot(x, 0.2)) + self.out_block.apply(weights_initializer(1.0)) + + def forward(self, x): + x = self.conv_block(x) + x = self.out_block(x) + return self.pixel_shuffle(x) + + diff --git a/visualize/ca_body/nn/color_cal.py b/visualize/ca_body/nn/color_cal.py new file mode 100644 index 0000000000000000000000000000000000000000..a866f646f5bad64673ff9325ef49917fbe7012aa --- /dev/null +++ b/visualize/ca_body/nn/color_cal.py @@ -0,0 +1,322 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import logging +from typing import Any, Dict, List, Mapping, Optional, Sequence + +import numpy as np +import torch as th +from visualize.ca_body.utils.torch import ParamHolder + +from typing import List + +import torch as th +import torch.nn as nn + +from torchvision.transforms.functional import gaussian_blur + + +logger: logging.Logger = logging.getLogger(__name__) + + +def scale_hook(grad: Optional[th.Tensor], scale: float) -> Optional[th.Tensor]: + if grad is not None: + grad = grad * scale + return grad + + +class CalBase(th.nn.Module): + def name_to_idx(self, cam_names: Sequence[str]) -> th.Tensor: + ... + + +class Identity(th.nn.Module): + def __init__( + self, + cameras: List[str], + identity_camera: str, + ) -> None: + super().__init__() + + if identity_camera not in cameras: + identity_camera = cameras[0] + logger.warning( + f"Requested color-calibration identity camera not present, defaulting to {identity_camera}." + ) + + self.identity_camera = identity_camera + self.cameras = cameras + self.holder = ParamHolder( + (3 + 3,), cameras, init_value=th.tensor([1, 1, 1, 0, 0, 0], dtype=th.float32) + ) + + def name_to_idx(self, cam_names: Sequence[str]) -> th.Tensor: + return self.holder.to_idx(cam_names) + + def forward(self, image: th.Tensor, cam_idxs: th.Tensor) -> th.Tensor: + return image + + +class CalV3(CalBase): + # pyre-fixme[2]: Parameter must be annotated. + def __init__(self, cameras, identity_camera) -> None: + super(CalBase, self).__init__() + # pyre-fixme[4]: Attribute must be annotated. + self.cameras = cameras + + self.conv = th.nn.ModuleList( + [th.nn.Conv2d(3, 3, 1, 1, 0, groups=3) for i in range(len(cameras))] + ) + + for i in range(len(cameras)): + winit = [[1.0], [1.0], [1.0]] + self.conv[i].weight.data[:] = th.from_numpy( + np.array(winit, dtype=np.float32)[:, :, None, None] + ) + self.conv[i].bias.data.zero_() + + if identity_camera not in cameras: + identity_camera = cameras[0] + logger.warning( + f"Requested color-calibration identity camera not present, defaulting to {identity_camera}." + ) + + iidx = cameras.index(identity_camera) + self.conv[iidx].weight.requires_grad = False + self.conv[iidx].bias.requires_grad = False + + def name_to_idx(self, cam_names: Sequence[str]) -> th.Tensor: + dev = next(self.parameters()).device + return th.tensor([self.cameras.index(cn) for cn in cam_names], device=dev, dtype=th.long) + + def forward(self, image: th.Tensor, cam: th.Tensor) -> th.Tensor: + return th.cat([self.conv[cam[i]](image[i : i + 1, :, :, :]) for i in range(image.size(0))]) + + +class CalV5(CalBase): + def __init__( + self, + # pyre-fixme[2]: Parameter must be annotated. + cameras, + # pyre-fixme[2]: Parameter must be annotated. + identity_camera, + gs_lrscale: float = 1e0, + col_lrscale: float = 1e-1, + ) -> None: + super(CalBase, self).__init__() + + if identity_camera not in cameras: + identity_camera = cameras[0] + logger.warning( + f"Requested color-calibration identity camera not present, defaulting to {identity_camera}." + ) + + # pyre-fixme[4]: Attribute must be annotated. + self.identity_camera = identity_camera + # pyre-fixme[4]: Attribute must be annotated. + self.cameras = cameras + self.gs_lrscale = gs_lrscale + self.col_lrscale = col_lrscale + self.holder: ParamHolder = ParamHolder( + # pyre-fixme[6]: For 1st param expected `Tuple[int]` but got `int`. + 3 + 3, + cameras, + init_value=th.FloatTensor([1, 1, 1, 0, 0, 0]), + ) + + # pyre-fixme[4]: Attribute must be annotated. + self.identity_idx = self.holder.to_idx([identity_camera]).item() + # pyre-fixme[4]: Attribute must be annotated. + self.grey_idxs = [self.holder.to_idx([c]).item() for c in cameras if c.startswith("41")] + + s = th.FloatTensor([0.37, 0.52, 0.52]) + self.holder.params.data[th.LongTensor(self.grey_idxs), :3] = s + + def name_to_idx(self, cam_names: Sequence[str]) -> th.Tensor: + return self.holder.to_idx(cam_names) + + # pyre-fixme[2]: Parameter must be annotated. + def initialize_from_texs(self, ds) -> float: + tex_mean = ds.tex_mean.permute(1, 2, 0) + texs = {} + idx = 0 + while ds[idx] is None: + idx += 1 + + for cam in self.cameras: + samp = ds[idx, cam] + if samp is None: + continue + + tex = samp["tex"] + texs[cam] = tex.permute(1, 2, 0) + + stats = {} + for cam in texs.keys(): + t = texs[cam] + mask = (t > 0).all(dim=2) + t = t * ds.tex_std + tex_mean + stats[cam] = (t[mask].mean(dim=0), t[mask].std(dim=0)) + + normstats = {} + for cam in texs.keys(): + mean, std = stats[cam] + imean, istd = stats[self.identity_camera] + scale = istd / std + bias = imean - scale * mean + normstats[cam] = (scale.clamp(max=2), bias) + + for cam, nstats in normstats.items(): + cidx = self.name_to_idx([cam])[0] + if cidx in self.grey_idxs: + nstats = (nstats[0] / 3, nstats[1] / 3) + self.holder.params.data[cidx, 0:3] = nstats[0] + self.holder.params.data[cidx, 3:6] = nstats[1] + return len(stats.keys()) / len(ds.cameras) + + # pyre-fixme[3]: Return type must be annotated. + # pyre-fixme[2]: Parameter must be annotated. + # pyre-fixme[14]: `load_state_dict` overrides method defined in `Module` + # inconsistently. + def load_state_dict(self, state_dict, strict: bool = True): + state_dict = {k[7:]: v for k, v in state_dict.items() if k.startswith("holder.")} + return self.holder.load_state_dict(state_dict, strict=strict) + + # pyre-fixme[14]: `state_dict` overrides method defined in `Module` inconsistently. + # pyre-fixme[3]: Return type must be annotated. + def state_dict( + self, + # pyre-fixme[2]: Parameter must be annotated. + destination=None, + prefix: str = "", + keep_vars: bool = False, + saving: bool = False, + ): + sd = super(CalBase, self).state_dict( + destination=destination, prefix=prefix, keep_vars=keep_vars + ) + if saving: + sd[prefix + "holder.key_list"] = self.holder.key_list + return sd + + def forward(self, image: th.Tensor, cam_idxs: th.Tensor) -> th.Tensor: + params = self.holder(cam_idxs) + outs = [] + hook_scales = [] + for i in range(cam_idxs.shape[0]): + idx = cam_idxs[i] + img = image[i : i + 1] + if idx == self.identity_idx: + outs.append(img) + hook_scales.append(1) + continue + + w, b = params[i, :3], params[i, 3:] + if idx in self.grey_idxs: + b = b.sum() + out = (img * w[None, :, None, None]).sum(dim=1, keepdim=True).expand( + -1, 3, -1, -1 + ) + b + else: + out = img * w[None, :, None, None] + b[None, :, None, None] + outs.append(out) + hook_scales.append(self.gs_lrscale if idx in self.grey_idxs else self.col_lrscale) + + hook_scales = th.tensor(hook_scales, device=image.device, dtype=th.float32) + cal_out = th.cat(outs) + + if self.training and params.requires_grad: + params.register_hook(lambda g, hs=hook_scales: scale_hook(g, hs[:, None])) + return cal_out + + +class CalV6(CalBase): + """ + A faster version of CalV5, which also does not cause CUDA synchronization. It does not support gray + cameras. + """ + + def __init__( + self, + cameras: List[str], + identity_camera: str, + ) -> None: + """ + Args: + cameras (List[str]): A list of cameras. + + identity_camera (str): Name of identity camera. + """ + super(CalBase, self).__init__() + + if identity_camera not in cameras: + identity_camera = cameras[0] + logger.warning( + f"Requested color-calibration identity camera not present, defaulting to {identity_camera}." + ) + + if any(c.startswith("41") for c in cameras): + raise ValueError("Gray cameras are not supported") + + self.identity_camera = identity_camera + self.cameras = cameras + self.holder = ParamHolder( + (3 + 3,), cameras, init_value=th.as_tensor([1, 1, 1, 0, 0, 0], dtype=th.float32) + ) + self.identity_idx: int = self.holder.key_list.index(identity_camera) + self.register_buffer( + "identity", + th.as_tensor([1, 1, 1, 0, 0, 0], dtype=th.float32)[None].expand(len(cameras), -1), + persistent=False, + ) + identity_w = th.zeros_like(self.identity) + identity_w[self.identity_idx, :] = 1.0 + self.register_buffer("identity_w", identity_w, persistent=False) + + # pyre-fixme[14]: `load_state_dict` overrides method defined in `Module` + # inconsistently. + def load_state_dict( + self, state_dict: Mapping[str, Any], strict: bool = True + ) -> th.nn.modules.module._IncompatibleKeys: + state_dict = {k[7:]: v for k, v in state_dict.items() if k.startswith("holder.")} + return self.holder.load_state_dict(state_dict, strict=strict) + + def name_to_idx(self, cam_names: Sequence[str]) -> th.Tensor: + dev = next(self.parameters()).device + return th.tensor([self.cameras.index(cn) for cn in cam_names], device=dev, dtype=th.long) + + # pyre-fixme[14]: `state_dict` overrides method defined in `Module` inconsistently. + def state_dict( + self, + destination: Optional[Mapping[str, Any]] = None, + prefix: str = "", + keep_vars: bool = False, + saving: bool = False, + ) -> Mapping[str, Any]: + sd = super(CalBase, self).state_dict( + destination=destination, prefix=prefix, keep_vars=keep_vars + ) + if saving: + sd[prefix + "holder.key_list"] = self.holder.key_list + return sd + + def forward(self, image: th.Tensor, cam_idxs: th.Tensor) -> th.Tensor: + params = th.lerp(self.holder.params, self.identity, self.identity_w)[ + cam_idxs, :, None, None + ] + w, b = params[:, :3], params[:, 3:] + return th.addcmul(b, w, image) + + +def make_cal(version: str, cal_kwargs: Dict[str, Any]) -> CalBase: + cal_registry = {"v3": CalV3, "v5": CalV5, "v6": CalV6} + + if version not in cal_registry: + raise ValueError(f"{version} not in {cal_registry.keys()}") + + return cal_registry[version](**cal_kwargs) + diff --git a/visualize/ca_body/nn/dof_cal.py b/visualize/ca_body/nn/dof_cal.py new file mode 100644 index 0000000000000000000000000000000000000000..e8205703a1d55ac2bdd57b3120816587869d499c --- /dev/null +++ b/visualize/ca_body/nn/dof_cal.py @@ -0,0 +1,52 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +from typing import List + +import torch as th +import torch.nn as nn + +from torchvision.transforms.functional import gaussian_blur + + +class LearnableBlur(nn.Module): + # TODO: should we make this conditional? + def __init__(self, cameras: List[str]) -> None: + super().__init__() + self.cameras = cameras + self.register_parameter( + "weights_raw", nn.Parameter(th.ones(len(cameras), 3, dtype=th.float32)) + ) + + def name_to_idx(self, cameras: List[str]) -> th.Tensor: + return th.tensor( + [self.cameras.index(c) for c in cameras], + device=self.weights_raw.device, + dtype=th.long, + ) + + # pyre-ignore + def reg(self, cameras: List[str]): + # pyre-ignore + idxs = self.name_to_idx(cameras) + # pyre-ignore + return self.weights_raw[idxs] + + # pyre-ignore + def forward(self, img: th.Tensor, cameras: List[str]): + B = img.shape[0] + # B, C, H, W + idxs = self.name_to_idx(cameras) + # TODO: mask? + # pyre-ignore + weights = th.softmax(self.weights_raw[idxs], dim=-1) + weights = weights.reshape(B, 3, 1, 1, 1) + return ( + weights[:, 0] * img + + weights[:, 1] * gaussian_blur(img, [3, 3]) + + weights[:, 2] * gaussian_blur(img, [7, 7]) + ) diff --git a/visualize/ca_body/nn/face.py b/visualize/ca_body/nn/face.py new file mode 100644 index 0000000000000000000000000000000000000000..9be98c309b99b3b5e7bb53b644b9848c7b9fe668 --- /dev/null +++ b/visualize/ca_body/nn/face.py @@ -0,0 +1,85 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +from typing import Dict, Tuple + +import numpy as np +import torch as th +import torch.nn as nn + +import visualize.ca_body.nn.layers as la +from attrdict import AttrDict + + +class FaceDecoderFrontal(nn.Module): + def __init__( + self, + assets: AttrDict, + n_latent: int = 256, + n_vert_out: int = 3 * 7306, + tex_out_shp: Tuple[int, int] = (1024, 1024), + tex_roi: Tuple[Tuple[int, int], Tuple[int, int]] = ((0, 0), (1024, 1024)), + ) -> None: + super().__init__() + self.n_latent = n_latent + self.n_vert_out = n_vert_out + self.tex_roi = tex_roi + self.tex_roi_shp: Tuple[int, int] = tuple( + [int(i) for i in np.diff(np.array(tex_roi), axis=0).squeeze()] + ) + self.tex_out_shp = tex_out_shp + + self.encmod = nn.Sequential( + la.LinearWN(n_latent, 256), nn.LeakyReLU(0.2, inplace=True) + ) + self.geommod = nn.Sequential(la.LinearWN(256, n_vert_out)) + + self.viewmod = nn.Sequential(la.LinearWN(3, 8), nn.LeakyReLU(0.2, inplace=True)) + self.texmod2 = nn.Sequential( + la.LinearWN(256 + 8, 256 * 4 * 4), nn.LeakyReLU(0.2, inplace=True) + ) + self.texmod = nn.Sequential( + la.ConvTranspose2dWNUB(256, 256, 8, 8, 4, 2, 1), + nn.LeakyReLU(0.2, inplace=True), + la.ConvTranspose2dWNUB(256, 128, 16, 16, 4, 2, 1), + nn.LeakyReLU(0.2, inplace=True), + la.ConvTranspose2dWNUB(128, 128, 32, 32, 4, 2, 1), + nn.LeakyReLU(0.2, inplace=True), + la.ConvTranspose2dWNUB(128, 64, 64, 64, 4, 2, 1), + nn.LeakyReLU(0.2, inplace=True), + la.ConvTranspose2dWNUB(64, 64, 128, 128, 4, 2, 1), + nn.LeakyReLU(0.2, inplace=True), + la.ConvTranspose2dWNUB(64, 32, 256, 256, 4, 2, 1), + nn.LeakyReLU(0.2, inplace=True), + la.ConvTranspose2dWNUB(32, 8, 512, 512, 4, 2, 1), + nn.LeakyReLU(0.2, inplace=True), + la.ConvTranspose2dWNUB(8, 3, 1024, 1024, 4, 2, 1), + ) + + self.bias = nn.Parameter(th.zeros(3, self.tex_roi_shp[0], self.tex_roi_shp[1])) + self.bias.data.zero_() + + self.register_buffer( + "frontal_view", th.as_tensor(assets.face_frontal_view, dtype=th.float32) + ) + + self.apply(lambda x: la.glorot(x, 0.2)) + la.glorot(self.texmod[-1], 1.0) + + def forward(self, face_embs: th.Tensor) -> Dict[str, th.Tensor]: + B = face_embs.shape[0] + view = self.frontal_view[np.newaxis].expand(B, -1) + encout = self.encmod(face_embs) + geomout = self.geommod(encout) + viewout = self.viewmod(view) + encview = th.cat([encout, viewout], dim=1) + texout = self.texmod(self.texmod2(encview).view(-1, 256, 4, 4)) + out = {"face_geom": geomout.view(geomout.shape[0], -1, 3)} + out["face_tex_raw"] = texout + texout = texout + self.bias[None] + out["face_tex"] = 255 * (texout + 0.5) + return out diff --git a/visualize/ca_body/nn/layers.py b/visualize/ca_body/nn/layers.py new file mode 100644 index 0000000000000000000000000000000000000000..b995e3921638db598b50e1bf48a2e168b95fdf3e --- /dev/null +++ b/visualize/ca_body/nn/layers.py @@ -0,0 +1,934 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import copy +import inspect +from typing import Any, Dict, List, Optional, Tuple, Type, Union + +import numpy as np +import torch as th +import torch.nn.functional as thf +from torch.nn import init +from torch.nn.modules.utils import _pair +from torch.nn.utils.weight_norm import remove_weight_norm, WeightNorm + +fc_default_activation = th.nn.LeakyReLU(0.2, inplace=True) + + +def gaussian_kernel(ksize: int, std: Optional[float] = None) -> np.ndarray: + """Generates numpy array filled in with Gaussian values. + + The function generates Gaussian kernel (values according to the Gauss distribution) + on the grid according to the kernel size. + + Args: + ksize (int): The kernel size, must be odd number larger than 1. Otherwise throws an exception. + std (float): The standard deviation, could be None, in which case it will be calculated + accordoing to the kernel size. + + Returns: + np.array: The gaussian kernel. + + """ + + assert ksize % 2 == 1 + radius = ksize // 2 + if std is None: + std = np.sqrt(-(radius**2) / (2 * np.log(0.05))) + + x, y = np.meshgrid(np.linspace(-radius, radius, ksize), np.linspace(-radius, radius, ksize)) + xy = np.stack([x, y], axis=2) + gk = np.exp(-(xy**2).sum(-1) / (2 * std**2)) + gk /= gk.sum() + return gk + + +class FCLayer(th.nn.Module): + # pyre-fixme[2]: Parameter must be annotated. + def __init__(self, n_in, n_out, nonlin=fc_default_activation) -> None: + super().__init__() + self.fc = th.nn.Linear(n_in, n_out, bias=True) + # pyre-fixme[4]: Attribute must be annotated. + self.nonlin = nonlin if nonlin is not None else lambda x: x + + self.fc.bias.data.fill_(0) + th.nn.init.xavier_uniform_(self.fc.weight.data) + + # pyre-fixme[3]: Return type must be annotated. + # pyre-fixme[2]: Parameter must be annotated. + def forward(self, x): + x = self.fc(x) + x = self.nonlin(x) + return x + + +# pyre-fixme[2]: Parameter must be annotated. +def check_args_shadowing(name, method: object, arg_names) -> None: + spec = inspect.getfullargspec(method) + init_args = {*spec.args, *spec.kwonlyargs} + for arg_name in arg_names: + if arg_name in init_args: + raise TypeError(f"{name} attempted to shadow a wrapped argument: {arg_name}") + + +# For backward compatibility. +class TensorMappingHook(object): + def __init__( + self, + name_mapping: List[Tuple[str, str]], + expected_shape: Optional[Dict[str, List[int]]] = None, + ) -> None: + """This hook is expected to be used with "_register_load_state_dict_pre_hook" to + modify names and tensor shapes in the loaded state dictionary. + + Args: + name_mapping: list of string tuples + A list of tuples containing expected names from the state dict and names expected + by the module. + + expected_shape: dict + A mapping from parameter names to expected tensor shapes. + """ + self.name_mapping = name_mapping + # pyre-fixme[4]: Attribute must be annotated. + self.expected_shape = expected_shape if expected_shape is not None else {} + + def __call__( + self, + # pyre-fixme[2]: Parameter must be annotated. + state_dict, + # pyre-fixme[2]: Parameter must be annotated. + prefix, + # pyre-fixme[2]: Parameter must be annotated. + local_metadata, + # pyre-fixme[2]: Parameter must be annotated. + strict, + # pyre-fixme[2]: Parameter must be annotated. + missing_keys, + # pyre-fixme[2]: Parameter must be annotated. + unexpected_keys, + # pyre-fixme[2]: Parameter must be annotated. + error_msgs, + ) -> None: + for old_name, new_name in self.name_mapping: + if prefix + old_name in state_dict: + tensor = state_dict.pop(prefix + old_name) + if new_name in self.expected_shape: + tensor = tensor.view(*self.expected_shape[new_name]) + state_dict[prefix + new_name] = tensor + + +# pyre-fixme[3]: Return type must be annotated. +def weight_norm_wrapper( + cls: Type[th.nn.Module], + new_cls_name: str, + name: str = "weight", + g_dim: int = 0, + v_dim: Optional[int] = 0, +): + """Wraps a torch.nn.Module class to support weight normalization. The wrapped class + is compatible with the fuse/unfuse syntax and is able to load state dict from previous + implementations. + + Args: + cls: Type[th.nn.Module] + Class to apply the wrapper to. + + new_cls_name: str + Name of the new class created by the wrapper. This should be the name + of whatever variable you assign the result of this function to. Ex: + ``SomeLayerWN = weight_norm_wrapper(SomeLayer, "SomeLayerWN", ...)`` + + name: str + Name of the parameter to apply weight normalization to. + + g_dim: int + Learnable dimension of the magnitude tensor. Set to None or -1 for single scalar magnitude. + Default values for Linear and Conv2d layers are 0s and for ConvTranspose2d layers are 1s. + + v_dim: int + Of which dimension of the direction tensor is calutated independently for the norm. Set to + None or -1 for calculating norm over the entire direction tensor (weight tensor). Default + values for most of the WN layers are None to preserve the existing behavior. + """ + + class Wrap(cls): + def __init__(self, *args: Any, name=name, g_dim=g_dim, v_dim=v_dim, **kwargs: Any): + # Check if the extra arguments are overwriting arguments for the wrapped class + check_args_shadowing( + "weight_norm_wrapper", super().__init__, ["name", "g_dim", "v_dim"] + ) + super().__init__(*args, **kwargs) + + # Sanitize v_dim since we are hacking the built-in utility to support + # a non-standard WeightNorm implementation. + if v_dim is None: + v_dim = -1 + self.weight_norm_args = {"name": name, "g_dim": g_dim, "v_dim": v_dim} + self.is_fused = True + self.unfuse() + + # For backward compatibility. + self._register_load_state_dict_pre_hook( + TensorMappingHook( + [(name, name + "_v"), ("g", name + "_g")], + {name + "_g": getattr(self, name + "_g").shape}, + ) + ) + + def fuse(self): + if self.is_fused: + return + # Check if the module is frozen. + param_name = self.weight_norm_args["name"] + "_g" + if hasattr(self, param_name) and param_name not in self._parameters: + raise ValueError("Trying to fuse frozen module.") + remove_weight_norm(self, self.weight_norm_args["name"]) + self.is_fused = True + + def unfuse(self): + if not self.is_fused: + return + # Check if the module is frozen. + param_name = self.weight_norm_args["name"] + if hasattr(self, param_name) and param_name not in self._parameters: + raise ValueError("Trying to unfuse frozen module.") + wn = WeightNorm.apply( + self, self.weight_norm_args["name"], self.weight_norm_args["g_dim"] + ) + # Overwrite the dim property to support mismatched norm calculate for v and g tensor. + if wn.dim != self.weight_norm_args["v_dim"]: + wn.dim = self.weight_norm_args["v_dim"] + # Adjust the norm values. + weight = getattr(self, self.weight_norm_args["name"] + "_v") + norm = getattr(self, self.weight_norm_args["name"] + "_g") + norm.data[:] = th.norm_except_dim(weight, 2, wn.dim) + self.is_fused = False + + def __deepcopy__(self, memo): + # Delete derived tensor to avoid deepcopy error. + if not self.is_fused: + delattr(self, self.weight_norm_args["name"]) + + # Deepcopy. + cls = self.__class__ + result = cls.__new__(cls) + memo[id(self)] = result + for k, v in self.__dict__.items(): + setattr(result, k, copy.deepcopy(v, memo)) + + if not self.is_fused: + setattr(result, self.weight_norm_args["name"], None) + setattr(self, self.weight_norm_args["name"], None) + return result + + # Allows for pickling of the wrapper: https://bugs.python.org/issue13520 + Wrap.__qualname__ = new_cls_name + + return Wrap + + +# pyre-fixme[2]: Parameter must be annotated. +def is_weight_norm_wrapped(module) -> bool: + for hook in module._forward_pre_hooks.values(): + if isinstance(hook, WeightNorm): + return True + return False + + +class Conv2dUB(th.nn.Conv2d): + def __init__( + self, + in_channels: int, + out_channels: int, + height: int, + width: int, + # pyre-fixme[2]: Parameter must be annotated. + *args, + bias: bool = True, + # pyre-fixme[2]: Parameter must be annotated. + **kwargs, + ) -> None: + """Conv2d with untied bias.""" + super().__init__(in_channels, out_channels, *args, bias=False, **kwargs) + # pyre-fixme[4]: Attribute must be annotated. + self.bias = th.nn.Parameter(th.zeros(out_channels, height, width)) if bias else None + + # TODO: remove this method once upgraded to pytorch 1.8 + # pyre-fixme[3]: Return type must be annotated. + def _conv_forward(self, input: th.Tensor, weight: th.Tensor, bias: Optional[th.Tensor]): + # Copied from pt1.8 source code. + if self.padding_mode != "zeros": + input = thf.pad(input, self._reversed_padding_repeated_twice, mode=self.padding_mode) + return thf.conv2d( + input, weight, bias, self.stride, _pair(0), self.dilation, self.groups + ) + return thf.conv2d( + input, + weight, + bias, + self.stride, + # pyre-fixme[6]: For 5th param expected `Union[List[int], int, Size, + # typing.Tuple[int, ...]]` but got `Union[str, typing.Tuple[int, ...]]`. + self.padding, + self.dilation, + self.groups, + ) + + def forward(self, input: th.Tensor) -> th.Tensor: + output = self._conv_forward(input, self.weight, None) + bias = self.bias + if bias is not None: + # Assertion for jit script. + assert bias is not None + output = output + bias[None] + return output + + +class ConvTranspose2dUB(th.nn.ConvTranspose2d): + def __init__( + self, + in_channels: int, + out_channels: int, + height: int, + width: int, + # pyre-fixme[2]: Parameter must be annotated. + *args, + bias: bool = True, + # pyre-fixme[2]: Parameter must be annotated. + **kwargs, + ) -> None: + """ConvTranspose2d with untied bias.""" + super().__init__(in_channels, out_channels, *args, bias=False, **kwargs) + + if self.padding_mode != "zeros": + raise ValueError("Only `zeros` padding mode is supported for ConvTranspose2dUB") + + # pyre-fixme[4]: Attribute must be annotated. + self.bias = th.nn.Parameter(th.zeros(out_channels, height, width)) if bias else None + + def forward(self, input: th.Tensor, output_size: Optional[List[int]] = None) -> th.Tensor: + # TODO(T111390117): Fix Conv member annotations. + output_padding = self._output_padding( + input=input, + output_size=output_size, + # pyre-fixme[6]: For 3rd param expected `List[int]` but got + # `Tuple[int, ...]`. + stride=self.stride, + # pyre-fixme[6]: For 4th param expected `List[int]` but got + # `Union[str, typing.Tuple[int, ...]]`. + padding=self.padding, + # pyre-fixme[6]: For 5th param expected `List[int]` but got + # `Tuple[int, ...]`. + kernel_size=self.kernel_size, + # This is now required as of D35874490 + num_spatial_dims=input.dim() - 2, + # pyre-fixme[6]: For 6th param expected `Optional[List[int]]` but got + # `Tuple[int, ...]`. + dilation=self.dilation, + ) + + output = thf.conv_transpose2d( + input, + self.weight, + None, + self.stride, + # pyre-fixme[6]: For 5th param expected `Union[List[int], int, Size, + # typing.Tuple[int, ...]]` but got `Union[str, typing.Tuple[int, ...]]`. + self.padding, + output_padding, + self.groups, + self.dilation, + ) + bias = self.bias + if bias is not None: + # Assertion for jit script. + assert bias is not None + output = output + bias[None] + return output + + # NOTE: This function (on super _ConvTransposeNd) was updated in D35874490 with non-optional + # param num_spatial_dims added. Since we need both old/new pytorch versions to work (until those + # changes reach DGX), we're simply copying the updated code here until then. + # TODO remove this function once updated torch code is released to DGX + def _output_padding( + self, + input: th.Tensor, + output_size: Optional[List[int]], + stride: List[int], + padding: List[int], + kernel_size: List[int], + num_spatial_dims: int, + dilation: Optional[List[int]] = None, + ) -> List[int]: + if output_size is None: + # converting to list if was not already + ret = th.nn.modules.utils._single(self.output_padding) + else: + has_batch_dim = input.dim() == num_spatial_dims + 2 + num_non_spatial_dims = 2 if has_batch_dim else 1 + if len(output_size) == num_non_spatial_dims + num_spatial_dims: + output_size = output_size[num_non_spatial_dims:] + if len(output_size) != num_spatial_dims: + raise ValueError( + "ConvTranspose{}D: for {}D input, output_size must have {} or {} elements (got {})".format( + num_spatial_dims, + input.dim(), + num_spatial_dims, + num_non_spatial_dims + num_spatial_dims, + len(output_size), + ) + ) + + min_sizes = th.jit.annotate(List[int], []) + max_sizes = th.jit.annotate(List[int], []) + for d in range(num_spatial_dims): + dim_size = ( + (input.size(d + num_non_spatial_dims) - 1) * stride[d] + - 2 * padding[d] + + (dilation[d] if dilation is not None else 1) * (kernel_size[d] - 1) + + 1 + ) + min_sizes.append(dim_size) + max_sizes.append(min_sizes[d] + stride[d] - 1) + + for i in range(len(output_size)): + size = output_size[i] + min_size = min_sizes[i] + max_size = max_sizes[i] + if size < min_size or size > max_size: + raise ValueError( + ( + "requested an output size of {}, but valid sizes range " + "from {} to {} (for an input of {})" + ).format(output_size, min_sizes, max_sizes, input.size()[2:]) + ) + + res = th.jit.annotate(List[int], []) + for d in range(num_spatial_dims): + res.append(output_size[d] - min_sizes[d]) + + ret = res + return ret + + +# Set default g_dim=0 (Conv2d) or 1 (ConvTranspose2d) and v_dim=None to preserve +# the current weight norm behavior. +# pyre-fixme[5]: Global expression must be annotated. +LinearWN = weight_norm_wrapper(th.nn.Linear, "LinearWN", g_dim=0, v_dim=None) +# pyre-fixme[5]: Global expression must be annotated. +Conv2dWN = weight_norm_wrapper(th.nn.Conv2d, "Conv2dWN", g_dim=0, v_dim=None) +# pyre-fixme[5]: Global expression must be annotated. +Conv2dWNUB = weight_norm_wrapper(Conv2dUB, "Conv2dWNUB", g_dim=0, v_dim=None) +# pyre-fixme[5]: Global expression must be annotated. +ConvTranspose2dWN = weight_norm_wrapper( + th.nn.ConvTranspose2d, "ConvTranspose2dWN", g_dim=1, v_dim=None +) +# pyre-fixme[5]: Global expression must be annotated. +ConvTranspose2dWNUB = weight_norm_wrapper( + ConvTranspose2dUB, "ConvTranspose2dWNUB", g_dim=1, v_dim=None +) + + +class InterpolateHook(object): + # pyre-fixme[2]: Parameter must be annotated. + def __init__(self, size=None, scale_factor=None, mode: str = "bilinear") -> None: + """An object storing options for interpolate function""" + # pyre-fixme[4]: Attribute must be annotated. + self.size = size + # pyre-fixme[4]: Attribute must be annotated. + self.scale_factor = scale_factor + self.mode = mode + + # pyre-fixme[3]: Return type must be annotated. + # pyre-fixme[2]: Parameter must be annotated. + def __call__(self, module, x): + assert len(x) == 1, "Module should take only one input for the forward method." + return thf.interpolate( + x[0], + size=self.size, + scale_factor=self.scale_factor, + mode=self.mode, + align_corners=False, + ) + + +# pyre-fixme[3]: Return type must be annotated. +def interpolate_wrapper(cls: Type[th.nn.Module], new_cls_name: str): + """Wraps a torch.nn.Module class and perform additional interpolation on the + first and only positional input of the forward method. + + Args: + cls: Type[th.nn.Module] + Class to apply the wrapper to. + + new_cls_name: str + Name of the new class created by the wrapper. This should be the name + of whatever variable you assign the result of this function to. Ex: + ``UpConv = interpolate_wrapper(Conv, "UpConv", ...)`` + + """ + + class Wrap(cls): + def __init__( + self, *args: Any, size=None, scale_factor=None, mode="bilinear", **kwargs: Any + ): + check_args_shadowing( + "interpolate_wrapper", super().__init__, ["size", "scale_factor", "mode"] + ) + super().__init__(*args, **kwargs) + self.register_forward_pre_hook( + InterpolateHook(size=size, scale_factor=scale_factor, mode=mode) + ) + + # Allows for pickling of the wrapper: https://bugs.python.org/issue13520 + Wrap.__qualname__ = new_cls_name + return Wrap + + +# pyre-fixme[5]: Global expression must be annotated. +UpConv2d = interpolate_wrapper(th.nn.Conv2d, "UpConv2d") +# pyre-fixme[5]: Global expression must be annotated. +UpConv2dWN = interpolate_wrapper(Conv2dWN, "UpConv2dWN") +# pyre-fixme[5]: Global expression must be annotated. +UpConv2dWNUB = interpolate_wrapper(Conv2dWNUB, "UpConv2dWNUB") + + +class GlobalAvgPool(th.nn.Module): + def __init__(self) -> None: + super().__init__() + + # pyre-fixme[3]: Return type must be annotated. + # pyre-fixme[2]: Parameter must be annotated. + def forward(self, x): + return x.view(x.shape[0], x.shape[1], -1).mean(dim=2) + + +class Upsample(th.nn.Module): + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__() + # pyre-fixme[4]: Attribute must be annotated. + self.args = args + # pyre-fixme[4]: Attribute must be annotated. + self.kwargs = kwargs + + # pyre-fixme[3]: Return type must be annotated. + # pyre-fixme[2]: Parameter must be annotated. + def forward(self, x): + return thf.interpolate(x, *self.args, **self.kwargs) + + +class DenseAffine(th.nn.Module): + # Per-pixel affine transform layer. + + # pyre-fixme[2]: Parameter must be annotated. + def __init__(self, shape) -> None: + super().__init__() + + self.W = th.nn.Parameter(th.ones(*shape)) + self.b = th.nn.Parameter(th.zeros(*shape)) + + # pyre-fixme[3]: Return type must be annotated. + # pyre-fixme[2]: Parameter must be annotated. + def forward(self, x, scale=None, crop=None): + W = self.W + b = self.b + + if scale is not None: + W = thf.interpolate(W, scale_factor=scale, mode="bilinear") + b = thf.interpolate(b, scale_factor=scale, mode="bilinear") + + if crop is not None: + W = W[..., crop[0] : crop[1], crop[2] : crop[3]] + b = b[..., crop[0] : crop[1], crop[2] : crop[3]] + + return x * W + b + + +def glorot(m: th.nn.Module, alpha: float = 1.0) -> None: + gain = np.sqrt(2.0 / (1.0 + alpha**2)) + + if isinstance(m, th.nn.Conv2d): + ksize = m.kernel_size[0] * m.kernel_size[1] + n1 = m.in_channels + n2 = m.out_channels + + std = gain * np.sqrt(2.0 / ((n1 + n2) * ksize)) + elif isinstance(m, th.nn.ConvTranspose2d): + ksize = m.kernel_size[0] * m.kernel_size[1] // 4 + n1 = m.in_channels + n2 = m.out_channels + + std = gain * np.sqrt(2.0 / ((n1 + n2) * ksize)) + elif isinstance(m, th.nn.ConvTranspose3d): + ksize = m.kernel_size[0] * m.kernel_size[1] * m.kernel_size[2] // 8 + n1 = m.in_channels + n2 = m.out_channels + + std = gain * np.sqrt(2.0 / ((n1 + n2) * ksize)) + elif isinstance(m, th.nn.Linear): + n1 = m.in_features + n2 = m.out_features + + std = gain * np.sqrt(2.0 / (n1 + n2)) + else: + return + + is_wnw = is_weight_norm_wrapped(m) + if is_wnw: + m.fuse() + + m.weight.data.uniform_(-std * np.sqrt(3.0), std * np.sqrt(3.0)) + if m.bias is not None: + m.bias.data.zero_() + + if isinstance(m, th.nn.ConvTranspose2d): + # hardcoded for stride=2 for now + m.weight.data[:, :, 0::2, 1::2] = m.weight.data[:, :, 0::2, 0::2] + m.weight.data[:, :, 1::2, 0::2] = m.weight.data[:, :, 0::2, 0::2] + m.weight.data[:, :, 1::2, 1::2] = m.weight.data[:, :, 0::2, 0::2] + + if is_wnw: + m.unfuse() + + +def make_tuple(x: Union[int, Tuple[int, int]], n: int) -> Tuple[int, int]: + if isinstance(x, int): + return tuple([x for _ in range(n)]) + else: + return x + + +class LinearELR(th.nn.Module): + def __init__( + self, + in_features: int, + out_features: int, + bias: bool = True, + gain: Optional[float] = None, + lr_mul: float = 1.0, + bias_lr_mul: Optional[float] = None, + ) -> None: + super(LinearELR, self).__init__() + self.in_features = in_features + self.weight = th.nn.Parameter(th.zeros(out_features, in_features, dtype=th.float32)) + if bias: + self.bias: th.nn.Parameter = th.nn.Parameter(th.zeros(out_features, dtype=th.float32)) + else: + self.register_parameter("bias", None) + self.std: float = 0.0 + if gain is None: + self.gain: float = np.sqrt(2.0) + else: + self.gain: float = gain + self.lr_mul = lr_mul + if bias_lr_mul is None: + bias_lr_mul = lr_mul + self.bias_lr_mul = bias_lr_mul + self.reset_parameters() + + def reset_parameters(self) -> None: + self.std = self.gain / np.sqrt(self.in_features) * self.lr_mul + init.normal_(self.weight, mean=0, std=1.0 / self.lr_mul) + + if self.bias is not None: + with th.no_grad(): + self.bias.zero_() + + def forward(self, x: th.Tensor) -> th.Tensor: + bias = self.bias + if bias is not None: + bias = bias * self.bias_lr_mul + return thf.linear(x, self.weight.mul(self.std), bias) + + +class Conv2dELR(th.nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + kernel_size: Union[int, Tuple[int, int]], + stride: Union[int, Tuple[int, int]] = 1, + padding: Union[int, Tuple[int, int]] = 0, + output_padding: Union[int, Tuple[int, int]] = 0, + dilation: Union[int, Tuple[int, int]] = 1, + groups: int = 1, + bias: bool = True, + untied: bool = False, + height: int = 1, + width: int = 1, + gain: Optional[float] = None, + transpose: bool = False, + fuse_box_filter: bool = False, + lr_mul: float = 1.0, + bias_lr_mul: Optional[float] = None, + ) -> None: + super().__init__() + if in_channels % groups != 0: + raise ValueError("in_channels must be divisible by groups") + if out_channels % groups != 0: + raise ValueError("out_channels must be divisible by groups") + self.in_channels = in_channels + self.out_channels = out_channels + self.kernel_size: Tuple[int, int] = make_tuple(kernel_size, 2) + self.stride: Tuple[int, int] = make_tuple(stride, 2) + self.padding: Tuple[int, int] = make_tuple(padding, 2) + self.output_padding: Tuple[int, int] = make_tuple(output_padding, 2) + self.dilation: Tuple[int, int] = make_tuple(dilation, 2) + self.groups = groups + if gain is None: + self.gain: float = np.sqrt(2.0) + else: + self.gain: float = gain + self.lr_mul = lr_mul + if bias_lr_mul is None: + bias_lr_mul = lr_mul + self.bias_lr_mul = bias_lr_mul + self.transpose = transpose + self.fan_in: float = np.prod(self.kernel_size) * in_channels // groups + self.fuse_box_filter = fuse_box_filter + if transpose: + self.weight: th.nn.Parameter = th.nn.Parameter( + th.zeros(in_channels, out_channels // groups, *self.kernel_size, dtype=th.float32) + ) + else: + self.weight: th.nn.Parameter = th.nn.Parameter( + th.zeros(out_channels, in_channels // groups, *self.kernel_size, dtype=th.float32) + ) + if bias: + if untied: + self.bias: th.nn.Parameter = th.nn.Parameter( + th.zeros(out_channels, height, width, dtype=th.float32) + ) + else: + self.bias: th.nn.Parameter = th.nn.Parameter( + th.zeros(out_channels, dtype=th.float32) + ) + else: + self.register_parameter("bias", None) + self.untied = untied + self.std: float = 0.0 + self.reset_parameters() + + def reset_parameters(self) -> None: + self.std = self.gain / np.sqrt(self.fan_in) * self.lr_mul + init.normal_(self.weight, mean=0, std=1.0 / self.lr_mul) + + if self.bias is not None: + with th.no_grad(): + self.bias.zero_() + + def forward(self, x: th.Tensor) -> th.Tensor: + if self.transpose: + w = self.weight + if self.fuse_box_filter: + w = thf.pad(w, (1, 1, 1, 1), mode="constant") + w = w[:, :, 1:, 1:] + w[:, :, :-1, 1:] + w[:, :, 1:, :-1] + w[:, :, :-1, :-1] + bias = self.bias + if bias is not None: + bias = bias * self.bias_lr_mul + out = thf.conv_transpose2d( + x, + w * self.std, + bias if not self.untied else None, + stride=self.stride, + padding=self.padding, + output_padding=self.output_padding, + dilation=self.dilation, + groups=self.groups, + ) + if self.untied and bias is not None: + out = out + bias[None, ...] + return out + else: + w = self.weight + if self.fuse_box_filter: + w = thf.pad(w, (1, 1, 1, 1), mode="constant") + w = ( + w[:, :, 1:, 1:] + w[:, :, :-1, 1:] + w[:, :, 1:, :-1] + w[:, :, :-1, :-1] + ) * 0.25 + bias = self.bias + if bias is not None: + bias = bias * self.bias_lr_mul + out = thf.conv2d( + x, + w * self.std, + bias if not self.untied else None, + stride=self.stride, + padding=self.padding, + dilation=self.dilation, + groups=self.groups, + ) + if self.untied and bias is not None: + out = out + bias[None, ...] + return out + + +class ConcatPyramid(th.nn.Module): + def __init__( + self, + # pyre-fixme[2]: Parameter must be annotated. + branch, + # pyre-fixme[2]: Parameter must be annotated. + n_concat_in, + every_other: bool = True, + ksize: int = 7, + # pyre-fixme[2]: Parameter must be annotated. + kstd=None, + transposed: bool = False, + ) -> None: + """Module which wraps an up/down conv branch taking one input X and + converts it into a branch which takes two inputs X, Y. At each layer of + the original branch, we concatenate the previous output and Y, + up/downsampling Y appropriately, before running the layer. + + Args: + branch: th.nn.Sequential or th.nn.ModuleList + A branch containing up/down convs, optionally separated by nonlinearities. + + n_concat_in: int + Number of channels in the to-be-concatenated input (Y). + + every_other: bool + If every other layer is a nonlinearity, set this flag. Default is on. + + ksize: int + Kernel size for the Gaussian blur used to downsample each step of the pyramid. + + kstd: int + Kernel std. dev. for the Gaussian blur used to downsample each step of the pyramid. + If None, it is determined automatically. + + transposed: bool + Whether or not the conv stack contains transposed convolutions or not. + """ + super().__init__() + assert isinstance(branch, (th.nn.Sequential, th.nn.ModuleList)) + + # pyre-fixme[4]: Attribute must be annotated. + self.branch = branch + # pyre-fixme[4]: Attribute must be annotated. + self.n_concat_in = n_concat_in + self.every_other = every_other + self.ksize = ksize + # pyre-fixme[4]: Attribute must be annotated. + self.kstd = kstd + self.transposed = transposed + if every_other: + # pyre-fixme[4]: Attribute must be annotated. + self.levels = int(np.ceil(len(branch) / 2)) + else: + self.levels = len(branch) + + kernel = th.from_numpy(gaussian_kernel(ksize, kstd)).float() + self.register_buffer("blur_kernel", kernel[None, None].expand(n_concat_in, -1, -1, -1)) + + # pyre-fixme[3]: Return type must be annotated. + # pyre-fixme[2]: Parameter must be annotated. + def forward(self, x, y): + if self.transposed: + blurred = thf.conv2d( + y, self.blur_kernel, groups=self.n_concat_in, padding=self.ksize // 2 + ) + pyramid = [blurred[:, :, ::2, ::2]] + else: + pyramid = [y] + + for _ in range(self.levels - 1): + blurred = thf.conv2d( + pyramid[0], self.blur_kernel, groups=self.n_concat_in, padding=self.ksize // 2 + ) + pyramid.insert(0, blurred[:, :, ::2, ::2]) + + out = x + for i, layer in enumerate(self.branch): + if (i % 2) == 0 or not self.every_other: + idx = i // 2 if self.every_other else i + out = th.cat([out, pyramid[idx]], dim=1) + out = layer(out) + return out + + +# From paper "Making Convolutional Networks Shift-Invariant Again" +# https://richzhang.github.io/antialiased-cnns/ +# pyre-fixme[3]: Return type must be annotated. +# pyre-fixme[2]: Parameter must be annotated. +def get_pad_layer(pad_type): + if pad_type in ["refl", "reflect"]: + PadLayer = th.nn.ReflectionPad2d + elif pad_type in ["repl", "replicate"]: + PadLayer = th.nn.ReplicationPad2d + elif pad_type == "zero": + PadLayer = th.nn.ZeroPad2d + else: + print("Pad type [%s] not recognized" % pad_type) + # pyre-fixme[61]: `PadLayer` is undefined, or not always defined. + return PadLayer + + +class Downsample(th.nn.Module): + # pyre-fixme[3]: Return type must be annotated. + # pyre-fixme[2]: Parameter must be annotated. + def __init__(self, pad_type="reflect", filt_size=3, stride=2, channels=None, pad_off=0): + super(Downsample, self).__init__() + # pyre-fixme[4]: Attribute must be annotated. + self.filt_size = filt_size + # pyre-fixme[4]: Attribute must be annotated. + self.pad_off = pad_off + # pyre-fixme[4]: Attribute must be annotated. + self.pad_sizes = [ + int(1.0 * (filt_size - 1) / 2), + int(np.ceil(1.0 * (filt_size - 1) / 2)), + int(1.0 * (filt_size - 1) / 2), + int(np.ceil(1.0 * (filt_size - 1) / 2)), + ] + self.pad_sizes = [pad_size + pad_off for pad_size in self.pad_sizes] + # pyre-fixme[4]: Attribute must be annotated. + self.stride = stride + self.off = int((self.stride - 1) / 2.0) + # pyre-fixme[4]: Attribute must be annotated. + self.channels = channels + + # print('Filter size [%i]'%filt_size) + if self.filt_size == 1: + a = np.array( + [ + 1.0, + ] + ) + elif self.filt_size == 2: + a = np.array([1.0, 1.0]) + elif self.filt_size == 3: + a = np.array([1.0, 2.0, 1.0]) + elif self.filt_size == 4: + a = np.array([1.0, 3.0, 3.0, 1.0]) + elif self.filt_size == 5: + a = np.array([1.0, 4.0, 6.0, 4.0, 1.0]) + elif self.filt_size == 6: + a = np.array([1.0, 5.0, 10.0, 10.0, 5.0, 1.0]) + elif self.filt_size == 7: + a = np.array([1.0, 6.0, 15.0, 20.0, 15.0, 6.0, 1.0]) + + filt = th.Tensor(a[:, None] * a[None, :]) + filt = filt / th.sum(filt) + self.register_buffer("filt", filt[None, None, :, :].repeat((self.channels, 1, 1, 1))) + + # pyre-fixme[4]: Attribute must be annotated. + self.pad = get_pad_layer(pad_type)(self.pad_sizes) + + # pyre-fixme[3]: Return type must be annotated. + # pyre-fixme[2]: Parameter must be annotated. + def forward(self, inp): + if self.filt_size == 1: + if self.pad_off == 0: + return inp[:, :, :: self.stride, :: self.stride] + else: + return self.pad(inp)[:, :, :: self.stride, :: self.stride] + else: + return th.nn.functional.conv2d( + self.pad(inp), self.filt, stride=self.stride, groups=inp.shape[1] + ) diff --git a/visualize/ca_body/nn/shadow.py b/visualize/ca_body/nn/shadow.py new file mode 100644 index 0000000000000000000000000000000000000000..e23c098996e503fb6a6baa6d2d9eba6b31393db5 --- /dev/null +++ b/visualize/ca_body/nn/shadow.py @@ -0,0 +1,615 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import logging + +from typing import Optional, Dict + + +import numpy as np +import torch as th +import torch.nn as nn +import torch.nn.functional as F + +# TODO: use shared utils here? +import visualize.ca_body.nn.layers as la +from visualize.ca_body.nn.blocks import tile2d, weights_initializer + +logger = logging.getLogger(__name__) + + +class ShadowUNet(nn.Module): + def __init__( + self, + uv_size, + ao_mean, + shadow_size, + lrelu_slope=0.2, + beta=1.0, + n_dims=64, + interp_mode="bilinear", + biases=True, + trainable_mean=False, + ): + super().__init__() + + # this is the size of the output + self.uv_size = uv_size + self.shadow_size = shadow_size + + ao_mean = F.interpolate( + th.as_tensor(ao_mean)[np.newaxis], + size=(self.shadow_size, self.shadow_size), + )[0] + if not trainable_mean: + # TODO: + self.register_buffer("ao_mean", ao_mean) + else: + self.register_parameter("ao_mean", th.nn.Parameter(ao_mean)) + + self.depth = 3 + self.lrelu_slope = lrelu_slope + self.interp_mode = interp_mode + self.align_corners = None + if interp_mode == "bilinear": + self.align_corners = False + + # the base number of dimensions for the shadow maps + n_dims = n_dims + + # TODO: generate this? + self.n_enc_dims = [ + (1, n_dims), + (n_dims, n_dims), + (n_dims, n_dims), + (n_dims, n_dims), + ] + + self.sizes = [shadow_size // (2**i) for i in range(len(self.n_enc_dims))] + + logger.debug(f"sizes: {self.sizes}") + + self.enc_layers = nn.ModuleList() + for i, size in enumerate(self.sizes): + n_in, n_out = self.n_enc_dims[i] + logger.debug(f"EncoderLayers({i}): {n_in}, {n_out}, {size}") + self.enc_layers.append( + nn.Sequential( + la.Conv2dWNUB( + n_in, + n_out, + kernel_size=3, + height=size, + width=size, + stride=1, + padding=1, + ), + nn.LeakyReLU(self.lrelu_slope, inplace=True), + ) + ) + + self.n_dec_dims = [ + (n_dims, n_dims), + (n_dims * 2, n_dims), + (n_dims * 2, n_dims), + (n_dims * 2, n_dims), + ] + self.dec_layers = nn.ModuleList() + for i in range(len(self.sizes)): + size = self.sizes[-i - 1] + n_in, n_out = self.n_dec_dims[i] + logger.debug(f"DecoderLayer({i}): {n_in}, {n_out}, {size}") + + self.dec_layers.append( + nn.Sequential( + la.Conv2dWNUB( + n_in, + n_out, + kernel_size=3, + height=size, + width=size, + stride=1, + padding=1, + ), + nn.LeakyReLU(self.lrelu_slope, inplace=True), + ) + ) + + self.apply(weights_initializer(self.lrelu_slope)) + + if biases: + self.shadow_pred = la.Conv2dWNUB( + self.n_dec_dims[-1][-1], + 1, + kernel_size=3, + height=self.sizes[0], + width=self.sizes[0], + stride=1, + padding=1, + ) + else: + self.shadow_pred = la.Conv2dWN( + self.n_dec_dims[-1][-1], + 1, + kernel_size=3, + stride=1, + padding=1, + ) + + self.shadow_pred.apply(weights_initializer(1.0)) + self.beta = beta + + def forward(self, ao_map): + # resizing the inputs if necessary + if ao_map.shape[-2:] != (self.shadow_size, self.shadow_size): + ao_map = F.interpolate(ao_map, size=(self.shadow_size, self.shadow_size)) + + x = ao_map - self.ao_mean + + enc_acts = [] + # unet enc + for i, layer in enumerate(self.enc_layers): + # TODO: try applying a 1D sparse op? + x = layer(x) + enc_acts.append(x) + # TODO: add this layer elsewhere? + if i < len(self.sizes) - 1: + x = F.interpolate( + x, + scale_factor=0.5, + mode="bilinear", + recompute_scale_factor=True, + align_corners=True, + ) + + # we do not need the last one? + for i, layer in enumerate(self.dec_layers): + if i > 0: + x_prev = enc_acts[-i - 1] + x = F.interpolate(x, size=x_prev.shape[2:4], mode="bilinear", align_corners=True) + x = th.cat([x, x_prev], dim=1) + x = layer(x) + + shadow_map_lowres = th.sigmoid(self.shadow_pred(x) + self.beta) + shadow_map = F.interpolate( + shadow_map_lowres, + (self.uv_size, self.uv_size), + mode=self.interp_mode, + align_corners=self.align_corners, + ) + + return { + "shadow_map": shadow_map, + "ao_map": ao_map, + "shadow_map_lowres": shadow_map_lowres, + } + + +class FloorShadowDecoder(nn.Module): + def __init__( + self, + uv_size, + beta=1.0, + ): + super().__init__() + + # TODO: can we reduce # dims here? + self.down1 = nn.Sequential(la.Conv2dWNUB(1, 64, 256, 256, 4, 2, 1), nn.LeakyReLU(0.2)) + self.down2 = nn.Sequential(la.Conv2dWNUB(64, 64, 128, 128, 4, 2, 1), nn.LeakyReLU(0.2)) + self.down3 = nn.Sequential(la.Conv2dWNUB(64, 128, 64, 64, 4, 2, 1), nn.LeakyReLU(0.2)) + self.down4 = nn.Sequential(la.Conv2dWNUB(128, 256, 32, 32, 4, 2, 1), nn.LeakyReLU(0.2)) + self.down5 = nn.Sequential(la.Conv2dWNUB(256, 512, 16, 16, 4, 2, 1), nn.LeakyReLU(0.2)) + self.up1 = nn.Sequential( + la.ConvTranspose2dWNUB(512, 256, 32, 32, 4, 2, 1), nn.LeakyReLU(0.2) + ) + self.up2 = nn.Sequential( + la.ConvTranspose2dWNUB(256, 128, 64, 64, 4, 2, 1), nn.LeakyReLU(0.2) + ) + self.up3 = nn.Sequential( + la.ConvTranspose2dWNUB(128, 64, 128, 128, 4, 2, 1), nn.LeakyReLU(0.2) + ) + self.up4 = nn.Sequential( + la.ConvTranspose2dWNUB(64, 64, 256, 256, 4, 2, 1), nn.LeakyReLU(0.2) + ) + self.up5 = nn.Sequential(la.ConvTranspose2dWNUB(64, 1, 512, 512, 4, 2, 1)) + + self.uv_size = uv_size + + self.apply(lambda x: la.glorot(x, 0.2)) + la.glorot(self.up5, 1.0) + + self.beta = beta + + def forward(self, aomap: th.Tensor): + aomap = F.interpolate( + aomap, + size=(self.uv_size, self.uv_size), + mode="bilinear", + align_corners=True, + ) + + x2 = self.down1(aomap - 0.5) + x3 = self.down2(x2) + x4 = self.down3(x3) + x5 = self.down4(x4) + x6 = self.down5(x5) + x = self.up1(x6) + x5 + x = self.up2(x) + x4 + x = self.up3(x) + x3 + x = self.up4(x) + x2 + logits = (th.tanh(self.up5(x) + aomap) + 1.0) / 2.0 + + return {"shadow_map": logits} + + +class ShadowUNet_PoseCond(nn.Module): + def __init__( + self, + uv_size, + ao_mean, + shadow_size, + # uv_coords, # for bottleneck + # uv_mapping, # for bottleneck + # uv_faces, # for bottleneck + lrelu_slope=0.2, + beta=1.0, + n_dims=64, + n_pose_dims=6, # root orientation only + n_pose_enc_dims=32, + interp_mode="bilinear", + ): + super().__init__() + + self.uv_size = uv_size + + self.register_buffer("ao_mean", th.as_tensor(ao_mean)) + + # self.register_buffer("uv_coords", th.as_tensor(uv_coords)) + # self.register_buffer("uv_mapping", th.as_tensor(uv_mapping)) + # self.register_buffer("uv_faces", th.as_tensor(uv_faces)) + + self.depth = 3 + self.lrelu_slope = lrelu_slope + self.interp_mode = interp_mode + + self.uv_size = uv_size + + # the base number of dimensions for the shadow maps + n_dims = n_dims + + # TODO: generate this? + self.n_enc_dims = [ + (1, n_dims), + (n_dims, n_dims), + (n_dims, n_dims), + (n_dims, n_dims), + ] + + self.shadow_size = shadow_size + self.sizes = [shadow_size // (2**i) for i in range(len(self.n_enc_dims))] + + logger.info(f" shadow map size: {self.shadow_size}") + # logger.info(f"sizes: {self.sizes}") + + ##### + ## FC for root pose encoding + self.num_pose_dims = n_pose_dims + self.num_pose_enc_dims = n_pose_enc_dims + self.pose_fc_block = nn.Sequential( + la.LinearWN(self.num_pose_dims, self.num_pose_enc_dims), + nn.LeakyReLU(lrelu_slope), + ) + + self.pose_conv_block = la.Conv2dWNUB( + in_channels=self.num_pose_dims, + out_channels=self.num_pose_enc_dims, + kernel_size=3, + height=self.sizes[-1], + width=self.sizes[-1], + padding=1, + ) + + self.enc_layers = nn.ModuleList() + for i, size in enumerate(self.sizes): + n_in, n_out = self.n_enc_dims[i] + # logger.info(f"EncoderLayers({i}): {n_in}, {n_out}, {size}") + self.enc_layers.append( + nn.Sequential( + la.Conv2dWNUB( + n_in, + n_out, + kernel_size=3, + height=size, + width=size, + stride=1, + padding=1, + ), + nn.LeakyReLU(self.lrelu_slope, inplace=True), + ) + ) + + self.n_dec_dims = [ + (n_dims + self.num_pose_enc_dims, n_dims), + (n_dims * 2, n_dims), + (n_dims * 2, n_dims), + (n_dims * 2, n_dims), + ] + self.dec_layers = nn.ModuleList() + for i in range(len(self.sizes)): + size = self.sizes[-i - 1] + n_in, n_out = self.n_dec_dims[i] + # logger.info(f"DecoderLayer({i}): {n_in}, {n_out}, {size}") + self.dec_layers.append( + nn.Sequential( + la.Conv2dWNUB( + n_in, + n_out, + kernel_size=3, + height=size, + width=size, + stride=1, + padding=1, + ), + nn.LeakyReLU(self.lrelu_slope, inplace=True), + ) + ) + + self.apply(weights_initializer(self.lrelu_slope)) + self.shadow_pred = la.Conv2dWNUB( + self.n_dec_dims[-1][-1], + 1, + kernel_size=3, + height=self.sizes[0], + width=self.sizes[0], + stride=1, + padding=1, + ) + + self.shadow_pred.apply(weights_initializer(1.0)) + self.beta = beta + + def forward(self, ao_map, pose_vec): + # import pdb; pdb.set_trace() + x = ao_map - self.ao_mean + + x = F.interpolate(x, size=(self.shadow_size, self.shadow_size)) + + enc_acts = [] + # unet enc + for i, layer in enumerate(self.enc_layers): + # for i in range(len(self.sizes)): + # TODO: try applying a 1D sparse op? + # x = self.enc_layers[i](x) + x = layer(x) + enc_acts.append(x) + # TODO: add this layer elsewhere? + if i < len(self.sizes) - 1: + x = F.interpolate( + x, + scale_factor=0.5, + mode="bilinear", + recompute_scale_factor=True, + align_corners=True, + ) + + pose_enc = self.pose_conv_block(tile2d(pose_vec, self.sizes[-1])) + + # we do not need the last one? + x = th.cat([x, pose_enc], dim=1) + + for i, layer in enumerate(self.dec_layers): + if i > 0: + x_prev = enc_acts[-i - 1] + x = F.interpolate(x, size=x_prev.shape[2:4], mode="bilinear", align_corners=True) + x = th.cat([x, x_prev], dim=1) + x = layer(x) + + shadow_map_lowres = th.sigmoid(self.shadow_pred(x) + self.beta) + shadow_map = F.interpolate( + shadow_map_lowres, (self.uv_size, self.uv_size), mode=self.interp_mode + ) + return {"shadow_map": shadow_map, "ao_map": ao_map} + + +class PoseToShadow(nn.Module): + def __init__( + self, + n_pose_dims, + uv_size, + beta=1.0, + ) -> None: + super().__init__() + self.n_pose_dims = n_pose_dims + self.uv_size = uv_size + + self.fc_block = nn.Sequential( + la.LinearWN(self.n_pose_dims, 256 * 4 * 4), + nn.LeakyReLU(0.2), + ) + self.conv_block = nn.Sequential( + la.ConvTranspose2dWNUB(256, 256, 8, 8, 4, 2, 1), + nn.LeakyReLU(0.2), + la.ConvTranspose2dWNUB(256, 128, 16, 16, 4, 2, 1), + nn.LeakyReLU(0.2), + la.ConvTranspose2dWNUB(128, 128, 32, 32, 4, 2, 1), + nn.LeakyReLU(0.2), + la.ConvTranspose2dWNUB(128, 64, 64, 64, 4, 2, 1), + nn.LeakyReLU(0.2), + # la.ConvTranspose2dWNUB(64, 64, 128, 128, 4, 2, 1), + # nn.LeakyReLU(0.2), + # la.ConvTranspose2dWNUB(64, 1, 256, 256, 4, 2, 1), + la.ConvTranspose2dWNUB(64, 1, 128, 128, 4, 2, 1), + ) + self.beta = beta + self.apply(lambda x: la.glorot(x, 0.2)) + la.glorot(self.conv_block[-1], 1.0) + + def forward(self, pose: th.Tensor): + assert pose.shape + x = self.fc_block(pose) + x = self.conv_block(x.reshape(-1, 256, 4, 4)) + shadow_map_lowres = th.sigmoid(x + self.beta) + + shadow_map = F.interpolate( + shadow_map_lowres, size=(self.uv_size, self.uv_size), mode="bilinear" + ) + return {"shadow_map": shadow_map} + + +class DistMapShadowUNet(nn.Module): + def __init__( + self, + uv_size, + shadow_size, + n_dist_joints, + lrelu_slope=0.2, + beta=1.0, + n_dims=64, + interp_mode="bilinear", + biases=True, + ): + super().__init__() + + # this is the size of the output + self.uv_size = uv_size + self.shadow_size = shadow_size + + self.depth = 3 + self.lrelu_slope = lrelu_slope + self.interp_mode = interp_mode + self.align_corners = None + if interp_mode == "bilinear": + self.align_corners = False + + # the base number of dimensions for the shadow maps + n_dims = n_dims + + # TODO: generate this? + self.n_enc_dims = [ + (n_dist_joints, n_dims), + (n_dims, n_dims), + (n_dims, n_dims), + (n_dims, n_dims), + ] + + self.sizes = [shadow_size // (2**i) for i in range(len(self.n_enc_dims))] + + logger.debug(f"sizes: {self.sizes}") + + self.enc_layers = nn.ModuleList() + for i, size in enumerate(self.sizes): + n_in, n_out = self.n_enc_dims[i] + logger.debug(f"EncoderLayers({i}): {n_in}, {n_out}, {size}") + self.enc_layers.append( + nn.Sequential( + la.Conv2dWNUB( + n_in, + n_out, + kernel_size=3, + height=size, + width=size, + stride=1, + padding=1, + ), + nn.LeakyReLU(self.lrelu_slope, inplace=True), + ) + ) + + self.n_dec_dims = [ + (n_dims, n_dims), + (n_dims * 2, n_dims), + (n_dims * 2, n_dims), + (n_dims * 2, n_dims), + ] + self.dec_layers = nn.ModuleList() + for i in range(len(self.sizes)): + size = self.sizes[-i - 1] + n_in, n_out = self.n_dec_dims[i] + logger.debug(f"DecoderLayer({i}): {n_in}, {n_out}, {size}") + + self.dec_layers.append( + nn.Sequential( + la.Conv2dWNUB( + n_in, + n_out, + kernel_size=3, + height=size, + width=size, + stride=1, + padding=1, + ), + nn.LeakyReLU(self.lrelu_slope, inplace=True), + ) + ) + + self.apply(weights_initializer(self.lrelu_slope)) + + if biases: + self.shadow_pred = la.Conv2dWNUB( + self.n_dec_dims[-1][-1], + 1, + kernel_size=3, + height=self.sizes[0], + width=self.sizes[0], + stride=1, + padding=1, + ) + else: + self.shadow_pred = la.Conv2dWN( + self.n_dec_dims[-1][-1], + 1, + kernel_size=3, + stride=1, + padding=1, + ) + + self.shadow_pred.apply(weights_initializer(1.0)) + self.beta = beta + + def forward(self, dist_map: th.Tensor) -> Dict[str, th.Tensor]: + # resizing the inputs if necessary + if dist_map.shape[-2:] != (self.shadow_size, self.shadow_size): + dist_map = F.interpolate(dist_map, size=(self.shadow_size, self.shadow_size)) + + x = dist_map + + enc_acts = [] + # unet enc + for i, layer in enumerate(self.enc_layers): + # TODO: try applying a 1D sparse op? + x = layer(x) + enc_acts.append(x) + # TODO: add this layer elsewhere? + if i < len(self.sizes) - 1: + x = F.interpolate( + x, + scale_factor=0.5, + mode="bilinear", + recompute_scale_factor=True, + align_corners=True, + ) + + # we do not need the last one? + for i, layer in enumerate(self.dec_layers): + if i > 0: + x_prev = enc_acts[-i - 1] + x = F.interpolate(x, size=x_prev.shape[2:4], mode="bilinear", align_corners=True) + x = th.cat([x, x_prev], dim=1) + x = layer(x) + + shadow_map_lowres = th.sigmoid(self.shadow_pred(x) + self.beta) + shadow_map = F.interpolate( + shadow_map_lowres, + (self.uv_size, self.uv_size), + mode=self.interp_mode, + align_corners=self.align_corners, + ) + + return { + "shadow_map": shadow_map, + "shadow_map_lowres": shadow_map_lowres, + } diff --git a/visualize/ca_body/nn/unet.py b/visualize/ca_body/nn/unet.py new file mode 100644 index 0000000000000000000000000000000000000000..4a9f65d73977ac5965a61d708cf4acb6e8f5b43d --- /dev/null +++ b/visualize/ca_body/nn/unet.py @@ -0,0 +1,254 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import torch as th +import torch.nn as nn +import visualize.ca_body.nn.layers as la + +from visualize.ca_body.nn.blocks import weights_initializer +from visualize.ca_body.nn.layers import Conv2dWNUB, ConvTranspose2dWNUB, glorot + + +class UNetWB(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + size: int, + n_init_ftrs: int = 8, + out_scale: float = 0.1, + ): + # super().__init__(*args, **kwargs) + super().__init__() + + self.out_scale = out_scale + + F = n_init_ftrs + + self.size = size + + self.down1 = nn.Sequential( + Conv2dWNUB(in_channels, F, self.size // 2, self.size // 2, 4, 2, 1), + nn.LeakyReLU(0.2), + ) + self.down2 = nn.Sequential( + Conv2dWNUB(F, 2 * F, self.size // 4, self.size // 4, 4, 2, 1), + nn.LeakyReLU(0.2), + ) + self.down3 = nn.Sequential( + Conv2dWNUB(2 * F, 4 * F, self.size // 8, self.size // 8, 4, 2, 1), + nn.LeakyReLU(0.2), + ) + self.down4 = nn.Sequential( + Conv2dWNUB(4 * F, 8 * F, self.size // 16, self.size // 16, 4, 2, 1), + nn.LeakyReLU(0.2), + ) + self.down5 = nn.Sequential( + Conv2dWNUB(8 * F, 16 * F, self.size // 32, self.size // 32, 4, 2, 1), + nn.LeakyReLU(0.2), + ) + self.up1 = nn.Sequential( + ConvTranspose2dWNUB( + 16 * F, 8 * F, self.size // 16, self.size // 16, 4, 2, 1 + ), + nn.LeakyReLU(0.2), + ) + self.up2 = nn.Sequential( + ConvTranspose2dWNUB(8 * F, 4 * F, self.size // 8, self.size // 8, 4, 2, 1), + nn.LeakyReLU(0.2), + ) + self.up3 = nn.Sequential( + ConvTranspose2dWNUB(4 * F, 2 * F, self.size // 4, self.size // 4, 4, 2, 1), + nn.LeakyReLU(0.2), + ) + self.up4 = nn.Sequential( + ConvTranspose2dWNUB(2 * F, F, self.size // 2, self.size // 2, 4, 2, 1), + nn.LeakyReLU(0.2), + ) + self.up5 = nn.Sequential( + ConvTranspose2dWNUB(F, F, self.size, self.size, 4, 2, 1), nn.LeakyReLU(0.2) + ) + self.out = Conv2dWNUB( + F + in_channels, out_channels, self.size, self.size, kernel_size=1 + ) + self.apply(lambda x: glorot(x, 0.2)) + glorot(self.out, 1.0) + + def forward(self, x): + x1 = x + x2 = self.down1(x1) + x3 = self.down2(x2) + x4 = self.down3(x3) + x5 = self.down4(x4) + x6 = self.down5(x5) + # TODO: switch to concat? + x = self.up1(x6) + x5 + x = self.up2(x) + x4 + x = self.up3(x) + x3 + x = self.up4(x) + x2 + x = self.up5(x) + x = th.cat([x, x1], dim=1) + return self.out(x) * self.out_scale + + +class UNetWBConcat(nn.Module): + def __init__( + self, + in_channels: int, + out_channels: int, + size: int, + n_init_ftrs: int = 8, + ): + super().__init__() + + F = n_init_ftrs + + self.size = size + + self.down1 = nn.Sequential( + la.Conv2dWNUB(in_channels, F, self.size // 2, self.size // 2, 4, 2, 1), + nn.LeakyReLU(0.2), + ) + self.down2 = nn.Sequential( + la.Conv2dWNUB(F, 2 * F, self.size // 4, self.size // 4, 4, 2, 1), + nn.LeakyReLU(0.2), + ) + self.down3 = nn.Sequential( + la.Conv2dWNUB(2 * F, 4 * F, self.size // 8, self.size // 8, 4, 2, 1), + nn.LeakyReLU(0.2), + ) + self.down4 = nn.Sequential( + la.Conv2dWNUB(4 * F, 8 * F, self.size // 16, self.size // 16, 4, 2, 1), + nn.LeakyReLU(0.2), + ) + self.down5 = nn.Sequential( + la.Conv2dWNUB(8 * F, 16 * F, self.size // 32, self.size // 32, 4, 2, 1), + nn.LeakyReLU(0.2), + ) + self.up1 = nn.Sequential( + la.ConvTranspose2dWNUB( + 16 * F, 8 * F, self.size // 16, self.size // 16, 4, 2, 1 + ), + nn.LeakyReLU(0.2), + ) + self.up2 = nn.Sequential( + la.ConvTranspose2dWNUB( + 2 * 8 * F, 4 * F, self.size // 8, self.size // 8, 4, 2, 1 + ), + nn.LeakyReLU(0.2), + ) + self.up3 = nn.Sequential( + la.ConvTranspose2dWNUB( + 2 * 4 * F, 2 * F, self.size // 4, self.size // 4, 4, 2, 1 + ), + nn.LeakyReLU(0.2), + ) + self.up4 = nn.Sequential( + la.ConvTranspose2dWNUB( + 2 * 2 * F, F, self.size // 2, self.size // 2, 4, 2, 1 + ), + nn.LeakyReLU(0.2), + ) + self.up5 = nn.Sequential( + la.ConvTranspose2dWNUB(2 * F, F, self.size, self.size, 4, 2, 1), + nn.LeakyReLU(0.2), + ) + self.out = la.Conv2dWNUB( + F + in_channels, out_channels, self.size, self.size, kernel_size=1 + ) + self.apply(lambda x: la.glorot(x, 0.2)) + la.glorot(self.out, 1.0) + + def forward(self, x): + x1 = x + x2 = self.down1(x1) + x3 = self.down2(x2) + x4 = self.down3(x3) + x5 = self.down4(x4) + x6 = self.down5(x5) + x = th.cat([self.up1(x6), x5], 1) + x = th.cat([self.up2(x), x4], 1) + x = th.cat([self.up3(x), x3], 1) + x = th.cat([self.up4(x), x2], 1) + x = self.up5(x) + x = th.cat([x, x1], dim=1) + return self.out(x) + + +class UNetW(nn.Module): + def __init__( + self, + in_channels, + out_channels, + n_init_ftrs, + kernel_size=4, + out_scale=1.0, + ): + super().__init__() + + self.out_scale = out_scale + + F = n_init_ftrs + + self.down1 = nn.Sequential( + la.Conv2dWN(in_channels, F, kernel_size, 2, 1), + nn.LeakyReLU(0.2), + ) + self.down2 = nn.Sequential( + la.Conv2dWN(F, 2 * F, kernel_size, 2, 1), + nn.LeakyReLU(0.2), + ) + self.down3 = nn.Sequential( + la.Conv2dWN(2 * F, 4 * F, kernel_size, 2, 1), + nn.LeakyReLU(0.2), + ) + self.down4 = nn.Sequential( + la.Conv2dWN(4 * F, 8 * F, kernel_size, 2, 1), + nn.LeakyReLU(0.2), + ) + self.down5 = nn.Sequential( + la.Conv2dWN(8 * F, 16 * F, kernel_size, 2, 1), + nn.LeakyReLU(0.2), + ) + self.up1 = nn.Sequential( + la.ConvTranspose2dWN(16 * F, 8 * F, kernel_size, 2, 1), + nn.LeakyReLU(0.2), + ) + self.up2 = nn.Sequential( + la.ConvTranspose2dWN(8 * F, 4 * F, kernel_size, 2, 1), + nn.LeakyReLU(0.2), + ) + self.up3 = nn.Sequential( + la.ConvTranspose2dWN(4 * F, 2 * F, kernel_size, 2, 1), + nn.LeakyReLU(0.2), + ) + self.up4 = nn.Sequential( + la.ConvTranspose2dWN(2 * F, F, kernel_size, 2, 1), + nn.LeakyReLU(0.2), + ) + self.up5 = nn.Sequential( + la.ConvTranspose2dWN(F, F, kernel_size, 2, 1), nn.LeakyReLU(0.2) + ) + self.out = la.Conv2dWN(F + in_channels, out_channels, kernel_size=1) + self.apply(weights_initializer(0.2)) + self.out.apply(weights_initializer(1.0)) + + def forward(self, x): + x1 = x + x2 = self.down1(x1) + x3 = self.down2(x2) + x4 = self.down3(x3) + x5 = self.down4(x4) + x6 = self.down5(x5) + # TODO: switch to concat? + x = self.up1(x6) + x5 + x = self.up2(x) + x4 + x = self.up3(x) + x3 + x = self.up4(x) + x2 + x = self.up5(x) + x = th.cat([x, x1], dim=1) + return self.out(x) * self.out_scale diff --git a/visualize/ca_body/notebooks/render_example_cca.ipynb b/visualize/ca_body/notebooks/render_example_cca.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..09211d203fa91c31f683bb754bf62e60951a882b --- /dev/null +++ b/visualize/ca_body/notebooks/render_example_cca.ipynb @@ -0,0 +1,195 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "43cbd3f0", + "metadata": {}, + "outputs": [], + "source": [ + "import sys\n", + "import os\n", + "import torch as th\n", + "import cv2\n", + "\n", + "# set the right device\n", + "#os.environ['CUDA_VISIBLE_DEVICES'] = '0'\n", + "# NOTE: assuming we are in `ca_body/notebooks`\n", + "sys.path.insert(0, '/home/evonneng/audio2photoreal')\n", + "from attrdict import AttrDict\n", + "\n", + "from omegaconf import OmegaConf\n", + "from torchvision.utils import make_grid\n", + "\n", + "from visualize.ca_body.utils.module_loader import load_from_config\n", + "from visualize.ca_body.utils.lbs import LBSModule\n", + "from visualize.ca_body.utils.train import load_checkpoint\n", + "\n", + "device = th.device('cuda:0')" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "5caf2480", + "metadata": {}, + "outputs": [], + "source": [ + "# NOTE: make sure to download the data\n", + "model_dir = '/home/evonneng/audio2photoreal/checkpoints/ca_body/data/PXB184/'\n", + "\n", + "ckpt_path = f'{model_dir}/body_dec.ckpt'\n", + "config_path = f'{model_dir}/config.yml'\n", + "assets_path = f'{model_dir}/static_assets.pt'\n", + "\n", + "# config\n", + "config = OmegaConf.load(config_path)\n", + "# assets\n", + "static_assets = AttrDict(th.load(assets_path))\n", + "# sample batch\n", + "batch = th.load(f'{model_dir}/sample_batch.pt')\n", + "batch = {\n", + " key: val.to(device) if th.is_tensor(val) else val\n", + " for key, val in batch.items()\n", + "}\n", + "# batch = to_device(batch, device)" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "dict_keys(['image', 'ao', 'seg_fg', 'seg_part', 'lbs_motion', 'geom', 'face_embs', 'camera_ids', 'campos', 'camrot', 'focal', 'princpt', 'K', 'Rt', '_index', 'face_R', 'face_t'])" + ] + }, + "execution_count": 17, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "batch.keys()" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "73331f2e", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[2023-12-21 17:04:11][INFO][visualize.ca_body.utils.geom]:impainting index image might take a while for sizes >= 1024\n", + "[2023-12-21 17:04:13][INFO][visualize.ca_body.models.mesh_vae_drivable]:ConvDecoder: n_channels = [64, 32, 16, 8, 4]\n", + "[2023-12-21 17:04:14][WARNING][visualize.ca_body.nn.color_cal]:Requested color-calibration identity camera not present, defaulting to 400883.\n", + "[2023-12-21 17:04:14][INFO][visualize.ca_body.utils.train]:loading checkpoint /home/evonneng/audio2photoreal/checkpoints/ca_body/data/PXB184//body_dec.ckpt\n", + "[2023-12-21 17:04:15][INFO][visualize.ca_body.utils.train]:skipping: ['lbs_fn.*']\n" + ] + } + ], + "source": [ + "# building the model\n", + "model = load_from_config(\n", + " config.model, \n", + " assets=static_assets,\n", + ").to(device)\n", + "\n", + "# loading model checkpoint\n", + "load_checkpoint(\n", + " ckpt_path, \n", + " modules={'model': model},\n", + " # NOTE: this is accounting for difference in LBS impl\n", + " ignore_names={'model': ['lbs_fn.*']},\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "86a2a291", + "metadata": {}, + "outputs": [], + "source": [ + "# disabling training-only stuff\n", + "model.learn_blur_enabled = False\n", + "model.pixel_cal_enabled = False\n", + "model.cal_enabled = False\n", + "\n", + "# forward\n", + "with th.no_grad():\n", + " preds = model(**batch)" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "9a566533", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[2023-12-21 17:31:18][WARNING][matplotlib.image]:Clipping input data to the valid range for imshow with RGB data ([0..1] for floats or [0..255] for integers).\n" + ] + }, + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA3MAAAKlCAYAAABhWbX8AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8g+/7EAAAACXBIWXMAAAsTAAALEwEAmpwYAAEAAElEQVR4nOz9d7BmW3reh/1W3Ht/6aQON0/OATOYhAEBDgACRCJEMZmESDOIsUjKFskSi7JE01ZZJkWpLFdJf7gUbalsk6KKLNI0mGEwIxIZIAjMADM39b3dfcIXdljRf6zdPaAwAIkw99zuu35VM/fePqfPOXt/3z5rvet9n+cROWcqlUqlUqlUKpVKpfJoIa/7B6hUKpVKpVKpVCqVyi+eWsxVKpVKpVKpVCqVyiNILeYqlUqlUqlUKpVK5RGkFnOVSqVSqVQqlUql8ghSi7lKpVKpVCqVSqVSeQSpxVylUqlUKpVKpVKpPIK85sWcEOIbhBA/IYT4KSHEn3qtv3+lUqlUKpVKpVKpPA6I1zJnTgihgH8BfB3wAvA9wLfmnH/sNfshKpVKpVKpVCqVSuUx4LXuzH0c+Kmc82dyzg74C8Cvf41/hkqlUqlUKpVKpVJ55NGv8fd7Gnj+Z/33C8Anfr5PFkK8dm3DSqVSqVQqlUqlUnn9cS/nfPMLfeC1Lub+lQgh/gDwB67756hUKpVKpVKpVCqV1wGf/fk+8FoXcy8Cz/6s/35m/rOH5Jz/K+C/gtqZq1QqlUqlUqlUKpWfj9daM/c9wDuEEG8RQljgtwF/7TX+GSqVSqVSqVQqlUrlkec17czlnIMQ4o8CfwtQwH+Xc/7R1/JnqFQqlUqlUqlUKpXHgdc0muAXSx2zrFQqlUqlUqlUKm9wvi/n/NEv9IHXPDS8UqlUKpVKpVKpVCq/fGoxV6lUKpVKpVKpVCqPILWYq1QqlUqlUqlUKpVHkFrMVSqVSqVSqVQqlcojSC3mKpVKpVKpVCqVSuURpBZzlUqlUqlUKpVKpfII8prmzFUqlUqlUqlcB0cnG1abBVpLpsEhlUQpyQufe4WcahJSpVJ5NKnFXKVSqVQqlceWtrW85wNvQSrF+mRBv58Ik6dtO6SVPPu2J/n0j3+Ofj+x3x9qYVepVB4pajFXqVQqlUrlscQYzTve+TTrxYKry4Gt27E/jBgtWTYGFRUhRt725idYHx/xmZ98nlfvnnN1ebjuH71SqVT+tajFXKVSqVQqlccObRQf+sBzfNnH387pZoHbj0wu8tmXLtmPnjc/c8yTt45JUjOMHmkMN1YLfvr5V/jnP/lZtlcHgo/XfRmVSqXyC1KLuUqlUqlUKo8db332jF/3Ne9l3WokkE87ones9RoXE4vOctQK3vTO20hjefHF+6x0YrM0DIPn/GjLZz/90nVfRqVSqfyCVDfLSqVSqVQqjxVnZ2u+4dd+EOE941DGKk9Ol6w7y9HScrQ0CJEZ9ntefeFV/Dhw89aGrpEspeMD77xNoxSLZXPdl1KpVCq/ILUzV6lUKpVK5bFhuWr5bb/lkzxz1pJdpFs1SATTMBFSRluDDAqfE1IKxsHzygvnqFYSpwGl4KkbHU8/cUZWks9+5iXc5K/7siqVSuULUjtzlUqlUqlUHhtunq1405NrogsYLVAIUhL4KRBdIIWElBKtFFobYkz4lLm8t2dynm5pOLux4N3vuEkOkSefuXndl1SpVCo/L7WYq1QqlUql8tjw5V/2ThorgYRpFKQIMRAmj3OByXlijpCBDErJ8jkCEAJtBNIonnpiQ2ctQohrvqJKpVL5+aljlpVKpVKpVB4LrFF84H1Pk2JCKck0ebKLaCkJLiIEKAFh9EijGPuRIQRSTmhriGSMMATnMNpy+8kTzn/yJaSUpJSu+/IqlUrl51CLuUqlUqlUKo8FH/3St3CysRwu98SY6UePiJkcAq4fkVISY2JyEWUUUUhSzmQBtk3oxpBjQMeIzJ4bR5bVquX01jH37pxf9+VVKpXKz6EWc5VKpVKpVB55VquWT3z8nZAFCUXvIlkIDv1AcoFhP0IISKkYfKBpLba1aGtIKYOQSCGw1hJ8AuHYrFu0VrjRXfflVSqVyhekFnOVSqVSqVQeeZrWcOPGkrvnB/aj5zOfu+DyfIeIAZMzC6sgwnot2U+B84PDLAy20RyfHDONkSXQrVqyEPgYMVaybBtSzNd9eZVKpfIFqcVcpVKpVCqVR55v/HUfw4jM516+5PJiy7AdkASmcSIpgzbwptsbAnBvPzHGxKt39oxT5Oxs4PatDbuDYPKRp54+JQ4jwU0cnXWsj5bsd4frvsRKpVL5OdRirlKpVCqVyiPP/ZcvEG8+BiI//dn7HMZMlgmbQUvPojOs1g0XVwNTyOx6z+Q8hzHAVaY1gpOTFfs+MAwTGUnKks3xkm7TXvflVSqVyhekFnOVSqVSqVQeaU5vHHHjuGFyI+cXWxZ2wfEyo5RiHBzLzvDMzQUiG3KeONu0uDGSPGxWhiwVd+5MIOGZp08gRrpFh7waCcNEnqpmrlKpvD6pxVylUqlUKpVHmpu3j3n3u5/mcHnB7RsbNosSDj5sR546XdAZzenRCqUV297ThcCbnljRu0RIiaazjFNiGEdeevGSo82SlRC0jUFp8FO87kusVCqVL0gt5iqVSqVSqTzSZOdotKA7WjK1HtdG/JAQqwXDfk+ztJAd/T4iRabvE5s2szpesD1MuJA4OTacCktKQEqInNFak8OI0HW7VKlUXp/U306VSqVSqVQeWZQSvPvNNwgugA/IJFmQUEvD/ftb+jFwOV4hU8a7gDCS06OGu/eueHrRcON4AUoSBKQUaVtFqxVu8qgcWXeKT/7q9/DKy/dwk7/uy61UKpV/CXndP0ClUqlUKpXKL5XGaj71ibciBYQAAkXKgsttz8989j7nVz0//um73L3subcd+eGfvMvoRq4OnouLAzkmDNAAWgr8FPEhMQ4eBDz3zifpNFglrvtSK5VK5edQi7lKpVKpVCqPNFIJJhdJCLJIOO959e4WtGZztOHs7ITdmLgYPFIZXnx5R2M1vQukGIgxIpUkhkwWgpQyWcA0BXw/0Q0Hvu4Tb2W5rK6WlUrl9UUds6xUKpVKpfJIY5sWnSZCjKQMw+iYYuLk9glBCY6sxL8SEFZxvJFIBCuryN7jfUQ3BqEFIghImZxhcgEhBLuX7tCIyM2TBUebhsNhvO7LrVQqlYfUzlylUqlUKpVHlwz77YHgI5MPhJSYYqI9WnPwgXFy+MEh5yItAsIqolI0q46QEyGXjpxUigylM5cyVkleef6C1kj++Q+/wEsvX1331VYqlcq/RO3MVSqVSqVSeWTJQAJ8SIQQMQKcEIwyM7qJadcTXWLVKMwYkChyAn3UsTs4jowkpkQWQE4IJZBWo3NGCYg+4GJErLtrvtJKpVL5udRirlKpVCqVyiOLABotEUZC8LjR4zO8/Px9ckzYKIkx8cLFnjAk1q1hdWQRjSYmiVp0CJkIPjL6iFKSHBMhJsaY0U3DcPB854+8cN2XWqlUKj+HWsxVKpVKpVJ5dBHF0VIgEMuWYCRjSlireeJshfAZ4QM3xiVGSgYf2Q6Ow3ZksWwJIaA6QwRiBqsVSkqUVhxbyVOnz/AjP3GHlPJ1X2mlUqn8HGoxV6lUKpVK5dElgyYxREEIEak1Ukmevn2ElrD3E0HCLiVEDCxXDTdWipBgP3q6RhEzyFyKwrYxpJTQUpEmT5/hh37yLj6k677SSqVS+TlUA5RKpVKpVCqPLC5E/uLf+BEuDh5pNFFJUpYsu4bgAikEBAklI0pmcojInNksDDeOFzgXQQhShsYockqkDDElRp+5f9nzg//iznVfZqVSqXxBRM6v37EBIcTr94erVCqVSqXyuuG5Z075I7//a7i63IHUyBDYnW+ZhgAuMvqADwmhYLlusK0lCYlznuXCkJLAx4RuLAp49e6Wv/73fpScM7uDu+7Lq1Qqb2y+L+f80S/0gTpmWalUKpVK5ZHH+cRys+Rye6AxmqazSOfxJnLYDggy684SUgIh0I0hxEy7atntRpYLgzKKEAIg+IEff5ntfrruy6pUKpVfkDpmWalUKpVK5ZHnlVcv+ba//YNIKRFaoa1myrDrJ/a9o/eJ3RSISiKUxg2enAEhEVKQskA1hiwlSlLDwSuVyiNB7cxVKpVKpVJ55MkZ3GEAqbGrBZAQTUM/bekBbQ34SBojrRSYZcPgA9l5hICQMm43kH3AKVW+YKVSqbzOqZ25SqVSqVQqjwXWKI6WmvO7F1zsRoaQ6M7WLI+XBECuOlhakjXsR4+Lid22x0+BprUIQAlYrixS1S1SpVJ5/VN/U1UqlUqlUnk8yBnXj+QUuLrYkqxCbTrEWuNlZGAiWlDrFoxi9IGcQSrBeBhQZNrWIHOGXKMIKpXK6586ZlmpVCqVSuWxQAhIzpNdRGtFGEcu7u1xk2Paj9jOMI6e2Hu0EsTRo8gQIlKX0crgE62RfPxDz/LTL1xe9yVVKpXKL0gt5iqVSqVSqTwWpJSAjMqJ4eAxrebGjSUX5yAyGCvRQmJajXcBmSNaSnLOpJSIIdJ1GiESb372GCGqdK5Sqby+qcVcpVKpVCqVx4IYE8F7og9YKYg+kjKsFgYRE7ZTSKHIOYMQtJ1FxIyUAu8cMWaUhNxqMrBetez2E6/nTN5KpfLGpoaGVyqVSqVSeeT54MfezSc/9BQ3lGc/enwG21mMNbjRM42elBIZEGRCTAyHCRETWilSzHQLg5KwWjW0q4YxKz5zIUhS8W1/4dtxk7/uy6xUKm9Mamh4pVKpVCqVx4tn3/Y0X/3rPsmTT59x88kTPvP3/j5ojSQjEMWRUoDSgqYzZMCPjpgyKYNQEiVAkTFt0cxlMjFG/BRQS8PX/PpP0q3XfPmv+Sj/4G98J9/7D3+IOy/cve5Lr1QeOYQQGKPRRjE3x0kxkXIuTrJaklNGSAEIpskRQzUi+ldRi7lKpVKpVCqPDEorbtw+4Tf8rm/gg594H9EH1sdL7r70CtoISu8tY7TEDRO6MeSQCCEjjSABIWZySigpkAiULMHh5IwUohR9MRFSw9XlxKGP2Mbwu/7Yb+Vrf8On+Kff/n381f/hbzL20zXfjUrl9YfSquhXMyyWLaujFSkErLUcn67wIRJdIGVIMRJTIodEs7ClmBOQUyKEwGE3cDhMOB+BXDWsX4A6ZlmpVCqVSuV1z+mtE97/0XfxJV/2Pr7iaz9C8GV0cnu5Z3u5I0wT7vnP0ISR/jCSUsZnyEIgEKRcNojTFDFWEnxg6B3JJxaNLg4pCAQZ3WhCu0HffpbTJ87oFpbN6ZpFZzGNZXO8JsTI/+M//0t857d/H/funF/37alUro1u2XJ8dsSwH2gaw5vf+Qznr17hnEfOLkLaaqSSJBfISuB6R84ZQTEfkkphrEZIiXeBnGM5WMmglUAIQT9M7PYTu6vDdV/ydfDzjlnWYq5SqVQqlcrrDiEFTzx9kxtPnvI7/uhvpGlbnn3rk+ScuH/nApETSMXuas8rL94FKRjvvkqzu4tICQS4KZAAqTVCSXJKxJhIsfwzukAOGakghkTOkIC4PKJ98ikECmUMTz53kxgSq+MlRydLrDEsj1Z0XcPnfuYVvv2v/iP+0d/6Tq4udgz74bpvXaXyRcdYjbEaN3ne8o5nsE1Dv+3JORNCnEcpc+l6C0EIAdNaxl2PbQ1GK7wLCCmZRo+2ipQhhkjOGYkg5kR5lIsxUYqJ1dGSl1+8zzA6YojXfRteS2oxV6lUKpVK5dHha/6Nr+BP/ed/FDdOxCmw3/fsrg4cLvdIa5BC4kMgThMvP/8qzkXapaF/4XnacEBLgci5FG0ZchYgy2YxhljGuSjRAykmQoj4LPHtMd3NGzSLBm0Mp7eOWR8vIcNy3RFjQilJu2g4Pjtic7xCaY13iX/y976Xv/0////4p3/3e6/79lUqXzTe8YG38txbbxEGz8sv3ieFhFSS4TAgpYLyqKEaQxgcMSWskXRdg5gtiGyj6Q8TiMw0eHyIpCxQShJzJsW5UHugbxWzxi6XMU6pBOPkuXfn4jpvxWtJLeYqlUqlUqm8vnn7+97Mt/xbv5Z3ffBt3HjylOACh23PYXfATxPbyx5jLc2ioekMxiiG3cDdO+dcnW/pNgtcP7J/6QXUsGfRFIMTHzPTFFBGEUMixoiSAiUE4xSYfMILQ3v7CcxmjUzFPMV2DbefOUNIgUKijWZ3tce2lsWyY3O64vjsiBQSSikWmwXb8y0vv3if//Y/+R9xk+df/PBnrvu2Viq/Yrzz/W/mmbc8xf2X7xFC4rA9EEJCW0VjLbvzPapRNI1FSoGbHErAatHQNBrbWlJKNI0muIj3kWFw9KPHuVLAKaPIUsxd9EwGyMUYJU2elBMpZZRWXF0d6A/jtd6T14hazFUqlUrl8wghoCyRnNw4YrlsiSFiG81+N3D/7rZma1VeM4zRfPAT7+VP/qd/GKkEfgp459hdHsgps7/ag4T+MNE0lm7RIJTANobh0LO77DlcHZgmR4gJIeDw6j3YXWFkQOTMOAVSyoRQPt42ihgzfdK0N29gNxtyyiilsI2mXS5QSnF0siQjCC7Sdg3OB6bDwGLVslh1tMuO6ALHN4+wTUO3aIkpYxuDMpo//fv+HD/0XT+GdzXWoPJoI6XgK7/uS7n/ygXT6Gm7jvuvnKONoVtaVGuZtgNSCow1JO9oWs3R8QKmQLOwWKsRqXTnUkqkLNj3I8PkGcbA0E/kDFIrhsOIMoqEIKcyBj3/Hymm8rGQiSmy3faPe3RILeYqlUqlAtZqFsuW9WYBGWKMdKsOoySC+eQzZWLKvHrngqvLN6TQvPIacnrzmD/xZ/8g73j/W3CTL4UbsLs64F2gaTQxJnaXPcoqlusFSpaZK9sYdpcH9tsDUsHUu/L35s7bsBtw+z3jxRUheISS+CkAAtNYmpNjUoZu1ZYNo4+0y5bjm8eQM+2yJbqIaSzHZyvIsL3Yo/Q8SqYkWiu6VUsYAmdPntJ1DVkImqYpVuvAz/zkC/yf/zf/V+69Uo1SKo8uTz97g7Obx7jJY4ye9W3lOfCTx8zOse2ywY8jisxq0bFcWrTIxUBIK7pWF8fLDC5k9oeRu+cHhsFxedUjlML7iHOeGFM5oIFZY1fiDfLcqZOidNG3VwcOj3eHrubMVSqVyhuVprXcuHWCNZqhH4g+4iZP2zWEEOl3A4tlQ86ZtjGE0ZMFnJ2u8D4SfMC5cN2XUXkMWR8t+WN/9g/x1ve8ick5ri727O5tSSQO2x5jDSFEBEBO5FRcKJujJcTAYdeDyCgtOWx7ms4QQ8QNjm7dIo2mOVnjskKHQLfpSDEhhCzW6AKUELgpIIRgdbSiaUv3wDSG5CO2NQgJQ+9YrDpUY9BKMB0mkpZIJTi/e4XKAqkV3DrGtoYQA8o0iAzvfP9b+Y//u3+f7/gb/4T/6f/2195oxg2Vx4CmtZzdOMaNHq0lMkUWncWHRJ4dYHNO2KUhe0+rFZ1VHB+3NEbRGoVVksZqFiuLlBptNePoUKIcIG6VIIZYuugiI4Qgp4zk81o5KDMlKSVyLEZJMmWWyxYEHPaPdUH3BanFXKVSqTzG3Dw75n0ffDPn97ZsDz1i7ibklDnsx4d5PoeUkEKghWC9brl7f4+U8PTTJ6yPOn7kBz9XC7rKrygnN474U/+Xf4e3vOs5Xn3xLpDZXuzZXu3JKeEnjzIaYsRaTbvpyLFYnI+7gfXpimlwuBBRc4dMZMGNJ88QZCbv8S4QY0JrSbNeEWIEUYxRlFVkH1mfrIrmzRiWq5bFssF2Df1+QGRB29jSzYueXYgIKRG56HpiSiijEUMJIh/6gfhyxFhNt+rIqRivtK1hc7Lif/1HfhNh8vyl//qvX/ftr1R+USgpWC4bcggsOsumM5ycbdj1E/1VT0yJttX40dMuy7RHYxTrdctqYZHA0bqlaQxydqYUQtIoi8yJ/W5kYRWhM7RWc/+yx+oMqKKlyxk5d+RzzqSUkVKSYyLmjFCS1apDCMF+98ZylK3FXKVSqTymnJ2t+eav/zAxJ+6fX5FiwvtIiqmMqMyLohBFaI6UDOPE8aYluoCwCjd67g2eZ58+4f5lz/aqJ6U6AV/55fPuD72d93zkbbzwU3cIzjGOnmGYiDGSYxnVcpPn+MaacTcSQqSdO8i6MbjRlbHgmGmXDVIKlFEM/YSbPForGmtYHW/YNweuzveEmCElzp48wXQNw35gsVmQQiwee0qyvxpoXMB2FiUVIpcA8qaz5Jw5bA9MUiCUJIVEdpEYEv1hIufMNE5IqXA+zMUcDI0m5ITUkt/+h/9N7r58n+/46//0ul+CSuVfi5PjJV/7q97F517ZsmoNTz1xxBOnS3KCIyPJJx3jMCEQROfpFg2L1mC0mJ9DhUhgFCxaRcoZYTVaK/r9gJWCo2XLojUsu4arw8jkA/0YSudvdrPMc1ZkSqWYy7kcrhTJQCIDq1WHbQzn97cPZOGPPbWYq1QqlccQIeDrv/aDvPdtJ3z/T9zB+zCPdgl8SEgBcg5iFUKQEcSUwSX60bFYWpyPOF/GwYyxvO3tT3C5m/j0P3/hei+u8sjzka/8AH/iz/0Bru5ecef5u7SLhhQjwXtSiCVcOGaaxnB594rFesFy1WIbi3ceJQRIjQ8lJiCljEDgQywjYMIyjY7N6YrtxZ7l0ZLj28fsL3u0ViijEKocYkghaNcLgo/EmLCbDikFQkhCSkgyZBinCYEkJEiTQzUakWF7dShW6SIzDRPORVbHS8bDiJs8MSRuPnXK4WrPsDtw++kbfPO3fg3f+e3/jLGfrvulqFR+QbrW8Kf+2Ddx/96BMWS6xvDkzTVvfvqI6APJxxITMPpSaMUIomQ6No1GKYlUGpEjymiUlsVJVkqmyWO1ZNkZntCKQ+9wIXO17ZGyPNdKSaxVxbhIitLRk7nkQ0pB8OX3RRbl3wFW6wVXF/tySPkGoBZzlUql8hjy1Z96Lx95z23u3rng05++w9VlzzQ6Qkio2eQkp4zU5VQzxYSUgphhtxuxRhOif+ju53zg3p1LVNNgjMb7OnJZ+aWhjeaTX/sRYkwM/cRw6IGI0QYlxcMNWtNZpJIYUzaEAuYNoiGljHOedtniRkcOEaElafRoo/DeEUOCDItFR9M1KCXYnK0QGYZ+xE+pHHAYQwoRUkZrRbuwGKMRQN9PkEEZSfQZCKyOF0yHMqIspSxFYSg26TlFbKPQWnB1f8s4OY5ONkxjoGkUWht2Vwfe/M7neO+H38E/+8c/cr0vRqXyr+Adb73Nm5865vLenne/9SZHC8tmZVguNCJqgp9ASBZWMfYDGU0MCSEUKEHXWZrWzgUXGK1JMRJTRgylg37UWTqXaBvF/cuRRWfZDwGjPFFLYiiGRkLJMlkSEykmUhIIWWQDMYPWskQd7AdOTtfcu3t13bfvNaEWc5VKpfKY8cmPv43f8s0f4t7L9/jMnR13L4cy8pWLtXSMs7VzBi0EQhbReQZImWH0CFFOTqUWDzN+EBS9xKLh6qoWc5VfGjefPOWjX/El7K8O9IeBYT8ilSCbOYIAiVQKYwxCwGLV0iwahJREH5kOI7ZrSAmCi4z9RIqRbtlibKZdNPT7EW0N1mp253vGwQEZbQ3kTNNZmixYLFq8j6UQyxEhBX6MXN3bs950IMB7jxCWo7M1w34ijJ6mtUBm7B1pShydbbiYPBfne1brjn4HkOnmjuPhakfsLOOgSIC6VHzLb/86fvh7fqJGFlRet3zpB5/jj/yeTzFc7bi9MdhmQQ4JqwQ6Z5RRSIrp0DBORFcOAH1IGKtpu/KcpJiKecnsoJ9FObQxnSH4iBaCmCPL1hCO4GxY0k8e5wNJQggJUiKmjNSqdOjm0cqY0nywIghzVz+EhGk0tjGPe1wBUALaK5VKpfKY0LaGL/voWwjThEvwMy9vmaZADMXeOcM8QlYsnVMs3Q4Q5FgcxUJITFMo1tC+/DPERMoZ7wM3bx/NOXWVyi+e3/Xv/haC90zjxL07F2QyKWZSLu/D4CO2MfjJ4ybP5nRTbMxTmjdvmaYrOXN+nGgXDUprUizCmnGYWJ0ssVZDhpMnj2k6y2LdIQSknJiGMv6ojGa57ub8uQCpbFCPTtfo1iClxFqLVIrgPKvjRenSzZbsQz+SgcN2QCmBNQopJVJJptEXJ9jR4Z1nHEb6fc+w7+m3Pe/96Dv5nf/b34iQ9VmqvP748Aef44///q+hUxl/GNisLF2jWXSK5aLBSEnbGJaLtujiWstys0K3DUhBloKcYfKRcfLstwemKXDYD1zc3zFNAakUOeeyvqSEmxxxcuz2IyHPRR8CKSmFYMqEyTGNjpgS2ijk/H2CL89knN01Y0gsV91138bXhFrMVSqVymPE7/ptX8a73n6LyUfubh3b3TgLxcuJ6IMirPyzhLaGUAxRSoxrYRw9OZcFUYoyhplTLt0R53n7u566luurPNq87yPv4u3vfyuH/YAbPfvLPcEF+sPAOJbumZCUsUqtMG3D2Dv6vaNbtjRtGdkSuWxglFVMhxGty6bOu4ixBor5HYujjpyK5s1ag1KK1WZJ27VIBAJBt2gx1pQ/X7Qlw4pM8JHgIiFEbKPJCcbDyGqzZLFe4H1ESgk5E2MgxIhQshSJSuGcw48e5zzaltGznB8clni25zs+/tUf5vTG8TW/KpXKv0xjNV/zq96JkQK3G5ACRIhE5yGBkBKldMkklRCFQBqFtgalBE3X0DSWmCLESPIB7zxunIghkmJkGEYO29Kd7/c90+TwPjCOjs4qVE5srw5s90WH6nzEh0jx3xIIUXLnil62HFKmVNaxnEvIuJQCpR7/Uufxv8JKpVJ5g/D2tz/Be97zNH7y+JS52o7knJGqlGhSiLLo/azFba7xZldLivYglr+TMhijSDGWwi4WnV0mvyFGVyq/8nTLlsv7W4b9wO7yMB8iZMZxQhuNDwEBrI6WrNYdq6MFutE0C0t/mBj6iZgS222Pnzzb+3sQctarJbSSSCFpFhY/eZKPpJRpFg0pZqJPxJDoNi1iNmIQsoQOB1/GtfzkWW46tNasThasj5cAeB8hgx89KUYWq5bluoNcTBr86MnzsxNDQmpVTFKkYBgnxskTfGTsR1KKxBBZrDu+6td92fW+KJXK/4Lf97s/xZd/9M3EaUKSMUoXQ5IQEALC5Ak5MbnANAVSLqOU02HAu0BwDuccUsyjkDECGSlh6gdySoz7gd3ljhACMUWmyRcttpA0tnT6jDXIOYMupTJdghBARgiJ0vLzRVyGptGQy+emXNa8N0J3rmrmKpVK5TFACMGHP/AMi1ax3wcOg8NnyuhZFkV0nhKI0n3LUpQU1iKkI6cy6vagqEsJlEpMY8ToorNTSpJyJicQCrRWhBp+XPnXRGnFb/zd38DYTyyWhvuvXDL0EymDmwLKKNwQsAbkrJUbB08KCW0kISZ0a2cDhYCbHEorFpu2jF95aFctYfIQBM2iISPoli1TP+Jm1zuRwY8B2xiUVrjJY1uLEIKmNUgV2F8NQCn0/OTpFi1TP+H15w1ZpmHC+7IJDTGhrS77TClIObO73LM5XqKURsRMcJ7tYWB9smYaJsbGIJRksVlc90tTqTzkySeO+eDbbuEOE9NhQJRWF8pqlFCEEMhSI0VZW4b9iM8JocClkvkWEqSDA6UIKZNTRCkIMZYR55jmDrYgB0ghIrJA2vJ1FaBlOXxMsRz45Fw69s6FkjdHOVwUgNaCmMC5iDaqdMFnycBi2dL340Ony8eR2pmrVCqVxwCtJF/+8beyuxq4ujxw/2qinwI3bh/TWIM2CtMYhJBzNTcL0WEenyxaIqWK/iCFRI5lZMXH0rEIoeT7xARKqaJJqlR+EcQQ2F5cobVinBzT5NhdHhh6R3+YWBwt6Y6WhJgYekfwgW7VkjO0qwXZR4QC29iHmVZ+cOwuDiilGOfwbiHErLPLOO+JsbztbWOQSmKtQaty+i+knA1T5vezEBzd2KBVsUHXRuKdI4tMDIGUIs572kVDu2xoly1SfH6caxocUhbb9BQjKSeCC0yTBylJKeJ9IPhIvx/48Cffz40nTq/vRalUZpaLhv/Dn/w3OF41D070EFKgrS5OlAuLNQajJUpKUoakBShJ30/zBEeJEFCtwc9du5iLKcoweKa5W64aA1rOUQOSJMA7jzUSYxQhlckQIUU5bMyloLNWk2IqB4lzVy5lZu1p6b4rXda5NEclbI6W131rv6jUYq5SqVQeA971jtsk54qIfNYVHK4Gogs89+wp69UCUummxVC0byVnjocxBeSyMCLKwhjnBTTNmiMhmbV3mWlyhPj4nnRWfuV530feiZvKGOXhak8KkeAj++2eZ9/65MNT9vXxiuVmQbts6ZYt42Gi6RqmwwBSMOwmvAuQM5sbR9hlVzRpKSOkYHnU0e8HshBE7zlcHhj2A0gwrUFpSUoJoRQxJcaDgwxSq/LzCZj2Y9lkAjmVHMbVpmN9tGTsHdZajNV0ixLVIedDkDw/P9FHBOKhCUoilzD0uQO+vdgxDhNNo7hx+4TFqr3mV6dSgS//+Nu4tVmUZ3N0c1cOlNbEkEsURxalaMsQQiSHRPABiWAaXHE9zpngAyFFgivZkTGVcUljFEopBJmcS7QAqeQ9KikhJ4QSSDJKy4efL4Uk5xJJYJoSYxJDyZETAPOoJYKHfw7FGOVxN+yqxVylUqk84lir+bKPvLlsGn1ESM04hZK75QNSZN71nqc4PV0hhCinnVC0B7mMtM1GgiWYlbmAA0CglCr/OjuGpViMURaL5nouuPJI8qEvey8IgfeRw3566DxZdDKJ4D3LoyWb4yXNomV7Pof+zif2wQW89xirGPYDKWUW6wWunzi6saHtLMF7dpcHvA+4YcI0FqUUm7MVWutivjC/t73zTP2EthrdGJrZ2bLpLIvVAtu1QNlgCgQ5lWDk9fGq6IJiyamLMaOMxk++mAklCDkjlGIaXLFet5oQyzVe3r3k7kv3uPvi3flrw+nN4+t8aSoVvuT9z/IHf/tX4Iae6Dxh9CghaFpLDAnTWIhl3DIkcJPHhUCYowGElCiryjijKIcjKSREnjtvcwC41JokQBoNeTYFipkcE+TMNHj6wSFkyY2Ts65VWQ2UEeboSwC5VJKc86wYeCAAnw2+Zo14kQLkOSbh8aQWc5VKpfKIc3a65OlnzghJoo2hP0zs9hPLdYtdtYwRpuB55rlTVqu25POEVDoJ88InZantHnQYoCyOcg5xhjI+kym6ByHgHe965jovu/IIcXbrhPd8yVsJbpqz2wLRB1LMGKtRSpGzYHO8olstiDFxfHMDOaONxk0ebUyxIaf8nZQSh+0BbRXDruf+3QuG7VBy4KymbQzT4NBGsb8qbnk5JZQ1dOuOZmFZHS8JztFYTZxHt67u7Rn6kcWmw7aGRAIJbnK0i4YsZg2QVqTSjsNNAdvY0vlTRWendDFW8c4TptKdGPuJcSydxcvzHRf3trTLhm/+1q+57peo8gZGa8nXf+rdqJxIPuLHMnmhjQKtiVKQYypFQ549SABSmnNKS5B38AGhVTHLAoQSCKWJsbjC5nmhEUriXSQLWf5MKTyUHDmjsLYYbwHkecw/+dKNz/Hz0QM55TlqB/LscJkeODeXH7V0/ma93ePK43tllUql8gbBucj5Vc+iNQgp2Y1lo9lZzenJEqsVl/d2XF0eWC5alsu2bIZjmgu18r/PF3GfX6xjiPM4TJFQ5FxGY5QsXZVK5V+Ho5MVT77pNsujFX70jIMjz46QWUjc6Gkag9YKYyR+cmzvbxl7R4yR9ckK2xlyyOzP99i26D9LcVcyrWxjWR4tOH3imHbdFLMTyuhj8AHImMaSfYBY3tN6jirYX/Vcne/oDyPdqsGNE4fLPcFH3OgxRmMaTb89MB1GpFIwj1UKUYpQbUoHO8yByVorvAslzmOOB4mhFLAhRNwwcfelc/rDSNvVLnfl+vid3/pJPvnBZ8ne4ydPdGGe8HD0u0Mp4JQkzkYnpTjzxFAOZdw04WIgAd77h/mkKQmmqRieSCUhhLKepDIBIo0uxVouz1GkHBaOo8dajZaCxcLSWI0Uxb1LGY2Y16jy/JcVTM1rWcmdK+6aeXaaLeYp6lrv8ReTWsxVKpXKI8798z1/6S9/DyAIQjANpTPx3LHhhgGFQErFfjdCTixWDd2yKdlcSiJk0ciJXITmDzLnyiHqXOSJ2clybufZxvDpn3z5mq+88qhgGsv9O5cM+5Fu1TKNnpQztjW4YSLHxOpogdKSy3tbovM0jaZbtwgluLrYc7jck3NiuVlweX+LXRhCiBwu9wz7CZGhbVt2VwP93rHfFd2clJJu0bJcL/CDRylNcAFJxsVYugfAYtnOAeJw2I5c3d3hRk+7bAk+YFuLm8p4l9aCw+4wmy8kvPNlUylksXCPDzrfpZCcxmIOkQXk2a4dIfE+PMzaq1SuizR6ckxMh7HY+0tJt2rouobleoFRkkhinCa22z27Q0/fj0zOFzfXUJ4B74vRj9CSEIr5j1ACpECpUoy5yZeWGUBKxBRJlEiPBwHgy2WD0ZLGlkOUBx1ApRSzhJsHFl5aq4dxBUp9Xhs3m3CWLmHOrDZdyYV8DKlWZJUvyOZ4hZt8GUlRAqU1SitEznNIY0Tp8vZRWmEby9Xlnml01/yTVypvTE6Pl8QQmZShXbeonFFiYiE90fUs10sOh4nLXbGaziFjrcZNobh+xUQWGZEFiEyOReAuABeKOF1KgVYCbRRnT5xyfjnQ97U7V/lX83v/vf8V+13PcBiZhon97oAEBucwjUVrjURgdDlFnw4e0zVIMn4MhCkghCCFiFKS5XrJsO1BwGKzwB0mUKU4EinNhgolD0towWLV4Z3HdJZE5uLlS06eOIKYQEq0lkyDIwGkzHKzIKeEXVi00YyHkRgixzc39PuBpjGsjlou710hpCrfV0jazjDseiJlrEuZEn2wXC9IMUGCOH9uzpmxH9lve45ubHjmLU/ywk/XA5LKa08MkX7f43pXskVDxGhJCECIxBi489J9lDXEXMK7i55a4JxHSklGElMiMeeWSoBETBklJZISLp4pC0vKmThORbctBSklYsjzQUfmaGkYpsB2N6F1eUbl7GiJEsVdOWdiiLNplyTGVExUZt23kAIhMlJp/BTK934MqcVc5SE3bh2zWra4EDBKMU0eLSnjH/PMVZxd8op+QdMum3mTJ+laS4oBjOHVF++RM/gQirtXpVL5ovKed9wmp0SODh09yQVeuHvFSibGq540RTZHC/bbgZQzQgFZoLQkpggJlBCEB66WOSFz0dKRKTolM2fVITi/v2d7dbjmq648Cnzyaz/CYtlx94X7KF2CfkUqo1Q+li5VFrA66hgPE+vTDZAZ9iNu9KTZAa/bLAje0K063OiITUOzsIQY6dYdyiheeeEeWklMa2bL87JuTb2jXTY0raU/jJhG0za2xHFIiTxaMLkSBt62DTknLs93CK0IY8B5j1TFVGG56UghcHHvci7QIraxRXPnE+2yZexHuq7h7uQRUhBCCQnXRhNiRObZgGX0bC/3vPXdz/LuD7+9FnOV15wnbh3xiS95M24KpBi5d76jW3blAANwo+dwGPEx4wYHOeFj0ZGWYkkQUinaYkiILIghIKQiz6ORmUxMCak1IQRIn2/OKSORQjK6CKkUaN5HfJa0neXJp4+JL2TGKeCniLbq4fi/mbNOS6eujGqmWestijEmiYwiE2PGWvNYNh1qMVdBa0Xb2uJMlwKnxy3brUOlyPpoxRQS/VUPEqxtkAqM1UxjmYNWWiKEwE8B2xqkgCefugFSMg4j3gcuL/bl9KRSqfyKc3qy5C3P3qDvJ3zMGDehG82tN53wykvnWAmT94hWszluubgYSlGWEymW0TAhyimmzEUPUfKCmLUGIHLJ/HG+uJQhVX2mK/9abI7XTKMrOjcliLOW5t6dy4eZck1raRctUgq2lzvuvnQfNzqU1mxO1yijySnTrRpSTlxdHbBN+TOpJPvLAzFEDtsDZ0+e0O8HlFJlRCsIgo/kBOPg8C7QLi1IQZwStlP4UPKw+v2I0ppx8nTrlnbR4ZwvHTtRApKjDyirEFLSNBY/TUzjiDYWPzp0o6EHqSVNa0kpIZVCKIkQRac39hPGaJzzXN3fMRymh5rVSuW1pLGarrVMux2uH7m66kkpse8HpBDEnNn3jikkjHqQ61bGG3NMZFn2hCVbURFcGbtsujLGH3woguvZ0VJIgQ9FhC2lJLiIMGXU2adMCokpJJJWTD5gmobTmxsQgqv9yH43kUMZU84PNHSAc+Gh1jvNMSdilgqUmBHB7afP+NynH78Dk1rMvYERQvCWt90muIBRkuVSc35/4rB3JSA4Ji4ue2KISAFSGmJKTGM5aSzjlpngQBuNUpIUSsaV0hJjDdYaFouWrm2IMXI4jAzDnBFUqVR+RQghchgcq1aRgmdz3GGtxl/1ICVWK5SAEAJHJwvGITL0jqwgC1/sopVhGsvIpJLFhl2I8j8pSvxBiAljFJt1xyuvnl/zVVceBTYnK37zv/2NnN89L1oXH9FNKbIy4IYJpTVnT5xgGs3zP/US+6sDUkoWy5b1yQaAmNJcGGWuzveQMk3XEAZH34/srnraxrA5XqO1Rhs9Z14J1qcrwuiREpzzeOfplg1X93bEuaOgG4XrHc2iZewnhAAlLSlGJEVOEEOYDzIncIJEsTyPISNF2bQKOYeTtwY/eaQUTENAkLBWEX1kmhzG6jnkGEIMXNy74uk33SqdO1/Xx8prxxO3j9gfetxhIk6Oi8sDUgjGmGlbQ4yJw+RJCaIAhCDLMiZprEKIokuTFLMRvWyKqQlzDl0GEIQplFgDXw4LBZngIcWIlkX35lPG5RJ/0O9Hoo+47AgpszpakBDFsbkf8SGTY2AcA0oJ2s7g3DwNJsrY5s8y3cQ2Cjf6a7vPX0weTyVg5V+JEHD7iSOUyDRWQc7sDgGpNbvtxGE3gFQIKdBakyk6hhSLXkFJQdOUDaIgE53Du+JsZEx5KGOM5TRElBP9xbLl5u0Tbj95xo0nTh86f1UqlV8eN09XvPftt4u5glQYJbk4P/DCC+fs+wmtYL1qaJVA5sjqqMFaQ04CM1uyT7MJRYyRbmGxVpcTzTTHEYQS1ppTIqfAJ77iPayPltd96ZXXOV//mz6FECVMOKWyJkyDZ3O6Yn20wHYWrSRaCT7z45/j+U+/CCmxOlpycvO46LOVQiIZhgk3ebpli2kNYz/inMdNHtsYxtHhxon9VV9szWMpoKZ+IoZEjCVCIPrI1HvWJ0tSTIz7gWE7YjtD9B5EJqVi6LA7L+He3pXg8nF0LFcdPnjCFBj2Y/k5Js/ucl90cPuBMP9cMSZsa4qleoZ+P86jlmnObEz4+e9++dd+tIaHV15T3vLmW3zTN32YexcHLnYDk4vEnEkys+8nJh/JAoRUdAuLVIqYwftMTJRogUzJi0vM7siglECbEvgt52xTqUscgXcBqRVSKbRV6MYwucA4eSYfcT6zuxqJSZQuXT/hvWecAqYxnJ4t2WyWWCVRWhW3yvl65Jwtl1MqRefsOJtnbV2Mj2eAeO3MvRER8MSTp7S2CEIT5dQihICUEm0kOUukEsSYS9t6LtpSTOgHD6eWpFQeWqWKexcUy2UyJAkZifeRHCN+yGgtySmx7Bqe/ODbuLh3RYyJl1+4e803pVJ5dAkxEWIEIbGyGDUnoWi7lrsvnYOQrFeWPCVEyBgi7VIRlcGNpXtgrSLGMo6ilMRYVRY+WcZqco6IlBAottuBFCNNo9ld98VXXrfcfOKUT3zVl3DYHpiFM4QQkRJiEkgtaBqDlJLdxZZXX7iLQLA6XnF8tia4yHLVMfQO22iklpimmPYIAVf3diw3HQJBSgFjNMujDu8DwRXLdCg5VU3XYIyaM+lGbGsZ+oFu3dBvezLQrhtapbj/8jlta5FCYJcNMSaapmE4jMUeHWg6S7/tix5HCmxryZSRzyEEUkqkVMwcmtYilSSGSNNZ/OTJMc8jaJ5hP+COVlzcu/x88HGl8kXGGMWXfvztXOxGtq9u0SJxa92WqSsh2R1GaBuOFguU9EVSIwWNLt210XmyUESf0VpAiuhUdHTBB5pGl+JOFH3ovJEs49bkMro8v92VlmX82gVAFB2tEgilSM4Rx0AWEh8SWSgWRx3LZcP9O+cEa4g+lEgdKVBCkUMkpdnpcnZhzhlOTtcEH7m4v72+G/9FoBZzbzCEFDz9zA0aXVy/Ui4n7iGkebSyvOlFTsRUMnoQJZ9DCAE5I3LGGj07BIExEhAIXeahk8qElAkp4nwgxPJQGVvebilnovf0254bt47YnG1QWnFx/6p0BCuVyi8aISVyFn+nlHjqxoJXx55971ifrGhag46ekCLLrojGgxAEUTSvD8QGej6YCb6cYIpcnMhycUEhxIwB+t14vRdced3z3Nuf5tbTN9mebxFyXnPmA8EsMovVgou7F5zcOOLuy/eZ+oFbz97i5hMnjGPADQ7blry4GCOH/UizMGwv9nRtg20tKWS6VYt3HqHKIeR4MaKsBqMZDhNKKVZHKyCzvbvFh8hwGFgdLQghYlvL+Z0LbN9grSX6QDCSi5e3LNcLunVLSJnhMOJGh7aq5M4ZDSJz2I9lvItMt2hJseiCxn4s66mA4MPDTDpjDSKmMsIpFVKr+d/FY9k1qLw+iTHxmZ96mbaR3Ltzxcm6QWaBU4ogS97j+W5gG2KJ5pASSDRtg9IKH4vGVErB2liUhHH0SC1JGeS8n0QWnayxlpwiSpROWgqBLEtsQQ7l84QU5FgcasfBo3VGqxI0HkOEEMEotq9uObu54tazp3TnAxfbkXGYEIjSSRTFxTLnkvkYQyTPhyuP44FJLebeYJxuOmRODGMp5MoYZMmPEokyBpMhxziLtcWsnymnGkoIpBBoLTFK0HWG1iq0FOQs5hY27PcTkxdoQikQlSLExANjSyEliLI47rcDN28e8aW/6n38/W/7Lq4u9td7kyqVRwyhFGMEkcsYpFSSHBIvvHyFMIr1pkVKsEbiUyK7TNsIJAatJTsS0+jQs7g9hIh35VlOqZyQaiVLGLKSKKUIqWxiK5UvhFKSj3/qQ/jJM/bTw2IlJpimMgWy2iy5+9I50xTYXR0QUrI+XpXAYKNZrjb4kAnOFwOVxpRiSqvixuoyygoOlz1Cg0bRu8zhMLKQC7pVW7KqWgs5l7/baKRW5JSIITHux3KaPxdUQsL6bMX2/g4/OXYxIrXE6pJ1RU7sLwY2ZyumyRFGh/OB5fGS6EIJCVfl4NNP/mFwcc6U/LmUcc7PuXUGpSU5lfGvephZeS1JKfP9/+ynef+7b3FyY8ndO1eMk+PmZoHJRS837R0X24EgSzRVILM5UbjLK9qFQSrJYmE56IQmM+0mlIG2s/iQ0EYglEY1tuhQf1Yhx3y4kWNpEqSQUEYTYihulRJkFLPcR+FdJMZM6h3d0jJNgWbdcfZMS3fR88KLFwyHoRh5zdq+lMG7OE+aRbSRj6XEp2rm3iCc3dzwjnc9zfqoYxoDwSe8C8SYS+CjL7khMZZRSqEUQirIFNchBGrO4ll2hs4qjlcNxwvLulFsFpajhebJJzc8cWPFW5455qmba27fWHFy1LFsFY0WaJHRCoILjL1j7IvNbb8f+ZmfeIGv+LqPsN4srvt2VSqPFKOPvHqxR2qDtgYjJPdfvWTymRsnS9adwQhBo4q5iXMBNwW0Fiw7xbLTRcQ+n7YIQbFhp7jdKqUAQdNqlFVIJfhn3/0vGGrGXOXn4Rt/69fwyV/zYcbDVDKkYiL4UsQF71FKsFg2HJ+tUbKM4xenu3LQpxuDdwFtSzaGRJAprsmCchAYUy5rWQxcvrolhlJsNQuLH0uIuB89Whv2h4EQI9PoiguzkWiluP3cDdqVZbHpcL1jOExcvHJFzrmEgxvDdBiJKTI5DxJMo4m+dCt8iDRdM1utC/pdj5u1faUDkAkukEJCCEGMAeawdFLJc3RTnJ9Jz2/43d9w3S9d5Q3G3/n2H+eyd2wPnvvnI8tFi1Wi+CmQkDliVXkvj2Pg6mqPSwllNU2jSFnQO8/zL11yb3vgYjtxtRtxKTPFkkMXc2ZyATd6/OQRQtJ0DVortJJYCVrMmjcpIUXc4HA+InOZEClZc4p2Ycs4phC4w4ifPEc31zz39AmLZRm7Lp4rJRvVNKZo53Jm7B3BP34uzLWYe4OwXrX0u54QMzHnsgjGjPehZOOEVD5RUDRxUjwMYiz25RElBV1r6BrN8arhbN1yum44WrWsl5ZbN1acrlvONpazTcMzTx7x1M0VT99ec2PTsukMq87QGsVyYVCinMZMo6M/jByu9rz0M6/wqa//GO2iudb7Vak8Stx56Zzv/97PkHJgipnzywMuCc5OV9y4uabrNIJidrRsS+GmVAkAF0KiVBGaOx+L8YMv3T0poGsNq1VD2xm6ruGDH3sXL798yfSYuoJVfvmc3TrhK37tx4qteQxzoZZQUiEo2aViNio4f/WSpjPEEFGzvkUgGIeJlHNxvzSaLGHYD+wvdgz9SCbjxhGjismCbQ3DtifHjFEaECyPFqyOV4QYCVNgv+2L7s6q4iapZekK3j+wv9wTXCR4T9MZdKNJMeK8QyjJ7vKANgqli6FKmDtuCRiGiSyLLijNTnpjPzKNjrGf8D4AZeTrQUCymkcqM9AuLMEFhkPpdCtdt2aV145XX9nxEz/yMv3kWR53vOnJNWiBC5HJFTfKlEoxpkXA7QYWS0N0pfNsVSb1EyJnhjHiQ+JwcNy/v+fQT2XEGAFakaREal0aBnN2qdEK2zYoU6Q9SpVnQzd2/r0BXWdLty0VJ8x21SEF+Ckw7ibOX92yPltxctw+fH7EA6+HUA6AlJKEWGIVHjfqmOUbgJPjBcGVjdc0BXzMRSyaUlk8AaMlUs2tZwFpzo/KuZzMS1XyPGKCmAWm0aw2HUcLgzGCjHyYN7fqFkVzkDKtgdFnbt9Y8crdPS4k+t4RM7iQGEZPyrLoCqQgTsUVs6oGKpVfHIulJWRwbmKYIo1WrNYttlWz05gsInUlOT1qmHwipEwmEnyJHAixdOczmZQySgma1rBat5jGcHS65h/83R9gmmohV/n5+dhXfQlvfuczXN69mLtgaj40jMVhLmWSAqU1XWd45YVX6bq2WI+HSHCBZtGQYsSL0tnKMeEnh4+RPGRyKvmIYR6XDDFCzKA85IwEru5eYRuLakwpoJSmvyqulfttCS4+vrkp+XZGEcJcoGUYDyNKK4Z9MT1RUpLnUUghBDlEpFEsrGE4jCQfMVaXcPCY2O+Gh656wZfARqlKboGag46FKGHiOiakLAebb3/fm3nPh9/Bj3zPT1z3y1h5g5By5rMvnfNNn3oPH3/3E5AyV/uRfopIrWiUpp87xyGWfdt+DDRWc/Os47Af2Wwazk47YmgZBofQshiPxARGz1Ic0K0t45RaI4hkH0CAVAqFQKY8Rw+U5sMwhHKIYg1aK7pOYFtNP8aHkVhD75Ba8uqdK45vrpl85OL8ULp6QhBCQupSPBbtXLruW/4rTi3mHnO0liyXDcGXRBznEs55lJQ0rSGn8iA/MEBIKRJ9ehi8SIY0V3FSlpZ1t7Qoo2k6w3plsVqiKFk5i0WDNYqR4rBnNgt8jOimYdFolDW4MTCMHu8ju/2ENIph9OwGz+QD23uXvOUtT/BjP/rZ67txlcojhoSy0dSaPDuGSTJCZqLPZdQtZVojOdu09FM5cNkxcbKyHHeaV897Lvfjw5GUlKDvJ07PlhyfrPjsz9ylP1SdXOUX5pO/5ksZDyMP3Ovc6NFaY5vZkj+lsrHyntXxks/+5EsIIVkdr8g5061bdpcHvIvYRmGMQVtN2mea1mJ0MQ3ZbwdyzvSXe6KAbtEy9ROrkxU5JlbHS8IUyDHSz0UYKc1FVEJmgW0NbnBM2wmkmA8yM9qUaJ6cLG50HJ2uMEaT56zVOBsraCNpFxY/lo1ujBE3eqIPGGtKl25wrNZdOaiUEjc4lFbknIoxzBRQUhFDYndxYL2pkR+V15ajVctXfuQtLDTcP79imgLExKI1hAw+JqxRSAm7y0Dfey6l4uJqYr0yLK4Gzm4sWHeWblH0bWhBTBHvQEtojCHPbskpFqdZrTUplKJOKoVpYBoD2io6objYTqSUaJSgMQqvi/yn6wxDP7FYd6jGcv/lC6SW7HeghcJaS0oTIZQ4LXLpAtpGcdg/fu2CWsw9xigluXXzCO8jMYsyUhkSRkuyEMRYdDEyC2LK5FDyd7SRpFg2f1JLyPPYhxBMLtAfJo43HYuFZbWwtFrNTncKoxUpZayWpBBYdYZpTLSt5MbJKUIqnC+uQs4FnIs0nSnZQGPk3uXAZ1/a8uxzN7h794q7r15e922sVB4J8nzyP/RTCULOYI0CVbRKWpVYEGEU+khhB0nMmUbC6dLifaRrNeJlwdV+BIojmcqJs7Xlxu0j7t2r5kSVX5h3vP8tLJctV+dbms6S53H+ECLj4JCyTHBEH0unKguUSCxXLW1nWW0WxFBMQoQQDIeRIU/kDH7ymMaAFCzWHU3XlM9jRUgJM2/0/OQ4PlkTQmToJ5T8/NiVbotdejnsEExD0YwbW/7c+TCbksCwG/He03R2NicRJWcrjqQhIqXk4EbUbBZmbMmTCzKUrt6c3di0ZtbpCKILsztnOTQVc1wDAmyj8c7zzb/tq/mev/+D5c8rldeAb/zV76aTme225/z8ALKEcCul8CmX8Wir2e4nzG1JQnK+HTHKgHNsx8B+55BKcnzUcLRpwGcmAeqoo1Wq6K2FJDqPGz1dZ0sepCjNAgVzZh3EJEGCSImkDCkldGtIYwApUF1DYxVRCGzbQIrsdyP7qwFlNct1izKSw378fF7qPML9OBrG1mLuMebZ524QQ8CHkiEXXETNLj6qDBOXHJwQyczC0/lNrnQ5lSzZcWUsBlFmjlOeHS2VREmFbS1tq4kxFkE3IESmWxS3IRYL2tbQtbZ0A4XAGEO/HxiHCak18qgtHYBVw1GnGYRmevfT/P27lzyG482Vyq84PpUTzVmUg5QlNsRNHnJZLIUuC2ocPItWk1Jm0xlSTGx3EzkntNowuBVXW8eNs4Ynbm84u7HmYkj8xI8/f92XWXmd8473v5njm8coKel35Xe8nzxumpBKoqQkxISQohicGM3yaE2/H3jbe55FKEG/PzANE+Qyhti2hqa17LeReAikZDFao5qyjmzP90UDt2rRuhxU9oeJbtlwdLoumVc0pFCKrBQSbWNws04tpEgMEW0U02HCh4xuS2bd1T3PNEwoo0uHoNX0e/HQLKzE52VCKNoeKQXaKIbDyO7ywOZ0jZSydOEoa3GJ+mHWCJa8LyFLsLrWirGfuPX0DV767CvX/XJW3gB87H3P8NF3PsnV+YEhBBClk6VVyRSWUqA7y6SLm3GcY6punC1JqbhRxpyZxomcEn4MnPtI2ypImWVnyU0kB4lq5BwWLh9GXYWQyAJ8LNE4xipUSiQpsVYTKBKh5XpBjsWNfdhf0XWWo6OOo6OWrSj71aGfEBLaVjMMU3FjB5QshyY5J556+oxxmHBTuO5b/ytGLeYeUxYLi3dllNFPvpywz102IXhYqMWYZm1MOVkUP/vUdJ55FpL5FLH8HaNEcc5JGd2UEFbb2OJABHjvIGWkTCQE0nu0EgTn2BwvUFoRQ6SzhsYoYi6aAi3gZNPSNYrdlPjw+55lszD8tb/5A9d5KyuV1z1PPX3Gl3/F+7h65T6H7R4BmMagjcQ5EJSNotQKZRRdp0iUBTnFhJ8Pa9pWl64HcOPWgq4xdI3CaMFP/PjzOPf4LH6VLw4/9F0/zpd9zZdyduukBGcDfu5KaavJCWTOjIMjhoj3ASEl+6s9n/vpOxwODm0UKWSULps5YzRSafK8ZtlOgCwdOHKmWxZ9XfCBbtGSU5wnSxQIGA5jiTPQksNuKNodJWlXDZevbtGtBiGKUYuArtPEWCIOYio/h+sdKSSeeO4GL3/uHrpRuGkq16U1CIgPwoljxo/l+qZhZL3pSLMWVSn1YPqUFBLSqBJfEBOylQz9yGLZ8omv/hB/5f/+t673xaw89nzsA2/i9/z6L2XqB5KAycXiCgu40dE0msaW9/eiNcyJIOjW4L2nWza4wRWX2dWiuCBLyW434UNAK4FRksaWEeU469h0I8khE3Pp0CfmeILZUTmGiOk0pjXITJnoioHkA6crC63i1tmSZ48sJ0tJfnLD86eWf5wEn3vxHojZTEXLed3KGKseHvTcfvKMV166/9isadUy6TFEKcFm0xFDKif1SqGMfmDtM9sjl0XxgbPWA42MoOThIMppSYyJNJ+cCKBtLW1n2CwtVkJrDf4wcjgMjOPE1XZfHkZZTFO0UbStLYtVgv3VgRQybgykubLMPmKkQiKIs+vQUSvpZORjX/Ism3V33be0UnldE0NkGB1RgrYKIYpjl5scMUakFmirkBoQ6eHHrnYjfc68vOv57Is79lMmLy0sNF5lWGhevrvn/KLn+RfOr/syK48AL/z0Hf6L//1/z4s//XKxBy+LSzE98RFBJvjA0I8lf24eJZxGz49936f5vn/4I9z53KukVMxShFLElMiUwlAZjesdOSb22+HhWqatIVPWMiHLwaXrp9l4JTAOE8O2L1EbWjEODjd6moVFKVly4IyiW7WEUOJyulXL8Y0NQpYRSG0kF3e3LFYNef6eTWdBCkKMeOdxoyPnzGLVlbFm5sNRWYq3B9Mu0SemccINrujmXMB7j0DQ70dOzo5YrOraV/nisV62/MHf8uUQwbtEnnWbQggSomQDx2I2pBG0WnK06dgsGkJfDu33l30xOdLQLDRSSxSZzUpz86TliZtrjhYNZs6KSzEyDRPj6HBTiSxIKZFjLuOVzrM+XpNT+T3RWIVKEatKwbJsFUed5h3PHXFzoxFasOsd4/7AO25bvvrDT3C8WuCmRKb8+lFSlu54yrjRc3W+ZxomTs823HziBCkf/VLo0b+Cys+haSzLeUF6YP9MBvmzC7kQ56Kt/JkQ5XMeBAYLKUt7XYmHYyBSldEtLSVGS46OFuQYSTEzjZ795QF3GBl6R3C+jHWRyUahrEYahfeR/tAjpSgLnytZPDEGmkaTfUQJ6Kwiec+bbm/443/k66/3hlYqr3NeeeWS7//hz5G1YfQJNYcTS1UCV8kZbRXaaiISrzQcLxi9pGmXnJ2d8eybbpaMx0ngD4I0afpt5ujsBq9cRaI0132ZlUeEu3fO+W//07/IKy+8SkqzFfgczB18xDlPCkXLWTpfCqU126sDr965IMYyLaKVxNpiWz4OE7azdMuW9cnyoV4u+oBIGT84rNFzV1qXqAMBbphKZmoZSSFniDGgrWYcJpSVjEOxT+8ve1zvUVqhjUQIOGx7pCin++NQdHyr9QIh5gw7ozFWF7fLlAje4yaHcx6pJG3X4Ocs10zpUnoXyuiXUkUDCCCLVh1RNtFPPXeL47PNNb6KlceZ1mr+za96L8P2wOX5lvvney7u7khIJpdwoXgtDD7hY6LrNK2SWClYtYbNwoIvGjSZBTlBmAJWCFYLw6bVnK1bbp4uWHSaz8d0C4p21ONcyXzMQJhjDoTUhJDwIRB94PRsyXLVsGwVm4WhS4lOZQ79yN17B853jjvnA8+/tOeFV3YsDDzzpjNWqwapJEJmdKMfZsvFUL7fNHpiTKyWDYuFfeR1dHXM8jHkiadO2O8Ggo+knJGyWJOXUcmyyZtlNaUzljNSStLsrKXnGWOpP1/kCVEKOiUFi4VFScH+ak9uNFJJwhCJMWKkYJomWC2R0gIQBo/UJX4gk4n9iFYBZlHtgznmki8iyVKWE1gyyXuevrHkSz7wHD/4w5+7tntaqbze+Tt/8/s5+R2f4s3P3WZ35z7I4iQWU0YbiWwazreO5ekJqCVGaW68K+OFRTcN7TAhR4ecHGac5g6HQDeGZ9664EPf8Kv5n/+b/w8/9aM/c92XWnkEeOWle/zX/8n/mw998n186BPvKXmlOc82/ZByKiOWALm4r2qt8b7o2IwtOjkhFabRuFGWwPtholtsCDGy6BqGYcJ2hujLuJTtGoQUtIsWBLQSyAIfS3D3YdtjW4MyEjcG5LpFIJEiobVkGB0hRdrOMvYTxpZxsnbZsLvq8SGyXHUYY0gpPxwdI+WHLpn3X7ng6nzHcr3ATR6lJSZEsigFbYqJmBNNVyIToHQvEZR1tDEPZQ6VyheD97/zNr/6S5/h/LzHxYSaDe9iyqSQy9oREgc3cdiPtPaYzaIhUfaM64VlvS5eB5MPSKMJLrBemmIG1Fni5Er0QATnA0hJnF0rcy77zOBLdEnOghgSUmtsZ1kuLUJphFAYKemsQsTA8rhDycwrd3ZFS2slMkemwXPXB96ysHRWAmXs2tqGcXAPnyVjJHH2FcrzuPeiNaw3HS+/dHFtr8cvl1rMPWZYq/GTx7tAglKszeGm5UQyorUk58/rZZDi885aArwPGKvLfL82pZCTgqbVLFYtR5uO1aqFHEsnL/qicQCELuYok5uIwaOtQUiBUhKl1XwiG0HOP5Mq+jkkuCkgkUhVvm/wEaJjedLxlR9/Gz/2z1+ctReVSuV/SUqZv/I//RNOThb8mq98F0pklM3YRYPoVqSjm9x+zw2CT4RcMr7GfsBIIGWCTzRLibGGbr0gkxn7ETd6TCyap9//7/8O/p//xV/mp3/ic+wuq7Nl5Rfm1ZfP+Tt/5R/xXd/xA/zqr/8ob33nM3OGYRm7TzE9dLpMKdN0lm7Z0HT2YTSAbS0nZ0ccrg7srg4oWWQDTWt5ZXcfaYozpF22uMGxMpJ+NxJ9oOsatC1uyTElshToRpWYjgjaaIb9RL/tWZ0ucSM0LfS7kRQS1sqi7YsJtbBYq+esqkC36pj6qbg9K1Wy56YMKeMnjzamTMfIgGk7MmXMUs0h5ylnok9ImdDWlqmZULKz3OS5uLd9bPQ8ldcfbvLcvXNBPwT8A+8ELckucbrpkCIzusAweS63A8+/dMU7nztBG4N3kUzGAqebjikEIpBaAzk93FsKrZmmMBukQBxLHlxOxYshZRCpHPDEkEgpIaXE7fYgJa2ViOQ5Omrwh5HsAyLDvcuJk5MGFTLSBRYLzXRweJcZpsjh/pajGxsOn7uH875oxwfB2E8cHS9ZLCz7/Th3zEMxSXnEnWNrMfeYsVq3BJ9IxVKyBCb68uBBcSUqc8TlJDCljEQgdXEkQpTumNSq6BvmEUxtFEoIlqt2DoKMqEaTc0YJQXIBrSVx/hoxBZIoHTaty7hKTrF8riguZg/GvyCRAiXIXEls06CUxIWIFBk/TXz1J9/JX/m2H+DFlx/dk5NK5YvNNDruvOz4tr/1Q3zrb/4o7brl5oc+RFyeMI2eaXRIC2Z+7rrlMdpKru7tadqmiNiVIHiPbQzjFOgPA1IIEhCS4Pf+qd/O1fmWf/D//af8w2/7TqbRXfdlV17H5Jy5Ot/x3X/3e3nrqWZ0kLsVpIQ2CucCKZV8NikFpzePaNoGpRVd16BMGZFKCVLMGC3pFi3j4FBWElxAGoNEYk3JTvXDRNM1KDu7LEtBdLFox5WiWXUQE/0hkFLGLBv6bY9tyzRJ0xhSTiijOV51HHYD435AIObA4czJjQ13nr9HjKmMc4littAuGkxj2F6Vr/fgMBPKYWkJDi/yhweanql3SCXQxjCNez77ky/w2Z96gXt3qk618sXh+Vd2fPqlCxbW4GJCAloJGmOwDWXUWZcg78kf6J1ne5g4PlZAQltd3Gi1oLWaYXTkXDptKEGWlGdTS2L+vP+CzUXX6kJEZABBcKE4rc+RHYTEwioaq9Fa0l8e0CSmEOknx3bnMdryzO0VaTthxdy1nzyTT3jbkKYBrQXTGEky06wUrs/4EOlaQ9cYhskjM+QskAgWy4b+MF3vC/NLpBZzjxFSCjZHyxIcLARyNjRB5IejksAswGbWwskiGk/l7yMlIpfTRW01OcZifLJuWHQWNX+9GCKIoksQWpFSglRCibMqhWHW4HpH0xpSKkUdOWNbOwvCxfx1BNZIjOnwoRijlJNaMI0khci07/mP/ne/iT/z5/4yL7xYF7hK5Rfi7JnbvOPLP8KzH3w/26vDvMju2Zw0hBCYBgdZYBrJ9rJHa43U4EOgXVjcKOhWln4/cPupEw5XPatNS2Ml3cLQLW7zu//4b+WbvvVr+ev/49/mH/+d7ylW8pXKz0cG6Rzick//8h2GKMjLFYHSicqpdOzObh3TNLYc6hnDarPET45+P7A5XXP7qTPaRcNLn3uVaXQoBO2ioe1KIdYuGvLximbZFMMRJRkOI8pqhICma4iTx/uAlLJkMEpBEvJhBp62CikUOZWJFiHKCJoUZSPqx8AgJ45OV0yjK6YRsztsTBHbmLkDGbG2m6co86zhKYVc8EWaEL0sjoEi0yrJX/oL38F+25cD10rli8TFduBHP3vOl77rCULIXF4OZOBo03HLdqXTLHPJLY2RYQqcX41YWQonQdkzBh9LpgCzyVFMZCQhBHyIqPm9H+euNLKMaQohSTGhZEYZRZxNHLRWQMaQkd5hjGW1ssQRdtsiH1LAfhu4y57nnlizaDQXlz03TpZ89l5PP/m5kZHKOPcQSCFiZy1t3yeWSwuiyHxK0kKmbW0t5irXT84wDRMp53ICKEvId5yjB8raUBYnKQVSlewbKLP+ZVykPKjeR6Qq2XPtnA3H3B5XgFCCQz/RGYkIxWo25TKiaZVFWUMIgZwFIec5yiAiECV3yCWaRpFjKoLvlFiuO9Sk5madQGlJSBmTItOh59bTaz72JW/hxRfPqctcpfJzaRctz7zlSf7En/9DLI/WXF3saVctw25kdbQkxIA2Btsa3OA57IfPa+O04uTGEUqLspBH6JYtfoqEkIvzmE9lBDtPBOfZHK34Q//h7+Jbfuc38m3/r7/Nd3/H93N1vr3u21B5HfLAEfLBQSPjyLQ7MGXJYYwEqck+0HaW4xsbrLUYa0pe3VSMTtbHS5brjuEwkWPk6GRDiolu3dFYg7aWw7Znd3VgfbqGDnbn+zkHIINQ9LuB1fFiLsKKLfoDvZ02ihAifnAMPrA6XpB8QErIMaM6TX8Yadb284WfTuTki1NfSkyDw7ZFnkAq0y8xFk04KiNSRORM7geym9Ai064aFo3kMz/yPP1+qIVc5TXhu3/sZW7dPuZNT2y42o0MU+Ti1R1XvePWpiXlEk8QYyoSnpTJc9yHULORydyZfvCWTRn8VIK9H5j+KClJEYwung2ZMt0lpSxfK4MxGh8TkUxyASXANqZEG0gNWrL73AWNVnQ6M7nI0GeyFAijypjkWvPK3T373cRi07FYthx2E1FEYoacMsbK4lbbO1arhv1+JGdYbRb4lDi/v7vOl+SXTC3mHiNyzgxDcZGUUuCmUNav+ePl9EEgBTCLWHMqC1iM6aG3qZASqyJ6zvcoXbeMjgk5xxuMk6dZGbKUCC3JMZPmhyX4SHQeq8til4Jg8FMRucaInhdB74t43LZNOSGdnby6xpaRmhDx828IPwXO79zjd/7mT/DKvSv+yXf/1HXc4krldYvSin/7T/4OvvKbvgw/Og5XA+28qcw5YxvN4XwoIy26iM3bzpYTVlUso8ngRs9i2RJCRhqNbQyb4xVKS7wvo3FKFSfAGDPd0vPcW5/i3/k//j4+8xPP84//5nfxl/6bv3rdt6PyekMItNFldGs+NCSloqNxAULm7OyYW0+ecXSypls0IEQZpYyl6Do6O2K5LtMnUiqG/UC7bEueWxZIo8k50XQWN00M+/GhAYmQirF3KCEYdwNyzjv1ky+jnOOAmyJSauxCMN6f8KNnc7oi7ouxineBFDP77YGVEJzc2JDFrAHMMPZTMXGQkvEwosgYkZF+QliFNgJ/OJSCDuiswWiFnEZUu+D8/FDW4krlNSClTMhw83TDq/d73HRAa8VLd7a89PIVZ8cdafAgBCebFq0ku8Fx1GpCKIczSQhCiCXGIGeEkoiYCCGREqhcDnLEPCGWcibNeYsPNKKkkklMimVfKARtqzGtLtNcjQEfMLboULvOEGPG+VAKs8PEat1xb0pMCch5jibIaC049EUPp2TZ/7aNIsSMtQajA8pqnnzmJq/evbreF+SXQS3mHiPWq2bWp5WNWnhwGijncm52rXyAFIL8wGUyZxaLBmkMfgooY4guoBuNbgxtZ7FacrI2KCkwRiOkxrlAzhlNsZad7biIKZGkRCoQc6cwuCJ0pzWIEEhRFDG7UiXCIEaMNaR5HtuRsUjc4IkpE/cDyl7x9Z96Lz/4o89zeETb4ZXKrxTGat7xgbdxdvuU3/C7v4nTG8dkHyFnFquWGALjYURbxTR5BMWmOfqAbTVuKs58k/N0Xcc0OWzT4p3HtBqVBP1+QMtiKd92lskFTm4s2V31yE5x2A8gBeMoeNu7n+Vt73kzN5885W//5e/gp370p6/7FlVeJ9y9t+f7fuBzvOWpI4QQqPlQL6VEiGW0/tYzt3jyTU/Sdg2mKfmk435iGhyL9YLTm8ccdgPBB4Iva481muQi7VFDDJFmPqDo9yNkykgxZePZdpboYokQCB4hBYujZQn5njxqzkWNIbC9lEy9Ix0nyMUgLMWImF2gp8EhlKBpyloZfJEMNE1DszBYqxnOr5hyQA0aYSTYEusjZ1dLJcXsFK1nw/bakau8tvz97/wp3v62J5FW45LgdGlo7YroE8kFulVDazVaiKJf6x2dVXQZ4jyGjJQPmwJCidnsJ4JIc34xxaFVCoSUhLkjnufx5bGPiFxMtqQSNFphjYJ5HHPqHUM/lQLRR6KWKJWJHrJRbC8OtMuOuy8eyAikUmy3w0MzIZHL15VKlFFQyrX4EOiWxZ39xq0Nwih+8sceTdf0Wsw9RoifZXscQjllF1KQ5/gBwTy3L+bsKUAriciJpuuKQNUHMiUkHEAL0FrRtYajkw7TGASZcfQYSXkAhSILyAKkVkglUHk2PYnFQUxIUYo6VcYuBaCyIHmP964s6nNsQkoZwVS+VyqFY9sY+qsD027gw++8zZueOeXHfuLl67vZlco1YqzhnR94K9/4rV/LJ7/uY0QXIIGbHEKWseqY0qxpSOSc2F/1tAtDdJmYM95HogskAaY1hJholy1ito0fDyPWGmxjSCGzOVmRQySkzMWrFyzXHQjB1fmOnDLeB1IM2KbhN/yeb+brfvNX8Wf+wJ/nh77rR6/7dlVeB8SY6PupbOiEmLXaZTMYQmJyEaEUxpZwbqUkbatxA5zeOubkxhEpZvrdwHAohZRIRS6gjWYaHNooSBRzhkMJ/d6e70vGoi7v5Sk7tDZcXR5oO8vufFsmQlYdYz+SfMDYspkMMRF8LNl4o0NrRc4Z7wJKK4ZhomsbukXm8nxXHDgXFmM166Ml/f3LeU1LxAgxCuY9arn29MCWfTYoUzX6t/Lacugdz9+5BF8mMtqFKmvFqiF5gxKgG8Oq0cgcGXJicgFr5M9qEGSyKLmmeZ7gMkbhESgJpAQIpFGkkBG6uL9qSfkaWpbYAheKl4PVRYJDkd28fHfPi3d2EDNWSdxQdKpP3F7P5nua84PH5YzSpeOvZCYlQX+YUEo81Pkxu8YaazBWE6ZAs2gZ+4HPfvrO9b0Qv0xqMfcYkSm6txQ/r4vLc4u7uMWWU/mfrZFDFEFqSpkcE0oIZNOQ+h67WhBiwvpA2xmkEowusOxs2TSuLSEI8BHdWZQqReMUEkaXxVjOo51iLixVqSgZR0fXNBitmKbiZma1JAewi+ZhF0/K4mgkk0RaxTSMbM+3fOC9z9ZirvKG5Et/1Qf4lt/59bz1PW9BKcXh4gASDrtDsT43qnQ9lGSaPFpJpr5o3PoYGccJoy2LTSnc2q7BR493kWW3QOZMjJH1kUFKyX7b0y3LMymkxBpJipLLu1tWp0v86GlaVTRCUuCc587zdzi+ecx//N//B/zp3/dn+YF/8sPXfdsqrwO0eRAdXNyU85x5ihBIrTh9+har41UxUcgJ7zKLoyWdyIwHz9APaKtKblSCxbJDSEmaI2tKsVUOFVPKRB+xbdm0IZjd99TcGRTkkMkRmqUpZiWN5XDVszxasj5ec9j2BBfwk6dpDc6V2J4Ui/369v4Oc1OhtHxYVNrGIID18ZJXKQHgPgiUEsSUCFFgdLkPSkmkLPpw+WCzWam8hty6fQQi43xESUVCFjMhozDGYK3CCsHoIyomtFKMPmJjwgAx+HkcUqCMLM7ps+M5WWCteXioH2Iip4jzsFw0ZSps9m9wk0OpuVutJGHOb5RCcv7qHhkST54teOr2kt3lSJSK9ZFhOjhMY7hzf0/MZS9rGs3u4Dj0E8FHBJmmMRSPv2Ly13UN61XH8okF7bJFyswLP/PKdb8cv2RqMfeYYIzi6LjDjZGY4iyUK/o2crFMhrKAkUu3TpCQSiAowcBogdQaNwZySLQqY1YtJueywZs8Wln22x6jBNNhQkqQpmTvCFXcibSQJQhyDq5LKZU5ailQSESG5XLuBFL0cFoLnCtz1qEcUhaxbXD4xtKlPJtyZu7dveTXf8tH+It/5buv85ZXKq8Zbdfwe//kv8XNp27y1JueoFu1TOPEFFMpshAIVTrzAP1hQCgJGfYXA1qXTeZ+15fuRD/RLjTSSCbn6FYt5NIxiT5wfHONd2necCdsY9nvDvjBsdi0jIcBu9A0jSYfL4ixdOaGOxd0q5ajk03RDCnFf/hf/gn+T3/kP+MH/umPXPNdrFw3xmrEbJaQycXZTpR/v/XcE4yjI6eMbVtSCBx2A8NhZJoc2irc4OeCcD6wVBKtFEKJMhY8d+Z8iDStoVk2xJzwoy/dwJTY70Zs20CGoR8xnUGaos0JIdAuG0yjySGWqAKRaVrLNEyIuQtRHF9LF2Ec3UPtqbFlciXnRNPYMtIcEy5EjBZEKciqjFLKMldZntkskFLWzlzlNef2jTULrVBa40IihkiSgqOTFf1VT5aKrEuRNHk4Xjf4wbHfT6xWLVJLgo9oq4u8JmXy/L6O8/SHVMUMJSIQuRzqhymUqa3ZrM+a+VmZIloplFWMk2e5annn287QMXF2Y4GxhnbZ0fcT7jCxai0v70YigBTYzjL4iPOeaZjIqXQMUywdQGMUZmFZrhfcevKE5975LEZJ/sHf+4FH2nioFnOPCVIKSDBNbj7oLELP4mxZTvyUlgilSKGcVGQhic7TrjuG7cDx6YKQJVoGlrePywmkiyxPWgICLyR5Cjz1xBFagN8NLFelu1acgkrsQQmEFKSckXMQeQgZbcp/GKtKgWnKgqZkxLtYBLJK4mIkpVJsNsYwTRE/9SxWHfvdAEqz31e9XOWNwZd/3Uf5ql/3q/j4136U7fkeN07cv3NOJtMtymJ6dX+HbTQhxNm8pNhFe+9L+HGjUaac7NhWgTD0h4HoE4vjjsOuJ4XMeJhQCobe0XS2hBsnQAjWRysS4IaJo5vHpFBOcqMb2V8dMNYgtcZozfbywGqzRCjJ+njBn/4v/13+zB/8z/iR7/3n13w3K9eJFgqpJNLMrsXzOiVLSBvroxVKSqKPKC2xVpX3/BTotxFlJDGU9WwaRpSSWKuJMTO5aQ4BHyFndKsJV4nd/R3r01XZ1GnF6mSFG33JhTOyODNfHlBSECnSg93FjuW6mzeBxfQnU4y5HnYQlHqYgWePLZvTNcF5xnHCT5Gma1gcrbl69RwhJD4kGqvLdec8j1qW9VGpkglLrr25ymvLD//oC3z47bd45nTBMAUyiX1I9K9c4XPGWItUiiAjojEc5pipOEb6fsS2xXFWAtFFxNx8z4k5ikCCBCUUYQpMLtJYjTKyGAfN0hrvYzmoUQolmfMcod+NtEbSLFp8EviQkEbSaEm3bBkEXLmMUJo8uy57F+aRzc87AKaU0MawXHV4V55/3TS85yPv4O6L93nhc/eu5wX4FaIeAz0mPBCSl9PHkmeTKS+wNrqMKmqFnGeUM2DbYhOtyJzeXCOAxcqSfGbdwK033QSt8C4S+rHMRoeMHx3T5MnWEFLCpVQW0zHgp4BUssw8l9SDUtwh0EqRQ8JIgZGS1iqs0XOBpxGiCGJ9yIxjwPtEnDeSQkl2h5FxcNjGcPeyv8a7Xam8NnzVt3w5/96f/8N89Ks+xPkr51xdbJmGsViex4SQcHH3EmMUQz+RQiouk74shAKBc479bqDfD0DGDZ5+O2AaXbIkfST6RI5pNqMoGVikzDQ4xn4s9tShaCGiD8UG3ijGcUI3muATbvK0naE/TKRQ7Ny1lvTbnvXxmj/7P/wHfOBj777eG1q5Vi6vSgC9nHXUSoI1ipPbp6xONrzjvW8mBo/WpWumtGJ1vGJ9vKRbdSyWHUqpYn6SMqbVxZV10ZbQ+7mbFmMiJ4gusDxakmPCtCX7bXuxxw0T+6sDbvRMh4l2UfIXrSnjXohcnCnjg+5c2RfaxrBYdRirMa0pkoHDxDBMCMo6pmQZczbzMwLFNRCKhlzMXytnaFqDoHQ9inauOllWXnu+/bs+zc5FJh/4/7P358G2pelZH/j7xjXsvc9wx5wzqypLKlWhWaAREGISCATGIVDjADHjBrvB2EHbxk3QBtN2tA0m7G5owAbU7TANNgbadjMYaCSsWUJIqEol1ZBVOd7xDHvvNXxj//Gue6rksI1RSXXrZu4nIiMzzx3OOWvvdb71Ds/vmaaInSONrrRGkceRHCNN7xnnxJ17Ox5czOjOE0olhASaZQNLkbMMEHLOEk9QCiFX8uIJ1dbgvRhHrRVrDtaRq/i7m0aeVec5EYeAVwqK/NpuSOz3EZ0KrTEUo7hzMVG0Au9QzhKmKHT1Ip0SpT7VILn99DVefv8LdH3L8ema5991i5fe9RyvvnKX+3efXJIlHCZzbxvVWoX4uHQfSykLRVJWrxSVNEu3s9aKs44SEqe3T0ghE3YTN585wh1voCoudiPDT7xB3ztyqvhGU+dIs5YDs4TI6bU1NWdiynJw5srRpkMrQTPnlMmFJX+nMo0Rt6yR5JzQGHKIEraqJLw8xcI0ZcHHdh7XGJzRjKkwRvE7fPzunjyYf8YVOeigJ1gKfv43fhW/5w//VkJIzCEx7idqziinMd5Qc+Xy4RZtNNM4y4OkteScqbUwT5GqhCK2Ox/QSh5C5znQbcSvEKaAMa0UaOsWYhR8vDaSp9UYulXDsJtoVx6bNaVWHt6/oOksw35Ga411GkpluNjTb3pqqWzPd0ytp+8bpnHi9vO3+Zbf+c382A/+hBy0B73j9N0/9DE+8Hm3pKBb/kFpnnvfu7h3f8fquGPaz0v+m2HcjjK6Uppu3WCWAHCAzbU1bd/QtI143S73hCEwDTPWSRi4az05ZlJIoDVhCmglK//NqqWkgnYGYw2rkxVU8BX224E4Rq7dPibNgWmMxBDRRv5eYwy+9cv0QPznvnUYJxEfpWSa1gtSHTmfUWpZ0xSboGRzyeRAmpnSiD3ooM+2xpDZ5spuCNy6fcTF3S3MiVoytrFchoQyhilDiLIxFSu0XhgMuVTMEnXFYq9xrWMOmVphu53ZrBu8tzgnQwZt9FXTRetC11rmcZYg71zQ1tC0cHTUsnu4p2k6QGFqoTeabQi8uQ1MqYDSjGMgpCrRByWja5V7VS++VGP4kq95H9evr+lah7aaL/7KD/CJH3mTv/zn/87jfQF+BnQo5t4mKqUSU8E5K+SsVOhWDTlVakpUrenXLfvLic1xT9M1KAregrcWs96IV06NtF6TsscYSKXSNZbjEyHXhSngq5EiL2Z0SnRrT9tITIGzUJJ0RI3WQhVS8gCorUUZuVGN1pLjiiaFjHYaZS1t58hmQlvDWDLb3cR+COz2kWdurClYPnb3gunu7nFf8oMO+lnTL/7mr+P3/ru/jThHUsncff0+0zBycm2NQhFDpCJUPWAJUp4Yh4laKqujHhDc+zwEiSkoBYMAkow11FLFhzQH2q4ljDNhTLQ94CvWGlIq+N5z2m24vNgTlULlSkgBrcF7OULinPCNJ8yR3RvnHN9Yo/Kjn0GZeQzce/0OX/xVX8C/8Ft/Bf/NX/z/CkTioHecjDWUkmg6K1TVVNhcO2GMhbbvZJNDVfbbGZZcqDhJjMCcJIRYGUUIiVVVpJAIY1gIzpV+0xKneBUX0K3l79w+vERpzeZkxbgXWnLWGWU0MURpMFRo+oambxgu9pRc2BxvmKYLFDDuRgkPqHEhQlcoldA6rLcSVdA17HcTTedYH69RWqN1vRrHlSKbX1oJtKwiFogwJqw5NCkP+uzLOsPmuOeDP/oqH/rxt2g6S9sYalU0VTYl25WnXbV0jcFbzeX5SHtrhU5JrDxKUVVlniKqkxiCnKuAU4yW+6OWq3DxutwE2ltqKkCm7RtKrZAq027EeYnROTluMbWitbAZzsbA2ZipRmG9YRyl6NwNe/HsVYXxFj3IOiVA01re+uRdrFKc3Dhic7ziH/6dH+Sv/sX/gXmOj+vS/4zpUMy9jSRh4eIpsN7JTnJOn3oo84ayarDeoXNic+uYGjOaSsqZWKSrKIVYpetlV7p1hhAz3mrWmwanNdYaWm/QFqzWlCHS907+W1VMY0m5kKsCLRSvEiMgX4t1hnEI5KpRWqOswqLYbUfu3tlyOUomVg2RVd/SG0WYC3/3+z/Cj3zw9cd9qQ866GdNTef5xl//DdQiq15TiGgjHqGzB5lu1UFRTNOEUhDmRE6ZWivjMNKtWmourE9W5JwxTozoYY7klOk3LfMYrh6QSy6EKS6+IGg7i/GWlIVwq1H41pDuJ5mua+g2neRRLhS+pvXSjXXiCfJeJhdaa9rWMY2BGDS+afjFv/br+Ht//Ts5f3D5uC/1QZ9llVL5wR99lS/7wNNYazE60nQN437k6edvMe4GpmEmhAi1knPBese4n6XLbo3AtJTm+PSIftOiUJzdvxSIQmcBTds3xBBl5apkjJMVr1qEQLk+WVG1Yo6TMMK0pvECYEghUYtMBlbrjoqiW3V0646Lh9uriJ+mccQQwcm00HmLd/aKFO2c5dZzN3nzo6+TxhGzEDQf0SthiQCZCo33XFwOfOgjTy5N76AnVykm9H7iuDEoq9hejsRBy5p8qYRUKPf2GKu4feuY5587pVkJREgbLe9rICM9i6K0RFiFSC6KthH+gms0OSW0dcSl+aKqoiBTvbJYcGrOuNaTQuJyO7FpLBqoRjHkypQh14KiYrzBo5iHJFsitmKMENQf3Wcg9+xP/NiraK1p2oZXPvoW/+Dv/PDbopCDQzH3ttGn8tmE2GO0piRZAYkh0fUeVcGvWqw39K0m7APUgneatmsgz6x6R/GagiJNga43svLYOKxRTKHge4t2hhASm87SrxpUztKJmSPaKXLNVwTNWCoJRdM25BiJsbA7m5jnhPKWYTdjvaOkiG080xTZbUes0ZiasTlw/XjNdgx85JUHj/tSH3TQz6q+6hu+nHd9wQs8vHNGSoXdfmTaDRJDcLlHKcXR6REhBsIoncdxmGhaRy2VYTvhm4YUMilG5mmmJsmeK1ny5mqp8tAK9EctpVaazqO1eO9OVg21JOqyln3/rQuBqOhHh6RinhLGOpSWdbiYEm3fYDTsLvb4OTHbmbiSbus0Ba7dshxfO+Ibv+UX8Zf/zN943Jf6oMegj7xyj3e/cI3WiX/mfV/5c9gn8UzvLkZAwAkxJFJIlFzIpYoXLkXGreQfLo+AjPuZMIclFNjQrz1xTuwvR5TW1DGwvxhYn6xAKXaXe2LO5DmTU0ZVLZO47UgMkW7V0m16cpJGyO5yT5hlxbLxhjBLWPnuYo9rLNNupukaUkz4xhID9JtuaZCMOKswzuCMWoBfMsWQlcxC23rcclZebMfH++Ic9I7Uz3nXbdYFTp0llERzbU2rFdo5hmFmu5PAbl0hT5HLi4FNZykOtDeoJTNyDoV20xJDQhmFXfKFnTFoCjXKZkixkELCOit3cS3EXNAklDKUVFBInNWDi4n7i2m1aR1t7wReYgxVFxpjmEMmx4RzWtY8rUM9HGSqv0RlxZCYTeDibM/lxV1++Ic+/kTTK/+nOhRzbxM9ogJ1m5ZSBFRgtCJXIWUpZeg7S3e6kvDSUOh9xTaetCDEj087mZaFiGsaVkcr0hzIWUnmG4q285ScUcZjqsZ6J+GRVjHuJ6zRjNtA3zsUMnWjgjNCO0pVkZKYZqNSlJixVrC1ftVhvcLMEWsUjoopmRITTWM5vd7z8ovX+JEPvfG4L/dBB/2sqGk93/p7fy3zNLHa9IxTINy/ZNiOV1lZYQ6kKEGp0STSnNCL38Y6mRaM+3EBTBhUVWgn04IUMlBJOaGdRqOw1lxN92KWh+eL+1t858khk2KiUMlZsd+OtK0DZ/HeMW4nTm8dUTvHdBkJk2T+hGHGtw5lHCVX8e1d7Ln72l2Orh3xga/4PJ558TZvfOIwiXin6eHZwN/9jg/zq3/J+4VqbAw5VEwtuMYIOS8m+pWjdJ7t2YAxCt9YaJwUfed7alXszgeG3UCtYL0npcj2XEBcKUSMNRjr6NYNSqvF76YZd6OsOmqNtopxN4jXJhe25zt2FzvaviUnu5ChFV3r2V4MAgdavKhN5yWSJxXGQaaHcY5o/Sh+QbE6XjHGiDGSvaoW+Isk8CixHCjN9/3IJx/3S3PQO1Sf/9w1ulXLSy/d5vpuJKVMToVpiqxaz7EzNK3Dek1aogd84+hbh6ZSNUQUSgs8yzoj68W2QEkYLfd5zUUgd3PAN5YwJ1IW+nmuoIwmpyLbYVYLGMgu2XMarNOYJV6gAGQFVtO2Fj9Iw2XWmTRJTmUtdWmecPVcfP/eBR/+4Gtvq0IODjTLt4WaxnL92ppV52lay9FRK0TJdQfagNbEKYCWHDe/hCcqpXBOCcI5w34fSXNGVYT8lWW69sg8HmNhezESC6QxSBB4XahfIWGWNat+1eG8l4MLAaB476CIh6fUitLgjMJr6NYNXW+582DLxz55ziuffEgYIzEmtDFUNMOUMLXwC7/8XTT+0IM46O2pX/BNX43Vmv2FPFzee+MBKUaaZWWxxMrlwx3jMF0dTnqZYkyDkCdTSIQpyNrlXqYNzklRZb3Gd47Vuuf4dIN2mnE30x+1kuHlDeuTnnkK5JiJMdGsGlStSwC5PGjvz7dM4wxGsbsY2F8MeGvYbUfO711SUqLUwrAbyDkzjTNqCVYeh4n10Yov/7ovetyX+6DHpPPLiY998iEovYBDLJcXe8Z9QCkB84QQuHi4pdaCNcsZBiiladYNAh4pcq7MkinlG4+xBusMrvX4xrJ9uFvWNS1H1zf4xjFcjrLNkousMg8zdQGX1CpeVG1kJdI3ltVRf5XjuD5Z0a87Vkc9JRdWmxVt35BzZr+bKLnQ9R2+cThn+YKf935AJuPaaFkV1YqcFvp0hVfeOOfNe9vH+poc9M5VzZmSEuuV53TTsVk1rDcNR8ctt26ueOG5Y556es2tmytunLQcrz1xFp+pcYZxFLps1xqMt7iuZdzN5JhpW4ezmqo1pvNUo6nGEAoMc0I7IwXiwn2oSC6cENpl28w5gzUaazSauhBxgSX+qm0Mp0cNq8ay7h2OShiCePK0ZClrpRn3Mx/7yTdIb0O/9uGp+AmWd4a2dVy/saGKd5sYshC2cuLhvUvIstuvrOZyG2n3l7TP38AajTOFeen42xyJNRNUJWbFikrMmTQnus7JSlZMdMuqVcoF5Qy1FPpNJ9M6QCuNdjIiN82CPV/CweeYmXJmt5/l83vDw+3M9nzi/GIkFAiXO9adZ9UaGmdoreao9ygK++3I579wjd/4a7+cv/BXvvfxXvyDDvpZ0EvvfU4Knv3I2cMt++0e6yTPah7nhQo7c3m25fTGKdY9QrM3hFmCkUvNTGOg3c+0q4aSM9MyNTBFy5/JhRjEQ+dax3g5CqhkiuzOBlZHq+Wg1sQ5kFKhpixTCivhzdpAyREN+K5hGkc5tKsEiG/PdjRdy3a7xzmLNZb9ECg5M24H3v3+Fx/35T7oMSnnwifeOOepm2vCGDHacXm+Z7/bESYpiOxSmD0KiioLfRUF3jtKgRwjeQksLqUwXg5CaQ0J5x3DmK7Q5PM4Mw8jCsXmeEWlXjVEFAqlluajt582/TaUmMk5c37vAmMtCmhWDXGIpJSp1CV+Bx7eOeParROUrvTrlnnqydMozVMjEzlt1OI1Wr4vZfjb//DHCTE/ltfioIMGA50u6JAouqAcNNbiVmbpmRRSrtKAby1TKiRjmVOh6w3OLhtWGUKKpFhw3hKHmTAnYhXgzzhBLIWmb6FCLjCNkttYaiVPkVILTesXkEld8heFOuu8xXuDthIorpCfDk5rjlYNbQ/DbiZ0kaPecznMlCq02BAiwzAzzulxX+6fFR2KuSdQSilu3jy6omHtd+EqHNw2nmocMRVUKbjG0rSSiaOcx4SZsBuxRhGmhDeKr3n3MW3NDHPmbAx86NU9+/tbTOfZrDzKGUpVNK3Dt44YE511WG/oekdOsgppraWUvDzQKXJI5CyTuIpitxt5/a1LtLUMQ2KeIylXcq54b1l1BuOP8FbRNQ5vNH1jsVpMtbZ3pJT4/GePefnFG3zkE092yONBB3263vP+F3np85/jIz/2cfp1x8O753TrjhQi/XqFNRbfOexoSCFSSsYYQ9s1jMMkK1vWEOZIrQVtJMMnxSzkvcoyGecqCHnazyhVMU5+RmgFcy2EEKhI/EFKGesNfuVRSqby7covHjy5N0MIKG3IKeEaS5gjKSV0CKhqWa07WQsNgWE/cuf1e9x89gZf+PO+gB/9vg897kt/0GPQJ159iFbw3q/9MnrfQ5WG3emNIzZHa0qSCZn1AuZRs5CQa6kCSpkjTefpNh2lVmIIUCDMkdW6RWmDbx05FeZR4g6stVivMV7WuWoWcJj1hnbVsjrq2F+OrI5WKLWEEu9HUJbTm8cy8V7WxEqRzEbnHbVWfGOpRUKLWXLjjk7X7B4uoKClqMxL/pVvHMZafuwn7xwKuYMeq649c4MYIqnOTPMoTXpvUapSUsYqteQCSwRA4wSMN8fCfgg4w9IQYckY1QzDzGrVUEIg50xGURXkCtNuZA6Z1mnmOVNjxliL11AyUMryTCtUZWcFGqQQ4IqsWRZSlsiBXCEhfryuc2x6h9WF1htWRx2vfPIh0xzfdquVn65DMfcE6tnnTmkbx9mDvRRKjzJslEbpRDAGZS1WV3JMxJTR2bDZGDCeFCNWWxqr+YYvusVzt3ouLiY2GV7aHPHczZ4P3R2YiiWNM7nKQ6LTMtI+3XR4q2mtRteKMYYYEiUWtLFoKq6R/WhjDFAZw8ydt87Znge0N5yedHQ3OkosuK6BqvC9I06RFBIOjTeaVeuIOYGCcUioPHLtxil/4Nu+nv/kv/wuPvSTB//cQW8P/Ybf/avIqXB0sqbf9GzPdoR5hqoY9ntqrfTrnnmcGbYDR9c2NEdr5hSXqIHCPOUlv6oQg4AajNWkJAWddZZcCk3n5eMxUTI0rSZMEW2EfJltEt9q4zBGUZQmjCN16ZaWkGUarzVN5wlTpO0b8dSqytFJh3aWeYgor6Rxk4r4+JR8PRcPLji5fvS4L/tBj1HZtbznyz/Axz78Kt2qY3cxcnL9iJyrvJeKhGynmBn2M1VVfONBa9pVQ5ojZSHqKaWZ5xljLTFmYpDsRcmyKpSqyCrjlCDXwyw01xCkkArTLP7uxmJyxnlPGANUTdMahl3GNR5tNMU7xmGm6TWqKnzrUVVALTlnVCtNzkfIdmPMlV+uFoFGGGv44Q+9yQd/8uAbPejx6uR0wzyMBGcoKVFzxneeaT/htfjUjGBcaRorw4Ja8U5LSLjWNFaTpoj3jjmVJYJE7gmWTMVpTJgl1zHnxEShKPGNxphwrcUoI2O8AjVXtJHsZOcNxhmhWpYCpZDmiFsJTKjGgq2VpGRN8/75wIPtDG9d8PYt4T6lg2fuCdRq1TLsZrresz7q0MZQinTdQ8zMwywdd60Jc5I9fQU5JUKI6JzoW8vXvv8mq8bwkx89595F4PWzkR/7xAX9tZ53X285KjPrRrox67Wn5symc/S95/TmsQQUV8Ao/KqRh0jEWzPOkRATMSRyLrRtw7vffYv3vHTKMzd6jtcNx8cd/coLoEWDqRWjFK2zNNbgFTTeYFF4rWmcxWjNcDlgc+Tf+B3fwAfe99zjfjkOOuhnSJq286w2HSkmMZAj6P95nNk+3HHntbvMcyDGzPbhlnkMzNNMKZKXpY26olaWUii5CMgkZbQTsm3N4psNS+Nk2I2y6uI0KSf6vkEphEoWM9M+oDTkLMhprTRVVVkVA8ZlbbqWKqCUVNlfTuzPR1LMVA3jdiBMM+2qoz9aYazmwZ1z3hGn7EH/y1KAEj+LXSZcFw+2bC/2nD24YBpnSpL4HN9YVute3t+loLRmGgM5i29Ha0XbtxxdWwtsYYEnzOOEX3LgUsyc39+RQuLo2gbfiq/tUWRBTLKCpbU0KMW3rTHGcHLjCOuMTAq8RStFv+nwnaWkhGscq01LSZm4TAHmKWC0wtplKpfyEmCs+ScfeusQRXDQ54TyMGFSJu4nKPXqPaxywShQpZBixliDtlbIro3DW41VlZoywxDZDYFpiqQ5YpdYANd4bOMIUeiwtRRClAa9NZqcMmGKxOWfXIqsLxehUGqrcd5SFy+REvArzju6VSPDjAJUhTGKdgGm3L+cFwjRO0OHydwTpus3j+h7zzwFUqrMkxRMdSFu9auGWgrGWKYpoI1mGmYUCPo1FzanPVYrbIq8+YlLMIajVccwZd58Y8fF2cjLL51w+/YR55eBtVGMIdP5RybUStMZthcBZRRzkYmddFMKWgv0JMYk0AOrMVSuXVuxWTWUlBjHgLIGW2EqFaUMYZCDr3OOhop1SrqbWpGLAgUpCsZ2N0ZsOOdbfuWX8cEPv/62Hp8f9PaXNpp5mOEG5FS4PNsJHAGIcwQUN5+9zvZ8z/07Dym5MOwnTnNGK00pmZQyKcpBOOxH2r5lHmdSli5pTuJj6BZYQ1xw7t2mZdxPhMkwx0TfNVij2D7YCjGsFsIwo2rh2q1rbM/2UDTN2ska9RDAyAqN9RYVEyErhu3I+nRFDoKAN0ajquTobY5XvPbRNzk6FXjENMyP9wU46LFIKSmU1icrGu9AK8Zxpu1bsLIiNY8C82n6BussD958SF02Uqx31JJJUyDHgu8b9peDnD1aMY0zTdNgrZPV4VLo1o00Gc92hCA+U5SEGJec2Z7viCFzcv1IYCpepgb73UjNhaKlKPPL9C2FT+U15lhpugZjLdZKptawDQvdUvzrzlki5jCRO+hzRlYpas6M53sKYJxGZ1mZNwpqLTirqAikJMaCUhGUlpBvrdjtJgGLNA6jYV6eP7UxpCmQ6vJ5lEJR8Y0jpUzfOdIUUdairWxymYV+7qxechvFl4qrstdfxaOqlq00Yw2eDNpQG7gc3nlzqkMx94Tp/OGOF1+4jjGGcfEB5FKBSi6VcZjpVx0gBDDfeWIqi28gcHrSo51BGaFcpgzXT1usVniluHXSEGoGFK2uHK8cY67UUqgVjJYd5nE3yeoKmqwl2046JjIijymTciGOmW7lxbOXK6bKmkzXeCqK0UbUxUDXe4pyWOcgVbzRYOGV188wypGTIJ/bxrFatcRZE1XlyDh+3S/9Yv6bv/tP5IfKQQc9gfolv/breOqFW/jGAULgkiDwgrYaqmLaTyilWW16zu6eMexGYso470gRdJGQ5ThHUhAqn9GGtISillLxJ16CiseZYTtiG4tJAjrpugaX3PIzRVNioVt5xiFSQyLMifM751StCcNEbbOATZyRh9olf0srxfpktZAADXEOoC2u9SijcEpjrRzk602H8+5QzL1DlZauvLWWru/IJUvoPeIBz7nivFuaHSPbh1uUUeRQKXOilAK1in+u9ZSU6FYNaq/kHAqy/rs/34Ne1iFRkqtKlZzFdb9kxElTwWhNdyrTcd/J/UitAlEYZllDg8XPo7G2EQKf1swhSJEXE03TkrNa+C3y62hN1obv/N6PPs7LftBBV3rxxZv4UsgpcWPVkGtlTBlDxVuZgmtjlrxHYTa0mw7vDNMUMbUyj5FaBcqnFFSlJIe4lcFDNoZSMgVFKdC0jjJFcsgCAioF7e0VnbImCQOvClmv1AqlBT5USsFoJc97BTAaasFYhbaKisKad14x9877jp9w5Sx+l8YbvBfYwaNORSmFkjLea9ZHPf2qo1SwVnwt65M13aqh8VZw0NYQYybEhG8MR73l9jNr5gqffGMrFLtphpjwztA0DlKSMfwU0N4Rc5aJnFHkUqWAC4kUMlYLSjbnwjylJWxVyGGlyPeiU+GpG0ccNw03TtecHq1YdR5rDWdnAxeXga5z3L6xpm0ctSq2+8AwTAzbiRgCv/DLX7oa6R900JMmrSXSI8XEPAVZqbwYUEqmdFpprDOUUklBHnw3JxtSTAy7gZIf5fqoK5hDCFFWNZU8slpv2ZyuiXNiHgPDbpLC69MOvYuzHSEEtFZsTnuOr/fEKPCTdt3gvKXpW1Sp+M5CLaTlc9dcUAhIxXgnOV5KsbuUz+MM4qutBeMtN546pd+0XJxdUvIB/vBOVS2FcZR1qOObJyijaFqPb6Sxl1LBegHz6GW9suRCXRqU2himIUKVCbbSGpTGeENe3v9xjlQk7L7bdIQpsLvYUyusj3tySmxO17StZ7XuOLl9yvp4zfq4R9XK9mwneXRaLdh1h/MW5wSuUkpFaUOtsvpVkoDHUs5XEQcscKJc4Tu+56OcnQ+P+9IfdBAAX/KBF1g14i31VuOMZuUMnbfoJU/OOStTdKtxrZdmfZEtKZzFr8QeoLXknaYQMUhBZ5yEgDtn8I1FKZiHICvLsn1MrdLz0IDS+mqVUhtNyTKps0s4uVJLMDgyIScLPZ1UEPatWAzeaTpM5p5A/fAPfpxrJ72YvrXCakWMskrV9i0VTc0Fp0FZQ3ttjY4B13hcLRxdWzNcjkxzonVQUsZRCI3llTcuuXdn4Ki3bFZHNL3HV5hjoVpFzVBzWahihThGrAayWQ416YwYpShVoZ2lUKlK4gyoVQzrWhNTgarJSfxyusqN6BvDPAZee2tLjBlVK+tNDwiZb5yDPKA2llwyw+XlYc3yoCdW126d8it+wy/i7MEF1lm2l3vU8l5/lJdVUsIueP8YJZy75MI8B1a5Usqj4PACj0iyKZG8o20c1lpUldDV/eVADhFtFNN2IqVC21u6VcuwnbFr8QsNFzO5VGLIcNTSNFa8c6Us4eUJVROue/RgrWiNodSyFJKK1bolxSKG+ZiI40xNmbZvOT7d8MmffJ38Nsz8Oeh/m77wK95HnBOVSrdquDyTAk6pgveF9ZFMzXIUL1tV+iqoe385AoIdb7qGeZScReedrOZnmWo3naWkjDKKcSv5cilmLh9uaVYd1Eq7iqiF0PfwzgVKy2rZat3Rrdur7Dnp+huohRgCuRS88yilxBO67sghEsaAcQbrLCVorDWs1j3f/09e4/xifJyX/KCDforOH2zZDzOpyPaGMhq1EFdLFo9nyoVUCmkqmEZRUiYg0QJaKXKGME80rWOaEkVrvJK/rxZhIahamfcz1ipi4eoeyVHir8xSCGojz49GS3HXLDC9ohWFCkm2T1wjq8zTFFAGoTOPck+WmOm8ZQxvzxiC/zkdxhlPoMYxYNoG6y1N42QfH/GpaSM3YIiJKSawQv8JoVz55uYxkObEh1/fsjmRcFPtHXfub3n4YKJvHH1jeO2NC/ZD4PS4pe89/drjvCbPgTDM6FKwWm5oEABCSrLuGbN07UspaBQpZuaQmUMipsI8JmqUmzzHQo6FNH8qqHWcAmcXIyFVximxHyYxuntAwcXFnvPtyPl+Int7lSV00EFPmkopV9ECCsgxo62hW3fYxnH5YMuwm6QIq4VcyvI7oaRCqUv4cBFPXJgD2lmMdVdrKSEk8mJqzykSgqDVfedoOgdKM48J1xjiHAnjDIhnr+s9ViuG/cRwvse3YoC31mCt3PfduiGnzHa7p+RMUdLcGYdASokwRUKIV2ugFdicbq6Ifwe9M/Xzvv5LcE7jnEy+ur4Rj2iV98k0TFjrMF4y53wrMB8Wst72bEu/aWXd0RhqAZSSXNRlY2XaBbSVXxOvm12aEZE0T+QUmQaZSKcp0HQO583V9koK8n5t1y3OG5lA24VS+YjYFxPWC0jFeodtHDmL/eG1f/oRVuuOOw8G3rp7+Xgv+EEH/U805cyYKhhLVJphH5iTrFSGDNlY6nIP2rYR9oLRhJgoVhNSIivFnAqhwhQL05SwjUcvW1vaKkpKlJyXJr5M2xTQtg6t1bLOKfEGyukrSm1NmQqUmFFU1DKxU0Yt0ztFzYUQZEut6xuazvLVX/US12+fPOar+9nToZh7QvXJj9/h9dceoA0Y82hHWLqHz750m3d93nOcXF+jNEz7AeeNTMaUYp4S1kAImTwl0hyZY+bsbEbXynrjee5dJ3zRz7mJqZnj0zWn11a0rceaBVOroSiFbhsKmlQqyhq0lalgKZUYM6VIcVdKWbo0SPG2YM1LgXmKhFQoC30TpZhzJaeILmIwn8bA2X7iH//TN/jRn7zLm+cTF8PIq3cv+dvf9VH53g466AnUz/uFX8K4n5in8FMKnpLLFXlSa6F+1VKuHmatE8LXuJdOv/WOtvM4Z6TUUwjgAS3Qk1qY5/mqcPTLA6f1RsLGl8Kq7RuBOHjHPAR5oJ0jMURZfymFcTtJgLhWeGckCiEm5mHm8uEOVaWRk1Nm2A7kGAX8EDPKGLTRXLt5TNN6vvDnfv7jvPwHPUbVnFFAChmtYL3pZOVKKfFoG8M0zsz7mVrU1cQ6hYxr5X0Upsg8RfbbgWE7cPngUrrzRSbEGMU0TAzbUZqFxtCtW5rWobTGNY6SMvN+YpqjeHYqNG3D6qjj6NoGbQxxStRleuG8rJ7pJVA5LzE8bd9QSmW/HdlvR7TWeKN4eDnx3T/wMaH4HXTQ55C+9wc+yoMpErRiKqB6TyiVKWZYIgO0ky2wHCLzIEAu4xzTdiaERIjS8JimxDAGUilMcyTVQpgjMSTQmprrspIvWaeySYJkwIFMBY0WaqVSAtZb7jG1xBWUWjFuyZxTCufl/ClJMo6NUeAcz73nJr/r3/5NPPPi7cd5eT9rOhRzT7BCyNy/vyPnjDVgtMJZzctf8BzPv/w01lmcUXjfsL0ciSEJTraxcuNYRek9aTGkXj9q6TvH7mLgzlngcqz4lWee09WDWts5NOLzCaNM50ouEkS8dEJrFRCDPLQZSsiCjbVSCFaNIMxjAuT3UiHVSgbmXBjHwLtfuI5ScD4F/ulr53z/B99km8EftWSn+b4P3+Wv/f8+zHd870cp5bBmedCTqefe/TT77YhvHEopUkwSsB1loiVEPYtxFt9ZWa1uDMooHtw5Iyw+u5wz28uBMAXiNJNiwuhlglfK1cQtx0eNlUKKkXE3EUYhW467iWE3ME0z1iiMVYz7Eds7jq4fCcAhZ1ZHHTkmxmFmezkx7AM5V5rOU2pl2M+kVOhXDZvjHm01l+c7xv1IGGdKlu+x6Rs+9uOvPuZX4KDHJWM1vnNYa6AqrDX0604eAKM08bTVTNPE5fmOaYj41rK92BPmjHVW1jRzpV01bK6vJTtumGW9y2i2D/dX7/kQFuR6rbjWo62AHUKQ+0D8bo718RqtDfMQlgmcpls1oMTrPY9BfDlK4bxMt3OphClcNVq0VsQ58cadHd/9/R8/rBMf9Dmrs11kTOCPekzXg9aY1mFbT80COAkhkAHXWqquVKdpTldkbZgyVGdJpdC0VpowsRBCYYrlCmzSNBarFaWCMRqjNTkVVIWc69VapqYi28xyj6FlS6RqyYFEsfji6lIYCmCo1MrZ2cjmxoZufYNnX7jNv/p//u089+6nH+8F/izoUMw94cq5cH4xEmKmaR39qsVaw42bJ9y6fUqtEEcJQ+1bhzUSFVBLIeXKg33k9PqKGCqn13vpuk+JN1895/69HXfPI6+8uef+vT3GSzhwTjI9W/UN1kmQ4xwSWkn35tHaVK1V4CqdrKuAFHxKLQtlVpFKwbWWTKYaGclfXo4UKqvO8+xTR6haSXOg8Q7fOKJp+Uc//Do/+uNvMYcDPOGgJ1ef94Xv4sbtaxgjmVnzJLlZKSZSSpRS0VqWKq3VhCnhnCVHmQTMY2DYj1yeXTLuB8Zh4uG9c4ExOINfMn5SzNJ0yUsIc8pMw8y0mwhTRFm1TDjka5iGiYdvPSTGxLif2Z0NbB9uGcdADJValwPUalTNGK0wCnYXe6DStLKqFpPk5cWQiEGytyqVMARSKjRtQ5jD434ZDnpMunhwwXCxk1B7I7Ce1bqVxkXboLXGefGeaSNwnxwzq3WLNZpu1bI5WWGseOTmYcY3DuMElmCtTOFca2gaQ9s5UEjW4ZKNaL08HMaYqDUzDrMUZVRSyoxDQCsBefWrFu+txP9YfRX5kZdtlFohBTkfrbd89MOv8j3f95OHzZGDPqf13/zN72VfNavrJzTrFSfP3ED7hmodqmkIuVKaluw8c9XM1TCMiTBH6pKbuJ8zsYC28nM/psw8xwVQIk37kvMyaZMtrUfTN231so6pr2IHgOW8kNgQEFsOtVIW/gJKpnnOm8XOU1GdZ9gnNjevo43m9NYJ//If+s08/cLbe0J3KObeJhrGwJ07F8xz4Ie/+4O8+vE3afuGazePOL11zMnpClC4zrG/HKi1oozi3sXMbAUJ27SeD3zgFl/7JU/z7usNdT/z0Q++xXd+z8eYMlzeu8S3HmcNdgGdaK0wtdB4S0iFXBTGO2LMjGNEVZhnoXqRikTRZZmi5SxgFLSiaiXFXy7ce7gnp4p2lmbTcXxtzfWbRzz/4qlMBErlA1/80mO93gcd9DOh1WaFbyy5FHKU+8RoofbVXHGNpSqWg08vmXL1asXkEY1ye7Hj/P4FNWfmKYhxXSnaTuAp0zwz7AbGIQCKpm8AxcnNI9p1w7wLhDGwOu1p+xZrLce3jiV6wDviFOXgzommtYzbUTxEy+rkw3sXjEO48tON25EwRbYPdqRUcN5Rc2F3sWfeTwJLacTzWw9T9Xes/sKf+mv80Hf/GON+T5jCQocs9OtOHuRKZX8x0i4Zc6rK6q7zltVRx/H1I9CKMAvwxxhDzpl21cp0LxW6VUOcC7bxTGNEKS0+uFrx3i2gIYs2hjAnUkhM48w0LjEDSTLrmr5ZAozF4/OoKWmspmkb+nW7xHF4jNH0q5b9di/E6YMO+hxWCIlv//a/z1/6L76DD75yl8tieOYD7+Lk2ZscvXAbtd5w/Owt1revy/SudfjOU3LCUDAaHBVKRWvoV+2yrizTs5TrMgFP4glHmnrGarKqzEniBtKSK/xoKKD0o2gPhbKGUispZEqWIo9H+Y5JisXtPrC+cUzFsr59E7QixcjRyRH/8r/zbW/rCd1Pm2aplHoe+HbgNhKy/mdrrX9KKXUN+H8DLwGvAL++1nqmZFzzp4BfCQzAb6m1/tBn9uUf9OmqtXLnzTO6vuXjH/ok6+MV3hmSLrhNS44SMA4Sz4FSFOCN+3u+6PkjyQupmqPbPc++dAti4vaH3+LO2cRRbwmzHKTWy2qLBDpCUZZxH+g68czFmMWHUCtzFDJZ4wSWINkghVwrxlpAkUrFGMWU8mIkN6gKkxJPne1btArs58ycxJv3vvc+w9//Oz/8+C72QQd9hlJKcXS6xjWe7fmek+tHkrem5AeqW9YuSy6klKil0q5axFCAHHS1sr3Y03aeh3fPODrdyBqmM+Rc2G8HHtx5SEmFbt2SU8I2hnmcMNawvdijlcY4g3OG6WLEWoc9NrStZ9xNpCihyE0jeOpxmARQEbNka42Bft2QY0ZVhTFKTO66YI3i7N6FBDAbxTzOmJsnaCM026deuPVT4hEOemfp4uGW//ov/G2++du+keefv8nxtSN84wkxy/S3VnLM1AJN10oKD+KRSzEQ55FcCmGS4sz1hhgTu/O9YNSdWTDomnmcaTovkR7bJA+CpV5tkvjWo5SsfMaQKDmRl2iEcT/S0uK9TP3aVY+14j8XcIPGebsQZiNQmceZH/jOf/p4L/BBB/1vVCmFVz9xl1c/cZfNcc/1G0fklLl584j3vXwb7bU048YZ7y3OKgyG/TDRtmLpCTGT4hLwraFUCf4uKVOVNOwpUsRRJcIghUTTeWKuuOVeLLUuQLClmFOQYhK/nDXSTLFS7JVSSCmTquLo2VMwls1zt3DeMg8BtDQXj07W/J4//G38pT/xV/nkR19n3E+P94L/DOszOUUT8K/XWt8PfBXwe5VS7wf+TeDv1VrfC/y95f8BfgXw3uWf3wX86c/gcx/0v6AYMx/9iTd449UH3H3zIfOcpJuZM6vjFTlmWXlUSqAjtXJ+MfKRN7d4b3AovLPs9oEQC8+/+xZf+EXP4i2SaxcFwmBrpYQkHZGUaHqHc0aolFNc9p8h5srucoRaibkKXt0bnLeoWkEV0FC1rFzux0BWimQN22GmWI1bedp1y3Y/E2LhqLV86J9+4nFf6oMO+ozkG8ev+PVfzzTMhBCJizc1xoj3MrHKOZOSZPX4zsnEblkv0VqjtBaPXEhMU+DBnTOazuMbTy2Fh3fPOLt/SaVIaGsupCi4aWuMgBxai2uNGMe1ePbiGMVfFAQMobWiad1V7o9rnayEpuUgDoWUCsYLxbJZedbHPb71NJ1HW03OlXmYuXi4FY9rzayP13zFz//Cx/1SHPQYNewn/sa3/11+6Ptf4bVP3GV3sSfHhPdOJr1W4Rcfjmsa/JJD57ycJVCu1h7DJNsgWmv6Tcc4zEuw+KONEEXTNaQkgB7rNK51TLuJFOTP5iTQIaXNle+05IpeIEGy8XKM856j0zWb4xVt39C2DmMMjXc4a/jgP/4IF2fbx315Dzron1vbi4FXPvoWr37iHj/0Ax/lr/xX38PHP/YWTms67+gaQ+MsTes4vbai5krfOfrWSladlXgB66xYBZYwcWoV8qwVSFdFvHN1sd/YJQ4hP8qTrIWSq6wyx0+B9VIu5Cz09FohV0VtPN21U7bphPbkOuNu4uz+Jef3zplnKRqfeuE2/963/yF+3x/7HVfWn7eLftrFXK31zUeTtVrrFvgQ8Czwa4C/tPy2vwT82uW/fw3w7VX0PcCJUurtO/N8jCql8MbrD7h/54Ld5YBWCnKmlkxdct4qlcKn1rR2c+ZsCJLtMSes0sSqZL2rQGM08xCIIVO1XjJ0DKoWtCqoWrHOkecg613eoFXFUYAlV2ROV2N0awUtmyV0jpAKZ2cD2zETlGEbwWw6lDdcnO3R08wzpx0vXuu43ll+/EcPxdxBT7YqsN+ObM93nFzbgJKDzVrLPE7UpXCzRjqReglTTTEL0RIpCBWCaqbC+cMdu4sB2zp2l3suzrbica1LLpySLJ84JTGR57KEvGZSlq6n66yEI3vDatNiG8vufOD83pZaMk3ngSoHtpGYVkWVicacJNtrWcOpFGqWiXu/6YkhM+5nSq1cnO3pVw03n77+GF+Fgx63lFJ8/ld8gHbTcff+nh/6gY+yu9yjdIUKGkUYI8ZreRB0jtVxTylyfuVUBWFeJaOq23R0q/aqsAtzWiZ6AlhIKaGtnEEP3jpne7YlTIGwkF7jHJnHGaWl0Bx2I9aZK9psSoXtdpDM0/1ETJkcMxcP90zjzP275/yPf/cH+Qf/3ffKTX7QQU+4Uir8D3/vg/y9f/QTPPfeZ4Sa7C2usTSN5ei0p+n8QlUHvcD3rFWYBVhijFqy5AosWaWlQlVKwHx2CZbTy2qKBpR4YasCZQSCIkRaLfd0KeQKanPKjS/+QuL6OZJuKLnQtB4q5JhovMVYQ80KSuYrf/GX8xt+9695jFf0Z14/I6HhSqmXgC8Fvhe4XWt9c/mlt5A1TJBC79OxZa8tH3uTg37GVWvl3t1zfOO4fWtDXiAKWkPJGVWFfqmspdRMCIlX7+85ai1Przz9usdbQ1VgNTjfcnE5Uox0WkqWe65UmDN4VUjzjOv9sjetaIyHnIlpyRQxCuck9y4tpnGN3GxRabYxk3LGKkNVlVIUNStUSsQkpMuLHPnRj9zhfPv2GpEf9M7T1/7Sr7jKzJF7Ki2eG4dCkVKCUqka8ixoZq3UFba5UnHesj3f02a5n4bLgbZ1OKuZhsD2fEe3aq8Is9Ybail0/eIpmtPii7NM44TRhlQrvnEAzONM23uUgv54RY6JFAJKa5rGglILRr5SasU5JwdxLczDyLyfca0jRgk9R8Plw0vgGcnH0+Itevr5m7z56r3H+noc9Jik4MWXn+XmU9e5//pdUqz84+//GE8/e433fsFz+MYKqS5lyTAsmWE3MU8z+4uBMAdpcsyZ9emKtGQqSvC3uoreUElJ0RYibd8w7mcqlRgi4xBJWYqy3XZgvx24cfsaXedZHffiAdKyGtbsJsnC4lEbA+YpcHkxkErh7N4l3/X3//FVw+Wgg55kHZ2s6VYtv+cP/xbWmxXv/byn+YG/8bfZPzijYIjz0rHImb73TAvESxvNMM5SxHkjvjnAK5l+owSa1TQWrSrYZcUyF0zrYIm1Ugiwy1oDVIyTrS5tFDFB8Ed0L7yXi9HgfaXtPVojeZWlcHyypt+sQBt2Fzu6dUurNV//q7+a7/q7388rP/H2oCl/xsWcUmoN/NfA76+1Xn56AGyttSql/rl6U0qp34WsYR70GaqUyptvPOBLv/R5jJLAVGsM8zSL/4VKSRmMTPMUle1uJqfMLeD6UUtFYZX8XdYYLoaIXskxVpKEPDqnsUZCHnOqGF3R2kAuuM4zzYlxP3FyfY1pLGmcyRXZfUZuumE/MxdF1WJONxbOHm5JqUjwpLdcPBiZQ+FDH7kjq2YHHfQE6/az1zl/sKWUgvMr5kHw/jmJr7Xp/VXOVqWQZqFbOmfQSjEMkxjFkXXMWgrzNC+5V5lxP5JiYr3p0daIryDKupk1EeOsBH97I5CIkChIxzTOSXxtsaItrDY9KUi+UM4VlTIpVUoSSuW4m2j6RtbeKiitF0CL0P6MrpL5VSvGW2KIUKXp1LQNX/DFL3Pn9QcLbvqgd5KUUjz70tPsLwe089hSmceBT37sDtMwc3ptxa2nTxj2Mw/vnqOM5M+VXNhfDoBMAEqpnN2/eAS5k3Xf3i/vUVndqlU6+9uLvURoDIFaBbxwfjYwh8i8H7nx1DGf+OgdLi8GNidrnnn+OtbJ2mUOGesMm9MNm5M1Fw+3TDHTdC2+0WijDhO5g554/aJf9bVcu3nMb/0D34rS0riIMVJq5Yu+6Rfz0e/4Li7vPWRfCrWIVSbHiHVm2fqCvneUIk1/YzVEmbbbZcpmlnzkq4ZgLkKIjVmmfEvXRGtNYXlmVMJZyH6Ff+l5bLNhGiMlJWoWqvs0zFhrufXMDYbdo0xUYUXM44Q1cPOpG/yf/tN/jT/6r/zJt0VB9xkVc0ophxRy/0Wt9a8tH76jlHq61vrmskZ5d/n468Dzn/bHn1s+9lNUa/2zwJ9d/v7Dj8SfAbVdC6WQrKbOkjWXY8IvoassaHGWLuY4Zz5xb8cQIifrFrRijJkpCchEQuoqymqKUtI5VRqjKs3yOYW2p4mx0DhNqBAfZfws3UxqkTE9lpQr9XLicphJuXB+b0ueM94qTjYN684zXuy5/fQ11l/yIt/9o68SD53Pg55QvfCeZ3j5Ay9xeb4DoOtb8ZumtPjaEiEk2r4R6ILWDGmUaXgp+NbhZ0dKWaJBhvnKQ2ecJYyRs3sXdKtG7nGBiskapddUIIYoTRpvmMclpDxV2s4DinE/0a08D++eMwK+b4B69SBdCuKHW6aL8zjjW0fJBd/1rDadhKCnLHmTS3c1xsT+cs+4F1O6bx0v/5wX+c6/8/2UcCjm3mlaH63JpWAbxzhOpJDQ1hPDxCsfu8eHfvgV3v0Fz7JeWXYXe6qCkqQwA0gxsh8icyxorUlzglI4ub3hrR+8i8qRhKZddTSdJaVKDFl843MSZHoquM7hjSFry7CNHF8/ImWYp8yHf/RVrj99zMX9PavjHq01Z9uIe+sCYyz9pqfpWvrec3Z3d2g2HvREql93/L4/+jt46fOe55kXnsJoJdC85dwppeIbQ4yWF7/y5/Lh7/xuyUMdJd4GIFWw9VEOnEJpKd5ULRil0IBZchhLLiijl/NEQHiKitJapnHOUFFXkQTaGuYMsbuOv/0s0XpqKlQqTeewWrZcNidrVusW1zWstGbcTZzcPEIbRdN65kmitE6vHfGH/2//Gv/XP/ineeUnXn2ioSifCc1SAf8Z8KFa65/4tF/6m8C3Af/+8u+/8Wkf/1eUUn8Z+Erg4tPWMQ/6WVLJhXt3L3jxuRMuzgfilKXDYRRK9lCwRkvnwoiPwGpNjpk374+8cWcHWtP3HrPkgCitSAs8pVKJc6AqTds77MI0eUSorBW8d2hdwGh0UVgjJldtNPMsq2XOGZ56+piLj9yDGHnp2SN6Z3DaYJRM8F585hpt36G05ZW3Lvjkm2eP+/IedNBPS9ZZchIKn9KKeYrMc7g6sAyQU2IaJsH6l8rmdE2YI/uLAaWkkEqXI85bpjEwTZGaC03j2O9GYojcePoatVZijFSWFRar4cpnVAhjpOkk/2B3NtCtGhQyyY8h0vZe9rOXSBG0pqZM00mhWJdlDNc4pv3M6qjDOcc8xavvVWnN+qhjtx1IOXP+cLeAWDSg+If//fddkXYPemfp/V/+fqxWXGxnfNuTwo5xnFBKc/bWW0xD4M3XW4bzHdMYQWtWRz3aCMnuERjBOi1NyyTvq1c/cg9KYbrccnT9CK0yNSamoTLsC8oorHey6nvk5aFTV/RRx/q4oz9acf/OGdPFGdovWay3jqFKHIFvPe26o+1aur6Rgq6xnNw8WSbSh2bjQU+GtNZ86df8HL75N/0yvuobvoyUMjEmLh4MeG8Xi4xl1TfEGOk3Pax7Nu95H3P5KOMnPiFbGVrWJqNWxDnSrz0lF+KcSFG2rIzWhJDoWku7asgx/ZR4GqWXMPEK2hlKkhgC4wyxGtL6JvraLYo2xCmitaLmSpoTppPYkGdfeoq7rz8AxJ/93HufZdqLH/bywZbVcYsyCm0NL33+i/zf/8Yf5+/9zf+R/8vv/0+e2Pv2M5nMfS3wm4AfVUr98PKxfxsp4v6KUuq3A58Afv3ya/89EkvwESSa4Ld+Bp/7oP+NKqVy782H/Jz3XieMmnnSQEEpi1UVZy22MdQYyTlTFGJsbS1lO1EzhJDFuwOoZZ2lFPHoVJBJX87kUHCNI+fEnDJ+ycXKueBax+JYpWk7YoxXlL7GW8J2ZhoC69ay2nhOjzu6Rvx5lIoGSqqUPNO2lm/71q/lj/7J//bxXtyDDvpp6qnnbhCjrE0+/ex1UszUUmWdZAkYts6h9AIsiYm8y1IAGSUh4AvBz3nHxcM9tUguHcDl2RbfNhhrMAs5zFhDDHnJ8ynoJMHGeSH7Ka04ub2Rr+XReaYUORW6lSObKh47BabxWC++PK0U7cqTohAzY0iMu4nV8Up8CyhWRz1hTsQpUkohzlJc5lwIc2TYjY/jZTjoc0BvvXafVz96h3FKjNs9w26i6Txdb+i8IkdNLYqcQVlH03q6dcN4scN1HW1nCOOM0oZ+03L+YEcMic3JhpMba+5+0tKsV+RcGUNic73DreRe2p1fYrWmvbHBGtidj+SYWJ/2aG04Oj0CoOs7uuM1YRLAD8qgrUNhUNpQtRaYlwLfNKh/xvd80EGfK3rq+Vv8sT//B7n9zA0AmX4vZ45Sle3ZDtd6/DVLTBI7VUthP4ycP7jg9KUX6HvH/Y9+DGMNKRVqybSNvXpGRMlZI143TVkaeTVnlIJUFt6Jluziq+gdJBS8akW2Lera09jVhmmKnJ6sCNNMCpmmtXRtQ7tqcF3L2Z0zmlVLHGcAhosd0zAxTpGub9mdjewvg6yOpsq16yt+4a/8KpRS/Ef/xz/zRJ5HP+1irtb6j+B/8WfWL/6f+f0V+L0/3c930E9fuhZsTqw7yzQackhYZ/AKutZQl3G2NQssthbyVHBWMy+7yzlnQBPHgOm83GiJZdUSjJNoggpUbSAniipAQVlLmCK5Qms10zATc0YB1hbmWATyUCqnm4bjztK1y661UUwp0XiDslBQbGvh5s3Tx3pNDzroM9HX/vKfy7Dg0FOUxkjOUlBZb5jGhK6CRFf6UYZVvirKUIpSMmEMkrWTC7kIuOSRh+367VOMMdQilMpHFNkUMr7zMhmfZoyWTLoYM04pcgic3jyRyX2BMEsIue8ajBNamVKKeZSV7bCs4Ez7gVIKTYvAKpa8yRgkOzIn8dCOD2cmFbDeUmvlH/x/votXP3ZY0ninyjae7TaiyeRU6dctF/cv2Bzf4PSF5wifuIs2mu5ohWsbnJc139W1E4nLCYU6Bqwx9EeeYWpAwc3bG8aQcV0DtWB8w/58j2sd3WZNnALWCvyn6yRCw7cdFw8uheKsDSkWNqfH9L3jwcOJbt1hjME4i2/bK1+rQXymuTEcX18dshMP+pyXtYbrT13jj/zpP8AL73qaeQoMu4lxGKkVulVLDAnjDdpAjInGaOIUcK0AhHbnA+uTFXfvXuKXkG/rNY2TZ0YTM0UvfrgCRilUzpJRLMeYrFdaOeuENltRGqDKBpjSxOw4fte7cccbwpTpjSMMMylEfOuw1rG5dkSphWE7SPROLgvVNvHwzgXr055WfjTQrXtSiqA0+/MdcY50XcM3/OqvI4XEf/pH/nN2l8MTtS59+InzDtDZxUitlVXnWPeedWdpnWbVe6xWaCpai+/FOkPTeYkesBI/oJ2hqsWMqjXTnAgx41qHdZZ5kqnebjsxxMycZf0SrVHOopwlxIwphVRlBXNh1XK5C2z3k2TehYgBjNZoLUZy6dZoxlgZYyUZz0Us/Ik/8dcf70U96KCfpl54+RlyEmDJ6qjDNe7qXqu1Lr9WF4aC5L4pqhwsspW45GRVXOuXYFVFmCO5KKYp4BqLb2VKnlMmzgkKlIzkag2zAFWSTANzzBgt8SPrkzUpZuYlo6vpGmqBaT8Tp4TzQqdcrVuss3jvIEO7NHmWux9tFON2FLJglYeHft1Sq+QHgaCl777x4PG9GAc9dnXrDtd2PHg4E7Lm4mzg2fc+Rymy2nX7+esYK9Pftm9ougbrHcY5qrJoYzi5eYJfdYxDpRZFrYrz84EHd855eG9kmDIP7pzJBMA1WKM5vXnE0bUNR6drXNswz5lcIGXYPxzYPjxjGmaeeukWq9MjnLN479HGoK3kZk1DJMZEVQqtYBoCYQq8+PnP/7O/8YMOekx6zxe8yP/+3/nN/JXv/TO8673PE0JkDpFc5FxpuoYwJcl/Q1EqTPtJ4j+s4fz+BZf3L7HO0jaO1165S3+yWmgIFe8dTdtgvaXpPatVS9c6vDP4xsnqplriBZBmZKkIACwXCpWqIGTg2m3ad7/Mbi5cPNgyT4EYIsYZiTVoJePx/N6ZFGApU1IRerJSNJ3n2s0N66Me3zT0m4YUI9oYfGPJMRHGmWkcUVbxjb/+F/HXfvDP8/W/+mse98v0z6VDMfcO0KtvXPDwfMRS0Dmx6S3Xjhu6RlNCpCaZwmkNWlVySBK8ajXWSLGXovhZKhX9KK+nyENlRRFToWrN9nwkTjNoRU6ZEBPDFAiTUJByrZScqVYzhsh2H9iPiZgKfe/pW4emEh6tYS0rWd4art86YXXjmClU7t8/hLEe9GTq87/oPRhjSDFjrUUr8ZYaI+uQaZbAZGO0rCSGJECUKZDC4j1QakGji5egIgjn9XHHtJ/p191VplxOEhSeS1mCkg3duqXpnQQhL58jzAGJpMyEKbA5XeEbK80VZ3DLSua0Fz9TRXx1ISRylclimpMUhssDb7NqKaUyjQGAeQzkJF9HyRmxSjw53c+DfublvZV8RWcx1tGu1qSQKDnTrnpOb53ivGN9LIh0Y61M1JzDekvbNxgvZNZaCt5ZtDY4b3CuQWnD5f2BWhTKSOxH13uMs+JHRaHN8mcax+Zkw/GtY7Qx3FhWz87vXrA+7qgo8cp1nloVzjva1lNrQVuzrGAqbj13+5/xXR900OPR53/Re/hD//H/gW/+zb+c3cWeaZJ8xQdvPWTcz6Qk58E8z4y7SWKiQqLtWpTSzMvP8vXJ6qqoanrJdlt14u/OWc4oow3aGPFpw0I41pJrhTxjghR06lHMjYI5ZILuKMfPEJsjyZIDeY4swn3Ynu1YbTqx/ywDB6stKRaUUoz7kbL4ziswDSMhBM7vb/GdY30sYecoyb/LufLGx+8w7EaavuP3/7HfyS/61V/D6c2Tz/pr9NPRz0jO3EGfuzJG8xVf+gI3b27wtnBy5FFGQ6mEOaG9AadlMrCYU61RZCTfyi4kPLu2lFShVkISvHguUgQqBcMYaZzh5LijlMQ8Jblhs4IYWW9aWc1cwiJ3uxllFMpojFWkXOhWjQBUaoFaqblQM2zWnlwqD9+6QK96vDGP+7IedNBPW/M4ywFTBcNcSkESXerVmmWt8t8oaHtHCvnKGK6NYh4jWkshl4LEA/jGYJbwZGP0VZAyVJx3snJZxQNhrBjLXWsJIWCtIYSK1RqNQlnNuJuJUySnItM1KwVdjJnVUSPkwIqQNK0Y3o2TB9qSZdLX9i0l52VNtMjXpbX4LpaDlE+LsznonacwSSD4+qTHaMjzxN3X7nPr+ZtUwLUtxzevSWSH0liqND+0rGFps/jplMJ3DcpklFZ0K8duVzi9dYICYgyc3jym7WS6N+xnrLMSZaAF4KONxlhLVppiYHPkSHOg3axoGs0wiAcoRcnRSjmTUsFURbKZOleKK5w/2D3mq3rQQT9Vbd/wjd/y9fySf+Hnc+v5GwyXe+6/ccbRjTVndy+Ektw4jNXEkGnaFoVQkvujjtVRR5plrXGeZs7un2OWe8Z7y+V24plbG/b7mRQTxnq005RUUIB3EoeFgpIFvldKXe4/aWiWWqnKoI5OWb34/JJZJzaBYT/LxsriDferFmMMl+c7QkxsjlY4b7l2+4j92Z5hnJnmyGrVE6ZIqlWylbWiFojTTMoZrSrjfiTGjG083QJlabzl3/6P/1U++uOf5N/57f8B9996+Lhfwv9VHSZzb2NZo/n6r32Zr/qS5zh+9phcYLVucNbgnKGxGmsVXWsxSnCxxi0Pg1oCHZ23aKdRRTKhUIo4BXJIkAs5JYrSaOso2pCpuMbTrxtKSNSS8auGOSZiLRStmAsS7BoSRkFKkksXdzOlVGKUUb93hqbRWKVQqdAq6GslPMH42IPe2Tq+tuErf9GXSIC3s9y4fYo2AgkC0Esn0zornUpYPG9aUM61UGvFOnN1KAHLqrMEIMuEQ/p0FRa/QiGFBFU8CkopKeTmiNbSbfXe0a1bqhIwCQppuGho+1bu2ZjoNy1N62hXzTIZ8RijUUuhFkNiDpGUMk1jZaIYI0pr4hyWg1vWRg/hMwfNU4SSWW8avNcYk1mdrFFmKfrhinpKBW0MTeuu/m20xmizNDsMCqFUbk6PuPnMKac3jji6tma9shwdd9LIWJolpQgaXTxyWsBCWc6jprVoa8nZMO0j+11mHiZSlPgQ68QHOg2BmAv7JYS81Hp1Px900OeCNidrfsvv/xa++V/6pTz/rmdQSjHsJrQzjPuZmBK1yqZVnBNhTqSU8J3Ddx7XOuKcGPYTw3YSIFcqzEOQuJu2ZbubKAsdPc5xoUJKkVaVxjX2qsGYYlyImXkp4BS5VuaqySe3UdduMIUEyPRb6WUCb2XVeXPc47wljDPrdYfKlTBFySzeDsRSsNZCLszjRC5QU2bYTqw2PaUUdpcDaY40nWdzsqHtPEbD+YMdZw93zFOkFnjP+17kj/65P8itZUr/uapDMfc2lXWWL/vC5/jiz7uB14Uwz6RSUQpqloeyFLMEEC9dkThHtDNglDTMl45l01h8s2TSKWicRS2ZI1XJOqWM3itzzGAN0xRZnW6wzjGNgZTLgkjPbC9HtAFjLbFIgeis5ui4ofWyU+2cpaA4vb6h7xt6q1g5ja2Z9737Js8+e+1xX+KDDvrnlm+cPJOKWWAJM5VokJLLowRGSsmklKhUcq5yf5VKjnLY1ioFWSnl6hADmIZJCj2Qz1MkkyultPjsylXB9ShjrlJJRVZTlNZQwTce3zpSfPTQmlBa0fQNWisuH24ZLnaU/Ojrgab1KKUwRuMbQUTHKbI+WmGtJccIStGvW66WRA9DuXe0rt065Uu+5v3kGDEq42xl3Af8AuQqefFXK4XWAj6xVsAkoMlFYZzDWEtFo4ysfB2ddIQxoZSi6Rzrkw5l/dI4qVewhYo8XCqjFoqsWpod8mlRMv0+ur7h6Pqa1fGKPIuXdHu2hZqpQJgSJRUuzgbCnFgf9Y/zsh500E/Ry1/wIu//0veyPl4zjTNhmKlLzu/Fg0viskaZYiamRC5ZtkdyQVVFDZJ92raSK0opUKHtGnKunN6+wf0HA/tH0+5l8ytMkZhkLbJU0FotZ5OStX8q1WjmDKnZ0D3/boJpqVVhl6amTPISTee5/fwN1tfWbC8GpmGm6Rs21zd0K4/3sqUyTYGcEnqBqHTrjm7lZPPLKqb9JPTbq60QIWhaKx478ZkXpikwx4xC874vepk//hf/LW4/dxNrPzc3ww7F3NtUX/+rvooXnz3l2vUVq15WrM7u7xkuZ+mYlIKmYhUypfOyXplm6d7XWtEsgeBaUVKGnKGCa4wEBWvISbw6q96iqqyf7HYzRSlM4wCZKlSliLFwsZ0pSnak98NMiPL35pAIsZILWGtpWkeJhRwScZQOiZzrmmefvc7R4bA86AnUN33rNyzryrA+XgFqgY8our5FG1lltNZehYVrrTDGwEKjzCnLoaiURBEoCQA/f3Ah68kLVpoqHgXfyv2fS76CHGkrh6oxRkAqzmGcWQ44qBTiFNBGsb8cqVnCyUOQGIGmc4RJQsCVlgfkMEVqKUzDTJgDu+1Af7Ritx2YpygETa3IQRpJSkGK+WCZewdLKcWz73qal9//HGka2V/uWZ1saNYdbeevyJVN6yQU2FlKAWcNvnU0nZdJtDN0q0buH2uYp0yYZ6YF4hNjxhi5T5QC4zQKyabLqTINSXw9FZzTC/jHU3OhZGmAUCX70ZqCqomm9QzbEaqg2Oc5Mg8T8xRo24OD5aDPDT33rqf5xm/5eo5vHmO9JaXC9nJ/1WDPubBadxhnaPsGaw2rVUO/agCFtQrt9JXvedgPPLx7jlq2RazVPPvep2lWG+4/3EtEh9HEJPddmBPTlMlFoCYoJdNxpZhTZYgGdXwD99QztNeO6Tcd7aaTpmSttK0Aj1QRYqzOmdWq5ejammYljcF23XF8uqZdtQzbAbRingPaWuIUKLninEWhJSLB6KuG4v5y4vLhllor3ltKSOQYmUNi2I3oRry0L7//XfzV7/tz/O4/9JskPuFzTIdi7m2oF15+lq/+hi/n9ue/i5QSTedAVY5vtIQQpFAz4LxBK7kZndFYo8XToiRMEQSeoKg4J+QfaySgUS9m8xwlO8osHXmzYNLDlHh495IlMISYC7kWjJKp4aOskraXTB4JTg4YZwRrniWHZNoHlDWkWokFita0xxv0wTd30BOmpvWAkhVLbzm5fkTJlVwKKRXQS+G14NKNtfjWA5CWCVgpFd/4BRghEJWc8hK8KpOFR6ssMSbpai6h4QrJjEupwAIvGrcTaUpXE7taC87Lg7ExmkrFegtK450VImapV1O9EJIcvr2TJo/TKK3YX4zUXHHe0jaekiv7i5GUCo+scqVUht24xJ4c9E6Utpbt+cwrH36NmBTGNayP13gna5SPchYfZS86b2k6R7dpcd7il1xFs3ivjVbUBfxjncQNmCWOI06JeYygYNqLFyengtLgW/HOiV9VfWoFUytKEgiXINQtWTkuH5zRNIWuheu31hADcT/w3Is3aJwhhcN7+qDHL2M0X/61X8hq3aPRbM925CBZnzkmtmc7YspMU2QaZrRRGKvJqRBDBq0IcyCFQNN7hq1MxGLK9H2Dc0bYC1rz4he+h4fnkbNdYDcmdvvAbgjs9oFhDIxjJOVKTIUpVvbRUPprtM+9gD4+QRlDzpmm9ZJdHCvaWI6vHeG9EJjvv36fOWaadUPTOPF3K/DecXb3nBwjq3VPSYW2a4hTYNjP7IeJYZxRRsjNaU6Skdw1V37y3cXIfjtRkEjk4XIgzJF7rz9gGCYB95XKr//dv4bf+m/873DePe6X96fo0D56G8l5x6/5zb+Mn//LvkKIdY3m8sd/jE5lwkXAdg06CBLdGA0aVEHy5TSovBhQK4JeRqFRFA2lFqzV1CRUPKMFppBSoW0lS6ogXrcQIsYaCXMtFV0rpEy1llQKG28ogG69ZGspRaFSYmbYzxgFysh+9RQTmYpyjowiK8WP/fib3L1z/rgv90EH/XPp2q0TfOuvmhZxDmhjmIcJZaxkyim9AFHkYTIlKdaEbClTt5QjtZirxkeKsnaplUw6aq3EIIeVQqBGZkGpG6OXB1j5fdppwhjpGrfQxDRxSktMgmya5ZxlXXoS6Eq3asmpYG0Vc7uGcSvhrN47Sio0x82VH8k1TrqxRjPuRikSi6KkyHf+ze/g4uGBTPtOldGKm8+cgFGcP9wRpkDbtxgTqVXe99Yo2QBRaoGdiP/TeovWeiGiSvRFXMKIfSvNyHkIWNteBRHL9NniO42OUnCFKWCdIYaM9QZloC7FozFGgGFI3qNfGjJN69ldBoxxvPaTn2S/ndDa8uYn3+T67WuYdsXxtWMuHl48jst60EEA/MJf+dV84Oe+j5Obx0Al58y4G+nXPSlmfOPIOdOvW45ON0xTYBrD4rs2qFJpuxZtNQ/vXXBx/wJlYHXcM42BsEywckisTo/ZPPMUr330NfYXW9yj5n+taK0YO8+qd2Qcar3m5MXb2M4xj5GUCzFK7I3WinkMOGPxnWMOCa0lpuTouQ05Fe6+dp9nXrgJBYbLmRiCNAmnIN46JUXbsB85vXFCGGbmYaZddRhvaBpLKYrdxRbjPVYbjq5JJM+4H6+GBdM4M+5ntLWc3thgkA2U3/6vfyvOGf7Mv/f/fLwv8KfpUMy9TdSvO37Hv/kb+dKv+gA5F2pKtKue4fZzhLNXsQTa046z8z2blSfMYi5Vsj0ih5aqkOuyYglohaoVjcY4RY0FRVnCwR8hzyPRynpWKoUcs4QNo8REWzJN46hGE+ZMt2oIi+k1VQE4mM6DNRilsY1FFcBaIWZeFGISmpFuGq698BQ/9COf4N69wyF50JOlNz95l+/5O9/H6fUNXd+yfe4G0zDRdZ4bz9zErFekKAAFs6wvGisrIWEIAkGxGqqsV9al8RKDZMnZ1qEfHZ6fBiPBW/KUaVcN8xjENxTF61NSFuiKNYvRXDFPM3bQlL4Q54TRhsabJdNLsui0UUyDUDj7dUfTeZSG3fkeaiVNmbkGwigePhYS56MisnUwP7gP8wFm9E5WReAI1mqaxtB0K9IseVdplq0SOVOkiKtAjtJsiMsZloLAdULMy3RNirwcArvLic1pD1myVOsShTHtJ6gVbdXy/zO+tRgt55ZpLTlVSi34VvITU0jkIl5vZxSucQtA4Yj+5BRtFP2qYXO6QSvNK9c2h2LuoMem42sbPvBln0fXNRKePSeUllxH2wjYZNgNV1Pv8wcXuMaxvxgw2nD9mVPmYaZft4Q5XGWgpinJ85hWnF4/JkXxa3erjtvveQ6c57ZR7O9fMl/uZWNEaezxCnO6psSCaz2xFErMhDlivKXtWpwztOsO3zS0K8e91x/SGYU2Ai2apwBVzpwYEuN2xjrNNMwYLwUoCmE/lMLJjRNpPubCOM5oJVTNaQgobejXPapKBNawH+VnjdbUJV91HmeG7Z7V6ZqH9y7o+xbXWHbbgX/xt30TWhv+3H/w/5Jz9jHrUMy9DeS849t+37/Ie9/3EpcPthirZLUqFex6w268Sd29BnNBtw7felkdCZm65MGhF9P3EhIJUEumoNBWUaPsO1fAe0NMBUOh9W5ZbZE3k2stOZcrZHrJhf1uWnxzmmk34RovPp8kndEyBZKWFU1bJXy1onDW0Z0afK4obdGu5Yd++JP8tb/6HY/tWh900GeiH/vhj3LjxhHvfs9T3Hn1LlZrWp05u/4aJ09d5/SFZ9hcE8JlDFBype07rLGknKk5E0JcijWZmg274cpD9yhbTlGpy5qYNovBlcULu/juZP0s4VtLt5K1tXEnxVe76QhTQinksLWauHRyfevJKdMvvoacZDI47+SgVVr+flrYb6crDHxBENXGOyyJaQ4cCCjvbOVcCHPAeku/aokxEvYRazTVGkBW/lPMzGO8iu2opYCWWJxu3TLtw9LgQNaMlYZq0UaR0kJxrdIAGfeSO1VyplFSLPrGLoH30PSCJldaoTVMgzwwOu+oMSPGn0rTNqw23XJugm8MbetpWo+7qa9WpA866HHoi7/y/dx4+hrtqpVnvlIxC6k4zYF5jLJ+rw0xJJrGMe8njm8cLaTywvH1DWEWwFYcA1pBv27pVx3TbuTWs9f5kR/8SZ55/jpxllXNR5CVk2dvsfpALwXRbpJomsaRaqBUKLlQ50SMhX7j8d6jrWa97ogps7scybmy38/0qxZrJXpndbyi5EpKWYxiWj62P99LAzMWUEn83EoxDDNH19eSxVrr1blTUqbpvGyxlErNFescKWW6dQdFADFd6QnDSLYWYzW+85QovvVv/T2/hnGc+PY/+VceO8H2UMw94erXHb/3D38bX/AlL9O0nod3z5nHQAiJNAfCGKiuQ994hv2DO5w+dcz5qw9ZN2bJ+JC44UcrWUorediLBbusUgr1S1ZPzHKw1ivCmCKmjLXityu5IBSTSqoVtBZPjDJXgIU5FbRWhJjRztJ4h+savDH4dUu/aUlRCsJxF/n4mw/wfcd/99f/IRcXA+XRXs1BBz1hyrlw58459+9f8vwLN1mvGprO4VQhPnzAnbMz7vcrbrz7eVbXrzHFTK0yAfONZxom2r5FaaHPxhAZdyPeW3wjgchaS3BxreJPLUk8eilmjDXEOVJrISfJo2tah/OO/eVAipnNaU+cI/26RauGEBLTfsa14nMNIdK2nlIKNT2KjpSfB/McCVOk7eT3xjmwOurwUXyw3bqTB18qRoOqh3v5naxaKuNeICXjMFFrpek7mXqFRE6JlBNaiT9uGiJt7xYC80J6nRJKa4kESIXWW1CG8XKPtQslVimMzhhVwFnmMcoE3GkIEl4cl+gOAQNJAyTHwvq4o5QqTYvFotBvZDLQdf5qjcx6CyjinIhh5uu/+Wv42Ac/Tj2cVwd9lrU+6vm6X/oVABydrklzIoyB9UkPSK6vNpo6VcIcCXNg/dwNtNLUXBjHwLVNLx5ppPGujcZ4R79uoVbmEPF9Q+sMZ/cvFliXppqKNoaKAEu8c0xqplRZtXfeyYCgCG3ZWsk5Nlp+TVlD3M/EECEL8dJYjdGWZiXgO+lNanzboHWhVjh56hjnHHGKV1mQm2MPSvPwrTNUBd816CLTfWPlbDTGop2iWy2r2ZOcq845us6hasUoBblw+XDLPAWu3TrBNY55nPkNv/ubMVrzn/+H/+VjfTY9FHNPqJRS3HrmOr/lD/wGfv4v/3lcnG1JSfDIq6OOcPfsU14b57DdDapyPLz/Gs3Ko0qRnI45geHKd0AVs7lv7DKxg5pk7VLbJQ9rMYXnKiWgtWqZBMhBqI1mngIZGY8bBzEue8/eUFC0jadtHSkVGu9YrVvmCK+/ecl3/fUfktwqBft94I3XHjy+C33QQT8Lyrnwysfv4JzhF/+Cz2fVe2pKoAp5t+Wtf/JBdNfjj444fe4pmtWKeRIv6iNqujw4RnYXA926FVrX8pBbcsV5tVDdPzX9KlkOZaUtaEUKYji31jBXebjOsWC8ePfGMUiWnVJoZZbuJgv8SL4O5y2lc6ScObm5YXu2l/w4pQghUQscXdtw+eBC8NZdQ9oNQrcuhzyud7KUVrS95/5b58QoTYcYZNI7jWFZ/9eEKLTJpnPS0ZcERrRWaCXrxyks3XgNu4uZcZTA+2EXSDEx7SO269FW4DtmOf/qozPP2yWrMaGtobEK01ict0z7gDUGvwBSHoUXS5CxkC+nMWK9TAeN0TLAO+igx6Cv/5Vfxep4xbWbpzStZx4jxku8lCqVmgu2dfS6p99IAZNyQWmB2619L89mraIWad4bp2k7j0Zx8XCHMZowzNx67gZvvvIW2QoNc78dcdZSlPhY5ykslgE5byoQxsjmuActGY+b4zVKy+oyuVCW6Cvft9QizX9lHFopms4TgsSObB9saTeSgSqsB0dFS5MG2O9GnLP4xqONwjknOcpOmp/NskJdiyKG+ClyrYJ+I9NH6z0hBFKIpFSYp8g8zdx++gZaV5z3fNNv/CX89W//W481WPxQzD2h+vpv+mr+wB//HbjGc362XTqGhuvP3uCtT95hDnHJv4lMU6TpKs3miP76B0iXD5he/ySrRg7AOMu6o168C7pw5U9wTpNKpuRCCpmUKwUh6eVchMaXKuUR7nkh1Vlj0UpG2blW+k72mV1j6JTCOIP1jhwTl2PmQz/wCufbmR/9J594rNf1oIM+m4oxs9l0eGcoaqHrmQpRirphu+Py1dfpb16jOz1lc/M6qUo3tZTCPCdKLeL/MRrfeIGXFASlHjNKK3zrCYuxXcJhI6BYH60WKldFa+mQxhDRWpFSWYJdE771DPsRpRSro5ZcJDZktTkGLR3XR1mTJQuMxVh1BXLxjaHpGmqB3fmO/Uc+TusNL714kx/58TcZF+z1Qe8sKQVmed/td+PS0VfMIVOrTBBKyuRc0FZorHrJNlVavN5NJ2Hfq6OWeY6kORPmTIyFYQjElBn3QSIHUkZbT7vqJB5Da4xR1CrU5VIkuqAuCyYlJ2loIvTXR36hMkact1ye7Wl6T9NrKnIftp1DkdhejofYjYMei45Oj/CNp9+0izFVntN2FyNt3+AXP5i1ch74Ttbnp+1It/Z0q4ZcK4XKcD6wvxyIKWO0ZthPQju2houzS7TR3Hz6Om988i7b88jxjTVznYlTYHs5YLScSyXLZsj2Yk+RJHFWqx5jJBjceUu7bnhw55zddsYYhXeKOAcuY+T67RMpIOcoJOckh5z3lu29S1znAEV/1C9xWoV+3RHGQAoJZRZPHcjadcqkJPE6qErbe4w1hDEQUqbmIsClkISwXipaaZw1UBR3Xn/I9VtHQKTfdPyR/8e/zh/53f/RYyvoDsXcE6hv+tZv4Pf9u7+NmAoX9y4ZdiOro5aK4v5bZ6Rc6dcd03ZmfdSj1UCuC2a5aoI+wj73Mml3D5NAp0guyJtWaRkxV8u0m+nXEgSc5ZySjmTN2MbgGkuMckOoBcRgvIzL6+KHq0omd95LHIFZfAmP1jOnovlv/9YPc//+7vFe1IMOekxSy6KzqpL5WIocdE6J97UC8eyc7fkFF598DbNa018/pmrL7nwHKPEPUBdEu8EYIw+dudAuWVzWSJc0zZFaYbXpaVcNWskqdClZzONmybazWibtSlFzpmkdCvHxWSN0sWE/Lmszhs3JimE3cnytYdxNDNsJKtx85toS3uw5vrbm4sGlTAtr5YWnjmm9PRRz71RVISh3q0YaBKkI3S4ty/9KgsM/NYkWf1utkOZMsUsDRCnGFISqXColCRlvpR0hRHzXEMeJUrR4tqnUqmg6K40PuOrmhykwjxPzKM3NeYwsRgP5erN46WKI+MbhrNgLFBCD5GmlWfytBx302dZ7P/AS7/uSl69yTEOI8rNbaVJKAq2bswSETxGtFK7zWKvxq4bVUc9+O7I56ggpS76v0ZQpYLymaRzGW9Kc6NfSFIk28/SLt9if77n/1hntusU7dxUtgoY4FcIQuXHzBNd43LL67xfvaUqJB3cvKKWyPmoxqhJjYX26XuIDLDlmgeQBYQwob9ie77jx3A3CNAuNFtkYmadAv2rQ3uCNlgzlTtauSy5yvyuWyaHCt4rhfELWSuDi/iXGmauc1hgTYU74RnyxucBuN7HRHSVFnn3pKf61P/67+EO/7d9/LK/7oZh7gqSU4hd848/jX/nDv4X+SB6cmpXHd55aC8N+QnuPd5EwRJTSYspeNZzdvxCPgJLsqF1QdOtnUOYC7r9JTbNk9ijFnCtKF1wreHJqRRmNtpUal1gDJE5AOU1j1dWeZmbBOBuNUYq2kzUZ6+Qm01pWL6uxfM/3f4Lv/b6PPcYretBBj181RTSOgtApa85XB5KRm0aAEIh3IV2ec3l5zpA1D85GWHK4UpRizDmLNkbCwZeVEYkw+FSMwSMCZcmFbDJhL3EJxlqoEilQUpFVkzGitKJbNUxDkHiDnLHKoKoiDAE0pJBIS0akMgotCbFc3N/inF0+Z4f3g3SKYyHnysE2985VqZV5jsQo5NRHTYEU5KGzVghBwD1l8e2EOZFikqzSWpfCSv7fWIVBUwpszwZcJ76Xs/uXeC+d/pLFg1OyeO600ssEWvypaI02FevFMyfrX0beq6XiW0NOkulonTRNchFfzdFxh6p1mdrFx315D3oH6uh0w+qop+bCtJukEVEqbedZHfUL0bjQ+4ZhNwl8aKFDpjkxDwEUDNs9Td/RNJZamyVrMWOMYR4ks20cAk3r0UozzTPdpuNd1zYoYPtwSy4V693i1U7kHFltOsbdRNs2tNcbQsgMk/AdwhgoudD0DePFns3peoG3SPElZ1gmhsg8zPjOYbwAi9ByT8/jTNs1aK05f7ClWzX4tuXazWNA7DspSvNzHgNKQQ6VdtUthezA5tpart0S8+O8o10y6VCKFBNhjmhnmeZE1znqnHnh5af5ym/4Ur73H/zjz/pU/hAa/gTpl/26X8Af+7N/EGM1824kzpGwD9QiXoOSEl1n6fqGnAqucWhjcdZw7cYx8xDYX+xx3tE4R1GK2J6gnn+Z3B9TChilcE7Idbax2MbCEkKMUkv3ssoDZ86Q5RBUWqONxhpk/cXK3rLRCqsVzmja1rE5XvHil32ATw4tH/7I/cd9SQ866LHq5LgTYEOVNemaBXZircY5g/MG84hUqRXWaLzTNN7IHn/MWCfGc2olzUKQrUUOqhQlA+iRyV0phfOfWjdx3tGtWzk8dzPDbrgqJCVzZ0YbKRDTsrIZloyvYfkZpI1GobHO0q+9BJlneYCwS/C485aaCyVFnHeM+4kUIt/zgx/j4nJ4rK/BQY9PkoHoiEHen9N+BLKsN1LRRrHeNKyPu2UNWDY9qJXt2Z4Y0tWfnadZvHFDpO0912+fcHJtjfWGdiUIcqUhJ/kzpRRKKuI1zwlqpu0cXe/pVp6lrsM3hloqYYxXTQjvrfhrjMFai/eGprVAJRfJpNtv58OW5UGfVVln+Zbf+U1orWj7BqXVEgAemcZw5UeNcyTFIh5Tq7Gtg1qxzkhOsNGgDWmOi+85068kr3EaZ1KQos9bw7gduTzbo5Xh5PoR3hoBi3jH8Y0jvLdgFLUWrBNIULfpUE6LT88ojk5Xi/9VVi5TSHSbHt950ArbeMIcybGwvRiXM8hhnQR3u67Be4dS0iBKKRPGmbpQ0+McuLzYc3l5yeXFjof3LpZC01IqYgFylnkOxJiuoEVhkmuhtCKXwup4LdyHKqTLOEeGy2Ep3MT/+7v+rX+Jk+vHn/3X/rP+GQ/659bTz9/iD/6Hv4cv+KL3opTFeU+p0tV2jSdG6WZY5xgvdszDRNs7jk430p1sDEonnnr2OikmtttBHq5QAiqxHvvcuwhvvY6OW3wDYZ5IGWwjBVmeCyWLCb3WJZsOhVYVyAJS0QrvJWzcqIq1WihjCBlsfe2Yl7/uq3nxCz+Pr/uNG37db/9V/Ff/2X/HX/9Lf4tyACEc9A7Us7ePuH7cM40zLFh1pWQVRABGUnTpBaZQlqZKzrKCYhb88qMJgQIBFy3/rrks/gTxxGmvabuW1abHt25Zt0mUUlifdEK3VYp5iuhHYeNa/HBNLyvXlAwVVsdyACPQWlKqxClfefVs52hXLUrBcLnn6PoJpzePGbd7+t4zjYEf+fE3Dw+872QtzUNtpMlQciaFZeW21mUqILmKWsnDVYoZ5+VhSmvF7kJomHpZS1bKUGqm7z2FTIpwdNySZkXz/2fvT2NtW7PzPOz5utmtZjenu31VsapYVWSRxb4RGxVJUSTVUlJEyZEAS7CsNDLSx00MRAGcBA6SAA4c5I8R6EdgIUiAwIkdIAhsy4jlBLLkRnIkdiJZ3e3OObtZzey+Nj/G3PuSjkRJvPfWqeJdgyBQ99599t5nrTXnN8cY7/u8XUMIhVQKVW2giBdcKWQbvQxTjFXYbDDGLnEfkXZdo4CqdjSdwzi7gLrubASGFCQ7S3ymNZ/5rs/wy//ZL73Ql/hUH6Uq94h8UWFwn/6izXtyYLSmqELd1RL3kTJ+FPJwt+447HrmKeCcZh5m4nIWZRS5wOZc7v3WGfRtj7GGkorATypH3RlRfiD2HH8cJSMVqBsn8Qgpo1GEEOl3Pf1+ZHO5xhnD7rrH1JowR5qqwtYWt/iy7/IiC3LNlpgpKeJDpO0a+v2IW1ni3UbPGKyzEjeiNN1GJJ6qFNnuJ/Gbz+MsZOimwk+BkovEngwTKS/evJv9/dYvhoAqmf21KE3WZx1N1xLmxB/7cz/Dv/m//bfw09fPPnBq5r7B69WPv8S/+lf+R3zycx8nxIj3M3mZSrq2IttMU7WMw8wcEsEn6q7GKIcuhXZdYa0VsMmmpd8NEvBdCuOxp1vX7HcDIUSay8f0x5Z2uqFZCchAKU1ReQEaCP5ZaWTq7uTmoGsLKi/Ti0JBiEjyYKqpK8uTz32K849/nM2rL3PsJw6HmU987g3+hf/1f5OXX3/Cv/1X/5985e+/+aJf7lOd6utWRis+9YlHAnNAQCFK3eH6ZbqfchYwUSWS51xEbu1Tog/cN1x14+7hJ0otYAkrf8ZaOcy01rh6iTDQetmmiyk+x4RzEmngagFSaKMpqUDKixzH0K5aPIJuv/MRdKuWvMQnhFlTNU4Ow5IZDyO5rfFtot8d5bB98x2M0WjnOA7zi34bTvUCK8XE/vpInGUabpcGKYYgOW1K46olILwIDbksG4S0SIeVKgvxDshCXT7uJqyW4cdmWxFjZrfrsXZDLor5OJFTWaTGCesMOWdSSLSrijAHjFFYK9dEyga7KE1QMBzn+8y7O7nWPCm5Fmsrm4txZh7HF/0Sn+ojVJ/7rk/TrVuMFTJrXqir3bqjWTVAoT+OoO5iQSY2bkWcgvjkztf0+0Gk86qgtaVqKioUZw/WXL9zjbWGnBIlaUouhDkuUmRLSYWYMk3rON70xJixleHswZY4iaQx+kRRirZryDFhsyEgEuth17M5W7M+X0mmW7WcW42lv+mZpplCwRgLpRDGSPAeaw1d12Ksplu1aGvkzxvZOKaUMFbItDkn/BTRWnM8ChDGWcM0znJ2zf7+XlDrmnmONNYQU5YsvtEv959MTJGqdaSS6Y8DYXYYZ/ipP/Ij/N2/9cv8x//B3/66vfenZu4btOqm4p/9F/4MP/MLP8mDJ+eEMTCPs0zGtUJpDSVzvD7Qna843AyY2mKtYdgduXxyLsbukMTw7Qz72wOurSjAunHkmLi92aOVwiqNtRbfbZmahvzuWzTWL5MdTUHkXHnBkhetRIpZSUh4XWtSKeiCXGzOYpeLp1p1VI+esOsj2+Mk6GelKVeF9bbjz//3/xQ/9ws/wV//f/xN/jd/+d9g7KcX/Oqf6lQffhmj+eQbl4TJCz0yLWmPSlEt1Mk72l5cCJGSz5gF4+4WiciyHbgzn5XMfaactUbQ7Ut4eNPJVNFoiTWAZQuYEikl1tsWSmE4TrSrhqpxpH2h23RoY5gGuQdJiLOnbmvysvXD6uV+kKgaxzR6AaWUQgwRrVsyivW2oxwq/vrf+pUX+vqf6sXXT/zxnwBAWY1evGfH6xHrFK6pQUljJnmnGT+JPFJgXoCClArGGYpm2WpHpuNE9h5bW8Z+RBmNj4XxOJGRKX1OhXmRAPs5UDUSCpxiZp7lvJumO7KrZLJmI4MVVxmCTxTSvXS5bitKLsQFyFJy4Mu//JUX+wKf6iNVn/3Cp6jqiuQjo58pKdOsamLOiwxZFFxGaw63R7TWtJuOuZ/YXm5QC5gupkTVWKbJczzItqoZPWhNjJ4wRbpVTV5ia5SCOEeaVY1Jou4Yx5mcCzFZuoU+WTcObUTaOR0nUhSpZ8kZZw22qpimmap2WNeiFGijGA4DMWdKUZJxGhLDNGOcY7VtMc7eq0TcAkGxzi5RBjXGamLM97E9OWdikCiT4TAwDxqlDUorpn7C1g5nDYebI0orhv0gZ2G01LUwKgBs5ZiGiegjdeMke7KrsJXlj/3TP8tXf+1t3v7q06/Le39q5r4ByxjNX/gf/FP8wl/4o9hacP8yObQMh5l5WrT7MWNrx+Fqj59nXBET6Pp8jdKakDLNeoUqhRASddcIenyOFDRNV3PBlhAC8xywVi8B4S3q8atMN1fUekblhDICYUgxk5UiA7YgIeFLNpWu5EB1zmErS9XUFKW4nRSXRfP0zedsz884P2sYR0891RxuDrSrDorip//4jzHNM//6X/7f30vFTnWq3631+c++TImRnBb0ulIUpSQDy+j3vHIojAKfJHsnpYwPcpgopSTM1Zgly0fLpqFy8mcVKCMbA1tZcpJNQlyuL9m+Q+XcMonVhAXDXMrSFFaG1bpBqcUvERKoQmWNwJGWvDqQeBNjDcNhFAmOkmw7FIzDjDLmfkI6zidAxEe9VusVwcsWLOeCnzxm8YDOYxBZP5kQ4hKjI1sApTUhSGJ9SpkUJe3bGWkIq9YyHifmm14+341j3B1o1i1KVyjhAslWr2RKTozHiDYKP0sOXQgib44LBCUnmdbnlLiDW2alaDuZ/msFGPGO758fON6etnKn+vrV5eNzPvXtH5PQ68lTd7VkIha5x2tn0SUwzxLTUTUOba1AR4rArvzoCdNM3UrTFYOnqSVC6t2vPKPbtFS1g6JIpWBQKKsZl+FfykWu1SFStTUFgW1ZrXFGhiDTMONHT7tuGfuJaZzvFwPBR5pVg0aItN4nXGWZ50hcMlK9D+SYQSm6dc08eNbrjlwKzmhSkuGhsYZu1Yg/cM7YWoY21gilOS5ZdfWqYeonVm29+Of0QuBUNFVFvx8xzgi0ZdWIVFXrpXG0lKaSBYTSlJzpDwNV7Xj140/4uT/9Rf7K/+r/vGzwP9w6AVC+wco5y3/nX/ln+PP/wz9Nu63loSpljvtevDFWcfZoRdM5csjknChGU7cVSgnQoFs3TKOnqQ0pBFlD1462bSi5iKQrZVzlWJ+t2JxvaNpa0LVedMVJWcb2gmN9yZgM2spWDi0Pj3aZwltnF9KlXCDWSA5H01TiyZsTv/L33sUHIdyNxxFlrUi0fOBw23P9/JbD/sA8e/7Qn/lp/nv/s79IVVcv+q041ak+1Hr1yUaoeMvGrcC98brkcp9tNU9hoURm8tJg9UG8c1orrDUiW9SaXKRxM84s8kpHVVu5ZpegVGuNHEJ58bZZMXRra8gFVtuOdlUv/iGFc4bhMOGnQIoL0r2pJHakspALxirWZ+3S2EUKYrKfxhkUTKPH1Y6UIm98z+f41S9d8ebbty/y5T/VN0ClIlLhmMTX6SqDs0bIrbNIFYfjxDR4wY4bAQGxeLJjTFAkVLjkOw5BIYZCikm2b1NgmiJKSzaqqe58qUKqu8tsVErCjMfjRPQe6zSuNosfVTZyy3ocUNjGUjciZ2OBFmktvj5tFN2qfrEv7qk+UtWtWx69/JAw+WVYUVBW4D1Wa/S9omJmf9MvGcGZcZqZZ2lsbq4PaGfBGuYpMA4zm4s17XZFvW6wlSHmQt1VaCdy6NW65eJygzGaqnIcr4+065b1dsVq1SCu7wJZlCZKKVxdMYewqEccxtoFfmJRRaEQi46fA8fjREgJ5+w93Gu17bh8JFC/uhNvXE5JYCnVsmEbZgH4KYWycLg9SjRPzqChWddCyoxp2cZ7/ORlceFEQp1LuTesu8bK66qFZqmNIoW4gL/EWyexQnD1zg3eJ374p76bj3361a/L+39q5r6Bqmocf+Gf/zP8wl/8o0y9HGIpRMGBWyON2qalaWsJ4F2UVUYbthcreVhzRjKbFFhrabqabtsR54hrJG/q7OGGurUYZ8E60vIBNK5ic7amrgyqZIxRzKZh2j5m0h1zTPe5V0rJRSlTfzn0qtrhnMVVjsQyQcmFetuilSaHxDSM1LXDGrUYa4W+1x8Gjvsjw/7Iz/3pn+S//i//Werm1NCd6ndnyeYNxtkT47Jxi4myhHFHH5n6mZIKRit5hlwO45DykgMr3iFjxNejjbpv4Nxyv6gah60qmq7BWou2BldXGG2EVlsZtJGmUI6swngYiVFkl9rIQ6+1IuJQSvxyKcZFuhKZ5xlj9b18s1q+/x3Qxc+elBLjYbiL8MHWTg6+U32kSwGmcpSMyByNqEO0VkiQPfdeujv/p1JapMNG3fvlUsrvyTBDQmuFs4XsJ0rwxGHg4uUL6lVDCrIJF5+qbKCtsyitiT7RbSqarl4+5xlFwVlN07qFqrmcedZgrWyitdYcDzMpFerGCfnPqBf98p7qI1RaC3V8Hqc7czV1W6GNYRgmifrIRc6F2oHWrDYNdV2xWjekhQA5Hme0NqQYyTmxe77DjxNx8gyHEZWlMbNa8kxjiAvxUiI/mrVs6FzraNqGnJFnzCVCJKSMNpo4RXbP93jv6c7kejFGPN91W6O1otu27K8PTIeR4TgJlKStKcj2vKrlXAo+MPWz+OgmaRLPLtZM/Uz0gWnwhCmQQhKiZy5LYHoWkImSwelwGOn3PfMy4Mkp37MihOA5M4/iJ/Sz0HPNQmQpKZGXvGUo7G8PGOv4w3/2p2m6D3+wc2rmvkFKKcVf+h//ef5r/9J/FW0UMUb8OOOnKNShtmGeIykV/JyYfaJog3aOqqtk+mBkmpAi5JAYJ/G8xSiG0xhF3++nmeNxIqXCeBxAaVYXG1JMnF1uFtSyY3u+xljHHAtjd8Foz/BFS4Dkku+j7R3Ny1G3lUzfUfT9jJ8D77x7oN2uQBXC6KlWFSio1i1V42T6nzMlCVXv9tmO63ev+X0//2P8pf/Jn0eb00f0VL/76tUnW771E48kc7FAzpm2cTRONl2VM2y6msqKLGUeZErqfQRtKFaM4WoJEhCFl8E5Q7uqpWGzhqquaJoKrY3k5DiLouDnQPSB481RmkhEmhJ9kDiDmCk5Mw+e/fWBcRgJPookxst9KNxJK7UhhkyMmfXFajm4BFZxR7YMc6BqK+q2BqVYPbp8kS//qb5BqpQMKVOKNGIxZrRRlCLDiXmULcOdH7RqHK42uNqQ45LHuAwy7oAoOWeqWqbzq/M1m0vJqkoRQEsjpzTtupYttZNrsKoMxkkDp43IsEqWzMUYJfB4HjwUiCnjvTy8+dETl2vijh7oKkuzOg0jT/X1q89+4ZP4eSalhI+Jpq0pOS/+M8s0SoyVsZr1WUvbVWgt2yVjDMOyhWtWDdOSP9e0sr2ahpmU5dwopUhUjU8oNMmLR9o4g58jzll8PzHthnt5YWUtSmlc5VitWuIsg/6ykJjnfiIXqOqK7XmHcZoUE9PgZUnQVhinqVpHs22pmooQ00LRlK1k3dUi5beK4KPktuZFor2cP1XtaJqKqq4kq26cOe4H2TL6iDLIBrEUohcqrTZK7kH6PU94QWGtDKFSlK1/VVdkHylFsdqsmPqJZ29d810//O381M//6If+/p+elL9B6qXXHvGH/vRP0x9GcpLpSdtJ4xNCpt8PMjk4zjx765rbZzvi7PFe/HPzMOMW83bVOlZnawCmYWb3/IBe/ttwnBj7WXw4Rsg/VVuRYqJeNeKlWzdUtcVamXwa6xjHCBcPiGdP6IMjFEVWQsRD8Z4/RxsOvef2dmD2mdvdyEuvPSHlTCiF/dWR6+d7KmfZnm9wzpApNF3N4bYnI+vwoR/50Z/9AV5+48mLfWNOdaoPoVLKjMMsjZvVrLsaZzQ5JprGyvYgZ2IMWKdRGobei6RLazAio5Sg5EUG5u7kloq6rlhvO7p1C0rAJ9Y5mq6RLV0tzeDqYk0pasnrUdRNTd3U2MqKT2CYUFruCdYZ1ltRAGjk4dksv0NKIt9JoVAyQv4zCmcFJz0PM0/fvKI/jtiqwm42L/otONU3QCml8N4z7AZyzsxjYDhM7K92+NnTH2bxYRpNnJMMK4PEF0yTx/u4UFSDbNEWubJIIy3WKaqmYnt5RtNWQtLLAlWYBn+/mbsLsFdKU4qSgWjKC0F28YUGifpQZJKX0GEAtzzIusrIwHSBgMX4Ql/aU33E6ju+/zP0h4FxnKmcZLWVUu491VprGeYpGcBprSko1mcdJWcO13ts49ietRitON72GKXwszQo1khUR44CzJonuQZtXUnGm5YogVJgte5ku6YUm7MVaMU4iFy6ANNRms5u3UAW6X63qiUuRClKLujaMY8zq7MVlbO0TUPT1sy9h5zRgDGWdlWDVjgnWzpjJAv5eNszT17k201FihnjpOmKMTH0E7ubI/PgGfcjUz+jlcI4kWPHIM1evx+JPuKngFKK9abD9zNjP4C6uzcEXGUYjyPTNJNSxlrLcBy4fb7nJ//Ij7C9+HDPvBMA5RugXnr9Mf/Lv/qXqbuKUopcINYQUxH6UOUoOaGX5kmhmKbAxaPtgv8vBK3FB1NgniP9ccZoycRxTYVWi17ZKGafMEqjG0XOSmSV2TGOnphkiv7wyQOmYSQEMXveo9G7FaZpGW6u0cOBpgZtxUtQUPRj4M03b5jnyDQlzl9/hQevXzINfrn45FCMQWSfzaphf32kWTU4Z/DjjNGKw9WBR280/Ik//3P863/5r3xdDKSnOtXXqz7/uZeXh8WCdQ6tCnE5eHMU889hP+NTZpojuRS6zpGVpp+j5PdkIexVtcBOjJVDyE9eYg1SYhwmtNakGKiammmCHBNhjgQfqdpK5C1OM/WB41G8S3fNXzSRUjLTMGOMSKmr2i3ByIp58Oi1+DKqxT9UNZY1nfhja01IhVxgGieevnnFZtPeb1pO9dGunBPT/kApmeRFIpVSXvw8eslIXSbtKTEcI67ShClK4LcPIt01IkUWkIrAEYzVDDc92lY0nSFFidOxraMs/6e0bJAlrkMtyPEi510upChQBWM0UypYa2jXFcYG8ao6g9ISfl5QIv+0ks11EpWc6utV4hGbCHOQ/E9naNYt2Ufq2lK3Dc/fuaZYw2rdMU+eblXjGsmCOxwmkegbwxQSQz9TtzUhJqbeg1GklKjbmqatGIeZuquJS4RHikKjbM8rDrdHUkhgZAiZsjxDbi429HsZ2phact5UKeimJodIfzuineH8wRpjNWEOLLGqFCR0XBRhinHwdJuG7fkaP3l2zw+0q5q6dky9p1BwtQxFnbPY2qGUBq3Y3R5lOOTF4iPB4ZmqETq0UJsnqtqJzSAXcpbz1S8bRVvJkFIGrgJmKTmx2nYM/ST0aavxk+fq3WvatcAHP8w6NXPfAPWv/O/+eT7xmTcIi9etah37myMKhZ/9fbAqQl6l7ipCjBz3A1VlqboGpoC2hsPTvRhWm4ppnu+pdsEHnBb/S9vWy8YvYZyRD3RdYUIhp0zdVNRdQ7NumKdI01b0/YzShfEwk62irM8x2y3DzS02e8o8kUvka2/tefNrN8RYePVbP8Ynv/tbyTHTHyZsJfJMPwdizuhU6LqGsZ85HnrWG1mvxzmgK8c7X3mXH/8DP8T//f/47/Nrf+9LL/ptOtWpPrB6/eVzXG0oyERPF7C6oBs5JHxIzHOihERdaaaQ8CGhLURbYxATtsjMEI+PFiCEHD55kUoW2rXATOZhIqYk2UDhzlCeCD7f52mZhfQV5kgKSYJel82HqiAA2QkAwhkrwbKVxftIvpVgVaXFg+Tqinn0uMrSdjXH/cA0jjx761riUk71ka83f/0tHr32ChBQxlK3mpwKphLUeFzAWagESuFHjx+yyPuXabzQUrV45nyU2A0fmBeYiaszrq6olxzFO/iJq4w8dBYkgNzKz5aIAoMxd74cJw+VRmTDJUmOakoFpTMqg7ICFatWy3ZAw+5m4DSyONXXo77/x7+T1aYjTF4sMtsOCnTbjqp29IeB9dkKFMyTJ/hAqh3MYWm2Cjkk4hxRKbO/2nN2uaZQqFcVTVtx+3zPPAZSSiil2VzIz1BAvxuxlaHZ1MxzwFUOrSFnON4el01XFBjWIFlxzlqhN6dMCImzRxuM1sxTYH8zEELA1TLwH/v53rcmA1CB79nK0O8Tq22LNYaqdYQoZ5wfZagZfMJVFRePtxxue6Z+4rjvRZmy6XD1sm0MCeMU2kC9qNWMteSYKNZy3B0xS67r+lxeyxTiImeFfBfYXgpTP9GtagkcL4VxmPh9P/+j/J/+jX/nQ/sMnGZHL7CUUvzkH/1RPv7trwtdsnas1h2Hq4NMz33CWUfTOaZx5rjvyUWklrAABlQhhcB62zINge3FWt5UrVBaVtYpyUEYlzGHa5yYY7tq8QQY/CRyrqZzdNsV4zgzjZ6qqWjbBmM0fggiY4kZPwfcdo1+8AD14CW4XP6/WfPpH/h2vvdnfohPf//nWF+sGQ8TpUj+VU4FP3sOuwFtDNoamlbW6eMgYY2lZMbjQL/r8ZPnZ//kT7zgd+pUp/pgS+AfhcpZiSDQ6n6w4r1s1u/0+tZoqmUDoBYqpXV2ybISb9tdnk636eQgCnExkrtFYmNE/mgcbdfQrRqMNYB8/xgTzhmZYi6U2mbVUNVCyQWZNKa8yGuGWTYgFPEpLN6HIlkIkoG3gCmij6SYsFZkOPt9z3icFjnMqT7K9bf/o/+cbtNi6wZX13SbFc2mk9xCpMmKi0/HVoZuW6OMpmTwU0RZAwpiSlAEFpRiJifu6cx+mik5k2O+f3D8zUOPaZyJOd5P6nPOBB+Zxwi5MA6eafTEGAH5TM9DEDnYItEMPi5kWrXkWMmW8IRAOdXXo7TWpJyZ50i7buk2LaUU4gL7qRdFhTGay4cbnDXcyYSVUoz7gbNHW5pVgx8D3brBh4g2Zok4KDRNJQCgSqjEw+3IdJykQZplKzjup3toiHWWhYOFdWahsYt3rmkbCjAcB1k0ONmEKyWqs0IGrWjbCqM1lbOgtFxnWaIWxv3Izbt7GToaQ7tp5PpbpNOudjgrDZ9rLDFEjruBaRT/X4qJEMQjTinUXS3KmJwlGksr8RkqgSBFn6jaipzEm6uURKSkEGXpsvw9xcsOKcHDVx4QpsDt8x3byzWf/a5PfmifgdNm7gXWz/yJ38u/+K/9c1RVjTLQ74U4ZJuKOHhSyTSdxd8hU61jHmbarmGaJionssx+31NV0hC1mxZrNKVkXLnT/koulC5yQKYkEpOUZC28uzoSg4REUriPJ8gp4SqHqy0vrypurg8c9yO2sqjbI+NxEgiCKqBrzh6u+dTlJd4HVpuWEORwc23N0994h09//uOitT6OzF1FySIXW51vOJ/kwFQYQJOiZ3dzxNXXfO67voXXP/ESX/2Nd17o+3WqU31wlUlJU1UGlL5/6Eu5CK1SqeWhVQ5cvUhNQiqEnMgotJZMK6UV2opXLueCc07+3JJFmbNsx+7ogM7J5mEePdoKBrqqZLBjraYU8CESxkC3bdmcr9jfiP/gromsGiuH/qahXTUooBQlFDVdKEWLX69ojAnL30PQ0/Po+eTnXue1b3mZL//qmy/o9T/VN0IppTBa03aVQL9mIeYNh4GqtlRNxXE33hMjUQXjNGEMaM29LBMKCSABWmGsWnKrAt4HLpSQVWPIi8RXrg+t9fJgOKG1oqqd5DQ6u4QMG0JMCx0QghLaZo5yLeGsZKw6jULyVqta5FRN617Y63qqj1YFL3Aeaw1GS4zU2M9UFxJkPQ+zWFW0APGMMdRdw/7mwLhIH+u2JvhFxtw45mHi2I+4ZZhy8XAroK66YQgCB8le8uBc5aibiqpxoCQGYRq9EF1VQRktTWEuGGdENp0zKWQ5rwrMY6DfjWRp5agqSy6FXDJNt0g76+Y+7NstAD1XW5qmIqVMfyt/F73kQiqlaFyNMZrb5wdunt0SfMA6S4iZOkkkiakMYfYi8V6klsSMdeKDY6HtzqOnqgwpRMIcMNYyZ3/fYK7P12itJJZn8qy2a+qu4uZqR91U/IFf+CJ//+9+WRrID7hOo9EXVKtNx8/9qZ+kqiVYMXlZQ8/TLAGEQN2IUdMsIamlFKLPxBgllLu2xJRwi79utV3J4VjZ+8mBBsyyTdMLqMQaMbPmLBk8Wmvq1i15PEvjqAQDnRHZSdU6KiPUoeEwYCuBLVw8PqNuKhRwuBmYpoDWdxcyTIeZfj+w3XYY+QUYp5nxMDFPQlEKk2wA59EzDjPttrtH0948u2V1tuLT3/EtL/T9OtWpPujSWqGNXEfGaHyITD6Sl8gPyY2UbC20+GWjcaDNPfQEuCdGouRhtO4WmfSquf+aQrk/QGViG7nLCUgxMRwG+v2wUMsEHa2Mot8PpCRT2barMVruHXVboa3Bzwk/RQl1jWEJHleLX7dic7lic7Fhc75itWlYbzq6TUfwgd//x3/sRb30p/oGKb1kxu1v9uyudhxuDlhnOHtwRrteQdacPdiy2nYYK/45o+X6uAP+WKOl2RISFzlK7IC2doE/yNlZUqaqjMB5FiBYSuJTbTcNVVNTlEgxUQptDdrIllkosJqSZMNXNzIHv5N2zqMXhHmS3MaSM8Nyjp/qVB9maaM5u9wSQ2JztuLiwZacE8Zomq5CGyG4aqNQGdKcqDrHsB/wPmBqw+Ziw3AYKQr8HCGL9aZZNaSUsEZy56bBM+6P916zZlVjq0rkzUbk921bo61EFuyuD+QC8xSAQogRpWH24f45tq4d5xcrtNUYp1mta6FCJlGA3VlzrJFrrtu2Es2V5XnWVRXeL5mm+j16MmWJADKK/jhy/eyW474XujtiZ5JIEwhzkgFmzKQY8dMkQ55hvifkdusWaxTDcSKEgHFGzlAl96ScC/2uJy2xPm3X0LQNj19+SJwDYz9xc7XnR376e3HVBz/oOTVzL6Css/y3/6d/ge/78S8wDyPH255cMpuzDuscOUYJMkxRJB1eHtT21wdiynJwaEUqQr3U1uKspm4sVsP++kBRapGqKMmcc5Z5DEKpW0KDK+tQ2tCsagGQLPkjrq1p1w0hyrr6+t09Ux+wtePhk/MFJQ3GWcIccVVN3dW4RnLuUIo4B4b9xOHQ8/ytK17++BPqdcO8EIOsMxx2R6bjRN97NmcbNmcr8R0spL0wBm6e79lfH/jMFz4pWPNTnep3UxXZkIeYJHJkjkQvm4GqcfIwag1VZbGVIyKAB5GGqHupYgyBEAJ+kXutNh1VXeGqhXZpDKutkC2FtJeYx0CYJW5AQlA18+Q53vaytViyJEuWyWrTNWwvNtRdDRmM1bTrCrQg5qMPSwhzFE9RzERfaFoJha2aBmUMXefoNivOH2xP0SMf8SpF8p66dctqu+b84TlN1+JqK2CSzgkcJSbUsomLQWAoeYmzySDb7UVilVO6l0qCPOzeZSmmkJYYDclo1ApSLKQEi56YtIQpN52TgcuyBW87JxJkqyXnaoH4NJ0TsIq1rLetDEFzoSQBrJzqVB9mrdYtX/jBb0NbTbft2F5uiD5SdxXDYSTFiK4M4zBjakdIkbqVLVrlDN26FYgdci24xmGbSmAix0lgWJWhXTWMo8TjWKvFZ11ErtitW3QtzZGfA/ub47LZluDtqnYLwE+hjcUZS9PUbM7XrM5XAt+LiRgzV8/2mMrSrlus0lhlaDcd9aqmbivCHAVYUglJuWqcSB2nwDx6yb2boqhUKksIieuntxx2R7knKCg545xlvV0JbZMiXvLZ40fP/vpIiBFnReUWfOD2as8wzKilcVNKEebIcByXzdyyrOgngo+M/cQ8zazONzx+7RFhFrnlpz//cR6/8uAD/xycTtIXUP/Uf+Pn+eIf+D2gNaaqcI2skPt+RBnF9nLD6qyVCaMy9MeJcQzYyrHaNjStI8ZITIkYAgAxRMZh4ngcKbmIaTNn2m1DTnJoxpQEF95U94dfipHhODHsR6rGLkGq4gnYnK/o1g3bh1tcU3F2uaFZtzx4fMFms0InMah7HyhKyJl3F5BtK/wcmQ4Tn/y2NwAYjxOrTSu/XxaMdNVVrDc1lML2Yo2isL85sj1fUbVOpjij55WPvSRBl6c61Td5PXq4pl6uFfGVFUJMZO4WDHeHnsY48ZUqa4hKo1er+wbPWCHo5UVKHb1MPZWCaZzJWfIpt+cb2lUjX5sL0+gxRlO3FXVXS/5cVaGdwVaO7eUGFBz3A8ooVpsOpTTz4PFzwI+eefb0tz3DfmA4DEL2iiKbMVa8f66y1LUhhiQbRiWZPNoYSJnVquEn/sAPvOi341QvsJTWQkf1kRwDJQfCPDP2M/PkmccZP3lKls83FHJCBgUpiy+8IP8+JnKI915NKPe+0Jun14tf3GKcXuIMonjLlSKMS6Dy8tBJEX+dnzwpRGJIjKMQNO+ysZrG0a0qunXFal0DAhZKi4qz2zS88anXX+Cre6qPQpVSCD5ALrQrCbVv23ohrMLhtsdqw+Z8Q9vVVJVFoznuekoGVwnJ9cFL56yWiJyyeEfNsu2eJs/YT9KcpUxYvNPzcWZ91tCtalZdzcWjM2xtoYhX+vxyS20sdVNx3PfMx5nD9R4fvMj8tUIVqOsKrTTT6HHOoVG065bzJ+cCPpokF3U4jiK/TJn+OJFTYTgOeC/3h7gEd1sn1FnQHPc9+5sD8+gFrmSMbCtjWjx9srS4U6zEkDHGMPfiW68bWSLEINEodVvhpyD/ra2om0qy+85XtJ1IVWOIjP2IUppu3fDaJ1/BVRXzGNhdHfixn/0+Hjw5/0A/B6dm7utcn/jMG/z+P/HjEgw+y4dzGiZ5eEPRrBrmKXDYjWijCTmitCbHKJlvRgNyyEzHCessfg6kGClKycPU4rWZp8DxpqduHN2mpV03+DkSpiCZUcuHb3O2ol01WCsXXU6ZYRjZ3x6IUWQlYz8xDjPOWrYPNjx69QEPXr2kW7U0lWO1qslDZO5n+psBWxSrTcvLn3iCNlZuGtZStRXr8xW5ZMIsF3NJheNhoBRF1dZMw8z+5ihLRa053Oyx1vDDP/W9L/rtO9Wp3nd9+2dfoasdIbwHYRBogsgbq8ZKjIiWrCxtDBhDyixZWEpyIq1ZiF4OVzuJNVmuU63VkickiHaFwllDt5asHoVCKwXL1LRqnXxNZZcH2ISxSr5fPxJnjzLgJy8Y5lIWH5OAVSgCdbFOMn6M1ct2JEgGWBJQizWLiT3lZYNy4v191EspiFF8PdK4RSTtRi2bYSEvp+Xz4hfggTEG65R4gRYqnphPFbaWwaTSmnZVEUbxxJWCZM2VgnUiyQTxu1AkZLiUAkXJZ10r2TCAoPmU+ELdkv0YYxJZVS5QYJ5kM51iws+BL//KV17kS3uqj0DlLDnDSim6bcvu+sDYzxKzkRLWWlKSTdHt1R5XV5TFQ1qvanKBdlUz9DPPvvaMm+cHxmEmUbC1+K+briKXTJoDzlnKkumYdblvpKrFDx1mTwhJPKQ5060aUio8eu2BLAg2Hc45kUFqTUyJVAoxZ8mMQ9GdrSQPUksj5ypLipnN5ZbN2RqFotu0uMqQyx3MJFJyoVvCwauqIqbA7ubAPHlSjLhKbD1aKRliOsvYT9LsTV4aWyPfW4amFldJ6PpdFI9fnp/vzm+t71QoCT/fWRkEwJJyoj9OgOITn32Nuq2YZ482hs9+4VOyzfuA6gRA+TrWt3zmdf4X/4d/mW/57OvsbwfGaRbcqRPtcQqZm6e3vPu156A0L73xCB0i43FAtMiNSKJSWrItDIfbnpJEIpVTRjvDPMd7OlAqGUrGTxltF68LhaxgGGasUSgKOSmKEtKk0hpXKbquIYSEnwJt02BrQxhnMaKeW4L3uMrQrmv86Hn42gOME4M4GeY5ME8zzVlNt2nJUbJH7oJVc8o8f7Zj1TWQEu35CsqWr/79N6VJXLXsb44LCTPz2re89KLfwlOd6n1XiolcBDaSsshQJJyY+42c0kr+Wy6SN1kKUVsJUzUib7nLtwGD0hrjls16Et2/OIcgBnUPmogxMY0eHwThXqmKojIkOTS11tRNjavd/aTz8tEZfg4Mx4lmVTEeJ6rG3U8zSwFbO+rGMY2enIXw52pLu2roj9N9jIL8HSW8tlnVy+bkVB/VkqGjkODuwAHWWqrWEqNQ+qwzBJ8IJlA19j42I0xCe71rwmwtsRjaiD9Ohh7w5m+8haIwHEdcVS3npCKlu2GHXD8xRIzTMhg1BmcMBTl7chIvkXN2obZCnOVhzljJYDWtuZdzlsJCez3VqT7cUkrODFc7rNHYyjEd5/t7c7OqCXOkXTfEOXDcHUhxiQk4jmhryD4w9BOHfsJajWtrwhRou5qYRD3i+5GqcVy9c8PlyxfUbU3dNEQfwGjGIVC1DqO1WMmMob8dWV90xBAZ9j39MKH1EqVjDNYavA8oDTlGCTE/X6GWAUsIifX5Cj95XCNxBzEl2nXDsB8JUbbmd5LqzVmH0Rq3qNdun+3xs8gvcwaUolt3MrhsBA4zjTNKiQJsve1kuJqL/N1jxicPyzCUSjZ0q7YWAKCVaAKLZNOFENBGrAm7a3mdjbWstx3nD86EEeEDx93A5777U/ydv/GLHHb9B/I5ODVzX6fSRvMTf+RHOL/csLs6cPX0luNxpKoMLz96wuHmKG9+FsmVtbLyPexGxn6S7VxOhGlmd3Ngte2w1hJ9QmvFumu4fb5DGSXgFB9xrcMqIw9lIbJe1cy7EWMUh91xCfh1rNcNVmu8D9iqou4Qb03MpJJRRgLBg49sLjfYuqKQKQlyzJhG/ALTGHBYrDFsHqxJT/e0l4387qUwe2kynRWCn/cBY82CLs88/do1L73xkO3lmt3tUTTNZx3DfkBby3Q8GcpP9c1fd5O7EDPtElKcFgmHXpq5XISel8pCuIwZu15TFcU8eZRSxBjvJcsli+ZfW01VV1SN5OTcNXwxxHswkUjFCtGLv+jOR1u3lZBqjVnoZDUxBA674X6DjwKtNCVnqJdmLsqWLcx32XeFFCNVs6ZuG1JieXgo2KrCOEO36dhd7fiP/19/50W/Had6gVVykYeyxQd3t5XWStO0mnlODPsJpQrJJ5QFP0pEgNJLM2YsMQr0wFYyJb8beEvYcGEcRijiozNmyZpTMizxc6RuHNpqok80q2rx5hWKLxLpUSsqtXiCfMBGK1P4VGhWAlQpBera0TSOsbbEeNo6n+rDrx/6qe/BLDAgV1lyEo/z3ZkyDTPJi490s+1QZsX1O9doZwijZ3O5ZZo8w+ipasdwGIipSEzB5GVbvUggtVVsLlfkkFFFsbnohOZqJBrhna+8SwwZWzuqSlRjw2FkGmbQCqvNsokXknpeMht3V3soYtWprGEcJsbjTN05xizZyn700py2FfMwE2JawC763l+ulaJZ+A23V3t2t0f64wgIDEVpRQiRdr1GaVGRiB9XXruUxSYQQyBEQ16WJndn7OZijVniC2zlZCPoA92mJcxBQGJH+fs2bUW7ask503Q1Crh4sOUrv3ZE6cTuas8P/MR38e/9W//RB/I5ODVzX6faXmz4mT/5RQnXTTPXz27Ynq9Yn294+2vPSDHz/N1rckysL9asNit2N0dZky++E+9noVsuZC20xg89zVqiCrQR+dXYT1hj0EEzR0/V1oQhM/Yz5ExWAkaZhklydJSS1TeF4iVDYxpn6qYSqYvRKCMZcfOYaJoKKKy3LegOlKJpM2rBnaeYmQ6z5Iw4S11LuDAl4+pGvDUxoHAYpcgU1udrvA+kkHjy+mPmX3uL9bZl7Ecm53FWUzWSeVVOZ+SpvomrZGnkSimL3FCaMBXlEI5JdmpL/qhs65oafxdgsExibZbw1Kp2aLMEfivZeM+jyGG0VQz9yDyK7KYU8dfmUnBOwr69l21IjEkmq9YyHi1Km3tv3wgSytpYmq4m+IApZqFiiipgjh5JkVWkmMRzpzR1VxG95ODlVAgxkELkP/i3/z8ipz7VR7Yk500eGGOMMnzQBnvmmMa4QBHugGCJMIO2ihTEcuBqGWiCWOdSEvmXtYoUgbLIJpefZR1icQginTRGpJYhCFnPVpKH5RaYg3WG6BM5S9wBRTLkNAI+SbFQLedzKfkei26M4sQ+OdXXo7bna/SivEheKJQ5F9pNQ//unvW6IZjAPEemKYDylFKoWgHfxSmgrZbnuSRRA9pZnDUoLbL7MHmqrlribOTzPk8z41dHLh6dEYMoN26vDnTrDrRi7Gc2l2sOVzuOuwFjZejRLhmnt+/cYmvH/vZILmIfqLXQ2DuzIsySaxxDwihFcQLGy6ksOaZlgZNE4TQsW3xXW1LKHHcDYy85eEopzi83KKXvc+9yFN6D+NsmoYGeryQjLkOOiWqxHbmqlvuKM8v9RP630pLRfHO1ExjM4rHrHrVQCkoLVMYYzXgcMc6SYsLVBj97Hj6+4NHLlzx7+/p9fw5OzdzXoc4fnPHf/Z//RdpOTNJf/bW3uXjpgrpxvP2159w+38kauXacXa65eLjh5vmBUmDoJ7p1Q13Ze1IcOVOvKvrbHuMsx/1ASom6a0g+ihQlBKbZo3LBaE3dVJIvVwolJSHkNQ5lFQVBxTZNjXaJ4TihlUw7/CQyzBA8SlmU02AMfpzRRnJ8bGWIKVNipkSoKkfRiqpxuIXs5X1g1TVEH2UrMUXJyQMohfE4ytdNgcevPWKz6RiOE5uLFced4GTf+JaX+bbv/jR/9z/91Rf7hp7qVO+n1HtNXPhNHtdcCioXmawubmallHyt0ShjmI9L5tbiJxIcukjKckooNMbIgRNCvA/4LiXTH6bl+iuChXbm7odAEfWAPZqF2GeW728kdNVVku1TO1abFmNle5JTZr1ycmCKyRVtDCGJBG6eZkD8A8fbnqqtGfcDz9+54r/4W7/8gt6AU32j1MNXHpJLkgw3Lzj1umkWwmRmPAq0QC10ZmM1yUfscm4JnCQuUR7iGZcSm4GfZ+ZRFB1f+qVf55Of/6zIl1PGNVayEX9TtLefI3FOktuolGS2moKrZeOXl4xFY7QMYLRs2bUxyzVYiCFQ7oykpzrVh1wpygbuwcvntJuW/W2Pc465n6kqyzTM2NrRruXefXN1i3KG4TDQtA3TNBN8vJdibh+fM/UTu6s9dVfLkGUK1JuGtquZ9hOby7UA7NqK7qxjHibm53vcIrf3PrB9uObq3R0pJNq2RSZaGwABAABJREFUQjlLSZGURHr44LWHPH/rOSllmrbGTwE/Byj94gWXzV4piVQcCqjrJXfZy/dQSRYYqshgJqXMNPn7geU0zksouSWmzLptWJ93tF29hJ0H/DSLJLrAcBhpVo3YkbT4de82eOKHE7m2UrLRyynT7wcuHm7pNqtFRhmpG6E89/uRuqvvfeNKQ7tq8F6kn03XcHaxOTVz3yz1hR/6Nr7nRz5PipGbZzvOHkqmx5d/5U2RGi4JAqtNQ7fu6A8jfvISXqgVYfKUmJbcG02/H7l9ptEo8Z8ZcEtjFb3IpvwUKMt693Dbs3mwhSjIcO/9vWl8niLdpmV/vcfXNajC+eWGsXbSEA4TRmuatiIVGI4TZ5drmq4ihEBZMM5hlnyd7cWamCR3yjoJkKxqQ5cFebvdtIz9JChXI6SkEKLkCAG76yPX71xz9vCM42HCLB5AP0c256vTVu5U3/QlsBPx2hgtcpi7APB8F04cZMOQlg3d65//FG9+9Rm726M8NAJugTzkUjAUwaWHRElFTO5FJpjtSjP0I1XtxHC9SDgpkHK+hz5YqyWfcg4ih64dzB7nLbOecc4CiqkfaNctbdugFPg50LS1eJ8WL58xZjGwizxHAH8ytEkx8n/9N/89rp/tXuj7cKoXX9/5I9+1QE4yCom76NY185TQSlG3Fj9J82WM5DkVLZ9/P4rHM/i4bMWU5J8u8B+supdbAkQvD1S63G33smzitDR0KS6/w+IHFYgJi6yq3EfZRZ+YSsFWAvaq1g5Q5CTDFWPk90i5nNq5U32o9canXuXjn36VnMTnFWKiqh2rTUecRSLvakfT1miKZDle9TRthXGyyfJzYHu5YTpOhJSEUnwYCSGxebAhHxP1qgLeG2SUXGhXDWn2DMeR6AOuttRtLRsro5mGsOS9FUxT0TYV0+RRSxZpDPE+57HbtDSrGmcth12PrSuGo8ivjbVUC2l2noKcJ0WaqGb5eVqLV/AudiCl9Fvykl3l0EqhrcI6kVtKbnPAz37JaK2gwNhPEuvTVOLpHWe5jpVazlTNu199F5Rmvek4e7Cl3XTYWiSu3aZBKY2fgwAFc6I/jlw+Ol/uVXfAJQg+8h3f/1l+41e+RorpfX0WTs3ch1zf+QOf4y/8i3+GFCI3z3eknEnDxO72yPF2T0qZ1XbF9nzNarvird94h+3l+p7MVYog/S8enzMeZ0IMdGdr5sHfm1itlol41ThYPnxq0QIXFMY5DjeHBRXu8EveXN1WrLaGafD4MRBj5vxivXzQ8r2eue1qQkhoBVUlWzkKdGsxm46Tp1k12NqhK01dalB6yQBKaFuLhKtYptETfaJu5AA87Abq2jIUePzqA1IuHG57Hr76kAdPzpmGic35in7fo5CbwKlO9c1cZYGf5JglkiBldF68aBTxDBk55FIQ0tf6Yot56+aeflUW/HmMEVc5oVyGCAWKLoA0UO26pV01NF1NLlm2BhnqpkIZI6CUlBcEfGCavNx7isjP9BLQTEKM6kDOAn4Ic6RqHMYYXONwzjLPAZUyRRdM0hwWeQ1kbq4OPH/nhtplgTyd6iNfcY7YSiipwD02XJtCCIEcRbp4t30AJc1XKiK5KsvQIywe05gwzSK9XDZwd1UWUEpGSVbcQoeVuKxCSQiNtUDyiTB7oIZaoVVBK5ZNeLnPpDNWo5Q0cOUue0ty0NELgOjU0J3qw6pHL1/yysef8PTNK4yxDPuRpqvxs2c8zFRtxeZ8vciNJWB7fd4JxKrt6I+TNDrGUFWONBeGw4hSsD1fiQIE6PcDKWbqrpYw8lVL2zWUynLzbIf3kW7dkHIGDJuzFc/fvhHJY4wQ5TlVLdP44APXz2+pKsflk3OImYJingNt1ywRV46zR2eMtwN1uzRjTjaNwSecE7VaXTmM0QQfqDtpyHZXB/Y3B/wcRHHirNxXtEIrkTjGJD5dFlAMKOrGYYwMLe/IzbmIL30aZg67I3GOQn+vLNouOX33GZaJasl2dRRR041Btvq5cPnogre+/C4x5vuM2ctH5x8I1fLUzH2IVdWOL/7hH6FpHP1xAAp9P/L0a88JXkAB24sNj195SEFx/fSW4+5A3TmqtmUaPdZaVOMYjyPTFLG6LFN8RSl6adIEVnBH0OrWLa6y9PuB4djjKkd/GLl8fA5KMLMpFdpVLaS5mCRkXAkpKKRC0zX4+YhzhmkKEkJZOQqZ9cNumaiASY52wY0rZBKPUnRW1vdGa5l4FtE+pzmKPycJtrztaqq6wjjL/vqItQZjDfMUWJ+viT6gl4w8H+MHinI91aleROWiFr9ckgfKuJhwjCKmgtYFbWXjlnKhaMvu+ohxZtH6Z7TWItsAXC1ysDDH5YARamUBdld7xjGw242yhaOggc26pqkd1+/ulgnsitW245WPPea46+83+8ZqnBF59R2dL+VMjBltEyZppsHTFsm6y6ksHgbD4eaInyOzDzx9Z4+tLLvne77zu1/7zcq2U32EKwS/NDvic6tbtyDBZaiRUiDfeUymQIrlnhgJBaMVSYF14vlUSCMmGXHzfbA3yIatquXryEVarSLXn1AztWzhUsJVGqWqJVhcvq4oiDFT1SJDLkDb2eUBURGLhIQrpbDO4ip9auRO9aGVUvDk1UfYpUlxlSbOHmpHRtGua4nwsJroC3UtjVAuRf6bNqy3tTxjaYWtHS5nmidn7J8fGCdPTIn98z3dpsU4MM6w2q6oKsd603L97g3aGdZ1xfE4UDUV2hrG40jwnpt3b1mfdYRZMt26TUtVW/rDBAmmwd9fh+uLtVxXSWw5hUKcPM26Zh68eLXjsoVPGa1Eds2ibFPL9qPAEotztwUTD3eYAznWVLVld32g34/0h2G5Vi3OWVbbFTkVplEy9WxlCN5z9Y4MUtdnK9RWyTmrJaLHz4FaCwyFgsDAcmaeZoy1lCye3H7X89JrD/lP/nqQGASr8ZNkt37xD/8w/+7/5T98X5+HUzP3Idbr3/IKP/sLX6Q/9PTHgeEw8fZXnzEeB1xlObvc8ODxBcfDyPG2h5JZX2you1Yyd8aAbsE1juNuYOo9Fw83YkgNgegz3bYTip2WaYdSajGTIp44hQBMlObdLz/j8uUzuq7GLRLHFCLWyVavXYun7bgfWK1bzh9tePb2DdELREVbS87iqTPWEAfx7xlrqDctGMtw28tKXiuij2StaVYNxorxs66N0MSCNIBNW6G0ou2qe1N6mAPP377ipdcf8+DJBf1h4PbmyHic7s3spzrVN2spBfFuY6BY8tpEyiVo9AxZkwqElNm++hLDMN1j20OSaaNZIgq0VtIcaZFdh5DY3QiRzPvIMHhKUcvPUaSU6A+e7abhpU88IflAzgVVMnGeefLqA4LPHPeCTNZGY60glY0197l4JRcJPA+RfFiauZzZ70YKAl55/s6eqm2YpsB2yRj61V9++7dsTE710S0/zhLSTUZbe++NKyUuMR2AllWXsRLmnUKiUMgxE1MUKZWTh9a7AYdGHpSu3n1vA6zUAjwpsgGXc1NhjAwQ893mz6rl95AtYFpUMq6yhJiICtrGSrYcRWRjRmPyXSMnntWqqU6buVN9aGWd44/9uZ/l+ukNaE0IibZzKGswTlgFdVuLVWfwHKbA8fbI2aMz5sEzjzPjMFK3FSHOuMpSUmIcZqZ+xlSWOSZs7ZjGmUcvX1IKrFYNly+ds7vZ887XntNtWs5e3uJWNWmWbfru5iBRNl3DatNR1nIN123FNEyM04SrjBAsm2WzNntS1IQpgFK4yjANAR8Guk1LfxyYhpm6rRbrT30P25q9yDmVUcQo+c3z7PGTp2rEcqC0YrXtRIEyzPjZ4yq551hraNpavOZRAsGPu6OwLJxle7mWWJMs5GnrpHWap5mULGGWK905t/jhMyUrUeE4i6tkq7k9X3N+ueE3fvUtmsZiKsPYTzIwer+fh/f9HU71D60/+9/6r/DszecM/UA/TNw+u2V/vQcKD195wMe+9VWefuWacZhwjfhZNued5GKEhK0M9bomjNMyJWfxw3nmUUg/eS5opXCrBj8HTOWIIVJaWYdnKw+JdeeIIbC76dmeryjA2M8oY/CDZxrGJRRxZr1dSTwCjnbVoFAcbnqUKlw8PMdPgWYlXpjN2Yr1tuO4l7y7zYPN/VSk6zpKyVSVI5RIVTvCHMlF5DXGaPRyUYyD6JYLhbMHW8ZhYtgP1Kua88stbVvTL5CUU53qm7lkUphFHv3ev8RYRYripyuqkAqMc+StX/kaD155TIrxPheLArnItT8Ocj8IMfHs6UE2GjGTEvcPlsZZXGXwc5Tr0QeOg+dX/r9v0q4cj55cUDnDOHrS01vaVcN62zLPkt+jtaYoRYiZcZjwk4AnqsYQfOb5sz3720nM7l3Fq596gg+K7aMzQNOdr3GV4/qtK26ePTtdx6cChD4ZY4YlsL5u6gXOJR5wVxlyThLFkaPIgnNZJvBFPt+53MuYQOivapFO/ubaXe949tZTLp48lEDlKLEDxgltqIRMRLLsbGPvg4qVFgiCKoW6dfJ7LwHiJRdsY2DZtscQiUExDhPTOH9dX8tTfbTqjU++InATa7DLdlju2x3zFKgrkf+Ow4wyiuPNKJL8mJn6idW2Yzj07J4JuOR2muWZMhfajUgHcyrU5yuG/UhKMrhXSjHcjtw8O7C53FDV8gxXcoZcGEahrtvK0q1avA+0m5YSIsYIAyEFGf5tLtaQIWdRkdjK0q07mrYixMTmwZr91YHhMKCU4njbo4CqskvEDuSSIYOrNDkmbp7tub06SAZzznLtLl5xV8l9ZZ49IUScc6w2K1xdCQhmfySFIFDBmNlebKgayawbDgMxFFbbVgibVqGVFjCTMff5cqUUqtqRYmQcvHjjFxlo1TheeeMJv/ZLXwHlyLEwHCeUgqat39c949TMfUj1nT/wbbzyxhN2N3uevXvD7dVeUKvA5mLDy2884d2vXLG/OZJTIqV4nw+XUmG97ZiOI0ZrjnMSlPjyEDbPnrqrmPuJ1Uby5sZ+oK7F1DocRmJMdF2N1ppxnAg+EEJg3geuVg2PXz5fLtbM5nxFt67xMYlxdfJ0q4aw0HaGwyTxCD4BhfXlmhwTq01LTpmYM0obVEkSErw7SnCkVsQoa+gUM7ZA1Tmmfl4iBsTEaq2VMEgvF/dq06Cs5uqdG87SGY9fe8SDrzwVOufJM3eqb/IquSBjCyUoc6XQiISroEBrGXikzO4w8h/+v3+Vb/3Cp3nlYy8xHwfmUTZfxlq6dcs4zNw+P9KsV1RtI0h0LSb1qq0YjpJ5UzeOuhWZdp3rexP6cBhJxvDlX3/3HvMOUHcN1up7imXVScD44Xbk2Vt7jtd7Xv7EY+YhMPvM+qxFa4kQ2e9mtDVC17SGdtNStzU3z25/i/TtVB/tEpQ/KG0w1tKuW3JhwX4j03alyEue031GXJYsuao2HPf+HgNeCoRZ5FX/5aOiLE2g0aJUsZW6fzgtBWylsZWRRWCWjVvVaFIsuMqIciQn4QcZycMT0h2klFjoC3JGKYX36bSVO9WHVj/18z9C01RMw0jT1qy3wlpIi8/UNhX9fkBrQ38YicFz8fID5kGatv3NYYGDWMIstpYQImEUQN40BarK8uwrz3FtRd1WrM+E7WCNIodAs3jUpkEy4FCK4/VxyXJryEjszrgfJKdUSySWXsB3IUSMNmhk06iXaJ0UE8ebI66W/EddNHGOVE0l0TzWULc18xQosdCsK3nWHSYB7MVIigLlM8ZgnZyVJReGfhJ1m4/UTYNx0uAdbg/0h142b5VbMuIK0yi5rrauaDqhwJelMXTuzvak74dOQvsqWGuWKJNEuWNBvBR549OvcPY3N9xe7elWLQAvvf6Iy8fnvPXld3/Hnwf9gXyqTvVbytWOH/5930sMkXffvuLtrzzl9vmO6AOvfPwxH/vM6+yuDtw8v8WPMymlxfBp6VYN8zCTU5aDLWVBMWtNSYXhMEg3vx/JqWC0YTiO5CwBw7urI9EHpuPAPHkxiKtC1dSAxs+eq3euGRfttHVW1sDOkUMkhUgYZ/a7I9FH/BSw1rBad6zWHcfjhEKxWrdEn/DTTBhmzs5XAkMZJoEnLGHjxkkY8WrdiDlVweZijVvIenEKpJzRxnL2cMv2wUb0zYMHRAbq6orv+b1foF23/OQf+ZH3kOqnOtU3Y6ll0wUL7GGBJujFbJ0KcdlYvPvsgNaa1z/1BspY3LrFtg0xFWaf0M6yebBBG808eI43PcNxFiLZfmD3/MB4mJjGgPci9ejOWpq1AIuMMygjlK+YYLeb2B8Chz5yu5vYHTy7g+d2P3P9vGccshD+rENXFTkrqq7FVA5tLe2qpWpqtJE8uqoRP6xzVvwUpXC82b/od+BU3zCllqFGWXIMa9mIBUGFp1SWRi7ef61SLJ7xwjSIzFIw4UJkNVYkl8/efioN3G+q425P4b1Q8Ts/HIvssqoNaDm/WDbfIKAT6wxVU1E3shUwWnzjdxQ6yauTB7d5kg34afR4qg+jzh9seen1R4zDJP4xa9HaULU1OWfqrrnfCLnGorRivV0x9fPi5TJ0qwbtjHg9ncEaTZz8feRHjolUwLWO1VlHu6rZXqyI3vNr/8VvMHuR+9dtw9zPqJzp+0nk0Yu/WyEyyqqtSCny1peeEsZAznmRP4LSsqVbbVsKmnbdMk9Bzg6jCXNAF/GprZaNoTbmftPWrmqMFpJsjJl58syjZJ7qJUhdMikd0zAvpM4odgYFfvbsr3dcP72m38v2crVd8uZiut+8lSQe8lLK4skN3IWOT8MsYCQQynuRUHHjDEprjDPEmLh+vmN7vuH1T77MPHn6Q4/RywImxPf1mTg1cx9CvfTaI77/i1/g6uqGZ28+Z+oHVCm8+vGXqNuG66d7bp7uUCjOHmxYrSW/Y7Vp8T5J8KEzrLctw37JhkqSuyO4b4VzlrarGccJtJLDLSX8OOPngPcBP8/STDVuMXUbbOUYjjPT4Jl7T5gljy6WLFS6yuJqCYcMQaaharl4lFIYZRawykjdOFxTkVAMw0RKYiYvWjTIy9HLNMykAvMUCHMix0zd1Asq/T2ULAgWVjtFu2lZbRpM4wghYY3hyasPF/z5C36DT3Wq91GlQMoCCklJrpGcF3llEYR/SoWYC1998xoUQou1hhQyxjnOL9fkUtjf9ASfiSnjGsf5kwvOHp+zfXTB+UsP2D4858HLl5w92LI+60ix4KdE8IUUFTlpclI8e/OGs0fnXLz0gM2DLWcPz1ifrWlXHe1atmpFaUKQ2INu21G1IonbnG9YbVe0mxaMpmodzjm0vqN9CZ1s2PeUZ+/w/O0TyfJUUne0uLqpqLqKGKJIhpccphQTGWmmnDMoJfLiFEVaqRVYe3f9SPNXsoT3/oPOiadvPiUn8deUlPBzWDKmlGwQDn7x1okFICMevjBLFp4xWrbnyFkX57j809L0Cf+LnArH3cApnOBUH0Y9fOmST3/bx+XMyBL07Vq3eEel8ciArWRoPk+BGCU/0VojkQFGiK7kzDxOHA8DabHkHHe9+MyQSABrDQ9fusQazbGfGOeAbWpySuQC54+20kwtuY9nlxux51wfuX16YL3tGA4TMSXasw7rhKaeYibkhRbZTxijOO56qkYIm/MUaLoabbX8nXLBWCFTppSXaBx/n6mac2IaJqZhWoAkhZQzdqGy+ylI9twsth9FYff8lt3VDq0Nm4s1Zw/PqGtHVbv7RlAvuckpRcIsAJMYIiEkeT62MpQtWaTb0+jpDwP14te7a9QOt0eqxvGpz32M9aaTSAmkmf3eH/uO9/WZODVzH0L9/D/9szz92nPe+cpTbp7v0MrwmS98krppuHr3lpITuSSBfxiRjNStQymNUnKhOmd460vv8vzt50yj5H8oLQQ8Wxms04uOd6KQiTHiQ+LsgWy97g7J8TiTQpKQw5wgZ7RTXD29lUljW0lgdyyyPnfiUYghcntzEMrRYqZdn69YrVtSyviQadqK6BPDYeJwM1A1jroWn91qu6LpGpquIoZE9IFu20r+ho+kZXpZjCLMXky6ITEP0/3WMYye529fc7iVtf35xRatFd/+fd/6ot/iU53qd1xpoUTaBUiS7wlckmWlrQAa3nl2YPaR1z/1BtZKiKqAhAoxZNab9j5CpGlrtLNUjaPpHHVX4ZaDHOTBc38zEiOg9P0WsKApRTHsJlbnK5p1y/ZyRbNqWZ2tcHWNcY66a1htVjRdi6sd3aZjdbam6ixWFzZnHU1bs71cU7c1VStGcJG4GGqnSPsb9n14oa/9qb6xShuhReYswABtRI2R0xKLoYV/UnKhIHmId8AfpVjgJEuguNGUJX5j6ieGY///9/Nyynz5F79EjAk/R/zyUIYSCaY0eVlAQnO4J+cBQs6MGWP0snWQzKqUZdAKInGLIaFQVHXN49eefJ1f0VN9FKrkQr9kdpYlp0bgVIVp8DSrlt3VgZQyw2FYsuEy1+/ecDyMYMRPF0PEWpHGH297YkgcFnLyndzRVUJvNc7yzlee8e6XnrI6W9F1NavzNdF7KJn97ZGUCq6pGfuZoZ9kKL+q6Xc9bnk+nPqJbtNSEnSbhnAM+CkwDl6G+iVz9fSWqrU4Z+6plKutbAfr2omndpaoAJSS4WiI7K8OjP2E95JBqRYLw/psxTx6/CwDoruV+e5qz3HfUzUV3bqlW7WYJeZLaclyvTujx35i7Ge01XTrVnzuRraf1op3V2BKIgUPszR+0Usjl1Oh3w+4yvHqJ17is1/4FlmQaEMuUNXVb8nF/CetUzP3Addnv+tTvPHp13n29Jqnb13Rdi3f/aOfRzvH8TDQrRqSj0sAL8TJ061b2lUrQYYLISvnuwNFyJVKy0RScuS8oJW1oukczmq0EdBBjELkygmZhAQJ764WlOrqbEXOheN+wPvAeJxQiFfAWE3XtWil2VysKVGyNZRCMntypl7V1KuW4CP7/YCtLJuLFe1ZxzQHyaNDzLjT5Bn2IxePzqi7hhwTxmjqWnCtAMlHtpdb2nVzD3Y53PRsH6x5+MpD6sreH9iXjy/45Gfe4BOfee2Fvb+nOtX7rRQz0SdKkuvrjrRVllDSnAtzSHzlq88JIfHaJ1+9P6xKFnnHzbNbqtrSbVakZVAyHnrcAnYYDwN+mkRmUjtSSDRdJQeNVigl9xnbiB+uKLh6ay9nXNZ0a8mmO3vUUbcNYS5yH6kdVVOzPut45eNP6NYrrp7e0m0aKmchLbSvyrLadtSNIgVPfxy5ePUJX/iZH3rBr/6pvpGqa2uarpKHGiNyprRszAqFkmRwYSuDspaSlfjrjFmGG4acyuJXkS2YTO0jYz/+A3+mn2b8OAsojCKROSnLZi+DnxPzJJRnGYRK0xhCfq95NAa0YZ4kv0tpdS+dzku6uHWWBy89+Pq9mKf6yNQv/LN/kJxE1ptS5uzBGc5a2ULVlsPNnjh5Drc9N+/uKLnQLAowbcRus7s+YCqDD4kc5XOuVGFzvqbuGhn4pcTmcs2DJ5ei1oqyhNherNk+2NBtOpyzi0Ik4Jxls22paslas7UMFmNI3Dw7UJI0Usd9j7aauZ9QVkLD67YiJ/G0OWc53BwlGmDyjEsw+TzOmMowjzPHwyTXW8q4yi6B4jPTOItsu3LUqwa90KKF3jktGc6K4+7IOEwyUDJiB9qcr8klcfX0hsOuZxoE5lKyDHFc5Ra/nQx6/CRKOJG6mmXIJLl2tpZnV2vN4qMFP0cOuwGlNJ//wc+wOV+hjaKqLJcPz/m+H//C7/gzcWrmPsDSWvMTf/hHuHr3mre+9A5tW/M9P/p5+cCFSLuqaVcdrpIJuqySLevtCq00Yz9z3I/EkLh+d0ecxXjZrRrqxhEnaZaM1UxTQCtou4qhnxgPI0vIBeRMt6pZbTpSLvT9TEx5mc4IEWwePbOPMvY0LNMMWRt3WzGK1m2FdRZlLFXtKElw5VfvXHE8DORcaNqKeZwJk6c/TMSw5Hnkwtn5im7dkqNMkoKPRB/RVjaLq23HeruSQMtciDEuBvjC1dvXVI3j0asPuXl2S0yJZlXzPT/+nfcehVOd6putFIJHVwh1r2nFNyD5cjLRS6kwjIGvvX0LsGwt8j0kJSaYZoWrLevLFX4YKDGxfbDleL1nPIzcvLuTKI8sW4eSI/31Nccr8enWjXgN9PLQqbQjLlSxhCL4tGTNabRhCVOVTeJq21J3FVVdcfFwy+VLZ1hjQClCEADE4frIu19+xttfvmYaAhePz+nOz3nt06/y8c+8/gLfgVN9I1VeHpJKKdjakJb81TtfnK3lrChZNmZ6CR4uiyT57tqQrUQiB7ETaPMPH3HfXu0Yj0fZaGSgFOIsgb/KCBTFaI33WSTMUewNJYufdR4C8xiY+nmRfGaUUihr7h8Mtb7bMp7OqlN9sOUqy/pstfhIpZnJGerWsVl39+j8btugtaC2Yoocdz39MNHveqbjxDjMEgZeCrvrgyhCcsGPsiFTToPSlJAxRnH19jXdWUt9f2445mEGpWk3Hd12Q7OqUcbA4oFNUxBiuZGFQUoJow3BJ8Z+wjh7v5lXSpqabtWSfCR6uSZDEP9bLpkYIldvX3PYD2ijGfYDQy9KrhQT8zQzjR5tDK5ylIyATFJiGmW5kXLGz4GYEq6+iyawuMox9iOHWwkGv4snuYMw6aW5LRmmYWYe5sX6VBY5dbnPnpPIOxnMKq2XQPFAyYXj/ki7aths1nzmOz6JsULOLSVLXMPvsE40yw+wPv0dn+D1T73M3/kbv0Tb1XzhBz8nEsMoshHXVtSdY+gHtNb0x5mmq8ko/OjxkyBcr57eSPCgjzTrmrptiHOgauvFUGoxVlNXNeM0cbzp2V6uqRrH7dUBZw22sUAh+oy2oIqhri39YSD6iKsc1moePT5nd3ukZJGTKCO5HXfGcT/OlJw57EeBlpTM2eWWfjfIwasNTVfjx0DlxGjrc6Z1MvHPSnO4PlLVlqrraJHJvTaGw+1xQdmCmwKrdY2tzBIqOdJ2LSUlVuuG63du7lf2q/Xqhb7PpzrV77Q2m4Yf/N5P4PvxPuC0IJP9GJJsBmLm2fXxvT9U7giYLJN/OH9yBtqyvx65ePURKUnkwTRFFJkHrz/C9yPPv/yUqqs5f3RGs7lgPMzkMHO48VjneOfLO6H6uUIYei4fPmZ73nI8ePw4EqeZdtuhjRwVWmuOtz2u0VjnKAW2ZyumwdOPnrkfgQS6YrV2dJuaulvj2oaqq3nnS+/g/fszep/qd0/FEGF5EKWA95LxJFN7T5jloa5QFloeBB8lLgDZ2uWUpREMmUK636b9dvWlX/oSn/nez3PHP1Ea1F32ahYSZp4T1mrmIVC1Fj8LhKVql+Dxsnj1lkFMLpKlFXzEVpamqe+hCKc61QdVP/snv8jDJxccdgN+8qScaFc1pSgyBWs0/WEkjJ6hH2k2DSVlhv2IcZbsLEWJ18tYzfG6x9WOsRcsvqksm8u1bLxjZn3WYStDjJF3vrTnY59+haZrSVEy6fwwC7GxKOrakbWmunQcr4+gYDyM3F4doGQ2D87QGpJPVI0jBHmGLEUosdpoVpuOunHMc2A8SERALoV4fRBCstZYaxl6kVHnVIgxcfP09t6S07QNxojMcr3tSCEQQ2JYIkNiEPlo9GJxqpoKPwuPQcAlokRJMaMryaDzs5dsV5CbhrUiRU0CQFmtDcrKDcVPsrkz1oi/LgrtVluJe3CLHen1T73CYd8v/31R3f0O69TMfUBlreEHv/hdPHvrOZttx+e++5OEOTJPErRbdzUpi6wqhYRyUNWOhy9d4Kcg27XjSCmZw02PMYp23ZBCwo8e4wyFgjNW9MX9jHcJbWFzvsJYzTx6mrbGWH2v1y1KYZOi6oz46JLQLcd+4PqdG84vt4SQICVKEvpPWDLg2lVDiJFpmqlqyzROnJ2viSGiDfiYGPppya2KHPdHcum4eHTOcBADqp8CXdcQl797GGdc5bh8+RKAefIoDfMYGPYz8yD4WecMyhpyjGwuNwz9jDOGeQi88vGXX/C7fapT/c6rJDm4YMGsp4LW0iipUph95EtfEUjI+cMLnrzxCpTIYRwpubDe1iJHi/Ig6+ckE9QsBEBtLbVWGK1pN2uMli07WpMx+GHi+HxP11m2lzW2rkAZDle3PP/K24z7jnq95fr5kbZpKCWjtCOETJ4Hms5yfB4wVUv90jm2aVnZSCYz7RPznHn4qnjujLW4pmIaArdPv8bx+RVv/cbbL/T1P9U3Tv3yf/5L/NDv/yHUAhgoWc6gcZjvASYi85dJt7WaFJVEBCzxAahC8kkIysv3ffs33vxtf27JGYV43VSBSMIpUAsNs2SZrrPEH8xjxDiDDEgTVWOJMWOcRRlNjOV+GxhCAF2oGiME51Od6gOqs4sNP/3zPyoD9yLWlzAHXOWAQpi9DMSjYZ4Q6AmKrDWrizVTP3L7bMf24Za6dhwWuaMxEILi4cvnhCHgR0+OGV3AVhVf+dW3uLnaU9WOGNP98NEajdcwHWcZtEyeVArNugYl27jjYaBqKsbjwNiPNF1D0QL1cjFzvDkw+0S7boCCj5GiNM5ZUlWhKyvN6DDR1JU0sCHKPcMYuu1KXg9gGgNaKapaIClVKz40P8mQKKckfxa9gAYlssDP4T6b8g4yWHKmXbW0q5oUk9AvEbtD1VRLxIncpuq2FhJ1KYvJV6GKEsDgMC/3DghT5LDr8bOAXbbna177xBPefeuKMPn3lb96auY+oHry2iM+9fmPo5XijU++wrOvXZETFCWSkWYtXrRcCq62XD4+Y7XuuH265+Z6z+3VHusMtjI0q/p+vZtywRiZALpapFHH/UC7kosl3JF85oCfI21XEWaZFhpniSHgJ4k+WK1bbGUpRVb1Siv6w0DTOMZetNbGGDYP1vQ3R1KWFXKYA9paVquWaZxpu4buYsP1u7ekLrJ6fCapWRqU0fT7kXbTiJdAafphFBpfNiRj0NZwvOnFkDpOoAzNtqG2MqlJiyQAo1h3DeN+ZDiONE1Ft6r53Hd9mkcvP+DZ21cv+m0/1al+ByX5jtpoQKajJRdsZZiGwDvP9tzcDgDYSoiQ42FcsOeZsZ+5eLwlhrIQtLgPNjaLvMPVAkzJy4AmZfEfGa3EvL7uMEZCwMMsVM3N5SWuguk4cf645vHrj7l555qnX93RdQ3d5YagNP2QQdVY5/CjRxXx/V6+dEa/O3D28gNcVbHaCj0zp8z1O9e88uoFq/bxi33pT/UNVfvrPdpWQsyrHNkLEh1EZqkUIt/1AW3BeyHYZQAFdz1XjEKBJmes00zD9Nv+3HnyPP3q2zz52GuyEdDyVLa7GujWFc45qtqKTFIpXG0A8bQWZIhinJHw8CJbc2c0MSaM00L2SwljTzE6p/rgyjrLatNx3PeE2S+DdcPZ+RbjxEs67I6EkDh/uCGnyDzN5JglciMVmlWzDAEDOUnIdVHSgww3Il8kF0qItOdrXG2EnaCLEMwry/HmgKsr9jeHhaRc4UdPSJFcCs/f6nn45JJcMtbZe4L5qqsZ+pGYMsfrI21XU7TCVQZjYNiPNNsOaxQhg3EaYsFUTqI/poB1luAjxlqBlFQGtCXOgeOuv5dJKwVn5yvZBKZEXuSVeRkCyXUvpEpXVQJeypliwFizEJkV8ygLB62VeGuTSKvRBW0NBpFdCh1TBkXGmYXCKxR4rbRYlZalxjTMtKsa54QSOg4zb3/lKW995XeeM3dq5j6g+iN/9qfFcFlJeOH6bMU0BlKMMnEHQXPnQtW1Ig/Jhapz5GeJi4sV+5uDJMobja00Uz9J1o3WKA1hCowgOFlE+29ETSkGSw39cUaR6dYdVWWAzNyP7Ccxp8bZoxCKmJ883aoSg7mTC7Yg4JIwzlgnORndukUtuNs0ZdAaV1dcvnSBH0V3XVWG4ZBJMbLdrnGVJYdEt2lI1xGFwo8zFJGhmMoRY6Retdw836MrQ0Req6IUTVdzuOmJPtK2DVVTEX0g1pbXPv4Sj16+PDVzp/qmLKWWrCwtAeFaKbJSTKNQXkPM91/78seeLFlXhRQC1inOH64l3mBpCFMuaIrIOpzBGrNs5bUMdZLkYck9B9YXHX4MKCckSqWFaNltKlICbWuKtlQ1PHr9CZAJU6LqHKZO9xv3EiO/+J9+mctHGy6eKEwVWD+8lAOxZObRy6ZFFUyJTFOgbk5HzqneqxgTaEXTNeRU8D6AkmFHXh7AlGZBrstQ089RwrpLwmhFRmOdNGOllPvt3D+qSk6IvafcY8y7lZVpe5Gfq8oSOeD0vZ/VLF6+HCSAvBShbCqjcFoDWqANw8Qv/Se/+KG9dqf66NWf+Us/T8niHQs+LBs6kSvHY15ipyZWm4Znbz6naiXrUymYek9KM3VTEVPC+0AMcUHvm6WpKyQKtTOsH2x48PicL/3imwyjZ3u2xhmDHyI5RUKMnD3eEqaInyNGSzTBNAVEBC3PemH2NK1l1mIp6DYt42Fi9BFTG6beUyhMw4hSiuGmF/mlkUaqXdfSEFkBDPWHUYaVVjZrJRfJQ65kS26dAEfCHGSTv1id/CQkS2mANTk67PLnS8n3/leh44oPLuUs0SRTWHxwcm4LE7CgC8SQqDtLVVdAkTN0WbAZZ2i7Br8AU6wTaFNBNoBVV3NmNNebHSkl3v3a7zy25wRA+QDK2sUfFgWretgNYq5sDK62FKW4fXbLsO9JIdN0FdNx5LjrefbWc5FXKs35o3OsghQCu2d7kSXOgc3lGuscTV3B/RRe8nViFOBALpmSCn6cSFkCTFNMEndwd5EtEk/vl9WvNRQUYz8TvORu1LWVSaOWIOO6qWg3LW6JJ6jamugD1893HI8jyhj8YgRHK1SWg/H26S37myP9YaRuaygZWzuRgwH7q/09ArZdNQyHATQ064ZHT85pVpJDl+aANoKAPe5HjrcDVVfzc3/qJ1/0236qU/2OSiklm3Gl70EOpUiunPeRr3ztvSHF9//4dzIcB8Z+4rjrF5+QGMNt9d7t+7fk3CwHjFr8qSllQR93glv2U8BYjUIIggpFt6rupSfNqgatqLsaWzmquqFetdSNoJs35yte/thDNpdrXFOhtEFpkbjkKPeh5CPH3cDbX3qH7CfmAO1mhQ/5H/SSnOqjWqWg7uSRSTwjd1CTsOSrKqCo++QAjFnABABK3fu9tZUYHZlu/qN/9NtfeVeodAupOS/eVJGRsZDsFuryQrJUZfHuobG1pRR5kEwx3W/J7zzyJYGfT1Ec/7BqKkvlTpvLf9wy1rDZroRy7qPklAaRDdrKLtRyjclFsomVpr/tl69PpJwZjhNu8aeGOSzqr8ThdsA5AwWayuGcqLRKEZmzNYowBVbnK4oq2MrKNssnwizb9BAS4zChSqZuKqwzDLue529fc/V0t8SBSGyINGOW400vodvHkZJkMzd5UZsN+5Gmc0yHiXma75slY+S58Q5MknMh+CA5eSktNqNAt+kosASECxwlLrEnxkiTVbcig45etnLaLGczMhwyRtO0lWz7rOQwV7VbtnkOpYUxEebA2I+yxVvuPUbre9m4WgZUd83kHRippEJVOx6//ADn3Pv6fJzGpB9A/djP/QDr7YpSCsfjwHCcOLtYEaZADMuhhOLiyQWlQOVkDby/3rO/6bl4sGWeA9Y5MIrNuhMMcohcPDkT46pVuKYSI+huYA6Juq3QMTIcZllHF5nqFwKsW2SUIJP0fjfSthUxiUyrXUle1OH2iDWasZdVcl1X5CwPjDlnpkG2CFVT065abGXYXR2F1JXkYurOV+SYqCqLL3KYTb3n/HKLUZrhMAipaFWjtRhpxTckP0cpxeZyC6kwHEZKKfS7gbmXVfRh13P24Iztww2H654UM9/9e95fwOKpTvXCSpLB7/X5KAkJp2SeXh243Q33X/o3/v2/zcXjB9KEzYGca5QGKEv+lUg1S5acoeATThf8YceUE8o0qLamRLCmwdaOlDXjYcJZhdbgfcTP4u21zjAeZ6zTWKPoti22rbFWUVeaGaEPyoOrwlQV4zhyrrdMvaeqZYuRUyKEgh9H+r3j/OEFdVMzj7+9/O1UH61SWh5ySk4UjUCB7rwnSqGtPPA4J9jvNAkcxY9iK8hZNmKqiIxYmbI8jP3j1e56x4MnDwXCgFyHBbkmzBKXUHImL371wjKdV4CG7MXHZ6wm+kiMAvc53o4n6vJvU48vOv6lf+ZHeXbd86/91b/BMJ2gSP+o+sHf+1187rs/xX53IKe8wPVEXlw1jmn05GXYn3ygshpVCymx3x9JQST1w3Gkbhx+ki3dggBiOHo2lx3ttsNozcWjc3ZXt4v8sNBtOypnub3eSxTObY9tK0hyQRgtJNiUyyLN9GhrOLvc4qwQZrU1xCx2HGMU43GmqizOWXJONNuOuZf4AFNZpiFw2B9xkzQ6fvKSczzKokOdsZyNXrxooyd1NXVdsTlbkZf4n3n0xJhlg7jE9VSNW+4T5V7lwvLPOWe8F2ruPMxL9IFHa7PQnxU5JZReMumWzLm775FjxjZuobfrhVQtijuKZM3GEIV1MXnqxvE3/tp/9r4+H6fN3PusbtXwHd//WfpDT4pR1rFZLpoQMjfPd8yT5/HLF7z8sSe89PpDmrZmnmau3npO09ZkZEqiKBxuDhx3o/jmjOL62S2H3VFWvbPHz4GUZZIQfWDcDxgDTSO+g6ZzEhBeZBKjtCbngrait44hoY1aaJnSUKVUMFZjtGZ3fSSXQtXIB5ZSqNsaVQp+mjnseowToEuJkf44ctyLTrluK6rGkVPmwZMzzh+dYdsKZTT14qHTWtGuapqmXqY+irqtiLPEFlilGG4k5PLs0Ra1bBuij/h+ouTEcd+zPV/zxT/0wy/67T/Vqf7JanlYRalFbqlIWSRlk0/8xleufosJ+mu/9lWmo+Td+MnTtAIHcrWlpLJs0RXKiCG95Ey/G3nw0iWP33iJzXlHmAO3zw74KWKZUfEAqSdMB/w4ohADuypZPHXrCoqAkvbPd7zztSuefekdDlc3+HFm6kfJJdJyfBx3M2/+/edAwVaiAvBzEE/e5MkIUGJeTPWnOtVdKaXuc5qMFU95mKNQ5JalTc55ARAkUhJaZSll2S6X5Wu1+LYVvPPVd5nnfzwq3POvPUUp5HvdSaiWvFe1YN1zKQKRWK7LsviNpsOMdRJFkJbhp142BalkgS98WC/cN3l97KUtdcm8dtHwz/3J7+OLP/At7ysw+Xd9KTh/uGVasnjjEnxdsmTN7a579jcH/DAz9rJRvr2RQO3D7ZF+NzANE0M/ogpYo1FGCW/BB0I/UdeC8lcKulVLv+/ZXR/RlZE4gZg4HkeqrqbkwvbBhjB4ptnLoCVnIaVbiebYXe0JIQqdtpJGJ0wzw25gOA5Yq5hnj+sqAYQow3QcZdhfCnH2zONEXnzX8+jvibMU2YxprZgGz3Cc8KPINcmF1VmHqwwxBKZhYpr8fZ6kxCFosSosr2NVyzZSLfCSZtXSrRpApOD72yNFsWwDuVfIpJiFQVGL6mVecueUZgGwLD46CtM4SZxJSvjJyzJGa7QS29L7rdNm7n2W0oqUEiUBaFBycQ3DxLAf6VYNH/v0aygjckejFbubHdc3B1gMlofdkcpZycgYAikPxJzQaJquIsbIr/+9L7O52LA9X6Gdwu88rray0UvygDR7T/QZ5xzRZxT5PeN2LkyzUHjCnCAXUpQcu6ar2N/KRtFYWacbLQGH3aMWPwW01iIz0eCsZh4mVmcSEeB7zwG4fHB2r3CZRk8KNzTrVlbb+8zmwUZ+7nIDcU3F4fpIt26EMhQL3bpBLzl68TjRdDUohbGKdtMxTwljDFXlePLaoxf0rp/qVL+zUkpRO8PgAymWJfS0gNY8v+l5+93db/l6bQwXTy6Z+oEwzdxej1w+2iwmcHm4jHOEbChFgknHPuB9omhDnCYBnzjNcLunfnJB1ayIuSJ4GewYqzElMOwHjJaMImMK7bphfzswH2/pth1P37rh0euPONwcCEu8SUmJZtWgSlkkmywT1yh5SOdrum3H+ryTwz6eJvCneq+mYWIYjjyuH+KnIHLHBWISQxTq5CKxvIvokJiCsmzDFvLc8gVhFkR5+U0Dkd+uCpKxuj53aCPfK6ci/rxSUCmj0FAyzspg1C4hxK4W6ZW1emkIjcioJrEGdE3zYb5039Q1+8jVbQ+5cN4a/uDv+SSX5ytiSvw7f+0XRbVwSnW4r6py/PE/93PMw4yfBXWfU5ZNcZYt1vFpv3zuCyEEupVEWhUKpqnIKbFdXuPDfmB/faBZNZRSmH2QZ8MpUIpic7ni2ZtXsk2ylm6rMPdSQcH191NPt23v84NTTNRNJfl3CjaXa+ZxJuVMvx+wtZMmUkGYI+Nhomkb9ldHWRhQiLN4Zm1tmY4e5yzVeXXvUUtRrntUwWiDUpoUF2k2hapyGKdpVzUxRMZ+Yp7D4jMUf27d1rSr5v6+4aewbDcrrLY4V2ErS7qjts+RO2xlTBmll39eIoVAVDFis2KhYsq9TWstvl/k58prI9LMOAfxuNey9Xu/dWrm3mf9yO//Pq6e3vLSa4843B4lqFcVxmNhe77m/PE5KWeSDwQvEQTP37qiKNherCVAWIk80hpLt2mkqTIa1zohYMYsgYaHAQWszzraVcU8BVxdSUiq1pigSSSCD0yTp123WNuitVwI8zBhqyXbQ1mGo38PONJW+ClQN25ZORu2l1uUgrjoptfbjmZVc/N0R/SJupaJSd3VXL97i9GG1VnLcBhRRmGbinm5OLWGMM5QFHVX0WkY9hOucfgQWG07jDFMveTZRR/wk2d13hGDTEbqpubswQptDGT4xKdfp6rdyZdwqm+aUlqyeOYpUopkz8SSOYye6R8QMKyNplm13Dy9luvjODKuG6pUsM4QvBzkBXVv0G7XDc/fumZ91lG3DX4SP0+zrvFJ4Q+TTDgppCBxBnmBKPW9J4dJfK27GT9F5rlA73n4ygVTL/KxcX9kdo66qdCqMI0zdVcxLxk+VWWYjhObi7UAj4yhZN7L6TnVqYB5nHnnN97hU9/+KWIOlCTNW7p7WE3vBfCy5FClEIWynARacN/MLd63umvuH6L+UTUce6Z+oNusRClihMocQ8ZZszR2CEDIp4UELf5xZ9VCI9NC1lMa4xQhJWxlqav354H53Vy/9OVrfvmrN7zxeIMpMM5HfvBbH3P+YMPP/NhnoWT+3b/+K/zf/tovkt4Hrv13Sz1+5SH7mwM5S1RVWOAlwUfadcN4HCgpC9hk8ALRayuSF1JxmD0PXrnEK4UPmf3VjuAj2gSaTvJCZx948NIl24s1h12/bO08RRU2ZyvZOqdEt1mRUqJ/vpAsa8fYT9jaoZXkIOeixCozTtJwpcT6cs3tW9fY2mGMZvIT3XlHg0TtjMcR19b0xwFFQSmJFiAXiaaqZHhypyZrW4nLCiHh55mSRHLatjWVs4z9xGHXCxwmJqZxplu3dOuGqnFYKxt1bbWQogmstyshvi8Qr5TykjeXSCFTNZUoBbLYirTiPmjdVkIjTD5DJXl1JYt6xliN1kK0VFozT4EQF1Ju4xZAyvv7nJ9O1vdZ/XHAWkNVVxx3PVM/0a1bPvbp13jpYy+JnDCXe83t3VTbKE3X1eyuDoBsxw67gVwgzjO2dktOnafEjHOG84dnVLXjnV9/m6mf0VZjF69c04qnraortBY5Y1VJrx4WWUrK8lAnQadyaHofqWuh+oy9F5pekOnBNM0MxxHXuGVzlrh9tmc8TJJcLDNSrNM8fOWCeZrRSuFqK+TNw4gfJrRRtNuWZtNSryppUH2i7Wqau4v1MPDOl54yHWfmcca1Fe26Ye4n/DBhneN4nNjf9txe7dFW8wf/1E9y+ej8Rb79pzrVP1EJaV2hlVoeABUxJfoxiE/ov1TGaJrG0a47zl9+RMyF4BctjNLkwpI3V5bBkPz7qq2JWS1DmRVnD9d0Zy3aGFzjsJVls21ZbVtAfLzaGM4fndFuWkxd0XQi9d5ernj46iWlwOHmsJjJBWt99nBNu6mpWjnw8r2MtBBjZHVW0a0bMX3HuAQvn+pU71VRGldZjBH1R87l3psm4eCgUPfKkLwQkSnlHhOeYsYYhZ9mPv65j1M1/3iypbuHrboRmEnyAg4q979HISeRLwuLD1JIC5yFe7m0NnrZ6iXGw0QuhV/7u7/+vh/QfreWj5m/+YvvMoXE7COzj+xue54/3WFLYt1Yfu5HP80Xf+CTC833o1taK37/H/tR6sWDFXxYeAyJMHs+/ulX8eNITEnAHSDRUDGy3nbCMphmds93hBDpb49oZ3FWnkmfvXWDMpp23VI3jqZ1HG4O7K8PqCIWl2mYJCd5Dhx2Rw43B4zVuNpyuD7IQGWJ69BWM40T/W7ALc+jde0YbntiyotH22NrS3/bc9wd2d3sQUP0AWsM3gf8PHPc9dxcH0Ar9jdHxmHCT/4+TsFYQ/SR4TAucu1E09aADIpSiCgF+92ArSzduhHPm0KIuVnOzrqrWW1a/n/s/dmPbml23on93mkP3xBxppxqHsiaOIhFStTElqiW2NBgdrfkltQNGX1htAzYvrDdV/4T/B/42oBhwIBh2YJhoCW0DU9SqyVSzaYkimSRrMqqyvEMEd+wh3f0xXr3jpNVRbHy1JBVyW8Rycw650SciC/2/va71nqe3wOF28e3TMNUQ8KleRTCvGzfUsxopTHGyJYt5QqZCeKdc+a9Q8vKhgDx/I1nidpa/myOCa01f+k//HPf03Vy2cx9D3X/kWTFXd3bcvv0SC6Flz/+Etf3t+RSuH33hn7XyySvKMLsUVqx2XfsrnZMo/zwF/Nq21r6XUtOO6FChkDjWpqNQEnGQRrF7f2rmkunuXqwl8ZOe6Dg50TOSWAqRWRNKQlMJXqD2hZyjCSnCVFoRtM4cz6MWCdBjU3j2O63uEZzPo2QMkUJ9dI52QK2fSMABCuXUM6FpnP4SW6AtnNC/2kdKUViSByfPmV3f0/bG2xjGU8TKUlY5DwG2k1D0ze0m0ZofVYC0K92V9y8e0u36TCqrqtDpGstP/Xzn+Otb7z7wV4Il7rUd10KayVP0hbFMMzMPhNT+Y6ekVQffkppmtbyysceopTGzxnbaLQGs1CwloeO1Wy3PbsHG/EkTZGSIUcxZutF6hIz8xBqjpbDUvBjwDhNg2xD7j3YMhwHsk9LJ0qYA6axgITKPnvnlt31njgnwuixncVP4m+wuxYTnfyd6Dq9vNSl7irNAT+KxDIGkWxRCiVnwZGXu6YrJ5GRLUNJpeUeMVqBUWz2G17/na/9kTlz7/n7U+Z8nDBOcqBKDSgPIWKtwTRCjdZa5FQxZTlwToGubySWQEkuHrNsA/fXGx6/eXku/bvqa28fePvxmZfu9eRcMMbIMCiL2kcZzV//5S/QbRr+7/+Pf8Mf17b4M1/4BH/+V36Bp49vJU/YB3IW/2iMieE8C0hjmHFdA1pzuhmk+UiFaZjRjZNGKgryP3mJjVLOcvPuDSjQzrC/3nJ8dq6RAkHAd5sW1xi2Vz1+8Bil0W3DeBqJKTKNM0prrDUMh4Grh3uOT47sH2zrZl0RcxGwSSk4qzk8OdJWNZi2oI3lfBxpGsllQxuUBaUN+6uOQiE2jhwLrmbdKZREVj09ivc8JoxRNL1jmgJ+lriCFDPGWhqnhbJpdG3GhDIJirZtaNqG4TzK9r8SoImZbtNW2qXc/5WXK6AXJ+AkrVVt4FR9b8pYa1C6EEvBtW7NxovjXBUs0mi6tqFpLR/79Kvf03Vy2cx9D/Xo1fu88toDru5fEXyk7VqRIm56FNDUCQFK4+dYsaSJpmtpNy1NZ8Vfcp5oGsfVg33NgFNoq8SLojWlanONUsQpsNn1vPTafYIPEtydCoebE6fDSC6SJZVzZp6CoJytruS8Qr/rubq/W7HkMnFwuFb+0UaatdNpYByDxBbUaYSrDdrmqkNpxdWDHQDDMJFT4vbpkaePD7Kvq9PK4TyTkmwOrx5cYa3mWLXau6teJjbHUaYkdboRp4gfZvn44ySbgPPMeBwlLLZOSw+3A3/971wiCi7141ObTUMukpu1PDCE3AebruXll6/f8+dLEQO2NoquszR1g1CyGMIF/EDdyKm6pUjiozMKbdSKsfZTwBiNMdRDa6HtLW1n1gNxQZNr37acnoyz1SwuFFrvRebWbSznoxyam85RyFTbHsFHcsxsVyhSWWFHl7rU89X0bVWO5LUxImfxogFVQSxbMKOxzsqBqdJgtZZr31rxtqXw/ra/d7FC8owUMqz423Natt56fY6mKMHIFJF1LpmRKUQONyMhRHKQaful/vCafOIf/pPf56tv3HI4zYxzYBg9h+PE0ydHTueZnDP/wS99jr/6y1/4YyvR3l9vGU5jpTJWvH6WjXSMmfE0cLo9i8UnJrz39Ncd/bZlOJ5J9To93Z4JY0A7Q4yJefbMw0TTNuQ5c3W9QxvNu2884d2vP0EbgdrlnJlqZuj1S3tcZzndnsTvZS1t2xAGz6lKGk+3R0opa+TVHBLDzWmNxEo5c+/RlTx/Ood1jnmo0v6bM6fjQAySi2yd+OOmaj3aXnXYxgoBs7HknAmzfF5foUcSzRNI6U5m3bSWbtPhnMQJqAps0VavqpJpmCk5V3WAnEcFViI2nqZz6/MWqo2uvg+scUAKkV9aTY4JP8siQ4LHBagkMRKWECMx5upzFG/i91J/PO+O71MFHzHOcvv0SNs1PHzlPtcPrrDOcP3wmv29La4TgMnh2ZHzSUg3MWTa1tVpu1AdQxL6pcg2tGh8zxMpSUBqyaJN9rNECOyvd7zysZfRRjEOAkwpSmRZbdfgmkYkjEVypcbBC852DnKATJkYCvMcKfVBpoDDsxPDONG0ltvHB8naqNSw2ycnhslLZlXfcPvkUOEohjhHmsaxu7eVm6ZxTIMX3Oym43hzFvmn0jRdS5gFLW2c4frhFQXE3zMGCmCM4fjshHOWJ288k4drfZjmTM3Nm/jIZ1/j05//+Ad8JVzqUt9d/Qe//CU5/C1Uh5wxSqSSrVa89tLVXYYWsPjaYkpVPi33gXamBofXhwngOpFtut7R79uKihYMs7bSsIVK4FpyJK01GGdoah6mtppYs+CUko377nrD9UsbkZKEWCEnhhwiz959xma/IfjIdBa6n6/U3ZwLccqV1CsP2H/7a//2A3jVL/WjXLvrDX4WWX5KldKH/JNLqYeqXL3nqQb3Cn15AfjUy5mSM2989Y339fffPL6R8PCUamrIXaO2TDTCHCv9GclcTfWgtiLNZXh5Pg6YxnF8euJ4e/o+v1IfvjqOgf/LP/0D/p+/8U2GKXAcPTfHkeNx5ng7iK8L+Jt/9Wf5z/7DL/PZTz36oL/kH3r9p/+TX+V8GJjOE9MwMVfPXAjCYRjPs0A+KJyOAzlnjs/OHG7kXOjnmTB7yS0OMmQLUQb9pYI47r18xWbXrVR1bTTdpmV/b8tm2zCdZmL1pAGUkjndnERaqAGr6Pc9McrgcCHQamfo9600SCnRVFnm4dmR83kUfH+IKCO0dmuW+1qvJMowR5SSRuh8HIkh0nROIrCGCXJBVTKlsabGEXiil+HlOMwYo+RzVyuCtqbGA8l7iWtspdgCCCnTWos2Rs6eTjaGS06d1gpnrbwPOVOzMYs882ImhYQyalWnqfq8t8YIOKyUlYQp3x/s7+341Oc+9sLXyaWZe8EyRvPX/u5f4qXXHhBD5N5LV7JZ02oNPZ2nwHCUIEFjJHCw7VtcI9kUc8WlGmcgK+ZJ8jDGKhHZXG9IWTGNst0C0Nby9K1bTseRXDdg0Qvq1I8iW9RW1wm8IGKbTmSLYpoN1b8iAec5iTE0Iw2asRqVC+NpIheZ/s9TBGswRrHbC6FrPM/kktlsW4zREkPg5EZ9962n6BpHYE3NG3GG2UdigVzpP8YZUpB4A20UymqUs2yuerbXPRqhXvkQabYdwXtQCl9DJZXRvPqJV/jlX/1zlSp0qUv9aFeqgcI5iqSMUlA50zuNypkH+/49HpHrh9cE7+umLK8odoFBqCohMcSQ8FOSLbuzYBTj2TOPskUwWoJfm9aK+bsxaCWBqnEOItnxkTDOaFVEHWAUOQTmwXN8eha5ZfVFGANv/MG72KZdKX7Gib8g+EQO0jC++/VbpnMgA66x/PavX5q5S723/ByIUSAFWikKAjpZMptMzZijHpZyTNWXKfLLFDJGa8bTzDS+fyrckzcfM5yDSIG1+D0zWf69TOorLbMUyaqyVtfpe5ZIAyUb9BgkQPnxG084PTt+X1+nD3N97Z0j/+D/+3t85evPZDhVJd3Ri1wuxcQv/9mf5H/1X/xF/mf/+Z/ns598+EF/yT+0WlH9Ma7Zct7Lf8vwLBB9kCgqq6XhyoUSE9bIe3Pw4iejIEHiU+J0GJhnsfr0246mcRSlaPoG1zmKgnnyzHPk4av3UXXCYmtslTKa4TgSY8I0hnmYSCExjR5TMf/jYWA8jNjGik+8Dg5TTLjG1uZRYXQlw7aO6wd72satS4hnT255+u5NjSYRT/ZwmhgHIVXmkiu7wWKdWTeYqg5ZSpVFW2fRVrzhgFDSjVAsjZUhpMh95bmqrV7jH1KI0kTX14sC3st1GaN4avVy5jbiVV+knFpreX1bJ1mWUeIa5nEWBV8rktFHrz7gS1/+yRe+Ti7N3IuWUuyuenKWw9T9R9e0rSXW6ffpOMrko0ggY67yjbZvSQkev/lM5BuNdOopJ1KI6xRCKVUn3bo+7GTS3W1aNvuOkhLOaq6ud2yvNuSUCTGS8/J3Sk7O8kAsNelUaZlQlJzZbDu6TUv2hVSDg5WWqYVMRQGj8JPndCP5ct7HVSOslCblzHgYuXl8W6cN4sHzc2Q6jBhn5QHsxbs3DROqIHknSqO1BKj3uw2n2xPUqUamkHIh+kCOmfk8YVrH4cmhTj/iKk/723//V9nuNx/gxXCpS/3RpbVazdelFJrGYpSisQq9bANK5tGD/foxv/AXvyz5WcDjbz7j5sn5Pfe0rjmMJQtYL8UkcSPbFm3EoK51zc+qWVxhCoQ5MY9xRbjnmMkxyqAn59ooFuYpyGCnIuMXb8Sx5lF2VSIHsm3UGnJOKAN+mAkxUlRBa83V/f23bB0vdSkBipSUmYeZENIqbYxzqpNyXa9doAZ75yKNXVUXS4B3znz1t/7g/X8BwgxaPXk5ZVIQqIqqTVpOueLyZZgSQqoeVQWau61BzPJc/GPr8HrxOo6Bf/pbb/L62wcaqzBI5uA8TMzDTJw9ndH8yZ/6KP/r/+lf5pf+5Kc/9Nl0xmjZPilW+EmszW0Mie1+Q9s7pklkfGEKnG7OUKDdtLR9S9O4leJorGyahE7e41pHf9XXRgOON6fq7dL4wUsId8zkIs+EeZh48/ffYjiM6wBvOI7ohaZcJZGhNj/jearLiIQymjhLXIDbNPKsqNs+11qmYQalOB/PHG/OjMdJgEMxEX3g+OxE44QIqbVCZWisrXJTAfTlLHCkGIJEiBhpbnXd9glZU86YbYV8UYFK2piaVanq80+aYF+JoADG2nVJYp1s5koGVT9/qbCTJaok51KXNpGxSmRTyEQvsk5lJIbBtU7yoL8HKfGlmfseSpClmXsP9+zv7ZlnOQzN09LBl7qWlSmEcYYUI/ce7DjcnGvQuEzErTUV350qIMFSVMFPHusc/a5HG5hOA/2uFZPr7SiHwrZht99IVt15ZBxmQgiUkquS606Xm1JBaTF++iph0FU2Mo1eNM6TbO+axlYfm1CRrBUstJ9CJdp5rDV89DOvyvq+QlCaxjKeRzZ7CZ4czyNooX2VILQvayw5RZpNy/beVgKRNy2ubzgfB05Ph7vJrFV4H7h994gG+RrOM8PhxDxLps/f+5//rQ/yUrjUpf7I+qkvfIRPfPSeyC3cMh0UqWROdxIN9dwJZTzPKAohJmKGza7HNiKbhIWgl2t2T8BYwbRrKx4iY2UiuWw0csxQszGNAduYNUNHvHHixxsOM2GaaXvH1cMNp8MgDxstE9jj7UmCTuvkUlFIKYq8JcgEtekd8xwYThNxls3L5Yh7qW8t2ximyeODDCLKQls2WoA7U2AeJYR+QZOXGtAt+a2KeQycDvI8fZFyjZUDYGNJi+VuAa8gfr4YEtEntC7VppBWLyhQFTgyMNleXTLmXqRiKvzjX3ud3/rq43owjngvEtzhPDFPM8Pg8ZPn7/7qz/HnfuFTH/SX/AOtz3z+Y1hdmE4DSkkGYggid1/8XCVmiLIs8HPAWE3wnpIzt89E7nvz+EgIWTKRlap5Z5qma3jptQc8eHSFn2ZKFu/XvYd7uk0jwBQNp5szySemSXxerhUMv598hWGBdZbtVc9m39N1jt2+F6tR6zDOMJ0nMgXXOObzzHCaGM4z03ni8PTAdJ6qwszXSJ62Ro20bK43tL1kDmujZMCCfL+qnhNTyvJ9sQxiFDHGOtR0tL2oyLRSKK2JSWBLfpIYLD9JoHcIsnjQWtF2AkZxztJtWrZXPU1nsY2tYeB8SwRKqQPL+kytAerBR9lAWottLVorUb2N83qebruWT3z2I7TfJYn3W+vSzH2PpbVmf70jBtHTW2trlsSyfRMaXazQgMWD1vbNepGVUqpu1+EaRy4wDR6rdb2IHDkrSlGEGDk+O5GiSCX95GUNj0AQSpHgResqNjVKo2idwTbSxDWtGDnH88RwnoBC07d0244UEq5xXN/fsd13dF1D01nZqGmNqtvGVA9mIUW896gM43HAWk237arBUwKFrTYMp4lN3wIFZcSbgzaingmZrpdohzB50Rl3cvP0+47Ndc88enngpsx4HAlRPDq37z7jfHtGV//PpS71o1qbvlk3YWXNdpPDqI+ZkMQP+s6Tw/ox8+TlfaW+V4Qo7yW5yisXGlf9Fzln/CwHWtnYLYfQKIHdqpK36kpDBiYF11razq2I9pwLapW6ycNIcnkCp5sT/bbHOkPJWabFUQ4Wi4TbWE2377h6tKfftoKTv8QSXOo7lB89OYp0UdQkqmZJZZEha5EUK6XfK6+MiVKn2cYaDjdn5ml+oa+hoAg+1aGGHItyEWLdorCRyblIsHJM1fsq2xM/i1fItTLMVH90xN2l/pBKufC7bxwwWhQLKSZCkqFy8oEcI/Msw+O/+Vd/lj/xxdc+6C/5B1ZmGphvbvF1u6XNArpaNlGJYZwIITAcBpIXme90nuj3PcNpEAie0cTZSyPko+SlxYxGsdl0orCIkmFnrSFMkZyKZMsVUXmlkAmjx4+B82HifBw53pwkhuBw5nw4c3h6ot91uNbiZ888e6ZhYjyNtJtGnhchQJVFhnFmHGUj1/UN1CZqGqp/thTxlIdI00s+XfSRVJ9p4zAxnkU5QpZnzPkk4BWlhBeRc2VRACkliThRyAC1yFYPQGuDbWp2KqJQWxYf2mo5b49eyNBJaLcxhBVaKJRdUdrJc1metU3r1pw5kYJKM1iAafRrA06BX/zln2N39WIqswsn+nuoVHX+tnX4cULUjWqVVi7EOsHwjyitufdwj58jTWMoycqpDpEypiA6aNc15JJR6PWHPM+LvlZiCkqBtm/x8yzrWq3IGdq+IcbMPEZcW2QSUx+Iqo7Fw+xl1a5hGmeu7u+xCm7eOdSLO3A6QLdpME6TJi9Bx4OnANt7PUZpbp+e8GNgYOLq4Z7TceTw7MzLH7kPVjyA+/t7ur4wHGZOp1GonkG8PZtdx3geUZXY9/JHHwmVyEd217IJfPr2TUW4aobTKJOevhH6ntU8e/uG/fWWP/crv8B/9X/+f/H7v/X6B3Y9XOpSf1g1jeWXf+nzQptMNUdLaVLMTHPCp0wqmac3sgFbapFk5lTwPnG6GdlcabRz8uAzmpQLtm3QLqORyWrTC7WrLFtAJ2AT5yzzLB9Xosg9jKmbuioLWeh+JRdunww8fOVKHqrOMh7OGCuUs+gDUYHWFtuANH0ii3aN4+r+ht2+xY9gjKLfte/ZOl7qUgDzHIhJpE5Ky6ZrQYqjkDw3qBtfuS6Dj2ueaskFazVN82Kz6Zwy3/jdr/HJz39KDmZKowwrglyhKs21kJ2g1guyNUpJZNPjaWKaxHenncK/T6Lmh722veNv/pUvrf87Z3kdUy5koChR36SQuL0582d+7uOQE7pkTNHkkIgmMmtoWrPKy53R/Nk/+Rl+6/fewfsX28r+qJXWij/10x/BWs1HX9vz+Hd+n+0nPokvSiwoMVKSbKXavpGNHVBSXv3W/X5D8IHzcSRMfiXAhjnSb8XnrFA8fPkeu+sdh9szp8PIooK3rRGWgbO0m47OwdXDLYfDSSKt6vXvGsuCMF4yGG8fH+h3/UpDXoLO5VMXyYQbPdvrjdAkG8c0e7JPq2x2f2+Hnzyu65jOE23Xrt9bmANN64ixEGq2XIwJNIQYOTw90rYSxTBVKnpZqbMVrhIT1loh1y7SxpRR9dzO80PSoiocSZq1xSsnViNVfXCZru+q1BJSEgiMAmJKpJozZ52RQWwrf+eihEsh4dqGftMKnOwF6tLMvWClmPiv/6//hL/+d3+Z/dWWWFfYkn2jJaupc4SQVtnR/rpHacWzd26wztBtWowzjGePUULh6faCSQ1zxG7darA01hK93Ly5AFk2fPMkjZmfReecUq7EnsxwONNf9TIVn2R1XFQRw6fRaG05HwbUa9I4bu9t8FNgPs1EW4MdG0cqis5ZfN0++sHjWsvDl66JKeHngGtkatJtOpQxlAyukRyRefB0u07ALM7R73tc3aI1XcM0TpyenWj6luAD3aYljAGza3B9y7vffMrVwy3zGJkHL0bzOcoa/8Ge6APK1hDHS13qR7FKIYdI0opUqDk4kRAT45yYJvGrPrsd3hM0HOYgUIdSBLgA9d5VpCxyEWer4KNOUI1RGF0YKiVLHrMabRUxAjUU2WiwjUw6jVGEOdVDa15lIC997Jqr+1tun97iKz7aNJZpnMip0FSFQUEIZ+Krq4ffOXPyA812gzKaf/n/+e/EF3upSz1XMqxI0iDN0rDl6gfN9fdVlfOKd1OULErXpqAUtALXvPj7v9xxCu0UaZZhqVZUP4xAWCQzy2CtXN9KKfHSKMU8SZDz/v6WMCaxFlxqrU3n+Dv//hflbKIVIQpYZo6ZmAtZKW5uzjx++4aS7hGDNOvbbQcKQlbMcyT5RAzQbxqR7s2Bn/zkI/7e3/xT/B//4a8xjOGD/la/62qc4aUHW0op9J3lL/3pTwvkSsH9fYcxiqwUh+PE+a23sA8fiSesSk+VUrz02iPOhzM5Z5quoVRIcrdphUlQQ61T8hUqZAhGGoe2adhcbVCqMJ5GeZ1DYJoEiNduWqw1lJQwfcfxdmQeZ8IUhKquBC7S7/s1Xso4KxJkH0k5kgs8fuOpgI2U3MdN5+RZVjMdVSvPRtcK2RIFh2fiAUwVLGKdFTtSVYpMk0htY8irLFtrXf22Wc7JdbMvYBJdz80ZpUUSao3BdU48sjV/NacsTfKmRau7hksbjbOGokCnskamrCoWpVZfefACden6VnIyQxRIWMqUOsjNMYGhKlki3keavsWuW733X5dm7nuo482JqwfXmMaIKdqL5LBkmUS7rqGUQNuK4XQ4jrStZGOMZ9Eeb/a9ZG+EINpibdBOw0ZV6aRmd9UxHIfqtbGEOZJLZhwCSmtB9c8e27pqvpSHDRRu371lHOZ1kkAuGOcIc4AQODwLHG9PdH1HyrLtmgaJDAiTZziNbHc943nEWDFqzuOMbRtCxdAGH+TmsIbjzcBmL1kgwWfCaaTbdJQs/p/tvif6JCAIrfCHeZV+judJwA3WsN31spIfZ1zjqpm3kw3mHNjd2wnRz2r82WOL5k//8s/yu//qBQzwl7rUD7i+9IWP4pzFDzNTSBX0kJh85HSeSHPEl8Lx/N6w4yXHZtmc7a42coCtGToKmfZFXyMHnLzXjOcoUo6iVlCEfEBZP68yitPtRNeL9xckc8sfZmxnaDcNw9FDUfhJfLhKSZ6W5HpZlNKVpikApZILuUrVnrx5i9GGlz/dYa3h7a+/Kw/YS13qubJWtizRy2BQKbl+Si5LTr3Ik7UiefEGKVNPrci1nYIE8r5wlbvNgm1sBYmBtvL3KKUoWkEGZQ22EYkxWeSgIcowtaSC1vCV//53v0+vzoejci4cbo41HxBCKhhrmXxknGRA+87bB9rWYbXh6mrDPAesFjqNMgqrZEAeU5GYlJTxKaOy4nOfesR/8j/4ef4P/+CfE+OP1nuMtXXTUuCVRzu++NmXgMJ+2/KTn3oolMnqzVzAUwWIlaJqlOL4zrvk80z78BF6yV0sQl7VVhO9bI6azgmwI2aG04htLV1qSYCmYBorw/waW2OdZajk8jBHCop200IuxCTExd29HdPkKUNiOAm9sukabKMZz544B3meAORMu22r9NAy3pzY7DdE7yV6oHWEyVewkVwX55Q4nyd29yTkPPhAQWGdJsaZftOujZP3kc2uYxpnobCnWBVy9WuoTZWtcQG5emuNMavCBcA0RhqtLE0gdpFgiretpIzPCa3E6hRTIuVSm0FdP7ZUa5UmV7+ic5WaWRcLueTV5yufP0mTmmTouXhvhdQZsbbSqF/kOnvRC/RS0LSOpjFC4ZojpWSMMSgngdznw0DTOU43Z8mnsYYwBfptJxeV1pK/ZjSbbSfUGyu4fqO1TM2DTAq01kRkmlWKUOasVWAVrnWkIOGOaCVa6Kp1Pp9GmbpouRnaTcf5MGCsfI0+RB6/dcMnP/dR4pTx48x2v0EpRfCF1omBs9t28oZTSqVhJs5jFIy50rStA6XYbAOnm4HNVcfVvS1PfaQU6Lct43nm9PRE0zcM54l7j/bY1hLrYTPHxHAzcP3KPTG1lkK7abj/yo7TzcDh2ZmreztyjJjzRNs1DMcB5zRFwWd/6tMf9CVxqUt9x/r0Jx/JvVvlKdPoCXPi5jBxrIGoKRTG+Vsny6VOGEFVH9HuqmE4hSoXKaRcvUOLRkYpjk9PMl3U8nAJIcsBOWbK8kCJCV29A8YJ9SunQrdtSEGmqtYonr3zjNPtmX7biTG8TkdtIwMcpQXZjhLan2sdWmuGmzPGOrKPPH3zKX58MT/TpT7clWJe8fN+DLQbuzZXMWViqFEeSyQBIo2SPETZRGMc5gUPQSDPxhATRStUktgbYw1GVVlWypJFhapQIUPwGdtZ5tETfZLYoc5BTigucuLnq+TM7c0Z5wzayvtRMUFUCiGRc2a760gh0zRGyLgUJp8oKqOMQRuBzNRkFtCakDLFiCLq0x99wP/wr/0c//Af/ybj9MFt6Da94zMff0Ap8t+/9AufqPh6kQPbCvYB1vdgtJL83yIbrBCrSiLJQd9qhT8dGaYZdXUPMBQy3gdiylgng/aiVAXybXny7g3TMNM0DpUz8+Bp+0zTWNqu5dGr92mc5Xg6MY8BDKhYJYPXHclH8a/mwv1HO56+c8A6x/aKGkkgHAjvI+2mYT7Pa8SNsZrzYRTfqyqMg6ffOEqUzVlBoEOlSqddY5nPM23f0GjZ3PkpUHJmOs8CQNKabqtXn9nKiKgecuus+AOtka2Y1jXmRBpDo2XQKVRLtQKOXCtgwBQKxhjaXuwA1onSbQF3KZnlrAqB5Rd1zbuE6oVPBaUKRRXSnASe5CWSaAkyFyhKqXmsNfpg8Lgrx3/+v/xb/G/+y//t+77uLs3c91DaGGzTkGOq04KaKzfNKG2IITBPknujtGJ7tWEePBIwypqdk1IgKYdtm/pDnSvcoCGFTFZKAoG9fD7XOLmgMPixZtU1ljjMknljBbF6++TANHnG83PJ9ilJALExYAxp9JQiXpu25n7M9UBpjGazbXFtK9lRWd5oCwo/emYfoWQ2m5bbZycevHxPpg0xcz4M7K43tH3D+Xag3Yi22zhbiV/y5oICrQpXD/ckFGH2DMcRazTbXUffdwzHkX7bUWLGNBrnWpq2kU2isyQfGMJ8ASxc6ke2/OQJ1S9KKZRUCCnz+68/pjWarnWQv32iXBXVAFVCqZnO8pBbokZSymvQaqkPqGlIhDjT9h0pxTpBVJRK8lK6oFHMMVbtfyEWedC5xqwH536/4enbj8UwXwNV5UHs5O2rylJyliGTsXfZOlrDvYcbrMmEuXB1fYkPudR7a39/zye/+En8IPRlZQTtDTJcKEGeVxKwWzBaEWMmzHdez2IEPd427oW+Bm00n/rCp0hZQEAp1i1DEVpmRp71qh7clFH4KaCNBIhPw0yMAes0OSZSCO+RSl9KDrkhJmLMGJtJuTD7SCqKZA0+ZlS1iJRSGMZZzjVGkTLoAlYBSvxy0xxJJZMAlQvTHBlGz8deveJv/JWf4h/+o9/8oXjojNHsNg6F4i/84qe43rdsNy0feXkv0UpVsaVKgQXko++iqkqN2hBvpqD9S932priE2MvH6ZRI84x/5x1S01KanpISOSZMa1FaNk0hJg7PjoynkTBHkRjmQtM4dvd2KKXYXUuc1Tx5TGPkz6hCKYqSCjkk2k1DmAKbXcc8BtmGDUIUtdYxnSZMK/6y4TCuSP9lm95W20ye5fnkx4Btqwu1yvmDl6zVZXuljICIdEoSl1BfC6WQDL0K/dJKQ4nihTNqlVeWAkYL1CvUHFetlDRysKrArLNA9dxWyI74/iQY3Fpbfa+lesRbAM6HgeBDDQsXGav84Ko0W+uVYClgFfkBLgqWUkRaqa3FKJGaxpSY50Dbt+SUXzhm69LMfQ+1AAZMzWVbZB9N2zKcRlJOKCWrbGMNzhmilSlA04qf7urhnjh7xtETB49WsNl1KKMlaNEWxvOEToZm0xHGmfPtGddYxrPAT4zV5JAx1lLqJCf4yNN3nmKsIfiw5oyUJZhVriqsFq9ZTIl5mElB3mCvH+xwreN8O5BRFZQA4yDyrXGY2d3bMhxGxiGwvXKEIM3ZZt9x8+6RJ2/eYIzGtQ3T2XPvpSvOh5EQEtZpkWlW2lAu4uHxMTOOE7urLTlnzocRIriN4+Fr9zneDmyuZbNpimK4PdE0lukkWNtLXepHsYYhcD5P6Ep19HPgG2/e8tmPP+Drb9wCfEet/Eoiqw+b2Qf6TSdyL2Ogxo7Izk9wzDlWjVgRH1GKBdsYmfzWQ4K2ilin4MaKV0DIl3JwzTGhtPiErHOiBHC6HhjEs5SBHERqbRu7Es90farce/UeWinuv3yfog1vfP2bP8RX/FI/DmWtZbvZcr55TIwicVJGCVFyIbRGGUIuw8hF1qu1WX07vl7yL1IKhWtbYs25UnVIskDCtJZNila60gAThUwJcm/e3p5E4oxsWi5S4u9QCoqWkPXkZfNZtCZEyRfMStM7TUmZYQprhIn3UWStWmNKAS0xEas/WClCKhSlmUOCOfLJjzzgP/kbP8f/6f/23+HD97+he+XRjs99+hHGaB5e9/zZn/9EHWaJvypnyexMscrbF89nkcTQXIOvlzD6ZVMlgGFp9HIupEQ1bQrJ3BjZxqlcmMYRfxw4PnuIslbkpz7QdA1NY4n1ntnd65mOM8Zorh5d0XQNV9fbNS8tl8LpMKKsxh9Elmm1JqfEzbsD/VYy6EQaKVLokgpTmFBG0ThLKhKwnVJiOnjKrtDbjnlRYmTFZt+TUly/LtuYOpTxuLb6xGq8TvQR17gVdAIa24iq7Xwa6TcdMQRiCPIa5toU1tdPG3kNNdJgKa0lk5hCU5u4pfnTRkNgVZyUXOT9RmexPVm7xpacDxIPsTRrJQv7RWwQBV0geCqR18nmMYvEwNgMyM99HCIpJTbbXjL3Gsd221W/49LWv/+6NHMvWMZo/vrf/WWssZSSOB9HpnFe8cRoMMrUg1ioDwaRJuVZAq9VlVDNXqbbu/0GreUC8WOo6+OC0oWcIqBXXXCKQqnrd51kbgQxr1otunJlRKJiGyea4pQXoTWuMSLvtNKU9aeBHBNN26CtxXvJylsm7tSJ2bLypyQUhfPtIMjYrqHtHMnX0HMU1hiuH+zwk/zd8xw4PTng58juwU5uCGvY3W8YjiPDaSRnmMeAbRyn2zPWFB68fM154wghkEKmFIl6cNbiWss8ThK70IoG+lKX+lGsX/vNr/Gxl/f0jakUvMw3373lq28kHl31ouf/DqfRMAfxgJSCrRNCrRSuFV+PNga1QEeyHHaoeUTMEWNV9RwlplGkPq6z9fOBsY6CQuVKJ+ssRoHpLeMQ8GHmfBzIKZGNxjUNptUYo4gh1VyfZdKc0das03XjLJv9hpAyrWsuwrNLfVuJZWCi1Ak26q55ShWSoYyiRDmoicxRU4oMUHXdTPtpJqUXk9YVRM7WOIOzjlBSbejq7yfZGBRV8HNCAc4J8VKCnKN4kIyu+V/xRc9jH9paaKDWWYJPpDlRamj07CPKWca5DpqrYinEREgFpw2kTPZgjHhyM5CLIaZSScB3CoZhnHnl0Y7/xX/xF3hwv0dX0Wsphf/9P/h1zmdpMtZ32wJtY/jP/qOfw1hLCKJkOA0zv/lbb/HzP/ORGpMh1E2nFQ/ubUSaG+TrFzic+NtCED6BkMHzGnKfUr6Ldlp+rcr1lmiZxZ8pPsFCTiK/RCkyBaU0Ssnge7PtRKGUcs04lHvi0av3efrGM8bTRNO3aKO59/I9IaH7hNIaP86UkEk5MR0HxsmLzH6G7DRGK/b3dqSY6+dxlCJgEe00YQ6M50myGTsHmTup9Bzod8JNIAtwL8bI8VmsPrPCPApApd/0xBBrdE7CtY7NVS+ySWvqfShNpK5bOTkryrZeIhrkz86jF9aDT2uDnOqQUtXcN2nUEgpzl1lZZDiplVrpoMKiKFXmOTKepwpJkftcG7EvCMAk12EPNQLhjmhPtfaW2rQv8LGC+O4MAg5bXpO2bb4lt+67r0sz96KlFI9efYjrLNNZaDc51skFijCHShRSKxxkPM2cjyObbc88eKEVZSF5WWsktBG1Giohk5XICFzrGI8T8xhoOlk/51xks1UKGmgaR5gTkLHaoJHcqW7bsb3aopUSDLRPpFCIwUvTaS0p5/WwiDIiu4qJ3fVGbiSnefLWDTlL89ZvWkzNiSulcLo9s9l1GKuJKbF/sGM4TZwPI6axtH0DztI5K28OL10RRo9Ols1OpJSHZ0faXVsbQjGrHm/O9JuWU20cbWMZbkc2+06a1BDxJjIeJzb7S1DrpX40axwDMcEcZAL49GZgDkkmz9VQrf+QE6A0TiKTtHrJpwOl5B4xVkFRpJywWjJIur4llsTN289o+g6lNU0npDHrFrR3IoWIMhbXOabqedhuG9KQyKNnjLpOwAVtjZIGcvVzJAk7TYvR3Gra3olcp9QIhirNfNFA50t9eGs4Drz9jXe4enBNMLFO2lnBIpSyggaEeCWH24UoKQcfeYZO3wIP+u6rkGPC2pqpVQ9mqgjFEqXIscrDtKqEwETjKq06JfpNJddFhZ/Tmid5qbuKRQlboEpmc2LdkKTkCRRsI74qciYjsvClqTMmSxahUmSlSTlLEDaFkAqxkkfjLD6y3cahMzx80NfzUua//B//e+Ta9IvaQcjAORdiqr5j3ZFT5sFVy8df2de8wUyuMjpVyrqBjT5WQo80DqluX1KQA3kMmVIVFyv8pconlwZvqVXGu1qf5bmQ61AjZRnQUxuDl37yo5jWoWKqCgoj8sUgHq9u36MLUArz5On7Dq0182nmpdfuoZ3lzdffIdUmURuNtRbjNF3fkmOSf+fCeJoZh0mQ+zES5oDrXA3hBoyiFEuuWXgpJDQKHxL+PKGdnAtLoUJsJKtRyLW53uPyWqR6TQQfVhKlqEXkurHOUIqqHnQp21hQCF12YUo0cq4tNR/SOHl4llwohhpzAsZZur6l6RzGGuZRFG8hiER1yXK1ja1xKGIjSJWgucQyLF68nDO2NouLEmaBNhmEYj/PYd0MSrh5zYINkY99+jV+8qc//b5hfpdm7gVLAf2mEw3/HARbrKHbtjXtXbS7zaaVC79xHJ8dZRI1Tav/JKdcQSauqjTlzUA2YeIhA0g3wxplkILQJjNi2BRDba7B5Rm0WaS65Fzou4bttkMjq+FpnFFasnpi1U2fbgf213u8n5lHz72He0mon2aCTwzvnjkfRx68fI3RMqEXel0mepnYlPrwa+s06Nm7NxQKu32PcQY/e4xWtI2TVbrVmNaRc5bvJ6ZKH5Kv/XwaaZyEGHd9i1eKcfA0rRWzuTMcngbRbQNxvhwWL/WjWSEm/uW/+Tp/5sufhqzYX20oBW5OE31j2W1bvv724ds+TrIqS5Umu5USa60mFYUfwko200bL4SYkXv74npt3blHNljDNpKA413DS7VUnlEslm7PzzRFrnBjVz56mdbS7DrdteeP331wfSlBlZDnjmqYePsRzIMhxjVaGaQgVuiJTyzAnuo3FNs0P90W/1I98+dlzOhy5fnAtWzdnKEky5ZZJOsgGjiKDClWpdgIekEOZ1opu04s8y7//DV2Mic1+WwmWIveT1ZxM2rWV61zyWuUQbq1IxYS6WeXNMa/+8Es9VwpQhRzugBJFK1CKprWcR9nwJ3/nXVJKJLclywYrK11J4dTPIdLGXKOflJbMTYUcjueoOQ4zpWSaGl0UdPUxcYecl2ZOrB5a6cp5K9Jw5CJnk1wkGiGL/yunjKok1YJsERVqiVxbz14o+T5SKbUBqWCTuqECJU1OLlVlIdeyqZCUZSiQUiamTIgZHxKaeo5TI9pqlJLmYrPfMI+BEBLTMEGB64dXbDYdrjH0+y27XY9tHMNZcuW6vsO5VPH+ivE0QSo0raXpG+ZhxvuAnyKnw1kiQWJGl8J4GHCto9+1Eh1gDSnkVT44TV5iR0LEOct4nqAscuWI0otsWUicyUfJLjUCQGm6huAD/VbRbpxktDlLmcra/BpjsFbCuOfJ03ay3VroykprcgKt63m5sRgjVHllNV3fVJBK4HRzWiGDpn7upYFc5K+l5BrgbtaNnl6y6qh/BtYmrizXf1kW9rLxm8eZTfXHhSmw2cgQ4eHL9/nop1+9NHM/zEoxcToI1WccatMVc23eLCFG1FyxyyEJoVEeCcQQaNqenAPBR9rWSYaTMQynadVa95ueGAW/mrPEDmx2G7yPmFxQvUyVgk+UJJ6ZEBKqFE63A/2m4ZVX79O2hrZrOJ8mQixV/igTycXXlxEdeltDy7XWnA4D8+gZzwO7q61841p0yRRF07eUYUIpMYVjNOH2VGmYCWctKWWa3mGiZFrFOXK96QDFdBrZ7nu2u16CHmcxh1qtOB9Hmkd7Hr9zUycgMnVxnROv4NVG6GEFMbvqyzT0Uj+6NVc/hUbhnKV1suF648kJnnzn/LWcCzHk99LxlCL4XP0AS+YVawSBNorz2WP7TrIptx3TaaSkxDxnDk8V9x7tGE8Ba6pPg1wHS4Zu31NSYTgeiTGilUgqjbX1oFsDk+PynqYq0l3gEFrrFcedU+R8nOg2PTH8+GRAXeqHV/MgHinxTS+QAPGgSRiyHHyVVqI2WT1GRSAlStFtOjb7DW997ZuEp+//Omt7h1KCeFfWYK1eD2g5CY48zlHuMQNWi4dpPA9Ya9BGEVLCOMs3/+B1ycu61F3Jil6a4gzBJ7DiwbWNxWZRMKWU8SGgtfxvpcDPkURZGxqRQiZcY2rSSsFqJSomRKGQSmGaAnPncFY8U1YuMhlA1aF5ybEO0auHqubfipUlieduDsINiGk9Z6EWuV2lUmahHCrNKiUUT1y9ZusWuVQZaV42t0r+n3wJRSSmAMg1npPAYnJtAFMq5Aw+pXrdt8w+EGs+rzWGeZzqVstUn6fGdA7XOqzRuK6BIiwGPwskJIREuxOiuqmxUTkXjjcnNruOECJXD/Y1RxTGw4AyIksUmnqu59xI0TLkazcNp8NQn0mG28cHQKNayVklZ4wyKKPlZxTzGllgnGTAKa2wWqSTC31SV0aF+FoXKaZaN3qpbvUWumXbN3LdFYneabsGpTR+DsQYOZ+igGQqiMZYs4aqLz8k4zQ6I7EJVZa5sDAW2JHSup6lzcqlUEajc6ZkuV4SpfoVBQbkp1CBgBCjLFY2+/6FDMCXZu4F67Nf+iTWGc5HIQY1XUtOieE8yRtCEWQ/uVQqj0x0NtsNmQo7qL6AefDyxkUhKSUTvyCBiE0nFJy2awR2ABQlmHGlHfPkVyRszplCRmsxtpZUSLGgGwEYKCUa9RyDGMqjTPFLnepba8gJii2cDwMpCoBkPA24Vsycfo701hBmL9EGkydnJNhYaYkNcHIjXF31gEaRGY4jKSTuv3TNzZMj7bZhPk1M5xnbSbg4Shpk11ranYSCgqyhl3W5q6vwdPZYYwhIcKtSivF0QZ9f6ke3/uDrT/jST7zK/W2DNfClz77CG4+P75HafGvFKtcRiU5hOM70+56SKxygQkeECiuT7OoER2tF03WgClePrnj2jXdRxjIdz8ydQ2slMSLXPZQkE9u+RVUZ59O3n5JjQjeu6kjka1J6mRpnUAbTiGFe6UKaA6pzlCL37TwFXBt5+tZjfvOf/MYP/kW+1I9dNZ1sAGJKtXGTSbcEglcwhNHkkBBQnVAt5cAdxaep5Rn077qX/vAS5nhMGdMYCvJ3a8P6zFueaUploSpvW3Rj5Jm1ZLtmsE4xHs4XmuW3VCkwR4kQyAViKZhcfcKq4Jwhg/jvNw3DaZIg5c6hrcFpGSjFlEmkKr/MtI3FaE2cgjTaz2W6hZgZ6zatawxNY4UcmTNVHbc27KVKJ1XIFfVvqtKpVNx8WiWTspWDENJKFF7yy0Dkk2XZylGI5Q50glIrvTLnjFICkkKJpLRUqkauW8FchIgZo1Aq5ZYohFiYY8ZWuaeuG6du3zEchrUpubreYZyhaxsa62gb2UgdnxzXXMZSwSHj7YhpDF3fEOeAba3g/RNCLz8NzONMTNWTFqJQxnNhOk2025bjOEN9PoQ50m5aSi6M55Gmd2hjSCHRNA3WKBnCI9EjaCUqNeTv00bLebARWIvWQtxcvHnLa5SiQFDkvJuYJ4/WLVlnOTcj9/EC6RpPIqFcmm9VqZbamHXbZpvFSiDnzhTyc/CTOylsTnJNGyMkUYxwLXIWMCJF+BoKid4y1mKtXF8hRPw007SWGOMKcvGTf87Q+d3XpZl7wfpTf/FPiCzEGHwJ5CxBwCUXuq6VyXVOQsMy8sYj5EsxhWIK0zCRYqLbNvJDd4aURabBkImp4pqVaIbbXcs8iVnUz56mc7LCzokUI6XSe8iZWDK2MbhG14YtM89yAPSTrOHlvUVW/9N5ot90aKsl12MW6eU0TLI53HYCabmq+GdjcVqTAxxvz5jWQgz0u54cBbmLtSJJQW6m/f0twzCxvdpw884NzjnmYWIeHH6ScPAQMsEnDs9Grh7s0AhZczgJkWk8jcyjx20axmHE9Q3BR7kh/GUaeqkf7Xpyc6argaOP7m/58uc/wm9/9R2Ow3cmsbrGkMTagZFAojqIKatPw1i9ktGsNmglpvpcSVq6ApPuvfYSMRd0DpwOZ9LkcdsNcdTYtsN0SiBCRSTY02nCuCr/Rq2+CnnOiNqgdXaNMSgZqJQvMYunSgSrtMzLAfdS31LXD6/5mT/zU0yjeEen83wXskyVVzaG7OOieiSnjLE1cDdmTMnEDLaRrfD7LaWg21iy0uRUaDaaUEEn2ipa7YghoR2orElJMrL8GCi5RgEVGUT6lCR+5FLvrQqHyEWaOWMNIWbZcgI6CVzCajkEq23L+TzjQ0Qt72XL+0jM9Rqp5yrqHqsSdlOR4W/bGGmkvAy7YvWp5VwwizS8UkiVYiUelgLey5lqwdkrRIqZauOUsviVyyKf0/JN5lzEI6fuJMJKVWhLocIvKutHa4mKKaoCXe4owdQ/H1Mh5kKsnj2BcshLmpfXDLtK3qfjJFl8IdK5FmU114+uaJyj3/fMp3kFbQUfmKZZspGVqv68iB+9+J5rlJp1hlwSscj327SOpnX4yTPPQXyKKVHGuTZDkn28PCfm0bO92qCOEyEKyZkCqdJic/UzllTQrRO1R5ZFwxJJ0m+NUCUr7l/8c9QMZlXjAyzTqJjOc40ngK5rVg+srj8TpSRSpJQ7v6Rk1i1Llirzrj9zqM/e2szVZeratAG1oRPY2AJp0lpXb7v8vIyzNTScal0Qr694LBOlxvuEOfK3//7f4J/8178mS47vsi7N3ItUfaBMkxdTaM7M51AnL4ASw2VO0PSNgEcmWc/GMLOEHcqESAxiMhkqUDJz9BQUXSdaXu8TKs+i3W0MJUtInR+DIJE1NQQ4r1MGPyes0ShjsVYzT4ngZ8nCO08obQgx0TRCrwwhMpxH0RfHRA6RHBN+8jhn2O87ufitlYswF5IqzMNM20oopZ8j22sNWSZn4+EMJWMaR9s2HJ+diSHy2qdfYTyOqJJ56SMPaLcCLkkUDjcnMcm2lnmcaRtHv2kZTkJOykW07eNh4MHL99AUYilc3dvz7jeffKCXxaUu9UfVr/3m69gvf5pH1yKV/uInH/Lxl6/4Z//6G7zx7nfwzFmDa40Y4INM72zrUEYCaHUqK7FrpVgiU9+cM6567koqmNZijUY3W/bdhs5BCLl6CkQSkpKEyj5967GEIzeLqV3Ldr5+fqXANqZu+/NKFey6ttLA5EFYamDqGmh+qUs9V9poNvueabilaRr8XIeYWabXSitUliatZCHBKb2AMyRjaziK90Skku//Osu5cPt04vqlayHL5TqBtxqBOdcNoZZnm9Watm8kaJkqdUY2JjdPzkIovNR7SgHOGow2kmOrRS4X8p1cTkvXhsqK1mnaB1vmKXA8jPJJCpWUK4duyU/LJBSlokdDbX6skXDumDKmQHbVd5dkY+ar/FFTZNtbzzTL5xeK9x2UAxaqpCYt4JKqrlKwwj1yZj3IoyqwhHqNKDnjlArhMLUhXSSDpcZrrPEE1IYuCvxkaYRzXVdbJ+fAFCNaabpNXzH5maa1bPcbdtfbutWSbXdbIXF58MScRO3x7EzTN3SbFm3sCv1QWoTO4zAxz1GyUefIlAScFWKi27T40WOskfy2Xct09qQkAw3nLKlJa5SAsYZl3Z6z0D3DHKA2QLkUwuRXv+BCj8wxM5dZiKCwvma6DjhLFvlt24tfLsZIHgv9pq1yR1M3ZTUL1TlqR7Z64eQcjgB41B3QZMlNLdTogAq8oVTJbrU3LM2+sdIcLoCy5yWTukoxl6Zx2WB6K/nOkudaePjK/fc9mLqw3F+gPvap1/jzv/ILcoAqqpotZRpCgeE8iz63c/TbnhgSrm2IQdD+8+w53JywjaXpGrQxdVJQdd5zIFY/iiBQDcrAPEwcnx05H87rhFwpmYKYSv1RSqaZzslBLIXIcJo53Y6Mg+d8PMvksL4JGaPkTc8n5tETpplpnDncnAX3XNfc0xRpN269jhUwTZIr1246XNOyu94Sa6xByULqMY3FT57jjTRy1y9dcz6cZZpf3+ymYSZnGI8TcY60mwbbW0rM68So6xuUFq8Rqwwnc/VoD8iN3fQvFhp7qUv9sCrEzH/z63/A20+OFBTnWUi2v/jFj7Lpvv36tY0EnC702t11Xx8iIueIdfprrKn5b6zTRG20HCBiRll5cKlq3hfLq0FpA0qjncSSOGtRwHgaREJUDfwKSDXLR7wLUIr4ZGOIq+RlOUwv/gVf38tivGzNL/XtlVOWw7oSKVLjLG3f1aFhxtVAY23kus0lr5KlWGVnKCGuPnvnKcdn3z4Q+aNKKWg2Mtd2zsmGwGg52OVMCaluLYS+53pR1+Rqe9BaMuiskee9e8Hw8g9zlUoYpAj1FhRGaRorrzNGo9sG7SzKVH+iDzRG8eD+lrax6CyNmQZUpZjGCiKxBlKW7YZRIpud5kiYhDbpQ2KeI/O4WEwSk8/MPjGHzBgyPmV8kJiDxZ+H0mSkKSwKYs7Eun1btjKpFDJKmn5FbfbKej7TlX+wKBxKbZREwrnkyN35QheAhsTplaVvqBJLoQMr5D3YT57zYcCHwDxORJ+4urdjf2/Pw4885MFL92QgvusrWMVAElXF+WaAVOj6Ow9dqptOP3mm8yigkhCJU6DdOPqtw3WWzXXPdt+hipAk+31HKTANM4XCNMxiH7KWpnUSOzV7pmGi6Zp1EDjX/LrghRxJlmfa83RIqi8yV4ms94Ewe8Ls14bOOsmDc42tTamRgWYSCecKKVEij51HT5jD2qwJxKWj2zQr7XmRb+a8NHfycwohyvNMEBgVcCKvrakecqXU3c+/SnmfH2guv65quLioVyTqJ6UoGX3vU8hyaeZeoHZXm9qcCZhEa03T2PqGNZN8IMxBUuSHibHmmnTbrmqmM9FLHoegS2P1usgPecndkK2bTAG1qVhUJc3XMtWxjRF5QbmbDOQkN8fxcJaGLsaaZq+Zzl5kWSEKbbNtVgOmn2b8HBjO4pMrClzb0HSd3Ayq0n2UYjgMTKdJDMUKUo7yxqNk4pZiZhxnpvMspJ59S7/rCJPHj16aM2A4ngmTxzV29UbMo6dEkY1pBcNhwDiLc5Z+19HUB30pRV7b+r1re1k0X+pHv2LK/Nq/+jpvPL4llyxglJT5zEcefNsGSxmN0TJV1GaZ+C4PJlXfB1hhKCnJg0v8GDWza/FrIKCgXD0cKYHrGrR1uMbQOINxisPTW6ZBIExKLxNEVkKXcRqqlMZVGcuCaU4xrw+hUj/w+V+71KXeU0oRJpE0aaOxnakkS0XJqsp1NVqzYsqXA1WK4lXpN61ska1+YSmvVhm75iMaVJFDomxFpIlMdQNtrakYe3keKjQpFZpNw/2XrmTgeKn3VKFm8tWM3VIzvKwCUxuepXFJgO2r1yllnFHcv7fh+mpD01iMUjij0KVATKQ5kCePA3QpqJJxTrY1GtBZ8PDeS8TBPCcZrAcBnIQoUJGliYopS8NkLamAjwkfCzFV0EkFYywkzAVWRz3AL+cRkc0t77XV17X46eqHiC2m/lph/T3ZwpU76nndyi3yTFF0ZUAgKMYaHr36gH7XkVMR8nH1amlduQt9i1JwOJwk6NtojBO/qnPyum52neT2NrK5KrnSYq3m+OzE6dlI9pnjszPDaWIaZeA/HCbG80iYA+fbQRYbCsmDS7JRN7ae36rCIxcqXdLRb9t6Lo71eSIrA2ttDeGWLT3L5rLUjNWlqa7aVWv1uoGjnoeX51iKQoRu6zMr5yxn7ShQmxiCAAGNFqJz3+AaJ/Cjmqm6PFdzFrULSr6HBXiyWBviczAXuMvOXN6elpgeY6vvNpUVniibQui37y9q69LMvUD97b//1/GTl2wLWDd0WkHbNuzu7bBOJgR+8mseyVynFiUl9vc2MiVRcqDyU1hzL5y1lYgj5yhrDd2mwTUWbQ0xJfGzGDFfxylULbBCFZEKnG/OHG8HfIjcPD5gjcKYIhJQrVHUdbOtsJQsHz+cJm6fHGXKVQpN74hRwsPPp5njYSJ7QZzbxtJsGkByTIKP+LPHDzMhBKZhpADb6y0pCU6anJnGmW9+7W3G48hut5Eb0ypsa7l6sKOkzHiaaTcN8xTQ1jANM65tJDC8d0DBjzNGyyFXpn6XE+OlfjzKh8Q/+43XefvJCddY2k3L5z7xEh996fo9f04VIfqlKKS/9NwWTB7+6U6/X+ERKMnEKaVmKS1ZH9WIEX2qsjU5D9xNjmWS+fjNx/jRV7BDJWrGtP7dKcqh1jh9N2E0um4Hs/B6lRw2xPPLC4IpLvVhr5IzIYTqJdEknzFGaMlN12LbBhDLgvjSIsGHuy2wUhitsbpOHF6wpnPgfHskJdkgL4Hh1llca8TbQlmBRIVSISkGVZ+h8zBzOp0YhxfNu/sQV6ESIxONVmw6x6ZztM7SN4bGanabhtZqTM6oLL4414g03FDYdJarbcumdeSQIRec0Thn6LbtOswqBUKU66NrZdMnTfdd0xhrc6WMIWZpIEMspCwH7YUg6UMm+CxU1doUqNpQrD4rXbPQagbZCjOpv7/2GvV48vzArtSmbfm9vMQw1O3gAkdBaQr181V/tOSjCShOVyDMAt5QopAUOWdjKFUlcXx6IqfM6eYkm+3qSdNWi3wfaU5Uufu5rVJCrdnf39D1TpYR1TNaciHVrdJwmqqKrJ51KQQvxMbNtsUai3NuVX0IjVIy6EKIK4lSnmkil5QhYVpjHciFEsXTSilVzphq7umdNHb5uoT2HlZFl9aapnO0fUvT1YbtOY8jqawyblWHN6sXrmbxrWqZmNe84yUYPMe8SFcAVmDYEmtgqtqOgnxcpUOHSohe1HB/5+//jfd1i11GSC9QfgrMkxcppJYGyDlLt+sYTx41R4wT0mPwSW50p/FzxZI6aZxyyqvue57EJ9D2LcFXLxxRJt/IDdZuWvzNINkbkyfMcqErUw9vFHLV9t7envFzYB5mgtZsrzc4J+v2Jd/DNmY1m8cafXA6DOQURepSp/ol38mlNArrenJI8rFZACdhmCmdhBpPPsCY2Ow3Mm0JQjWKs2c6RtCK7bajv+rwKUq8QRZ4wxgmmYyaioFN4Io8VAGms3gO210vYZbHEWMM2/2Gb/7Bmx/YNXGpS73fSrnwL/7NN9n8/jt87NV7fPyjD1avwVLGWUKQCayquYypRHlQxUSMy9atejBKJoZyZ9ReHQYLLMWKJCkKRCKFiDF3Ru95mDjdHFk+NOeap6RFsplTxtSHlspyuM6pVJy1XqfV2ijyc4Cli2XuUt+xlEJrI5lfWlDvIRQ5QGk5zHu10PH8+twsGZQWqqVrLdurnt/+9d964S/DNpZ5CsR4RF0VbNtVH5McrlTdxzQbV/McBXhmTJXRVSiDUKYvM/JvLaUkLNoZy6ZK2QoCmogrSELROItG3jPcc9EQhQJFYYySSALEduGjfPw8R7wXOIbEMwkUbZhrA7DCVwpmkY6rQsjhPe+TpQ7EZW0m78WLHE4ah+qdK6DqJpH6eUGRyreAp55rigpIZ0elJFbB5PNSSpFXljt/XKrN3nOfo8Aq6SyVxPrwtfvoItuc4ANt38h2rp6bYoii7mgMBPleXOuwW8v59sx2v6m+Rcl/007T2oaUEjePj5XSrkg+17Mp6/26ZOtN56lC+WqGXm1Su40EkKPgfByxztDXc+54nkXSnGQwUmp4dsmyIV9UY03rajKVNIi6RgKUKhldZP2lyNdXsmTBhTngZ0+/7dYYieXnc2d9K6sXLtd/pyjTI6sNWgvwaMm+VEahkngalx/KsiHUC6SpFGyN6Xmej5FVJbPX60TXZ2auwBfvg/gGfeD9ylkuzdz7rC/+3E/wykcfEUJCa4OfRkmrz5L1IZMa2O57lNI8eeuGzVWPMZoweUxjCXMgFOg2jui9+FYQuEmoF+aiyT8+O3H9cE9GJuopZqzTWOfqVCdXSo+si7XWYBXzNKOB02Hg+v6O7b7DNB32cGYePdrIZEAm/iIpuX12ZBo8rrWkUHW8iynXyH8UqxkPI/t7W5ZIg+E0YftGNNA+sukaoT8Bu+sNx5sz0xRoGtmiCY0zMhwn2q6hxEzTtfT7jmmcMY0RTftpxhpDDondvS2nmxObbSuhya0mjAHbWA7PDvT7Df/gf/ePPuCr41KXen8VYuL2lLj9ylv866+89W2/r7Wm3Yj3VRstdNiQsI2h7RuUT4Q5VclNwTWGeax5drWpUpnVc0TJKG3qJt7UDUSpZ5nC6ea4yleU1rLMSxJ5YqzFtXb1AIQ5rvAV27gqc6P6H+5kMdTN+6Uu9a2l63XWtKK2GAch3TlnMZ1IvWzODEdPjpF5mkQ6tUy7k+C8jTVM44tvxEp93qYp4tq54sgtBVXjcjQxiJTY+9rclUT0AkFQwPkwoFXhdHv8fr08H5pSWtNue2z92Vor25aMqA9SSAQCzmmIkn2Wa6OjK9xN8G4Ko+RAXQBnFCUWCBGVM841UDelWqmK9U9Qh9yqTpWsFgm7Vgqt72RvZfX+sg4OcvUaKyUN3BIGXspCQayyUe58covyMpfqcKuNyOKzW5q3vBIIKvAk3wE5an8oaoulkczVE10EqtcmOS/Oo2feBFSVKW62PTlFcm7IIWIauyq95tETUsYfR8gIwXwQiFCm0G0bhpszpXrRcsqiiKJSPHOuDYkMXXIpjCHiGiuLjUbCvb0P9LkV1Vl9fV3bYI0mo/BztfzUDVipn1db2YTnlCmNWYeVC59CL161skgxzRrQrbRerQFKwXCemIaZ/b1c1SJVgRLLt8RS5EqWlNdcLX9JjKQoUloBG901fGv8hF4sUvXnVdGlOWaUq2l1RovPsz5PUarCwwqZXInsjQw4osSMpWVT+F3WpZl7n/Xxz7yG6xpmP+DHCYzGWCtTnjnSdA3drqPbdNw8PpBL4XR7EtJPYzE1aLBpJbtpOs9AqTIA6Dct01kugpylcSs5M59ntDNsrjoUimmcoRTiLOGDCwAlxsRwmvjm1x5zdW9D8oHNfkvTNitxqe6M682ocY1hHCaOz840naOpZlIFVZYpGS/WGjprsW3LOAZcoyXnbpzZ7nu0NbTbBlNgGgPOWcmfK4qmc3QbR0lZAj6LhJpf39vJhW00ebpbTWujCbNkgcSQBDWrq58wZQgW10p4+P2Xrjkehg/uorjUpX5AZaypqGuBIUkYsbzXKF29Rqqs2UgohbZVApTLKkWhDl6Wh9RyqFhouqrm95xujtWfZ8QfWw3gksGzPK1KjSPI9QGmVviSMgoiNcNHNi79rl09e5e61PN1/+UHQMWyI9N54zSmaXDWMY0Tx5sz59szIDIq8QAtpEk5mJX8vcUB2Ebw7jGKD95YQwoFbR3tpqOkhDUCK0izr5lydj2slwLdfsP5cMuzd559H16ZD1fllBlOI/vdhnkO+FmakhAisaoMckjozor0j8KcyupRJAtt1zqDtdLcxZgwRaNUgm2DjZmkLLFAMlp8SwqUNWusS1kSvGsDrjV3W5MkUnVdm7GVVhjvNiTrVq5uTZaDOVA/3yJpl9KqxsbUD0zprn2DJWtOrTK8vPjnWDZGd9u42kNUuEbh7d/7Bg8/9gqu2bC93pBSwlrL7npbg8El2qa53uBHz7PHB25vTlXal6HCspwVyaG1euUubO5tSTExnkbZVirFPHrOt2f6XSdQv7qtSimt2zNjNPMcRbbpLHPNaJQcObBGcz6c8VMQGWptkBbQiWQdZ4k2WJpkVarvWgaZTedq/p7IRIMPsjXTMhhQLFLaQgqB0+2Jl157ePd6VrljDBmV1ArtWvxvS0O2bEYXei7I9bH4143T6IVMuVT9eS1/pv7SGma+Nq4hYxtL8GGVby4bwhzluf7otQfsrjacvsuz7UUP8D5qs+v50//+l/GjJ/gg6+Gcqywy3REcqyZ3HifK0kChcE2lTlqZfC95NKuxUovsyVZsagp1OlEkg070y5oY4pqzskytYkyrL+Xxm09RCvwsn+v6wa6GMxbmmhuXozRIm11HCInbZydc5+h2nazmlQhLcjVyLpMC4yzzMNM0Epju54DSmvNp5HRz4vj0xDx6TrdnDk+O3LxzW79eWYv7KTBNXkI/58jNk8OqN2+7BmOX3ColJtRNi3VaJJdRQiCjj8yzl9dEa6YxMJ0nfuVv/dIHdGVc6lLf//qJn/kM+3t7NCIla9plY2bk3qwDyztAifyaNmadMKaYVp/AspkLVcYSq7RNVX/CcByY53Bn7NdqbcLU4gdB4EoS2irTYttayafjzndgrF59vKIcvWzmLvXt9fmf/0LFEwpefrPtub6/Z7Pf1GHnxDxMKCX+GdTiV5JtntYKZwxf++3Xv02i/N2WqofJGEX54qewPptzysRZVDcSSBxISQ5rC/RHGSMURqXW6I5LvbeMUew6xzyMxOCZp0mogl4ijXzd8ueUZWNmLbZragyLQVkrap8a4qz0HZ0QdbfJSvVnGGJe0f5FIVu+qkLIq5+pEklLhYqYipOvEr0U5cAvK5SyRmMA65kOxXs2dVDlexV6Ihs1+XupH5KrZ2/x5eVF1bn8w9Jw1F1fWdu5tdlbvXNJmqH99Y6mb5jPk8hEK5FVa40f5nVLNZxGxvPEZtex2fTsHuxki5SSeFFjIobINM7kVPBTlDPj7AX9v2kJPgpcpX5fqn7R2oq81dVIET+F9evRdZOa61bdWEsYZ6JP62sisBIZ7Ps5iNRyIUSWSmem4H1gPI31z9T8UgVmiS+pTdsSx/Dk3VtyTqt8cjmnLxlzd4NN+Z6M0bjG0fcdrnXv2bQujVvJpfrHJd4hVXJmSiLPXPxvOea7Z6gSoItzFmMFTiNwM10hKHdZcyklvvhzP8FHP/Xqd32PXTZz76OsMzx46RqlDU3jqtRSSJbaGrSW7VXJhdvHB5F/OFunAHFFps6jF5/cOGG0odt2hDlUKYF0/jFEbNuw3XZyMdfgyRgriSnG9YDVdo6cZdU8jxNPn9zWA52gzI2V5mwaPcN55nAYuP9gy71HO7S1HG9OGGvZ7Du5sYESEq6zkvPRaFSUi3EeZpqukTBPCqlSi2I9BLrWcjzP2NZglEY7S+Msu6teLtZU0EkzD7KO7/a9vJEqGM4jIUWa1hGDaODjaQKtcW1Dv+sZx6HqwY083DM0TUsKidc+/soHfYlc6lLft3r02kO2Vx3TMGPrQwhr0KGAE/mxbM6q3CdXCtvi4akkL220PGyyZFmuh4vl0BFjlXTfrtr/xcwdq7xzMe2X+uBfc3IQP4EoUvIypH6OxiafZ2n2LnWp91QBkhKfdAbb2hpIHDg+OzKPEzmFKrtaNFDVn5JF5uUaw+2T2/drMXnPlyCSOQ2kGqnh6RtHKRnbNJQk3vRc8xy1M6hVCifPL9d2/Ma/+r3v20vzYapSxOcVfCT6+p6lLUUpYoaYwQAYU4EhavVN5RoLAEKaTClL+LYCHyK5VNx8LMQMxYhMb3nPs06TNZgq0WycWYcCy8ZFCKmpUrsFOrVuyGpjpMpdzl2p35R8uEQRUCEauuYOLo3i4oFTWiTFmkIWAxiq8u3XrdHyb5FFVVng3aZu2f7p2tA6awgpcz6MoODq4dXKIAhTEJieNbz19XcZjiMK8c8Np1Ek80lXarllOI60XUMqhaZ1pJjYXG2Yppmt7iXkXWmaTpN8FEn9HMhIs5aiNDei1FBrHEDRoswaz/MqSYZC0zVydgyLT269WlhhL6VgrRU+RJHNWQhRBpmqkJDXNoW0NroCGClVjioN5OHpif29q5rTLM/KvLxfFPn7tBbZLdUqtfTrMtiUaAylJAi85OU9Y2lQVVUXyDWQTaY4iTMw2sh3pCBpJdYorSnmLkYox4xu5HOFIECVt77+Ds8e337X99jlCfs+6lf/3l8h50qgKeDHGe+DmP+RG7XbtGz2G8bzhNKa6Tl/2jxImKJScLo5kYIEoM7DTAwZY031i8kByjlZ0dvGopURM+foUUZhnjO2LpIq2XrFutoWGeLmaouiEEPhfBx59+0bSojcf3iNaxznw0DyUbDkqhpRQVDM8Y4eBJLtk2JiOA7kum2LQSb/Mp0ohNkTZwkST1n+LBSOt6c6CZGLVRtN08vDchjm6vczGORNM1ewgwbOT08sQezjUf7s+TTjp1j9PQgUJlyyrC714alcI0wkyDhTcpJ7M0dyDHgfSFmQygqBQSzZc7KFo3o99J0trh4wSv3vxaR+884N8+jf6/eouV5oVafe0tyVivDWS/5XZg2Dzc9tJhY/iRAvP5CX8FI/4qWtwXZC0ev3HU3rKPV5MZ4HpvMgtLgqPdJa18l+BaQ4yTF9v/6S7/zF1PsnpLt4IB/xk68wFsf2aotrHM45mq7FNQ5rDLZpMNUScalvr1wKx9NMUprzXBh85nCaRGJZChgt27R1gVUbrCSAkxAzPiRiEvncFBLjFAmhME9VqlmHT75K19Qi3S0Fp8BQ1mGTQvxxFCGUDoNnmiIxZnyUhjHWDd/yT0wSbZBKWbPkoMroWLaAam1EigK0qr93FzOwyAqVqkHUVVGRF4tJJT3qSmhdtoBq/dtY1UwxRCEUp8Rm29HveprGYRpbGxLZ8hijMc6I96162mKQeKwlCuDqwV7OmnXDhYLxPOInLwKQCvhYpaJKYVtH0zQorevywhBr1A5qoa0LqbHbtIQp1Hu55skpid5quwWKs8gnhR7rJy//jPNKjHSNpWltlVrXBtfqdRN3F+FVM5oV3Dw9yEAoF3JOaz6dqXmSi8qECriZJ8myi14aq1yXKLG+F93tSsXvuGzdzAoBWwin8robZ1eg4HrNrN45hescKDlH+zngJ89v/frv8s4bT77re+yymftuS8H2aivr55pPoZSGGDgNZ3ZXG7q+WZuLcVguvowfZ3LK9Pu+BhGW1WeSYp0qFPHH5bq2XVfplSSpFPT7HjN5xrNkeyglq+FcvQaubZi9F6qW1bimEcNpI5uuw9MjRsMnfvI19ve2jKcJP871UGbkjScVdGtwjSL4hNaW4EWnjCkUZApRkE3ldJgoyJuwhXVreLg5V4Or5ck7t1w/2LHZ9oLQnUPVFheSjzx85QF+LPgQ6Lcd59MIIJ64s0dpGZWUXNhebzjdjsznM227ldfAiulVmcts4lIfnlq0/W3v8POMtoZx8ChrsApQIkdZ/mwsSXxt1euxnFwWLf4ivdR68czKvZPGyM27z0hZJDCLcjOX6udA1YddWpcfpj44c5H3gUUeBXn1JeRUyEa2cu13CES/1KUWpYl+7jq7fXbk+PTIeDgLxKFmpwIoq1afpqpT7uE8cniBsPClSs68+8132V7t5NkeJZbHNjOb3RY/R0prKcVCUXR9C1TrVbVV5Ayv//bvcbo9fR9elQ9flQznOaF05nz2tK1lOAd8WfD08n4yB9lspVSHSNVmIv/IRi7VjX9K0mSlXEhFNiNlGUYhH2OMFll6kYZLJfl5G6Mq/r/GFKSCNVWmt0As6vZOJJQi1Vw2b3IRylQs1y0Z1PfNvGzTFPk5KApQf/3u456HqMgyrl7fNfcO5L18gbDoLN+/VtIkvvW7r/PTf+kXaPueza4nxyRxUnXIXnJmnAPKyGvRVLidMnIdlyzfe5oDsdIXjVWkOWFb2YKVIj+HECUn0HWWmEQCOp9nlBFLjMgxYx1AQtc1dTNq1jxUjGKz6+vPwEMpzHOQ7NMaL5Cz5MWVvMiZ5Uzsp4jZipzZWEOaaqZqfUn1mqun1sYuVeL847eecnh6ZHdvt0oplTHP+W8lHmCRWqLumnVdhzxL3p3WGm2XwU2FjCG/Z61eGzdVJdhKSXyGLHykcXeuiH1ppVdGUn1WFqRhTu9zMHRp5r7L+tlf/CI//QufYziNq+cEJTfrIjMCmZ8ELxdQ17erhFB+6Lqmz4+UVAhFmrB22xFmz3gakUlM4d6jPTHkCjwQY6eJMi1yjYQvzuOMn2Ykz6lI7lxMOCfQhFwy0+h5+uRWNm5K8bmf/iT3Hl0zDjNGI6HhTsbm0cthLcUoMJOuqdpemfrnoiToPCaiT0xay004Cr0ukSlRobR4HXxIWFtxrqWwv96u8lCKhIi2rRP6570twzefoI2pcrBEGMTnp5Tm9OzIyx95yLN3bzFW02yEgrmg05uukeyZS13qQ1MF1ziGYRapNHXAEzNFqTvstIp3Ep0q/1ge5Mvkb9nel0J9T1FoI1Kc8+0JP/sazErd6NWP07rCTO7gKUtYMkrjFm/cMuXMYhiPMcmBJCqs1fzL//evf4Cv46V+ZCuL4qLrGiiF4SjbuPE0rP6WUA+mSskBj+WwpiFMEr/zvVQphWfvPkVbuw4oFAiluWlAK8LkqeY+2r6h6SzT4FeqpiLjp/H78IJ8OKtQePt24tlhJMSIsdJUPHwAV5tWQuF1JikvAJJKskirFk6ap7xs7xbZZZEg75Dh6ckzzhL6rI3myc3Ab/3Om3zk1Ws++6lHq+Txwf0tfWfX7VteMPh1SK1KIRWqh25Bzt95hkuuPrHnwCcLwGPJ/xRmlcBTdG0IFyS9XoEZ8o2UxQ+2XHggA+y6MVq2edooilFQgSoa6vBOMZ0nnDPsrnaAWrPKUkwcnp3QWnM+TQJ+sZrkE/PkMdXC0naO02Gg27QCADKaaRDLy7PphmbTrLlrKWWJG0hp9RfmLP+dU2Gz72j7Fj8HrLN1EyfVbzuij0w+EEKs0VwSMSARqRpjagNbY0GW0sbUvLnnQXmyVRQZtiw41itmsRjUTMEn7z6TzWUroeHkUgeWwGILqBvFnMWDZ6ypjdtCNFUreVO+hEJOAh0TawJoK3JgHSphs5U+IYaw/uPnwJLrKkMKKLNYFvzsySnxxtfeeV/32KWZ+y5rs+tRleqodY0ZcJambSQuoHX0256mdbzzxmPGk2D3dV2Xx5jwsxdDrzI0nVr9JPLDy6vZMvhYHxTSADZds1J+ul6axWny1c9iiLN49WIsDAfJ8TifZOP2zhtP6TrHvQc7PvKpl7HOrsZt44QSRSUutc7Qdl2dciiMMcSUq4RKVvoKCZgsRXIxBDEsE/w4RVCqaqELqmRSyDhrmaaZeQ641oi/zhqG40iYA03jcFZC1lMWvLrWrWiyvazlBVtbaPuGEBJd1wqSNmfCnGico2kvl/OlPjyVUpbwZGfJjSHGzPZqQ3p2kgxJfycrVtWLsMjNFAsQIINT9XBRzx81+ybXe2oeZ7pNyzjM2MaSWKbS6zNz3eZpVaUrVcpiKoRAYCnV14eu4bnQdo6mcUQffuiv36V+9MtaxWZbPT4hMgwjx5sTOcWaFVWfVdZIwL1Tax5WzgU/p/UA/L3UeBoYjgP9bnOXlZUL0QfaTYcq4McJhcI6Q8pVOmU0RivCOPIH//oPvvcv5ENak0/8o3/2ezw9vDc+4nrf8eCqX/1HP/Olj/Ho3qbKyktF0ctmA6gwNpFfKqU4jYF/+a+/SQHeeXJiGP23/d2/99XH/N5XH6//++WX9mz6pjYKii//7MdxBfEhZ8nrdKZu5CqYTqR7IqtTZvEl1+iE6h8uPOdvQ8LFC8/FF9T30lSHbIsna20Ca8m1Lb+5hpDXbZ6wUxVKyZnLn84Mt0d2j+5JeHgdlPebllQK83HkdHuuUVkC2+s2Lc5aCcvW0vDE2vz5ydN2Da513D4+ME+e7dWGGBP9puf2ya1YfmKsWZCG1gl3gTpg1EaWG6fbswDsrCE7u/6MBaC15MEVur5hHr28Rkq+w4WqmSsdZvkZ3OXXlQrDyXfNMKCibG/vQt3vfibvvvmEl157hHOGVJYGWYZE2ihsfUhKht3ilZOfweJtC3NY4gfl3N9V6nuGlBNFU2E64OcgZ1mjsUrO7ynI9+0aQ/BinUg1/gLg5sktORf+9b/4Hf5///hfvK977HL6/S7KWsN/9D/6lTppTviQVo1uSoXNrqtZOZYQo+S4qQVzLEHgTevWqXrTO8iFpjWM54k0R2KMbK82WCe622UCorRcQNHHGqgqBzWZZClSLCKPzCJXGccZPydSFklk31vuPdjx8sceiba6hkcKNKWgXZ061GR7hZhVjdV4H6GaPo01mJzxPlWZZA3nnAtN31R9eTXF+kRNeIQiaGGrTM3deO5GUZJ5Mk1h1T4/feupBDxqjW0tKVc6kI8Mh4ECdBvRaA+nUbTlVhDqz0+ALnWpH/9SLKHbkEkhcD7OjOepBn2XdQxZlMhDXGNFYlQBAYJPrrKkeCcLyVmGMilGbGNQqhDnQNPVqWUpqy9gaRJdK+8L2ig5VJEhifZfyHKqTqlZyV/aGjb7TZ1qX+pS761/+l/9N/zKf/rXKKUwz575NDKfR2II6zUoG2PZGq8b6AoeWPwo32ulEDk+O9DtN4DYG5aonybJM1YyUj2hBl67xq2S5je/+sZ6aLzUt1dM+dsaOYDb48Tt8e7Xv/bm7SpJlCo0zvLln/k4qm6cnh1GvvLVd+tvl+e2d99dvfPue3MA33jrpg4E5O/9+Efv89or19Kg1c+tgM2m4RMfeyDU05p3VgC1YOWR7c1ylkmVNLnYahZp+/Py92WDp5CBP4sctP75hbQKcmYKMfG1bzxbA7BP55nmtY/wc68+QiMgoVIK1jVYwLeBvm9JKdcsR4lLQBc2O7G9TOMs0sVGQ9dUxYdYWtrO8e5bz0hJFGHbqy0pJ/wYoBFGAo1Ai07HQeINUqS56rHOCHpfy9+7hHs3rUglU5BzYgji/fazlzicUu58e0XsNmEO69DRGFMx/7WZg/pMk5+nbOvy3TYtF9BifXr89lM2u49K1mHjMNqw0FCNMzWPtaxQEwGcpFV+vebZVXlnmGouZiMSbNMZUgxo7dBWohjaVhpnUqakBK2tdqhECIHT7cDN0wNhmnj6+Mhv/Lf/dm1u309dmrnvpqpBcTgN2FYolsEnsqVOJ0QzbKzlfDzKhKLmyoVZSHHzlNaLMcYMdeLU9g3OWaE8hUgkSG6dvkuPj17w/7YIktxaMVvOY5QJZpGEeT955tHTNJarexua1rG/loy5eRTfna468Fi10G0vuubWWWxjqrm15kjVizbGTEzzSkgqOa8SL2WECDWPnkKpIZMJ1zrm0dP1kndinSGFQGoEEiOTHHmTP98OHHYnHrx8H60NORWu7u+Y331GqtNRhebhqxZT6o0cZeN3vj2zv97QNJbPfv7j/Nm//PP80//6Ium61IeglCgBUsgU0aAI6Cdmoo8YgdQKPjtnUoEU8l2GUn3gL7RLkeQsIeBKBlNz4Nk7z9AIKWyR9YjnRMi64p+rzZnW5FjWA4ws9nWNbqrejnp2sU5Q1m2lll3qUt9aKQlUQGvFcBw5H09y4AHxtFTdlDF3IeFK1ZDnIr6p6fziYeHPV5jk8/iQcNYIkU9LQ6d0xliL65wAUZSi7ZwEGOfEG7//je/L1/DHvRaf2PM1psA/+ee//wP7O++aQfn3V19/wldf/3bwhHOGe9d93Q7Bpz71Eh//yD3x6hXZKF1ddai0+ORy3TItksDqi5MuTa5p5Ax0Ok+rRB5E4pdK4dd/4/X161pAJTe3d7ljn/8Tn+Vnf/GLbPYdfd+v+P4YIqpC5nIpeC+btN1+Q/AR1ze4RnIStTM4a5inGWcdfpwZhxkUDMdBFgqDXz2LzaZdG77hNIqvLmWE0yn3Zs6ZdtPKYsNInNbi1841E9VYyQ2chhlKpGma9Wtf7rG7H4vCWIGsJF1hgUYTqkxxae5iqLCkus0zlfGYUiaFyBuvv8NHPv4ym31XN6Bxfc6Ges4G1oWDDHDCCjopWZr3FCpUpjIyqNaGFCLOWVTJZJ/JWnG+OUAFCsYQuX1yw83TI+++dcN8noghEmLiD373DZ4+Pa4++Pdbl2buu6if/pOfI4WEnyKhAkZyzZDQG03bNbXxKcz1ImxakTPale6TsM7i50Dbt3VlLBP0JYfCNm7V7MYQBRNb4wpcY9cHWcnyBqSNwmLEQFpDOQHuv3TF/ZeuV/JcinHN8HCNZNgZa1GF2lTKIVFrXR+Os0zgrRGyUBIdusioRAa54HbbzhG8yDwBbOeYbkdwin7T0LTNmrkhhDAJNg6Tr9IskVLkmJhOo3jmtEK3lqZrJDAxydRkHj27exuOT0+gFf3VZvXvzJMgcu8/uv6ArpJLXer7W2KqL6QsngStNa7XlIPQwEBTULhWHmzjEGpmjYCURAp5dzjS1qzN3NK05Zz51Bc/w+u/8zW6mjmpa1g45W5CbJRQvxYEtNUCAzCNXumYQJW2KGxjcW3DdrcRedD7nJ5f6o9HKWTz7KfEcHuSA5VRaC1Hk5LFV778992uGNAK5wxP3378nT/5+6wCTFNgs+3uyHarzEtyGpOPoPTqN+quWt5+/S0ev/n9+Rou9aNbISTefXwHuHn89My/eG5u3LaWL3z+I9UHdRf+vVyuSz7eUguq5fY48fVvPH2hr6ntnOD4rcFYg58C26sN59szBXj6zq143NqG82kkhsR2vyHGyDR5rDEYpfA+VD91ounlzFYQNYeAZEwd0jcS0A3M47xKYZWWLFTvJaMxJTnP2cauqhDbmOp3VESfxDPnjKhBtGxdj7VRlVzlQvA1HN7qVRYrMQiiRFNLxuBCtTQGnTNZAbHU82+qMRGF8TzyxutvYxtblya1oa72KW3uGs4UFcYuz0kBuZSSVtWJPA/l7K2VBLTPw8TZe9I4YBSEYeL45EY2sEYTUuF4DhIzlgtf/8ZTUi587Q/e/p43+5dm7ruoL/zcT6xr9JQF9eoaR7/ryEnCdzf7DdN5JoaEa8x6A5ScFp4RKQZAMQ0jbS/ZaEorjJXpCdVDZ6xe0bRFK0LIdMbgWlun6hDPYvouWTTbqWTOp1Ewr30rRtF1eqCr3+UuL8o5uz6gms4KuUdnwdbWrAurqt4bRfSxbgHq1FQJaSiMAds5klbMo4epmoi1pela8fxpyZ8LMdLkZm1sTWNRXm5QH+SGu3q05/j0yOnZiZwLXddwDhPBByHipSIbTx85354kRLwSQF1jL1KXS31oaiG4aV1IEazTDGcPSmOcwBjkwKCJSdDXOkt46wIuyUomyosEeZGeWKtW74F2hv31npsnz+rU1JLy0kzWvCBr7oheC/X2uWBwgKIl46cgioXNrmV/b0OqgKZLXer56ncb/tRf/jMoxLc5nkeBnFDhDpVoB1R4Rbpb8BaxPOQU2ex6Dk+/d0+mbRzdtmF3v0NnSEmADClnVFTrhptSUH3d2MVCu9vz8c99kq//zte+56/hUj++Nc+R3/jvX/+h/p3Rp0q/VLjGSkZxjCiruXn7Zg0Cz0mI5CEmtNPopJiHiTEXyT/WCtC4xnJ8dpKh/jTL4L7IZu3wbKDrIt2mwzi7Bm7HEIRkvIBMCgy3I9poru7vCF62cqqoShGt8mgj7IkFJJJiomntSpYMs6jOcsgrJdQ6ie563gunlBIfeM6VmmlrFl2uQdzlzpJQCu+++ZRXPvYSu71bQ8YVolojZ3nPqb9WYq7qOM/56VzfFDIqRnKMDM8O+NMZY5TIOXNafYsgz8O2lcYxa01W0LSFZ7cj3/jGE15//THfr0fjpZn7I+ql1x7wuZ/+tJihK7K423bIdsvgWoOtHrTT7alKQKQpkk2cXERNa4khrMGCqeL+F0jBkiYvNBvRObvW0bRiKk0xEtNCTtJyk1WTtraGPJU1F27R+UrGhqyKjTXrhHPJh1oOZ6VATpEYy3M6ccV4FuJlLnKgU1rR71x9cxBNctHiN0gx45wlx4RrmrsHn1KkFLFZY52l27ZknxjPI6fbgXkMbD6xwTrJGzrenLGNZRpmkeD4VF+7xLtv3fDyRx6sa3FlZMJjNoI+V5X+d6lLfRgq+sg8JZkeFrkPF/lHDJUgabXgtWNaDdsSnKpJlUKrrUbXxpD6nFkeUgtW+erBlqdvPyaFRGmdDH+4G/5IAKuqxDElmVpFhkipDqCoNlnXSsbl7mqLdZamb8RTd6lL1dJG88Vf/Fn6qy0lFaZhlrzWulHOSa7NXO4OZLKlloNdkYcWOSY++blPkUvmt//lvyX6F8sa7XZbHnz0Vaw2JJ+JqYDSNK2+k8opobRaa0g+QVMwTpQhP/knvgQFvv67l4buUj+80lrRbTv6bcf2ass8ec6Hc/WdKdpNw3gchaEQM9f3d0Qvwd79tmc8T2hrRKpcasyTUjUg3XB4ciSmxHQe0Vrh58hmr/Cz5L9JUyWwvKIyuqq1tDUEH+tzQxYebdcQZiGU60YUbfPkMU2F4hmNLgWQs2XTOomPqDFckCs7oiKXF8JnYW0sFxK6AFaksVS6wmiqB/Fwc+KdNx7jPiYDoZIzWmmiKsQp4M8DcZKzr9aKEgLBe/wwQbqLBFPIRrQtGSq1VFtz54NUAksyzqGMJqIxFPpNwz/9Z1/hyZPvb4zJpZn7I+pnf/GLbK9k67YQASRrQtF2DU3rhGiZJFsthlBpNlrkUUk+SBvDZtczDdOaS6JqpkUO4jFbcjZc40ilEGYhD5l6gQQfCSHRdgrnLJFEoND1jsOzyPk0yqp7DoQgPj6ZMgjJbpFCLnEJslZWhLCEVOqaT6LWTI2cBAstCNl8Zz5HJJspxBWGIjkaBtcJWXIh7PXbljBH/OiZJ890nDkfJ1xtZo1Wss6PmTAFmtahjabft9jGcrw5y0OzZpUsmz3XGcgSGH77+MD9l+5V8ualLvXjXzlnog/kJJ41YsaPXohaRdFYU6ldMjAyTvxsVO9qijLZSWGhe2lySUSfnzN2C6jp6uEDrPsmfk6rJ0QtnrtS5GFVg5qpZDZpIDW2EUnM6qPVmqZt2O17CYYNz0njLnUpZCDZNC3T4Ck5rvKsxX6wQAxSiqIw0TWXK4unWysBfOkqRd7uNnzq85/iK7/5lff/tWjNxz//GVzrGG8GVAegK8xCAgkKZd0aGKMoShQ6MSQevnzN7e3ARz7zcd56/U2BQlzqUj/g0lrxhZ/77HNEyJmCEMCVs0znGZ8SRmlB5yNB3W3n8D6AqqAWLz4v44Qw3rQNQxxRpdDvWs7HEeuqP1RrDs9O5CLnLslblgXC8TjImbI2Na5ZWBCJftsKVMjpFYZlbKWUFkWsEs9Sm78UM7GInUDuOWFThClhKWtMRKpSSm1k4VCKbOmUymujt8K3SoL631//yjfpxiOmZFSRPNfFn6vIWJmeyh9XYBVstpZSzCqzXFRyaFPVKfK8tXbBRteGzgiswtQz+6//2te+740cXJq5P7L+vb/2i3SbnugzpQT6TmAbYQ41AyfiGsvh2ZkwC5WnZAELLBkaOWX8FKqu2WI1GCuSQKM11hp5eFn5vFOcpTHadaSjyCVdYyXLoyQKIosUnXJDmD2HJ7dy8Mu5rqfBWrte6NZZhHIr03xr7RpP4Bq7PhRd9cDkJFlWWmtKysTF5KslCsE1gmS1Vm5YPwU2Vz22ZnBYZ1edcIyCEW43XZ3EaHkgFrh6sCdWTK1tHZt9xzh4iUrIjZCLQiaXTPBCCjWNIyW/rsNTytjGMpzHFSF7qUt9GEokyxplDXGO+EniSFIqxCxBt0obrJOHtWs0wdfQ47qZl1weKvRImj3xGyhUnZpSCk3bMI7hDqNdp7s5ygNV5UyKrLLuBf+s1OIhEBN401g2NWakIIOaftt/wK/kpX6USmlFiPK8VBU3nmJCOSMe6lxqYHiqZEDJGENJQxcWT6gWL835cKbp2rpteH95bx///CexrYAfmo1DKYuy3Hmd6v8tUR74mpNWw4xd57jSWzb7DQ9+6wFvv/7W9/31utSlvrWMNfz8n/8ZmrYFJTJPENgJPtLsWjbWYoxQJadpJofI6CNuIyA/arZeCjV3zhn5czEzniZyzlhrCASscxQl7//GakHvO02xmhAi+3u7VaUR5kBKmd31jrYzhJBwzlLqOU74DFHuaahyffElqrpRnM6zLEOUDDZFYVa7K1jts6VIU7c815Yzr8m6wruK5P1pkXCXgsSehEDfOVROQpCvURQ5FSHG15xBrRTKqjv5ZbUVhBUAxjosFfuRDIFUjZ/IMWFaI3DAOfOVr/xg3h8u2pd/R332S58U42dI2Kah321wNYvEdQ2u62iaBmNFBmiswE5STszDzDzO5JRrUGDE1ws8J1kdxxhFThgTYQoikVIVL16EvpOT0OlkCm5wjVs1tkt0we2zW56+e1u/atmGmdqc2cbQNJamtfTbnn7Ts91vafuWftvK1LFCV1LMBJ9qvpVc3MsW0VR/jK8a55wy8yAES201tjaE1hjavlkPkCmIJFMBlIypEw0JaIyMpxE/e5TR+EmkNiVlGudwTmicu6sNbeuqDFlyYKZhxs9BGsEgzePxyeE9IZOXutSPc6WYKUkapOnsmQaBQxQEE22caPFFQq1FBllkqx59qhAwtWbPiZ9NrQHgQrusfjcKVw/2laArf78ylU6pVX2wUdna1atANZgH2VDkCk3p+oZu0y4QMrRS/PJ//Bd/uC/epX6kK6fMfB4E8lVDfbUWuX8KgThNQj/2YW3qUqoU6QrfQglFNYSE9wmUfiE5rzaWVETOvLm3pduKGkbypEolDxrJU0W25H6KK7hsOM20bUMKni//hZ//vr5Ol7rUH1YFSAW2Vz3GyPZtPI+kEClkjNb4cebmyS0xJ6zR2IXPUIAsaoppmGg3ch8KiRFyyWAUarHmWI1rHbrq9Kdx5nwcGU6z8BbqBkxXuMl0lkzGHGOlYMoyYBo8tmbOWWuwjWEJwSmprEASbYSzoI3YmayVTEdbs/RyKvXjuOvtSlWdVc+aLCtsXWDcQZQUEDM8OcziszPS9IlrUBRsRi/btRoeX0CrglaIPw4qUfNuoAlljZmwVt/lutaGEwX//J//Xo2s+P7XZTP376jPfvGTXD+44vDsRAyCQDVOUN2tcZKn0XScbgf85Om3HSATcK0NYfZYjej/dSEFmSo2nUNpWS8rpVBG46xmHmfxnVXPWqohvK5t7jLUtAT1FiDNiRgTx5tBgihRAjkxulIe5WJ2ztRcDSEeUWR6oAC364gho1HoRkLEZXpRD4Q50zT2jqCpke9TAlPwU8A2lq5vqqRU6Ee2UcyzxCG0bYNC1v/DFFDGUIqqkkpFbgoxRYyxkDOua4g+EmIkhML+/pZN7Hj27i2HW82r9zZipE3iXwhzxBgh6D18+d663r/UpX6cS2lY8hqdNURf4UpBVdllIZZUfbQ1Q9KomrkoB16lhARmjCJk1gzLBbCUi3jvcs58/fe/jm36FbNNRvQl1ackJaAV2yCNY32ALVJO4yzdphPFQiuk26ZV1V1+qUtJ5ZR466vf5OErD8FoYgiUSm0V+nOWa7wUUrqDHSxDx+oer2G7An94kUbuwSv3ee2jrzL5iNqLdSD7RHGsEIOCqtIqkXblnCRnMSWarsE4CQRu2gY/jGyvtpwP5+/TK3WpS33nUsD9R3uuH+2ZTzNhnEVuaDV+DHKWLJntvheQXoEYw0prHIeJftNhrK6qDsv5MJDrWbPbdiQfmbIQjofjSL9rhQuRcz0H1vw770FXaBGwqXmNtnE0nSVMQXyxawNk8JNHWyMDRKVAQ5yiqEnmgOIuJDymuAK5FuK7QAUFoLcoToQkKk2d0WX122mlyFrJmVsrjDy5xDpUM+0Utcl6DkQojVhtypYeOEnou14yePQd3XKBipGfi1OBFbKSf0CNHFw2c39odX3LF7/8EyJRbMy6OqVAzvJASSExnkZu3nkqeuVScK1jc7Vlf28vAABj2d/by1avMYA0dGvuhtXkIHIN4wSaUtIyLVBrJIC1pjZgipgSKUZykaDFx28/I0WBHpjqjVsgBNZV8ImSaUaYBVYSQ6wmc4NtrFzYNRx1WUNrBdZqyXqLMjFVGuZhlqw8pdDaoCr0QBuhITWdYx48fqwhkLlgGsN4nskozqcJ6+R1afpWJKxB/EHjcVwN7vMwM9UcjpRlhX4+DDx762b1A4oZPdJtO4xz/JX/+Je49/D6A756LnWp70MVVowzSu4xZcyKVNZG6GB+8sQQRSJdH2iKLFEiMRHmsKoDFj+C4J1lYppzwU+RkkT2vcCOSu3BlJEGcJlUKgUlFSQYifXhqoCud9jGSCNX39cKkhV5qUs9X6fbI8PpxHYnh8boPX4Kct0AixlHGS3B4ar6bZRcj7lO360Txcr55vj+myilUEbRb1vatq2UBI1S1R9apcSlVLhCFumxaw0pJvw812Fnoet6mr7n1U9/5Afxcl3qUt9WJWX84AmV1TAdJ3KQqKcQI/MQhIMAhPNMqZvs6TzjR5E5p5Q53ZwxzsjHhMAwzByfnnjy5lOG48h0Htns29XWslDSm1YCw7U2jKdJ1GdRVCFt3zCPE4enJ4lBqOqtGGI9KzrxyFFziGOh7SVrTpu7RmppgEqFEC0wP6UU2toViLcoUBZ5Y6pkTZE/yvbM1uZRKcUQCuOcarYyqyVq+d8U6jm8rJlzz2ffrbEltYlTatHFydJjiaFYFC1Lk/mDqstm7g+paZz5b//RP+Neb4hotvevaTuLbSz9pltDQ6MPpFKYZ0/MGeeE9misYXO1BWRqro0lBQkGTzExDRPTeZKLRGuKEkPp8rGLNniRMhVTqoxEUXISCWOUQ9j5MMiAoPpntNY4K4ZWW6EFOYmxdCUBCdpHHpwVLx58pO0tORVsKw8oimh+XeugF4pmirnewBqj5aG2hKMj1lE2VxtZW9ep/wJjmEcv2gCkYYup0G97QdW2ljJ6jK6TVis64xgiZfJAIYSF0ilvACFElFWkkOi6pk5qL3WpH+9yjeXhqw9FvhjFt5BiJoSMUoYwz+JnMEpClrUm10atVM+R0lC0qnEiViTgSmEaQ85a3keU3N8o+MTnP8VXf+trtNsNi6IyBlEjAO8BNy0PO5T8Paos6GhXp58GrQ25+vEuMXOX+tayzvHw1Yccnx1IMcjhCwFvCY1Pr9JdOaDJdbRkTomv5U5+Feb5fX8N8+jrtqJBOwNFoD7KyWa85EKYw+o91RXWlSoQ5XRzZn+9R2tDDJGr+zv8cAGgXOoHX5/5wiegKI7PjpimAk+85MNt7+3IuXB6dl5JxNcP9hQF02kixMj+/pacC23bEHzg+PTEPHmhh08ealP06MGGBy5zMyQOWa+Du2kUebF1Bdc39fwn94vpm8pK0GtOcarZzE3nUEk8ekvEwOK19j6uG7CFBhnmSFGyiJCgcSv3v5Fz7V2jxapgE683UJsrgfdRrQVALoQk7yelCBhGUdb3HeGrZFGnKLXKq9XdTJNSFDmLJFXVCB+qf5wKU8lV5aKt5vWvP+H3fu+dH9j1cGnm/pDa9A1f/sLL3H7l95l84l1tKU3L/ddeYnv/mgevPJBuvFItixKJhrGW82GsaFR5uHSbBm01xrWUSaNtpAAuZchZvHPnSeIC6lQ8VXS4tUYyPMq6WZYtWslQMt/86puklGkayczIKq/6XL0CCkQzrPWd70WaOkVeiHTO1oDFaiSdg0w/NBL8mKSBK8j20TYOSiZU2qU2mrZvBHM7eKzVbPYCPRhvZxqX8EOg33XrVqEgX6O1unoLC00vQekk8QxqI3AY01ravpUDqFZsrjaMlTDqWsGf+7GsGSSXutSPc3Wbnk996VM8e+cgwbD1AWK0Yp48OSUgU5JC1Ml5HajoGsAqDzi513KSgFWlIPtIMSItSTlTUJxuhhU6scJMKsdvkV4vk8a8eGqhRpvI/ZZzku1f/bVSoNu03MlhLnUpqaZr+KVf/QsMJ8/TNw+EFCm5wn7qIdIYjS662g4qJjzlNSKHUshpNaXwxtfefN9fh0LgC7rNmFZTxiwU1yLe0ZQTprF3k3VVD3EpE1PCz57DswOb3QbjLNa2fO7nv8CbX/2m3AuXutQPqL78579E1zecjxPEhGsN3ou3rZTC4ckB1zWVxj4Ro2IevERFNZbgZTCOYo3b0kYLDbPCQj73SsfH3MRWJf7/7P1ZjG1pmp6HPf+41tp7R8QZcqzMypqrunoku8luUi1KpCzZlEjTgGFBgmzDFwZ0a8AwbOjOF/aFr2wDBiwIkOHhhrYEG7YBWiQtqilTnKpZXazqrq4xszIr55PnnIjYwxr+yRffv1ac6q5iTXlyqvUBVZl5TpyIHXHW2uv/vu99n/fYZb78TuG6WJrWY5RZ4roO+9NyrqwLK2BZR6BRxCBbd12HMdbJoiEMAryzTYUK1UzTUgrOOMljNgpjxcpTSqmfI1dZpTRRWits/bM5F7SZ5dnzYKgsckiJ5oKQMjILVXL2rVNKjXjGJdpLzSKZGoMApeSFBSEWxHljWanuteOdVWaqGhUe57JhbeZ+RFmrefLuGWEYMSUxDgNxOPHO/Xd4C8W9p+6yvXPBKUiHv704w929IJdEu6tyjSITgdNxYHj7CqWh24mvTmkwaFS9oLUV2ab1VmhepV4Y1d8i0swbf8A4irTqdBgAuTitNehlq4cAEoypF1oijrkGBmt0beRMNZQ6b5epxmbbUqASOcVnl2LCekOMkvVhreie5ynFDHhRCsIomSJzjojWYL3nfCONXApZGriSKVZz/+1LtLmDopCmyGHfY4xmd2vL2E9YM5OTLMqID+d0GNnsOrmztCLHzHgcGcexZhKttdaHt3LOjH2otDGRGMcYoOr8c8ooXcgKcqgyRw2GSq+sZ9y8+N1u5GLzP+eHjLHiqc2niQdvP6A7P69DSdnK5VlCUoFIcxXFQiObB0XDaeD6wZ7N+QatDd2uw3mH2/j34ae41ge1NtuWqzce4C62jOOEMZBKJA81Xy5DsRXyU3NScyoUWxs7o/HeCN3VWV751ss/kx+lFAjDSJgS43GUaJ3OYarCBQu66CrhypAF0mCMbKPDENk/PFSCs0cXuLh7ITC0tZlb6zHWNEQ5syk5qx2PvVAm9z1GK/rjwOasI02JpmsY+olu5+mPQjC+9eQF1xWRvz3vSClx3J8Y+5GLu2eitAjXHO7dZ+bDflxbvnJq6/NHbEJxihW4p+siYmI8jfjbO3KM+MaLb9tqjJV7wtWYD2s0UWmcl7xU4yR+yxhNSpkYRaqvkzxvUhJQkUY83yWrZYA5SynnIHBp9MqfJi7XgVFOhQdDYefE+iTecxZi5Q3VudqO6rOYRU4pTZyeD8HzVq52cfOv5wWk8nhdbWsz9yPqL//LXxCSVogYoA4woAhZ8vD6G1x+/3VCRYS/iWLE8uynP8b5E7fZ3bnANY1QG2MiTJNsyKr3RFD9DRpFUrlS5OYLT2OdgAe01sSU5OIRpSWn40AMgbffeMDp0NNsGpSSrZ74RdVNJpSCFERWOefKqar99W1Dt2vp+5GUM86ZJUqg1JW5kDVF9iUwEweNbAFTysQkQZG2cehKR2o3LdYZTkdpNI3RDMOI9eJzM85JU2kM+4fHJa8uBtkszJyiaYo0nSfGuqqua/UwTmx2LfuHkj+3PeuYTmL+9a29oRyttdaHuErKlJQJExhTp4KC2BPTepFBRsm1oavPCqUVKsvDRAzbBeMUaZQHP0ahqWb1LO9PRWtU3f6LT3be7MOMpSxVHj37lSqbRaQr9SEWpsBwEp+r3zRMQ8A7C05zdvuM/cP9e/1jXOsDVneeus32bMNL33wR2zjOzm6jW0ceSz2MiSc8xYSJQZ6XRbybEtPjUMVjtg02Z9rOo9TPBhY4HY7cf+s+27MtIQhduvEN7cUG6zzttpPGzml86xiOI6UIOXNWwFAK0xAoCVLXYIzmV3/7V/hnf//L7+4Pbq21HiltzBKRFafI8eGR3a0NsQ9MpdRzoeKwP5Fiott4xD2nSKmwf3hc7Cqz/SclsdxMY2R6cIk1B/bHiTxF8VN7y8ec5RLFMWfG41TPjlbQ+zljvSFHUVaVAs4oiQuo8nyFyPfFDiDPrSIIBoyxWFfVZTlLKHfnSSGRcibHLKROo5ecZrI0V6kGhs/QFKVVHb6oSrmct+u12bLyh5W6CQGf48K01VitHlGeiASc6o+DuXFj8c8JRbM+GHMRIEwdgOaUefjw9Fivh7WZ+yH13Mdu8/HnbpMeCURcpuGlYJR024ZCUUU8cSmjY+D+H32bd0qhWMf5U3dQTcfFs0/hnKHpGrSqxk4KUz9ia15NqSvglG7klbZ1EnmgZbs2DdNyIcYKMdFatLoStC0EzVLkoTeWqU4IdA1oFHCCqZkZSmvGIUBKS26d0ZqmFd/LHH0gHrkGpQun40QKSYyvrcNlh9GKoiXUXA6Zslo2Ri8HvZLBOQnvaVrRU8ch0LQeiuTVTUNCacSHiFCXhsMgN9T8c3EWbQxNI0HkJWWOVydc6xnHQJymmx3/Wmt9yGs2ZiulMVoRSyFNAV0nl957slLkEmXYpOftW0EZjVEQcqw49zn2RDK7lNWQC6kkotLin82Z4/WB7my3+HC1kc08i3ymTkFhUQ6gqPK0Imj5KRLGgPUGrRIf+9RzfOE3v8jv/+f/9P38ca71PtfFnXNuP3GL06En5cS0D+wf7Hn6+ScpWPzWy6EyCITAOLN4t4Wil+gvexrvMQa25y398cg7b9z/mV5PyYXLdx7SbltBqJO5urri1J/YnG0Y+hOb7Q7fepTSOOfACLihpFilVxLPgYJpGvGNx3fNu/uDW2utR+qJp+/w67/9Ray39JcHcil0551shG3EG0vRCqMN4bpHGYOy4pt2Napq9oJZZ7m+PIASmaIuRXzYRc6cp+NEDhFnDTZlbrUnbp+dcakLryW9SBbnwV/beWnWlHzuMAbZrmnN+e0dh6uTALiyYjgK4VLsAFQ7QPW+lZppN1uI6hnZOTkz1z2ZcLjq804pJRToen7V1vzAeVAtDRqVcqmrZaAsZE5lq1RUzQuUakmomZfz59FGLwCX+UxfSm0g7bytqx9nNL//lZcf6zWxNnM/pM53DRc7z3Qa5QESxZSp6jUhslrp1utSFaM1xdxglFUMXL/2BrnA/rXXydbxxCefY3P7AoDbT90mZZmCM8pDJRdZf2utKCUTA4u2GBSucfjGMQ4BrSNvvXoPpcA7K9JJrSjeCYGuIshhNm0KilUrhdYWpQsxZYzsyusWoNSmUPPw3l6mIq1HW1s9OLA77+gP4gV0TiAlElae0EoRxoTRCtN5eU1GE8ZEzJIRtNm1Ijc9azld9WLcPYycjj3Pb5/Gd47xOOA3vt6omWkMtF3DNAUO10d816Ccptk0jKcBZRS+dTTeEEPgC7/xGf7R/3ediq714S5tqCNLXWMKZKrqGitSFGuEQGkNphVASkHyevzGEIaIspqCZFQaS40PkQdfSjLpNEZRvKUgA5Ph1NPtdqIUz4VMRcEroVvqClu5oXvJU3wmhc2Uy5ILYYjc7yec948Vy7zWB7u6bUu3admeb+mPvUigapPWbR2Xbz1gyok7Tz9Bs9vIdVXqYcsreS4q2O46ut2GMAamGGlT5lt/8K2f67Udr49Ljqs2hnbbsj0/w7cChjgdDlw/DDRtW1UzTu7BMWCdJmpd7QqaMEYoiv640lvXenzVbVueeuY24zThGsf+wUHIlEZjvSNMkRIkusZ6yUY+7Xt86wghCotBCQmZjTQgKSVKyuKjzoqy2XDv8oger+TXC+gCjQn4ceSJ1qOM5uVDpt/3kFuBFNUGJwdFUYmcykKzvLq/Z+ynSq0UT3VKkt+WUsI4Sy4Fh5XnU20o4xTJRXKTwySDS2U1RmuMlQHlHAiuYFmOCIBPciJlCEn9/urGzojKxGh5rgE1005h1M3nkhjxOiilYJSRTWEGkNc/LzGApckzzt5sAx/zjmFt5v5EGaP5S7/7heq7kolbzlnwploJibFeMIuutpLj9KyTVVVoC2gK4SRwgdf/+RVZacxmS3txxsVTd7j9zF182wi1MUawRrKkssA84MZgCSxa5RDCMjWnGj+tszRdTSpULNlPuRRUzphisE48cPoR34xxEnbetB6tJDPKGr2Ep8aQUFbW6JJdJxTPApwOPSVlmq3HWb1s91KWtXiYEmMfhI6ZRLY5DhFt5LWVGNlsHMoaQoowQUwZpzX9oadpHS4bcko0jaNpHNMYePj6A9qLDaaxNagy0lRE9F/5a39xbebW+nBXKdXDWqElKeMby9BPy4RyfjiUUoiTeAlmc7dWqkpRZrKkRnstIAfqZg4lpMo6mJohJaU62CUfswamajVzJoAKo8hFTOI1DmX29uY6+UwxYYxBacXYj6TVQ/QLWc47nnn+KXIuhEmicSQoXJ4/Tz97izdee0A6Zu6/fg/fNXTbHdpZieCJ4FpHnASwY6yhaSzWGd7+/luM/U9PsXy0SilcvXPJ3WefwjjL9nzD5mwnDV71vsUQOV4fsM7K9fzWQOM9zbbBBS/X9lkNTtaamji+1lqPpW7dPcM6w3DKDKcJjBY7kLOMYxDPWUm41tHvB0xVTXhv6adYrT7yjNEhSvNHESiJEWlgHAP9EPFRMkxLKTR1CFhSJAfNJmU6DKnzxBjRtUHc7DrhJ0wV9ufsQmvfnXciS7SGOAaoTVQpFUakNK51hBBmYDKqesWpapEyoyUR5dhMhVdNbQJVZgZXaoUoWupmTT2qGKOQ5+w5a2T7WMmYC5QyQ8k123kSAmeqXAtVs+uEyFkVK9WTN7vqjNbEmYbyGGtt5v5EKeB842qzpsm5Oi+rxshoRa7v0zOKVdbM85pWL/m4Is2c2zC5QTSJdHXF4fqa/Wtv8pq3+N2Oux9/Ft81bG+d0TQbyXiytsIH1NL1p5woBd76/j3R8leqXKxUys2mEYx5hqCTHKic5NsppTBG4grENFoPefVC7rYNORVSiLSbFucsYQrMQYu2sRUFW2g3nmkQIENKAm9RTqh5p32Qm6WIrts4Q9t5tDFMU6Q/DvhGohNEFiqTkePVie35hnbTcP3OXt58GgtGo0rBe0eMiWkI3H36NttdQ38asNYxHAY2nRcZqV8v67U+/CUeXerUUYkHYTZiZ1BWYaxeHsQ5JgkHnyIauceJCq1FdllqcJxGoZzCFKE4zNmTYZJma//wirPbF9jGS/OmKuVL3eRtmZoDNKOeUxQgi9IaJTmutRkEXZvGNTbkF68u7pyzPdsQYxZSXZZp+zxIlAOkvrnOC/SHntP1iYu757TbDcZb+usB31mazjHsR6Y44RvHg3sPlk3Az1MP337A+Z0Lzm6f0XYd2mvObrXEkOgPYyUxO1KW52x3tmE49uzf3NM0Dd12w2l/4u4zd3h4OvG1f/TVd+Gnt9ZaP7z+m//ev05/GrGNJ+17nDeMfWYKEa00w0myfMfTSMqJZtOiYuZ03Qu4KiViDb921tBuGzLQ73tOhxGFNGL2qadhfwmTou+nmjeqMU6ox75knvOF7zc74pSYpkC3bTFOpJxTH3CNXYaAfmOJKVNSqoquAiWRtZyPxZJWiClViIkoT1LKpCljtGTbpSB+O2UkHHx+X5lLIU3VvN2bPXQSDn6ziElFM6K5aAx+Jr7XTVtePhbQ4snTRpYu87NZSPMFctXplap+myWf9XU85j4OWJu5P1W3b23IMRMzlJSwVi8HGKG51WBC5IxjqqkzJVmtOifUrTilSs6ZUaiQclnWsrmASoF8CpyOPft3HtKPGb/taLYdT3/8KW4/eRvnBSiSKnwgx0SZLxA1m0bNcrgCubasVSgMqnWkGIlRcK7TGKrcscF5h7WmyhhbcswSTAzg5YDm2kaoXVNkOI61ObRyYKwTnGbjmYaJcahm3Jjw3mGdQxtF0zX41mOM+HK0lgNomKKEGcfMZtswxcRWi5z07PZmIQ6dXXTiyyuQjgnbWPFcVH11GiNaa6Y6udUozm7t2F8e3vsLaK213oXKOXPYHylJYbXCOk1JSjKAUqHoRK65PbpKRZSRB4o1hlJU9axqYpAIEWvFNF6SHKBFlqmx1Ue3oGlr6Ucmpqou/LXoLG8ou0m2czP1S1d/gUKJX87cBLzGad3M/SKUdZZu23Lr7i2B5cRU4QpJDmw5L8NPCrzz5p5pTBUCdhOt018fKKHH2gbVOKYxU0pGmYKOitdfeoPT/t2DCjTeYWuUx3AIaAxxSjBpjG9RKnPr7o7xKJmnvvGkuOV0OJHJbDaWt159g5f/+HuktG7m1np8JVA6T0qRtvOcDgMpRLnXSqFpHcYZGu0YThNhSigtUBRK4XQcUWRKlry4oR9lIKdgs2uW0O5cID/3cfKLLzENGY6BlOv5s26jnFV0ceC6Kr+00pQMYUqLzHDoJxSFYG5Af9MoUVV5brgoQmIvAv3LSZYhSqu6eVOMw0gpbllwkMrNY6sIlE9BtQgAFWqijEblR6jMpeA0fOKO46zRNFZhqwdujvYRH51EE8SUF0Wen0EvWtU9j6jxcqW7LETM+jM0VlOUyDwfZ63N3J+o3/6tT9E4wzROy6p0loMsMktg3nSlJM1VybJONVVhIdpaFlPl3AxqLX/jJZc5vYCYC4fTyGlM6FOgvHPNm99/h2bTcnZrx+2nb+PqTbI523DaHxlO/U33Xz9PihLEaJ3De5FEusYRg0YpoRHJWhjRKadMrJ6ZlGTqIRemfMKxnzDO0m081reEKVFKYuxHNttGoAo5MfVBNnc1BmHeQm7POjbnndD2ciZMQue8dXfH0I/EMWKcUIOa1nF175q7T5wzxbhQyyiF4TiIfGAYZQNpNClEkpUMOusM3jtQ4BrHp774Ar/257/AP/y7/+z9uozWWuvnqv448M0vf5tP/fKnca1dhh8KmBO4rTOoWcevqiRSaRIZ450MonJGm7zIKXNKhJjEE2BNHRJJnuT91+8tX79Uz5tEzck9PWcGySlcGkUKmLpZmd+HSpXQzEHP2j0yIV3rI1vaKG7dOafbdFjvFmWKPD9vPHIll8XGAHB11bPZNhwO/fK8bBpF5w05ZVLoydMJYxzZW0qGt968ZBrf3XDuN195k8/92c8DhePDB5zuw3QKWGehUeikcU/sCEYOvSlNtNuO89tbum3H/bcf8r2vv7RGEqz12EspuH54QG4YaFqhjedSSCfJbUspLZLmbtOgnSGHxDRGIRangnEa33pSyjQthBDJ2dAfekoRT9hlUJTzO7Tjm0wh1OZKckVd61FFs2GitxsJ4qbgW9nKKQu2cThnaTa+nlEDpYjvz3nLNASs1gxjJNXPL0sTaaRm+b91hhTFrhNTqrJ/gYPNIKKZHikNm4Slyzm80iaVwE7ubCxPnTnOWo3VGq3K0gTOCrz5cVUqqXKO65mbUVBYIxu7nGKldc5/Thq3ZSD6HtjF12bukdp0nru3N3JBpyzbtHjzt6DnPifVSWOcW5dqgKwIVqEz1vDC9EgOU2FZJau6xs1FcRwSWRmsF/NqLoUYMod9z+H6xNuv3UdpxfZ8KzelUThnME42a6YSe5QWmWNSimxvTNnd1nFxxzGcxkqolKl9mIRQJAbSjPUeY2TlPvaBphIoz25tlpV3Sqke2CSWwLiMc04olPXn1m1bXOPY7FpyKnTblsPVSUAxVpPrZvH8zo7+2JNSZhwFMjP0I9MwcX77gnGcQCv2Vye2Zx1KGwk1pqAauwSKlzzHFiScczXiYD04rvXhLqUQ83qqUz4Nw2kSf26Rh0lRihSzTP9qXIjSqjZyZQk21Uo+VqPQoYiPzhqUkUhXYxSX9x4sX1sms6rGEJT63iXvZfJ5C9oI4a8UVaXSmpKrxrI+0Iw2OCu+1vIuyOHW+mDWnTsbPvfLz/PW2z0plSVMeP47TzE/Iq2EeWI+P0v600DXWpTKGKrUqm4KgiqEPmLIXL5xKfmkj+HtfTj1lP3A6+884Hjo68Rf1dcqH/Pw/gNu376N37V0F5uqctnSWMs3vvT15eC51lqPs6wzAshSmtOhXzLVUsxszzeLHyzFVAeBCVU35N2uIwyBmDLWGI7XPdooLt/ZL5ltOWdUka9zvE6o7RmJgr1/j34MhJxpC3Ta4lSm0YrPbgrXSfPKPtHvhUKONoQY0VYx9hMxiA8cQGnZfOdc8F4iCHJKErdQh4MpZVIUebZkORpKzrRtQwyyvaMIPHDe5D0KYZHnp0aphNWw9fCxnWXTGKxWaG5kl7lu7rKkgs+pIwsJej5TynuBPFczNW7MaKA2lfU7lOByeV5+6Svf43D8+by9P/aaeKyf/UNWt29v+OTH73K8lgZjTpsXAIpaYgDk37V4RGoWXC6ysp5pcVqrhWy5PMBm7xsCNMgo9qfAlBWhXny6dv3WmXpBiC9vfujlUuRwZEz1uulKuxQyZgwJVUq9IQR04FtfA8sbrLWc9oOEi5tSowBk29UfB4w1+Cxr9nbTLCTNtmtIOXP1zjXdtqXp3BKPkCsJqd22kGowubdoYyWrL5UaPg7jMOJyJobE4eGBMEUu7pyhtGKagvzMgIcPr9nuNoR+EKrYKE1cu/FC0yyK8TSAVnSblqEfuPXkLWlYS1klXWt96KvMVMjqh1ueEiByy0k260prcipYb1Epo61IKHMWCaZ4XevvLx5fKh1Tlm9vv/I6MdwQ+GY67mzjlldRvcOqNnQxoq2lpBvZpWvcIr9EUz0WEFPiM7/+Ob7/rZfpjz1rfXTqiSe2/MXf/TT7YxTQQpZBaCmQZDguoeBRLt7dHcumKA595HQKbHaWHDPWKXJSTFMCNXF9JTlVESQj8TFXCJF7b71BUVam+MDNuB5m3/zDB/cp92WzYIzjhY89xXe/+d21kVvrPanf/su/wfmtHSkWUgqyEUoZ6x1KJyiZFAuq0oVzkiZoGgK+9Yz9xNQHwhQwZyIrDmNge2vDeBwpFGKfCCEKg6DCQtJ2RzsNqP2eFOuGz1Wqci5Mw8iT5xtCNrx2HbBWnjthCqIUs1qkofXZFCdp8iSMW1RVUy/PKVufPH0vaiwjuEm00YxjENkkNz5byVDO1YNH5UJUwJ+SpurZrebO1uBrZI+o6OShKjwJBUWeiTMZOkaJ7VpwlTPWRCDT9Ywvfy/aKBmeVt1nqQNQYw3pPVgurM3cI1VSYeoncqVIUiWM5Hp4EXfmghotRaYKMyBAKWommqpTghsE6jylVFV7mYEhFsYEpzEtAd86RDZnG7k4x8hwGuZXR4yRME7Ymhkna2iZsLsaopgzlJQxc/4T0ti5xpEzaGs4v7NjOE1C6ZqiTP1rwxhCopQBay2lyiVL/f6t13RnLSUX+tOE7xyNbZjGuGz72k0jIIUiBLJSCtMY5PVYRVEehcJrBaWh6RzKKOIY5ABAEcnKmw9QRehKYUqQEQqnFjqedprdbivo6OOA8xJAbozm7HyDWyEoa33I66U/eomPf+4FbNPcSKQV9WGtQEMe8zJselRqoo1s4qYh4r0cToVuW8EkzhKmjO8kJ/J0fajSNyllHok2ofrmEIVBqrTLGCI6JYzzNL4VD6s29b3CkkOax5gYo9me7SqMaa2PSj31zAV/7jefw5K5tdFsP7MTv4kSmnIImSnIgSgl8VPnBRZmaZ0ihETKhcN+ZMqB0yGQ43uvrCil8ODhnnazQalqGajP+vkBL8RWObj1xwFrIy99+zu8/r3X3/PXu9YvZt1+4gJjHYVIiCxgKvF5gWk9aZiEIJwS1jlijPjO025aHrzxEL/xbM7bCq6SDdjh8igDCSWKrJIKx32PbxtZZJiWYA3deKLPM5hPogycd8Rpoj9qzq3jeGvD/jiRahZySlmALP1ErPRJYzSnw0iOiXbXCO3cCZE8zZs4YxbgiXVWfs1b0pQw3uK8+gFK7syQUBqImbud5nYLnVU4LQ1i483caYGRyIHZI5CRRnDmOyglP59ZWjnT64GqSJH3tYJCl1SXPDIsNVaD0o+oER5vrSfeR+rf/Kt/hhhFJyx+EfkL1hX1L5QdwaPO3rN5Zicyv1xNI/KXp5WWRn8+gwHz/jgVaeaOYw1HrRLNaQg4N7E93woJ0hrCGIgxEcZY5ZEJ3xRso2p4aaYYjam0SuqkZJZ5GqcZT6MgUidBO7cbj9ZtBZdMKCP5HYfrA85bmsahrcFaRxgnUufQg1ykrjFyQw7QbBo2W4+xtm4CQClHGMSMa50Rc20pTL2QMa2TbLqrB3t2t7Yc7+85v70DZKOWZ6lAKewfnrDW4FqH1ooYIpuzlpwz+8s9vvVYo4ljpGwLftOgrJHmeK21PuRVquY/pYzTBu0sw/FEu5Ehh/UGlF18SNpoSt2EWWvQ3qKMrhPLmaxV0MYsSoLY97z18ms/8HXvvfomz37qBRlIqUf3c9I0ztAT8QhkUgxo0f2InyImUsryHppLNezP89a1Pir15FNn5FK4vh4r1ESefbF6dVL1zD36v6WxS4UQEsMYefDgVKWZ7+/3czxM+G6D1qCKWg5h87RdVekxSgYUKSW+/9032V8d378XvdYvVIUpSTPkNDYbxtOIdU68c53neHVa/MnKGIbTiHOG6wcHUWHV/OAwCEl8CoFQScmucTBLD40ov0Cy2VJGaO7Gok0mhETuA3P+qdIwMNJ2BZdExWZbD8g5cexHhn6SRUFVqLWdDP/jlOr3NgjnYZqqOi5hjEU3ojqLIdZIrirbV0oiD1KmFLOcr1tTuNVpbrmCt7oe5XNVkFTbwSP+trqnQZBgCPXZaCwG6nuZfCQV4PSIFaHGJoQgpHdthMCZq+VB1abvcdfazD1SnTekEAU9+sgBBiVetBjSglXOc45upbfNEQK65k6kXG7e8LNMz+cLphQIqXCcKqlHqUem4rKWTinRVAKk1gqGQMripUtRcm+s1ZSaa2OMwAzkJjcLTa7kQgyJFMTD0J9GnHfcunuGtppu18kBEJnGdNuGFJPga2MmJckXGU4FrTSowvGY5Yy2aWWVbzQmSrh3zpmck4SHG8Xl/QNhmug2EjCZk0gAul2Dbx2na5FchTAhCmZqPolsPdtNw2bb4lvP6TSiSlrerHbnm3qwhO3Zlv3lUbaG1Tu41lof9trsGnKucg4Um23HsO+X7cfs2tVaEXNZst2mKYJnadpU9d0pLURMAEqh6Rzf/uNvSgTJIxWnsPgHtJ4njEC+IfuqJYinfpzRlJRksOQMKIGszHITbW+Iu2t9NOq17z/gbGMwdbo/PwsX8AkyMMil/nf9/RQLMcPl1cDxONVMxQ9ILde2iG7mSezS2C0STHlidbuWcRjfdSDLWmv9sLLOkCn0+2HJFG02DUpRM+UM47HHNY4wBtrWk3Nms2tl6TAG+tMoAwslUTanfY9vhHzeblumSrd03qKNZhomtDVMWVH8Fj1M0sAcR4wCrSRDuKTINMKFhXFzC+s04xCWzORuJ2TyefkxjaF+3YzzZvF8xzEQQ8JW4jqAbSzToAljkLN23Z4xhZpvLDE4T7Rw0SicAmsegXNlaeJCiKIUYx7IyJk+VILm/DOdlzYSv/MIOHDOc9WaDBXuhfQJ8pECnykaow1KvzcTqrWZe7RSwhrNfKyRw4xQe3KdLpc5N07Nv18lkvXvK6dH5Biz0VOxNIeJQiqF05QFemIfWdUqhbEySY8h0u1ayX4rhdNh4HgYSFMkjJHJR1zjaFBoheifjVn+V4rECRijcd5KCGSIOK2Xlbw2Gu8953fOF7DJaS+UTFDgy5JDF0OqD2toWvHzXb5zzeHyiGkMzjpyTKJvtpowBJTVNJ2RyYqRWITZazh7+mSDqQiD6LMv377iqefuyoQ3JZGDhQQuSTj5WcvQTzhn8I2jKEWMhXQ9iLyM8tgM8mut9V7Xl3/vy/zWv/bnBOteCp3VsmEPEWMkS66UKmtMBd95NNBUwJHVCq0MpfrjCtS4ApHTnPYH3nntrT/1dWXwVOomTSiYSikJXH1k662rbLxUP58oEeQ9zViNqw9iY7UMidb78iNVlw9PfO1rb/Dx5y84c4ZsNancbOEo/MAANBdFbxLpYc/bV8N74iX5aUvNAB8l24Y5n2o+FMIjmzqrql9nVYKs9d6UhNgnfCu5u1TFVc4FjETApJChiI0mzx87JUyRQXqaEnGUBitVP3ZK0jylkCrhslQK+USzadBK0ZL5xLNbXhkPoDRTnAgx43PBlFK/BqQI43jgiMZYXaWVmhwzpnVMgzRk7aaV82Ud5qjKhrDOCnvBCaWzPw0SDq4U7bZdtorGGFTJbHXi7jajS6FrjDSMFbo126K01YtNas4cn6WpKUnsyaxm01Z86JSCKkrgXqWQc7VeKXlfy1WamaqdSilZ5BSopMtMDorL/eOFn8DazC3VeAs54ztPGOWBRKGuSiWYEFNXtQXR1s5BuJVMqcr8xn9Dw1GCkpMbo0BMmRALyTWoHAWVWjIqq5uGMEsjNvaBpm1wzrHZiUTqtD+RS15CIbtNi7F6yWRTRi8NXJiirIfrQ8nUsPDhNDCNE922wVrH2cUW6yzttsM3rm7GMtrUpgsFBFyjcI2nP5yYQsQ1lqGfMClRvBD1ckpYJ1CWlDOn/SAQlZRRyINPiERFPof3tZHVC0mz6cSkaxsvE4+UmK5O7HYtSht2F1vxB5bC4cE1rvV453BOczr2Eni+eubW+ghUijIsKoj/bAqJVCAGMbWrojBWLaoBXeXfzlmUmFBlgGKkEcuVIqiUggz3X3uL/vCns7oUamneKBJ9oPTc2s3+ARY1QLV8i3RTZbQRSWUuGV19dNQQ1rU+WnU8Tnzjm/e4aB3FacpWc9tZrLaymQMu9z1hSBQ0k02k6w8moGo+kM3/BH7AN7c0dnUjPjd4n/38M/zhV773/r3wtX4hSmnF7nxDTjPiX67FaUh1ayBKLCGZS3QWSob71hmMMzIcL4WYEynINt1vZAMWp0TRhRJLpUhKUHaYEilGbnnN1lg2rWUMCe0tARizNHLOG3TJkAI+Hhj0hjAhkVnWoADrHskppTaTVcm2ECyTPGPmmA+tNVgEhJczxmi8gjM14VSmVQLZK4CpJm+hMJdFBllSwZiqpEP8eHJGn6EmN744eZZCzvIznZ9wolLJy7BqfidAKWLIAkUUHSc5JKyBPkS+8+I9HnetJ95av/Obn+LuxYYYggR/T3Eh06R6c1inUcqQ6s0gnXtdqdYLRStVY6AWl1w1Zup6gcEwZdrzM8ZpX5swtVyIs+8gJcl5G9uR7blhs23oti3Waq4eHAhjJIyBcQyQJQ7At06mIFVXDFXXW0p9DfIgajcelKI/TijGOnXX3H0GtDbYRsAlznus9RXVKt9SmASm0m5bSkxcvrNne9Ex9iPH657zO7uFnKRLwXpHDAmtwLWeZmPRVjMcJzygrarreVmz94eBcYigoD+OtJuWGCJNDcY8dxbXOPrDIDJWa4Wi54XcpK3k0r0njtO11nrMNZ5GjpcHTFO9B96iq3x7GmJFL2d0AU2VsWUwTh78sylcA1YrppAJeb4fJUPyh9VTLzwtYeJjBG5yf5b3qCrtzLksgeI5F7StW4o6JZ49CTHkOv1cb8yPal0NAQZgD/v3+8X8jPXEk+e4xkp2VJ22z6WW5k1VEKxM/2/f9dw6b5acxbXWelx16+45f/Xf/ivsrw6EMaCNott2EtdUh/3jMFY7i8BPnLOC/J8iXnlCP2Eby/ZsQ4qJw9WJ4Tjgb2+JcRQPW0wYpUUe74zYapTmBRfp9z1N13DLWdnCFyhKY2vwtveOYQzcbQqlaTj2gbOLjZwDjXzOmDJkaLUSFVeVXjpvobBkH2utF6Cebz2GghtPdFZxbiKNE+VXzuLPlrxnidOZxijRA3MGcq68ixoOHkOkaVy1SAn4JCWxRKUokKOUMilL9Njs/Z6jClIRL2Eu1V5Vz/5VoTlDMt6zmKyfu5lTShng94HXSil/XSn1KeBvAneBfwb890spk1KqAf7PwG8B94F/p5TyvZ/3679blZPICMMka1erNbF63YzVNwAApcQnFjPkiEaRkfDAOUgQbqSV82aMIivrGDO6a0lF/Cx6jjmossw5XFBW05Ghn2gaT7dpAcX5rTOmMXF9eZCDnjVw3uFSxqaC0nKYG/tJ6JLbBqVrA6phrCGoWms2uxbnHONpYhgmrh8e2F1sGYdQCUEB37plKmGMJoyRdttQMkQNt586x1iLa6xsNSfJ39tdbEWHPEZyW7DOEkJkHAK+sWzPOw6XQupsmoYQAsfrEwXFrXHC1a3cNE50G8kUsc4sD1fBtc8/W4Er9IdRwiprePpaa33Y67g/8uDt+zz9wrPVG9FinSNlCP0kcg5ryCWTYiaM4pmdBrk3rNVMw0jxljzdNFMhZdp/AVjyiWeeqNNHaiTKDHlKPwAxmSesORfMjJnOBayq/oNqONeK0/G4GMnXWuuDWI1XPPv8hjfe6Amh3Mxkb+yhdeAqKPO7TzU8++SGNK3X9VqPv1T1ITetoz/U851iGXC7zmOHQCgBbUw9w1kEfSxnJm00w2lEKwnkdo3A5frDgG8kBqppHGGSvLccZev3jI2k4xFlDc3G46m5yVpgIrr602J9j48xsVV7ot+StaYxWmi2uVTKpWE8DaSU8Y2A9mIIC8hL8pOh6xx6TJyXA8omnJWxotZW8vaUIhc5YyutiDVbdTY6qdqs5VK3b3rmpKglvkCiCFhotRkBvuQyy1BLhabIr8XZGzx7arUiq+X0v0QdaGP4J195+T25Nt6Nzdz/CPhj4Lz+9/8K+F+XUv6mUuo/BP6HwP++/vNhKeWzSql/t37cv/MufP13pUpdE4vBX2IFtIaidUW3JiiCUZbcDbnglC7kUBa6JQpy1dPOf/mzNyyXQiqAEezxjGddyItlRqAi5upqEu1PkrXmW0+z8Tz93B200ZwOJ0KITFOkP45YZzGuoOtFZGYcuSQPYK3Fnlu6bUsYAylmUhglhLETs+zDe5f4tpFNIoYcDSmn2hwK1MA1jqGfKAoUdXqjDdYUtrc68UjATexBzELpbAwKT7f1nPYjZ3e2crjLNyHrcUpcP7gWbHXn0UreZIyv3hunOV6eyDWUNisF1TtvvVD9jLOLX3GttT7s9db33+bOs09BqVIOJQ9mXe+ZWeNvfJXV1PvdWM14ClUqoyi6oOtEkSLvCyH8cLmb845cMv1JHuh6QbKLNGWWmED1I5i6lqiTyThlnJMtncyyCsfLgxwM1lrrA1o5FTaN5uPPb3jtzYE4VbgBLNEc89ngY890PPFki1GKaQVurfUeVElZ8nWVxnpXcz5lgE6BMETaTSPno5RoO8/QC+0yFzmztp3j9MaJodRw8HkLZg3DaUAbxThMizLNWcedMvGcnTBtQz8GLMJ2mDNEQWCAMRVKTaSJIeLyibsbzQO2jFMi5wTIs6TZerSG4TQt5OMwRbwzWKs5SyecAZsLrZXnVNEKhYXqd5uH/DOwJNfYnNnDlqPYEnLKqAojtEo2jaUqTsSiACAQlFTXa3Kfz0RbIdDPkspc8rK9r16GJdNPaTnPWit5y6fTe7NY+LmaOaXU88BfA/6XwP9YyWrqXwP+vfoh/yfgf440c/+t+u8A/ynwv1NKqfIB0N2cbRv+5T//GXKIhCBGyHk1mmIml1xN/al283KocVrkkMrWiUOs4I3ZCJkqrRK9GFWLKhUbWy+YUvCzeboUlJawQ2nyRNoRY+a471FGphvtpuHp5+7w+vcip0OPQtG2nlnTr4rCe1cvKrt8H8Yams6jtcAIdJF8DmMdw/FEjJmmdUxqYjhMnN/d4RuHbx3jMAc1KvaXJ8IkG77z21tSTPTHEWUtwxhlGo+iaT1KKa4fHticd5IxEiLgiCESBsXheuDi7hnGGDZnG6ZBQC79aeDs9hZrLONJzKNKw/03H0oTrKuEKyXCGDjtewGkbBqMMTz/iWf4sv7ae7biXmutx1X337i/ACSULvjOMo2KxlnGU6BU/5qq0AZrFP1+QtWcx7pPoMRILGLWNk5jlEarH76eM86iYqq+AcmnpGKdc0wUxKMwy3lEh5OBiPceY+XhqpUY8o0xfPwLn8D/F7/PODx+M/haa/1MpeqBuLV87oUt2cCDB4FpkIPd9sJxvjVoFN7K4S3mXPNZ3+8Xv9ZHvpTCd463Xn3ANAZQsDnr0FrTH3umMZBqg2aKpigl27ZhWpYH4xDpdh3TFGoYd2E6BckndqaC6uTP+dZxpgufygNKGbJS2GTIqLqBq0SFVMghEmvmGkUYCSVnmunE063iobVcTbJZnMmZuS4yGqvZqMTZxnC3KSgCKkuTGaIEBuRcSJSa7yZfO+VCnmYvdiHGxcW2nMEl/1Sj6w0aScJw0BqtZ6kkSzar/LdsF1NtaOeoh5ylcYsxC7myEp1LtSKJyk7jnMFaw2GIDNN74w/+eTdz/xvgfwqc1f++C1yWUuZX/yrwXP3354DvA5RSolLqqn78O49+QqXUvw/8+z/n6/qpSjDbhWkKFfwhK+I5TyYnSDktFBvqBWWMJpdEmYdymkoRonqjy7K5SrkQogQsppgpzsjnyVVvy5xFoao0SS0bOpk4ZI7XPbbeCN224+Of/RhvvfoOp+OJ/jDQbhqarkGauhuQiq5Icq0UYYxYZ2WtXTPnUhS5lnHSbI79KJkhp5GcsxhjY6VJdp5u13K4TJATw2msJKJCDqGu7CGGwjRFDg8PNBvPWNf61hqOh4mYMvEwYK0hxIBVujbDkoF194lbQmIymaIVOUXiYZLtY+sFuqAgafn7iCFRcmHjHVYr/spf/4v8rf/bfyFveGut9SEva0xFRIfaRJW6KdOkKOGnOQkpF6ovgCISyVykAcvU94WMUwJDWSAPf6L6Qy9xJzFKvEHJqCIPKmUMmmpUjxlrNCUV8b82TuTQVcIyyypzKYzDUB/Ca631wSxdtwt6lOgfmzVPXVjULZbhBFnuqzFEkWEleW6vnrm1HneVUpbzVpwC1u1EvbTvMVa4DuEYquxdGqU4RVIubDYNaI0x1DNaJNSIG+ctYz+ijaLpPP1R6JFPm8Kt1OOtFlJmZS/MF3rK0izJ2VbUGqmqo3KS4O8QEo0+8qxz3Nl4tjtLu/GoIkuLsQ8YlfEp1bw7+T4zsvUrlSEh0kUtXyeJ5Ymq+khZpJIz3XmOLig1Ogyk7UwpoZIMPnXJTCEvZGedDTHJ8zLNzVl9ts0hQCI7vclenUm9SsmCYX6NJcvr+9aL97i86t+Ta+NnbuaUUn8deLuU8s+UUn/53XpBpZT/CPiP6td4T94axyny4isPeOaiAwphesSsX1MHQtUNGy2huEXVg0qZiY/1Alx0uYoUCkXdZMylVH1eRjZiORfpuAq41pGr+VJ+EBUoUDdQOQt2dexHjDV0G82tuzua1vHiN15jHEdiCMS6gUtZJufOmtpgSVaI0oppDJIbYoSCN40TbeuXnKBSyUIxBkqfyd5K1kdt9GKMTEOg2zRM44TRGr/xTINMimKQfJIUZVM2jdPyvYgES7yAxiqKdZRjJhnZECiqlGAaCUOU778eOJtG8vNUNZVqbTjtR7w1aCv0ppIyMQJ25eat9dGonBJ//Ptf55d/59fJuQigqSD+0CFgjRUEM2lBMlsvEseIAi00rqIKw2nCd/KglVXfD3+L/fZXvsmv/Uu/seTH6UqPpTaIWklgstGV/FelnTkksBa0mOFLzpjGkVPi9e+8ztivXta1Prh1OIzEuJXrPSRiVdsUZjr1TdN249+G+/f7tZFb67GXqsME4wzWO85v7ciVZHz1YM/UTyijGIdA0zisc+I/ywXfNLKFmzK+ERJ6fxrlf8ceZw1hiiit2KrEJ+60fLzJoBtiygxDIKQaoK0gRgGZxJTEQ5azDBQXTH9VdCA+M3Jmo0ZsHyihx3mDNYZWguoIWQaE0yRNZq4xAqn62tTsgruBzFLqwLAgXr0lTkexnOFzqV7BTFWayL1csrzWWApkKClW9YucuWPKlJDEtqQUKE3KaQZv3lgN5sULAm6Zz7lTyHzpD155z66Nn2cz97vA31BK/VtAi3jm/rfALaWUrdu554HX6se/BnwceFUpZYELBITyvtc4Rb75nTe5+JXnMUoM/zP5MYTqN5lR+s7inF7MjSlLwxFiTXvXikIWGpDRMr0ukCrqVGtF6EeiEUqkqqg5Y5RMvdPNRWKNESx5kSl8qVu9MATazmGM5uzWjs//+if57tdf4fLBEecb2o3HnbVYa0UP3E+4xtVpSYKSsd7IRKIUNrtOpFA16BclB7gUIjlmIVhWmRVKYZSmaT3TGFBGEUnsr47knGssgMH6lqaTGIL9w0wuhf3VEe/ldbebukFUgswdjhO+dWwuNowh0LoW4xSnw0jJie2t7SJlsU7XmAXxL0qGVWQKCecdm23D9fW6AVjro1MCShLJom9ls55rgGmunokUKzgpJRrv5zmRBKfCQrLNKZFJxJR54fOf4OVvvMj+4Z/mD9rG0bRtNZTLe6ByGl2qiqBGEihTZSZ18BVzZtOJxDpHUSc0G89r336JsAYrr/UBrsuHJw5P7ji/aPBGUZaZYJVaPdKxqVwYS+by7QPfe+XqfXm9a/1i1d/47/0bVXJf8I0XK44T5ZTWWtgMMYnMz4kMviAxNqVkGALNac/HToEYE6lkdKeg02gDqCwbad1gUuA0CbW9AOOUOJ4mpinK1qzmCku0FAsJUhYbClOnHoW5KatDwJiZppFxVLSNlay4av0pyFZMer/COEWGIZCTKECUFpnmo9EhM2193tBpDarUZxIsRErqWX0eMqaYyEj8ziynRBfJm6s/b2W0KNxSopAWhVsSggo1mQytJRLMWsnH09bwj7/8MiG+d+fQn7mZK6X8B8B/AFA3c/+TUsp/Vyn1nwD/HYRo+T8A/p/1j/y/6n//o/r7f++D4JdbSitiLsSah2S00BtNDaAOlQSX6uo4J4Uy3IQE1klFrlMCpXXV11YiSq0CxH4kNnIBWWeIIaK6BucskYQx1EBEyYcy2lTDZ644Ws00JqYxYEzm4vaOL/z6p3j5O68RQsQFkWEN4wCIxjqGuIT2xhBpWovR4LeNSEuC+AGnanzNWcIlnXc4b4k1Z0QpRaYQYhSgS8qkkNjsOozVaAVjH9icFw5XJ3zrQBvCMHJ+e0fOSPxDYxkOAzFFpjFWiYBhOPaEKTMNQrFMIaKdRD4oW1fnSN6IayzdrmUaAmGQ7ehwHNAgnsEfISFba60PW+0vrznuj9x56rY8IKvsu1T5h2sMtjGULMbwMEwi4bZGQEdJ5NLOW3K5QTMrpdHqhwcex1GQ0cSaqZNvZOAgsSApJhkQAbbi/haEu9GYxtT70a7341ofinrxpXc4v93hrMLtPGfG0HqHVnKAC1Ngfwic+sieyOneGoWz1ntTt+9egFJsdg1aaYmn6if2D/aM44Su0QBzc6ItaGVJKRH7gc/pI005chgjsZcNmGskfy7V6JlQV1+5FMaxerLrguF0HAWapWooN3IGFK5EfZG1MVNGYRSVbyALiUgmJXlu6ayYtEKHsnjPUsX/x5gJMTEMkXGUJi8medoZKxlxcKP2KrmQy7xFV5SUZFOYq0BSzRwLgRwqLa9TzXE7yPl6vpFj9dQppUklEZM0mabmOJe5i0OGpbrKK5XRC3H02L+3Fp/HkTP3PwP+plLqfwH8AfAf11//j4H/i1LqO8AD4N99DF/7Z64/+tabvPDkOeebBu8MmZtpeCkF7zQpC250RpbmKf1AHEEpEtotUr9Uf63mzGkh+Kg61Q71JtE1Cyrngm8lMmBeD4v+VvLTnLPQyuLMeksKcclhCyFxdmvLpz7/PK+9/BapOE6HnrNbO1JIhFGIP23XUErBVBhJyRlrLSll2q7BNU42X1beJMIgE3/rNK51pFO+afhyqTdRxntbw40V1w8uca2jf3FAaY11lu1ZS9vtsF42hf1JDnu+a2hNK2jZJDJW5z3bC8/UTxJuSak3rmiVFbKdkDW54bgf6DbV4FvNu9uzDZuzzftyHa211uOo/cM902lgGgPeWjCIlj/nhZ6eYsZoiSbQVpOmJNPYuv1GQX8a5d+LBIHHkn7oObQUoaaFMZBSqrhoGcpobciliITHarxxC7035+qTmCKqGr/bbQs5cbg6vJc/srXW+pkqhsyDt4/yHxqMUj8wiJg3Ix+gUfRavyAlPjiFQtN1LcMwUii4xpKL+L9Skn82rRe7S5C8tqeO91DTgX1RvHM5cXU9kEKkbTS+tWTk3JuRzVNJWbD+1hBiEil/AXLBakUx4ruOcwSNFnJxCBFVhJystERvydZMmqs5sktrocM7O9t4pJmLMYucs261ZlL8zLKLU8S0ouufz6E5F1KskspKmEwpL0ROY7VsCrU8o3Kco0ZuiPNCrEfy6opEGixNqmKhzs9ecOtq6LjW8vdSQShKG15944oXX35vhYfvSjNXSvk94Pfqv78I/PYP+ZgB+Lffja/3OCqmjPFWDM05Y2aPSH0T71pH0RBCJg6BVKq8qNTIgVnjWycEWunlTX8OH5zNmkoJglVbA84sF591Gt24CvOYV72qZtDV5tIalKwEiSERp0AYBFF+64kLUsq8/cYDclJ021TlktIozX68nDO+dYQhCJwlJZnez/JSlEwtdEbbWeo4ik56jPhOogmMUbjWEWIkxYJvxCujtGY8jUK0vGPFaDslmmoWbVvH2E9YK1Mk4wwhRC7fucY5mSr51kMpdHfORXttNCEEdFaUrGjPNhK7QOF0eWBzsV3iHFCK8ztn/Cv/1l/g7/7f/8v36Ypaa613t1755vc4v3sbpw0lyqTUGPE5OK/ISXDKMjSUx5RMaAGLSFwASpaMy9rwzcTbHyilyEgkS6mUTIkgkfedUgT3bJymRPFZKCvBrNMwkoJEGlhv2d7aMA4jr37nvfMPrLXWu1IZ0jK7X2ut96++8Buf5hOffY5+f8J3DdpoxiGwvz5w3J+ErBoSvvVLVluISRqNJIqw+/ueCcupjxz6RDj1DF5jTkKo1N4tfIdSCiZFlLVMKeOtQSlovcUiTZa1rkYSyNfJueCsQeUim+x621RuYAWkiFJEZ3DWLs1drv64lMX3lurZVNWvlZdtmXy8qWo68XEXcpFn0+xlnc/k1grJcl6wxJQX0rxs2aRRnNV0CvGAp+rZm9Uvavn/gnXi9ysV4qK1rt+kBIf/wR++9p5nqj6OzdyHtu4fBj5+9xylpOFSsITszplOxgj1kWqCzgg0ZMaUyn9DVpUiVwpaaVFa1kNRKVCmQNGSXqi1qg2dTDGsg5zkwhT4QN0QUjd6roCyGEQPHIIEMfbOcufJ2zRdwxsvv0V/GjDGVs9fZgwSJzAOE+2moVC4fni4oXApxWbXyk2QE7pS8+ZsqFgz9qZRFvEaTZ4Sxhr6vsd6g1LSXO3unhHHQH8aybGn23XkkiWuoH6f01jplArilNhsWvm5ZtF8D4cBcsY1jhQy53fOFp/f8fpE2zWgFN1ZJyv8MLF7eofvPM45Pv1LH38/LqO11nostb+8xjeWWDPiUpAHpqJ603ImTEneKzQYa4ghQ8mSQ+RqXlypHl1k0PTn/sqf5+/8X/+zH/haOQqpdvYiqCrFzHUYJYIEgSzNigTtROapUGhrqwRG3kf1D2sY11prrbXW+onqiafvcH7njHEYObu15XQYGI4DYYh1sC/WkxiibMMybM5aShIy+RQl422/HzlOmSFGySkuhjxEec+OuW6gNCUEHEWeLTMduQhkLuSMxdA2Ft85wpQEIGIhhFLJr4L+F9+ZBIUze82UZLVpo5az9DIuKSKHnEmxpQhrQtWGaV6eLDnOlCW6J6dMidTPq6plCPL8nCxVrlmQ83PdsM+qycU7pyQYXDZtNS+1zDRo8aorLaT7uQm11lCAt+8fef2t995Duz5hH6lvfPtNPvf8HcIQoEoQtZ6T7aXp0kVVspuQLHOSpPiFYFnT5+eNnta6So/KMp6YMyvmKIJSr+ScMglpVpyzGGsxTqYhSs+mzYx1Fuctvua4SYMDKSRGNbE723B2sWMcAue3G2KITFPAN55pjKAkqBFk4xWD5Ek1XYWyaFnl55QZh4lu22C9RQ+xEopkxa21+NxAsT3v0FrRXWy5Cgf6wwBAToF203C4PmCtYf8Qzm/vKDnjvCEME2M/0rQOs+mYTgNKi6HXt04Mq67CWhCZQRhjNc0qnLMypdKKcTJM/UDsGsmyWj06a32EquRCipHNbsPJWOI0oZxBnluz1KagbaVP5oI1CpRl1mJqc7O9lq1ewrkf8sWq7j9FQTnL+5cMplQdPmlu3uOMNpAVupJvdZXcGCvvVWGNCFlrrbXW+rmq3bS0uw3WO45XDwgxUlRZJH/WOVJINJ3DOivwjjFQcuZ460nC9SvsTyPDECRPzsqioWhNro1KTgmVIyZLhFZBmqmhKsamURohmy1jybjOo1LCGsl1s8agTF6asLKowm4kjeLBU8RUiFXZAapGHRRCkMGkAJRlmChQFFGQlVKW5i8niQrISUB7ORUMM2FSFhm54paFvimAv+VIPn+eqqSbFxkFYWg4Xdl/VX5Z6oCzFIVzphLu5ZmbMnzzu/eYQuK9rrWZe6S0FuoaVaY0h3ov27MikQIgWziZkGuooeKpduy1RxOzpgZqhtoMDJlqLIFV0pxoI3TGuax34pWjao7r74cxivbXWqx3AgcxBuc1kBn6EV8jCZ594Wneeu0el+9csbvYUjJMQ2CaAs5bBNtZajikBINTivjm5BskToFuK166eRIvgJdCu22rbw1KkaDwprWEKdOdteSU6Y8SDhynuAieS8n0h56cC+2mEfreGDlc9zg3YVtLiUJUmvqJkguu8ZQUGIeAbyzWGZquJSc5iIYQKTWsMgGHw4mSE5/7lU/w1HNP8PZrPxBluNZaH8oa+5GXvv4iv/I7vyp+AlWniTnX7Xr1Bxh5sIVJhjzWGXxjZXpZ/bmGuqGzhhR+dKOldB0/1oFToUjTVr3EKSW0NpSQsF5gTTkVqs0BBYz9tGQerbXWWmut9dNXDAllNLefOOe0PxFT5HTdU8ikGPGtx1hL2zV025Y4TRwfDtK4pITeblGf+wLNW/cww0SXBRKiUPRvvr0A/lT1y6GFtD6NEwQFRgaHuRRKPT/mmDldn7BaLWe8tjFYq5f4jlzjDGa5pEKTUgStGKdITtKMam2q7acslMwlnqtCTkq16lgnZ9IUogBa6qFblmp1zac0hQRKrEwZCTdXSqOQ/Dll5qzWCkypZ/pc5Z5l2f6xqOeYFSdKmBDG6KpcUfzBH73KN7/71vtyfazN3CMlgYkNcYrEusHKOVNiqRsoxByZMzHKRTdv2aAGjT+aQ1Pb/vkCjDEtF4Uxgi81xuK8FzmlFUiJqitymbSYxS8nJLpK8AFs3czlJJEAknGXl2yo5z/1LK++9CZDP2GNodt6NtuWcQxQEinFKoEyxKBxXvx6Yz/JJk5L/AIFhn6quXoZUiZ50TLHlCkxYbzhcBXJqlCiBDjqupp3jSWOCd84yIXTccBYw3gcaLYN2sh50Xees4sN1w+PpFzwnSeOkeE00u26SigqWGdJMdbJTa5ZV1pwvMYw9RPOGJ58+g7nF7u1mVvrI1Nai5Rxs/McjpJrOW/KqRJHpbREplSyVtGKaYhC3GokPyAMkdKAiYlUB1SPlogIJJhWFNhqUQcolSm5ymNyBm2Wh7Y2pvoQMqaCi+SBt27J11prrbV+llJa8ZkvfoJSo6n2l0fxgKm6ibIWUwoXZcL2E929V0lF8bDZYnadMBEue7w3+I1nUwZSKvSnib4fKarQT5ESM3EKDGMgFZEO+tbRNhZrhLAuKy1poGI9b+Y8qzWkIdRG4Z1GTaUuOqTJck7OsEprip5RLhJ5GlNcqMkp5SqHrHEDVd4P8gyUyAO1bNOKkFUWhVypUL1ZMpmynFWpQ8xSSZhpzs1j7v9qPmsNKdez+i5V0IkWn7pzZtnGGSusjX/ylVf42tdff9+ukbWZe6QeXp545bUHPHHWoqysrXUl5Yh+Ny0XWFYwd22lVBlljSIoRS4o5y05FcIUKm61ro9DQnk59FhncM5inaVpRFYYo0AJrHc1H0QCGTVCw5wn7ylKeKSv8iqlFOMYMM7QK9idb3nhcx/jlRdf53R9kql6AeMsppXMOfG1iC8v1D97dntHGANZ6x+Y3AuWVWSVpRShFmkBwcRQMCxRA3EAAQAASURBVE4x7SUOwTauNlnSFPrWMY0i03TeCcFTK1JI+NZK86oVp0PP5f0953d2NJ0XkmfjmIZJDK9jwDeOMAUxwk4J7WVlbjBoZzDUCZAq60ZgrY9UGaPwned46LFWSwZQyjUWJVGKmLet1oJXrmZx7+WtPoSEVjL4MEYyf1L805KQQllyemapwZK/iUhbdP3cSmVM9RSnELHWk5PMMxWqBp2v9+Faa6211s9Szln+9f/2X+J0OHE6DFw+2HM6DFUyD8erE5/eZi4OR96+d+CdIVDQKHVJVHJm1VrX+IHCg5QYh8A0RWJKIrtMIkOkFKy3eCuUxsa7xbMmfugKISlFSJkKiiS2gdGSOVdgDJlSYSNaK6zVoBMy/6vDPaUoqizxOSnK+ThVyWRNucHU545SMlCcplSHiWWx4JSSF1tUTuKhE+JlWmCGquQbumahZujNGhJAaUISouas49RatnHWGmyNJtDWyPerDUUZ9Pk5f/iNf/IDy533utZm7pEahsDDyxPPPXUO/cQ0yMTaWk0IP5g1IRMISYdP8YZwk280lrV5S6Sca6L9nFIvEwljTJVQavGZGA1a0209c46HqYG/OSmhaomvU8yf9ZCk6kYuVfPq6bonNJIh1TSe5154mu98/WXGMdA0XtbUVkPjgVKbzky7bRhOI8fr0zLVP+17XGNpt42suat3Bmpm3iTr83GcaGvzpYzCGLl5tIYwJTHi9pl20xFC4NauI4bENCSO+xFjNKfjyNmtDU0ruXgCR1H0x4HdrW19Q7CyHUThrKZtO2mOa8h5X9UBuWTGMb7nRKG11nqcFWOdQBpFjnMYqgwttJMNXI430SbkjNaSLVlSwW0sxAKmEnJLqREgP1glUxsy2crNvl5VjeVqfsgaQFWjeA11FXmNEHebzqIUnKqHdq211lprrZ+ufvk3P0+cEtMwcHV5ZP/wwDhMwhZoLOdewcNL7g2B0XmGCQ77ARUDxmoUIjNEaUrNW8soplTIWZHqlgulaFonw77atHl3E4cgqrC6/Up5+dxzS2StNDsxSDZcrmC9LMIRQqw5zNIKYbQmTIkQ4gIwiTWaK9UNYJ7pl/rGB5dnib/Si4dNIOw3hPn513Kp0JI5JzJIVJf8DORr2hpEXpBn7AxE0VZLxJaV4aerjAiUgc2WbCz2iTtY5953RsPazP2J6oeJolW9kIVSk8sNbdJoMWLGUAPC60U1Xzg3OmGRVMaanzYTCFL1k8yBh9bZhYKTQqTkilKtF+k0hsWHp6sOuf7f0lRpK6/JNk6QrlEazv40EqZIt2t57hNP8/J3XhcQirf0xxHnDUMNNvTeEsa4xCY4ZMtona4LyEIMUTI+rEEpLd67Ig3T7myDtoaxn4DCcIx02wZlDEbNXh0JDM8pczr0GGfrz0E2gLaRuIRm21Ay+MZItlyRhs4azWTk82pjsI2VvLxSJP/KaFqvmYZJZGHmhka61lofhZJJZCSNk2zUVSbkhEFhvMF6AxkMukpIimzIjfgO8lRjSpJkOMaQKemHNHNVcaCNWbLjVJWYz9NgiScAVCZr8RYrS8VjO5yzFKSp3F+dsN4RpxWEstZaa63109Sv/84X8Z1jHAamQc5upQg/oOSCPRw4HU4E52VrhYLGMRx76LOouSq/QRuDsppUWKiSbecxIZEKaA3OGZy3ctarGzijVc0SLVAUm9YLVTnJsiJV+EiKgcJMhJQGyxqBneQs+WxWKXKUSKyc5Fw8SywLN7LImbysZxVJzlXeKGfqXFVrBQknz0U2dLmUBXBSKnRwPjtrrYlZhphCyJQYHvUIUV6gL3XZ4jTaCseitC2l3cq5fdNWC4HhH/29L/9Qhct7WWsz9yfqS3/wMr/yS89hAdcYTJYL1HlLmIJcoFEOQrPm9mZakAXdmksl7tTDVzVyyoXCEj5oardPDSU1Tm6e2aemlSaVvIR551IkxFHNDRY470EjIY3Vt2KsBGsbZ9DeEMbA2cWWX/7Nz/Gtr32X/dWRpnFszjpSSEwq1C2jZNIpFGHKpJywzqKSvvHHIDev84bGS55JnGSLpjXYs5YUM5snNxQK42mkKGlKQXH14Jp203B1f6Q7a2HeZmtFOk20G0e/P2GsZWpEomlqnsr2iR3jmBj7EaMN1iogc9qLB886S8mFbif+wRQL7++sZK213r3yredjn32O/thDLnXWKluxojVTLz5fjSJXwlmx8nAjQ44ihZ7viZRSfW/50565aRi5fOeS87u3gTo/yjNBbM7AFNmLNHsJY2WK6hpbqWByDxYnEKNnPvEcr377e4/7x7TWWmut9ZGpO0/d4jNf/ARvv/YO+4d7+mPPcd/LpszKkG4KidE3nPYjWRuOfWCYIv1pXM5QpQ64G2NQBZE0xiy2FKVIsWAbIwY2xOdGTOQgzeGcYSoNU8RMivmJUKodaV5mqLoFiynjKuURhWTQLZ9E9hEpicxy5k3kWJkUFfevtLw+tEIrU6X+MiSMMS/bNZQhprTEDcBNMzf/mzGyOJEz+o3KTZaA0jRqJb457Rzu/BzfOPR2h/EO6oJlVqDkXHBec/1w/55fF3+y1mbuT1QphfsPDzxxawNUmmT1iBmrJdDQCOEtZwlpnM2gKc8bM7WELpbySIp9xYNT/2Hr9MMajfO1gdOKNAmlspiCtULLSblALBX0YdFWM/UjIchBrORC07qKH9eEFFFZkTM0nfw1K6X45Oc/zre++uKCi5XcjEyZMtpqxtNELoXNthUZl8q4xtA0jmkMWGtFvtk5+sOIbz1N21BKJoUMRiYep8NpIe75xhIDoDJKaYbTSLv1QuPUss4fTyO7845caUPWWpwzpCATH996+j7g2wZrqBhaKErjGlt/roUUAmGaKDmze/J8CZBca60Pe1lnufP0k8QwkbrE8Z2DKD6KSC6hPlC1KAhiist0UeAkUHNO0Eamus4oYv/Dv96iGiks2/nKWZEHLNLIaSX0L6LIbkouWKdqlh2I9V3h2naJallrrbXWWuvHl3OySEghkVISxVWI7M43bNNEfxgY0ajDCLlw6Ef6MRBOg9iASt1MKYVRijFmVC6EWBZuwnyOzTFjnCHVyCtTIYAzBVl8Yho02KyxVsmCQWtKzWeWZ4I0f87I+VlDPd/KkkLN0stUKClhaixCUWrJpytFVGzGCL9C1aVHKWLtiUtMAaB1tUFVhYiSczAARS3wlIKS83spWK/q+VCsTkpr8A3GO/ydOyilcK0XcGEplJiqFUpVEJ/GOs1Xv/THvPStV9+Xa+PRWpu5P1E5F7729df4N/7VLzINkwRmFzm8lPr7aaZS1osox9lgOX+OeuEraaaU1ssKV81QEcUiL/SdxyyHHIVtbUWK2yVPTWUJ+J0bPlXm4ELZ6FFzO1Q1hc6raKUgTJFpjHTbjqZt+MyvfJJvf/UlckyiCUbMqaYYCROvBlgJMDekkDmFAd94XOOwXtCzzlpiFNnl9cM92lpCHyipkDT0x5Fm4zheh8Uf6Bq3bCeVNVjnlimLMkLl840Em/vO18ZWcv10ktV6AJrOEfoJ21iUEWlloXB+R/L1UHC87mVis9ZaH4EyVgJhxyHKA7FI1mNEBkPWSVMVQyBNIvmIOeNaV3HKrkah1PeUKr9M6YdLkWev3BzKWupDciaCoebfE4+qRu7TVDKNlgYya0WD0Huf/czzvPXyq5yuD+/Vj2yttdZa60Ndb732Dv/g73yJP/sXfoWSE2EKjKeBJ6YTTzHw1v1rwmEkFKGPq5DYUGDjhVI5N2JGgzaLHDJXdVkBslLYVnxksWRSL6Hc1kgMTSry542tsQPVKqS0IheB9hmjak6dNHOq1OaPQoxF5JJ6ph3L1i2R0d5KtlyN7zJWUZQB0iN2p1xBhDd+8FxkSDg/g5hjeapKTc/RYlVGqZUiK5FRLrnRRqOtJfoO4x2669DWYCtPIudMnGIFGipiiISQaLzHOsM0BL70e//8fZdYwtrM/dDaH0b6IeCtWdaqptRcJqMpRUOSA0quXhJVl3Ip52V7l0JagsTTIxkZOSVM6zFWEKey8k74rqFpfaVXFlJIlFwwRTI4rBHPXIpJJuM1rFChMN4uxtOckjScBVABxoB1EjJMyfjG88Xf/Czf/tp3odRVffXaoTPOmRs/YEw1HkFW9dMwMfawPWsZTqNsLI3GekucIlorphpSqc1Npsg8DTFa4xoJK95dbIkhEUOk6Tyx5u+llMTno6DZNmKODQllDXEMS27JdtdWWShsdi2nq57+MNLt2jn6j1//nS/y3T9+5X2+otZa6+ev3/5v/AVc45nGgPNOPKsKSlEydIll8TgYayoRLJKC+AdsM+fqiA+hxEQumvAjHkSuEjBz/kEQSiniNZBNnVjrc4KsMs47kcYoSDlRIkw+EFNCZcMv/8Xf4Pf/9n/1nv3M1lprrbU+7PVf/e0vcXHnnOc/+Qw5Fza68EwZuY6Z4wiTcozjRI5C/TYV+18ApwRaFabEOI3EVAgxLiHb2srQXhk5S85qJq0VrnWUzM1SocodU4yUUhhTXpQbYSooY7AKlJ6jA0Q5Yp00gabS2UGBkWZPGU2aEqpAjolQyZYFiLnIQqXmPMNNaPcMV5k3dsuz0MxEeY2uXrlZwql8g/FetoCuQbeNLEOqAsxZQ9M68ZjHgjZazqYxLQR17x2+cRwuj/ze3/rHnI4fDLjX2sz9kHr73jUPLk8898yFXHiTTJ4pc3auXDhzoLfSmjwFQoykVG6au7raNUYRUlqmB6kU9EyzrGZLiQfQxCkJ9tRIOIbQgxSoOaBX45zDuBlMUOTCK4WhH8W7Zg2mNp6yQo8koD+BYkJpRbfxfOaLn+Clb71KfxhQWmGdw6RESRbrLdbKds44Q46FaYykLDfdOAYJ6y4wDJJjl5I0qr5rMEZLvEHJAjKpC7IYIqVkum23RBUoJa+x2zaMfcBWjPks6+q6BrNTEkZsLWEMVZrqULrQXw9szjpc6+rkSN54rDX81/7G7/L/+D/+7ffjMlprrXe1cpQHbLv15CAErlS3+bLVrxk/HvKYKDmhTSFnefCWZMgBTJVKaqVI9V589pMf443v/WBGzpuvvMEzn/w4VF/cfK+WinyGqhyvMoUFlKIU2iimUxJ6WlbkBFoX7jx9l+c+8zyvfff9l6WstdZaa30YKkyRv/2f/B5PPfsEf/Z3f4VfO4PrOHE4RbR3eK8oWhP6nhQysYKq5vMptbErUc6szsr7+DgErDO0Gy90SSvn0JlYDAqrdSWZZ7Qu5CCZpSFKHIFS4pmjSvElxLt63Ob/VXpySogtKc/+O1GGxKKIBSKKIFHGNRYMxAVebUoKUYTMCrVZKVI5FCWLSoX66zhPaTtyzFhvyErgJ7O3z7VeaJXeMhxHQH4mTSefT6l521iqRUCec3/05W/yx1/5Dg/uXb1n18CPq7WZ+xH1nRff5mNPnwPUKXdcwgxRkuWk6kBbkuNLbfTmX6vaXaNrBhyLP00phdt2OC/NnPdumTBI8KJ8TFbiDVNKYZTIEeXGEECBqrlvxphlOpGCyB6NVaSYFtKmSDIhhcR4mLDWsLu14dO/9HG++dWXiCHhGo91drl4k0ocro5oa2laVwPJxUR6vD6BhuEw4VtHjplu2+C7DYeHR8G4to5piqKLzmC9JsSMd5bTYaCoUjPIM21jmcZEs2mYhokwBZpWfIo33pvCNAZ2Zy0hZDBC3owhcv3gwOasWyieu7MO4yzT/kcYgtZa60NUd5+5y+7WjlxkC1YQDEmaIkYb0XwnkaEoJdv7nLLEhCghg019QqtANJZ21+C3LWEMj8hKfrDCGJhCQDwFZsnnEaplWbItNRqtxB+cYsI5C0rjWs80xiX6wHrDOIIy62NnrbXWWuunqeE08sp3X+OV777G6Xc+w2998RnGWIjKiH+swDhEOSdWvoMzIjdEKTAG28qGq6SMeyQay7deMpNzrtT0CuxDLU1TRlRmRSkyNecNSEXOcaVknDVoo9BV1WZ09ZiVmihVh46yEJHGKiaBs0SlwMq2zlKhJ3MT13TCR/BWlCClchNiohhD0KIIySmTKiE+R6EqxymCLWJtQkH1Cc4Awxwi8wnTGE2pxPkppMX7J+dQQ4qJb33tJf7J733lA0dKX5+qP6JefPkev/Grz3PrvJWYgazpwyQbNy36X6XkElh8WVrJhVAvwFQECiBeNlXX3tU8+cgGSddpiDGyEQuTkB9vZFRlgQYIbjyRQsIi+uc55FEraqyCIUdIRW5MUzPsxn6SXDglJLvhNNJuPJ//tU/x7T/6Hv1hIHiRkGpjaLdecq3iJOP3ecryiIxrd9FJc1oDzlPMuMZLoLhSNK3neNWjtEx9fFN/nmRKzLVRbNBa0x97tL4hX6ZUmIaAPetwjSUeewk/VhLjkCfJlvONkw1AKuK1845xCORSOJ3G9+kKWmutd6/O7pxjvWM4jeSYGcdR3nuq2VwpJVs6LdvxXAT7rIsiUzOBZimNk1gPSiaXTJiEJvbD6nh9RGuDaxzNplseghjIIdahVa5eXVUx0QrXeBwa6xPDKWCsjG9dozi7fXaTXbfWWmuttdZPVX/vSy9ydeh59unbPPn0bUpKpGmqLIeyvN8n5rOjJqZclRxglcYZjW29eOdmfL/WhCRZa8YojBKQlqqDO2oAeS4Sur3IGaulxjgjSrUCIM8esqJog2q92I2yEOG11oS2IWbhQ8zSNz2rMOtmcewnibiyhnEMEm/grRAwG/l5mHoGn6N5ZouTrrR4Y3VdWFhSyoz9JOfrkGg3vtqFhFFhvCZOUWKvcqZpZdBprOY7X/8eX/r/ffUD+exam7kfUeMY+eofvcq/9NufEViHlqmDaHlLDdRVcnGqZeCANbK1i2mGeog8sBTJrJszLyTDQ3xuzlu00Vhj0FZCyNGaGGO9yEWSiaJSh6ggFSs4/nrDygpYpiu5bgUlzLCupdE4L1P1cZiWSbpvPZ/+4if49tdeoj+MKK1pWvla1svWzxgtNMtNU29eTc4SCN5tm5qtJzeUW5o6S46Z7cUGrRRN5yk501eoSVG6bgKFCtp2ftkoiPdGZKJvv/oOF3fPBHQyTUwh452Ek6uksVoTowSHK1WpSlqhFYRhej8vo7XWeldq7AOHqwNxmuShPERA7hurtQT6qJliWc3mbV5k4VlL5EmYItZLzlyeJZN1IvvD6vDOAwC0s5w/8QTd2VbM6zU+pRQ5CgjNMpFz4er+FSho2pac67QzJvp+YDj1PPepF/jWl/+YFP50JMJaa6211lr/4kq58KWvv8EXursMO8utbccnbzs4awgxsR8jsTZnOQtG0ihFicjwXzCVC+n4sB+IMRNKqYHhhhwV2Qg50hpRduWiRBGCbM6cszVcefbTGYoxsr3zLaptCVPENA3KOWJMTP2Ebs0i28/DhLJG8o/7cVGaaS3DQd81AtNzVojyo2Q8ixpUV/uRWs7l1t38+eEkOcTUHLucy7KNbDaeWJ9BKUqDOPYj7a4lpUzTOokOA6w1fPOrL/EP/+4/E3XeB7DWZu5fUN/41pv80uef5WLXCCUuSbdvEEolWhLvhcADpDls98Z0OWdohJiIKRHGGgzu7JJWL8tktUg4qc2T8XYBrMQYqwGVxTu3pNl7Qbc6bxhTIEyxwkwgnRK2ggxizDStRWkxw05jIgZpSrdnGz77y5/glRdfY395IoREKSLHNNbQnFt0kibNOMPQT/jGoZTGdx5VpME8XvdYb8VnlzLGWWIIxBrbMJ4miSWImVypMbkeREHeMLwXz+Dp0MvPwRimYSIGaQxLStitR2lDt7WklAhXE0OfaRqHdR7XWI6XR/74D77zPl09a6317lWKkfE0kLPk6KAETqStQusieW51O6aNrUOk+YEnigDBSit5mFtLUQhBLKclL+hHVQ6Rw4MHbC528t6j1ZKHOb/ngdBox9OJy7cirmmwzonkZ0rEScBIl4jXd6211lprrZ+tbj11h8985mO8sNP80jax3yeOQ4JY2JqaRawqBMVbjNPkkBiGif400k/yLIlZtmw5S/C2KgVDqcC9TC4KnCLWIO85Y02ben7VEjVQui3RGOytC6YpkmIh9JHt+ZYQIg5oN03NbBYAi/WWXIoMJa0B5qw3xTRMdUCv8J2Av5b8ZCG8oFDoooVomUTxNhxHmm0DuRBD/XitZYCZMynEqnCTAHKl1LKMKDkz9uOS84xSTFPmeIp8/Svf+cA2crA2c//CyqXwla99n7/ylz6PTrJBmzdfcwbTTfbFTNqp/jCjicvkWiYpORdSKTS7Dd47gZl4S4wZi0QMlBphQJkDDtXyNY2VXDoJ+q365go5KVkOaNFkUowYK6tuY6sauBR8Y+Q1ZjGP+sbVKYUBCtvzDZ/+wgt879uvMg5BAsGjyLVi9eKVCmOJUxJ9tBZvXkkF33k2Z0KSTEo2fmGMjzS4It+cG9g4Rnzrlw1gCBPWWaYhkAkYpfEXG9qNJ6ZMOI7E2Xw7BpyzpEr9NM4Rx4kwBjZnHWEKXN6/5h/8nS+99xfOWmu9yxWD0LRKfR+x3mC9I48TYbyRj4Qp4lp5b7HGCbgJaQYp8rDSTaVUxtriKdlo/7hyVss9b41INLVQdHOlYZaSJdRVa6YpyL06T1mtmPCNM6Iu+DHN41prrbXWWj+6nv/MCzxzq+EL/sTVfuCd64HDMUijZY1s3+b4AICJitaP9FNaogCUFqp6oxVlhvZZoaenSCUXq5r7Vqnuti4jlEa1LerWLUzTLudgbcA6hW8dKWZO+57dhcHawthPpJxlGaAl86vdNGij8QiYJIYo59e65Gga8cSVbSsWmyQZxq6eF0uVdfanUZQgOVOUbOBKAWerEm7+ettWhqFaADFNZyHnmv9sF5p7DJn7lyPHw8A7bz54X/6ef9Jam7kfU2+8dcVbb+954vYG60SmlCroRJqY2tiVsgQuFiVN0CzFzOVGAplSZnsuOFSFZMLZRuSSSkujtdm1AgEJ8ebMo9RCJ5JGruC8W7TAUBZSpHFCxZQmTV5bDBGnnTQ/KeO9BI+HMTAcB+KuxXlH03q++Ouf4fsvvcmDe1fzBl2klVqTcmY4it54HAK7iw0pRKY+1k2BwTqZvJzd2tVVuTSXTec5XPUMQ6DbejpnmIYovj6j2Zx3EilQCZlKK/rDiXGc6LZ+wevaeetQJap9zJQcsVZ8QONp5DhN/Kf/h/8Pr7/81vtx2ay11rtakglnSVkyFGNMeOcke24aJe9RG5JKy+TRukJJQvcSFYAAhawRumwMEnGSg/hwf1x98hMXmA4G5MGXc/ULK41ShZQktsA1NSa2Apy0FamPgJkcyhqefuEZ3njp9R/7Nddaa6211vrBarqGiycuyClxKoWT0tB0aDz98YRDYgJUyShjmAAqLTKiiUWk9yhwtalSSjhauQjsZIqIhD4h4eCLFw6S0jR3LjBnZzRnZ5RSOB0ENuecqL8o4FpHGOMS4B1TpukaYpAAbqWE8TCcRlzjACgxVyuPRFmFMZCcIYX5fOlIJVBmdYqRzFXnDKbS4cdhElWXlUislJJs1ararUxzbp6cn+UMDzHWXONc2B8D05RxXYv7EFgC1mbux1TfT7z0yn2euLsDENT3JCHhcZL8C1EYadDSxKlSMzG03B0pFZTS5IqhM1aaEFc1uUoschhrb4yVSkiN3ttlowXUcMIKIagZb5IXsqwKK71SLRvEUiS+QBsNuRDGSFQwnCaajcc3Da5rMEozTommcTz/qWfZ7Foe3rvmdOjJqTBl2c6FGkhsOsPxusdYiQEIU6RrHFM/0WwcYZqYxkizaSgZwpi4uL1jGEYKLNu1ZiMZc8ergW7nGccgP2tbk9qLyMdsDZDMCD5WsvQKxhlMVMSTmFlTjPytv/l7fPOrL75n18laaz3O0tUjW0DkIiGRbamkMkgh4zrJv4khQc7EfqIYDeT6MZLJaLReEM/yoIv0hx9PffWNoyiF0QbtrUxrTZb8HSSCJYUoBnVbVQa1CTVGok6MszjnePZTz63N3FprrbXWz1DdbsOzn3yGfci8VuCwH/AkQkwoKpwky9Bflbyot2LM4pM2RiSKVoM1ZKUwmw5yEY+cFY91AfzZBndxTgiJOEXJFI4JZS3dxYapbtqaTVOBgEnAW0no42GYajwCdci/pd1I0HjbebHmWInaEpp5YOhHITVvNNvzLSklXGOEB5HmnFPZpBkr6rUwVkmlAm0awhjk16whl4xrHNooYqjAsJiqao0lckBpTZgSDx72RBRNK2dXPy9GPsC1NnM/Qf3xN1/n13/143hbEavUG0Wp5TA1624pVXaoFDEk8czlIsHX1YjanW3q1EAy4cQ8qskxVallmgPtbvIykKYsJZFImvlmK6IT7k8jSllyKihVMM4JFKVqfGX1LOCVkus0vQYyhjLhg8VtWnLIDKeR7fmGO0/cwvmGy/vXHK8PjIOETkoAuBwSY8goZYj1NY69AEfmNbV1mqmfaLctm+2GaQxC4vSeswvPaT+IHMtqSn8j5wxDWCY8gkSXJjhOEWMUftNW6iW0G482mu15i7GaV77zNt/82trIrfXRKV2DXBWKFGssQMhYWxHLIQmgyBqMVqQCGIEAFVVQRXLltBHqpclCH0Nnpn5kf//hj30NqWjQjpIS2jX4RpNiwjrxrVIHKyUL4UwZATcZ5/CNxxhDzkmmydfHx/wTW2uttdb6aJbER2mKUbwWN7yxv+IzLzwBqeAv71OS2FtijQCgovxLRmAgaKJ17FWDsxKS3e7OOL+1Iae0gPmUUvjWS4PYB4H0KU3XNQz9xOH6VJ8rsihIRaBcOYkVoOTC5mxTz4OZcYiEIdB0vsojK1G50s/3l0ea1rPZtqhtfe5p8WKDAFh845ljqrSWpjCMEecN0xgknqDGCdgKc1E1hkAp8ePNgeIyJDVLZFjfR4Yxc30d2F50snhxmvwh4OitzdxPUDkX/vPf+zr/6u9+nsYJwWeI4u+aN2VCvKzyyhlmgiLldJNmX1fCrm3kIquNilZC7bkhVmYoBecNFDmAQY0oSJmiMtFIs1iyZNihqIRKVTd9YiotdSpjnK1yKihFMZ4Gul1bA4DheH0iTEHCvqeEcYZu03Dr7pm8VqOZxokQEmM/MvaDfI6zDltsRb0aSqlS1BCx1uAbT6zSr/3lkZxlQhKGQElRNgOhomIbK96gMaJrQ7e56OT1GE3OCuslNL3dNrKp00b8e94SayP6N//D/zeX96/fp6tlrbXe/ZrprMUayhhk+FMyBYvWhqgkU64GoZBTqiRcTRoTxgo4CSWbfLSWYFmtZQj1E1SMBY1evHe+9aSgKyGs5gcVMaJrazDOVNqZSG5yjJQSianwnX/+rcf681prrbXW+ihXmAJnd86Iw0h31mFu3yVlxWlzRn8cySnTdYZ21/LWqw/53j//BmUaEPBwYXd7y8c+9zxXg8Kg2T848tZbe+IUuP3E7mbwnzM5Z85vbWlbJ+/7SYZyKWaajUcpJRu7KVJyIgTxTHfbRqiSR9neWWcZh4lpCiilCFMhTpmUCr6x7LYt0zASoyJOAWM1bdfgG8vQT4RJBv8ppAoYrEsDL2dxay1TCbRNwzROwp0oYjfQRtU8PMmRU0oLiC9livwCU1FMGTbnLaYS5r03nNIH3+O9NnM/Yb1975o3377ihedui1GzdYSqtZ3T6I3RsoGrW7VcU+61EYrQ0tBV/9kNFchIRlvK5CndLOWUQhnZdhlrcI3FN7Y2fbVhfCSnbpoCUz9ina3RBxqtDTkXhlOQCzgVYozkmjNXCnjvaLdt3SqK768/TaTaDLabhuc/8wzvvPGAh+9cVb+aQSuFaywhRqZhIhdLfxq5uLulJDGjpiJQBp1mnxuc9pXIR2GzbRn7EesdYz/K9+kM05TY3Now9nLTpzq9abWvbzKFcYjkOOE7h06SM/f7/+VXubx/9T5fLWut9e6WUnUz3Ys3VO7hAh66bVNBJ4owRIwTD6r4CcSMLgPaCmtaPuec/eZ+otdQEGWC1oo4TCKbtJoSRVaZomTOuRkNbRU5Cto6hkCcRpqN453X3l6Q0GuttdZaa/10pSg0ra2RTw5jZJiN0pXpoHGtJQP9MDGFwjgmxlMAxJ7Tdp6n7nhe/tbrXA+a5uyCXBQlK955+1jVFqC0LBHefnOP9Ybbd7ac12bHGM1wHNieb+mPQ80Zhab1C9vhdBiE6ZdZMlGN0UKTVIZuYzkeeqZxBArTIOT2dtNQSpYNnBWSum/FRpBCJMaM87J1Ixdc40gpYbPBeYfWimmU82MxavHuyfJDogzGceLV793Ddy0Xd8859SLnVEqjkYbycHmFsT/ZM/L9rLWZ+ynqH3/pRdrm8zx5d7cYNHNOC2Y7z8GKJRNzZqaYppiqfjkzDROvf/c1ul/7DN1W6G7Mg/FCXQWzeN4Kmqb1sjau6+ZSEkqZGuArgY26UnrmT5RClCl8hRDknCkKmtaCEvJdrtkaSsuUR2mRMToncJQxy1YvBFmRP/nMHdqN5/5bV/T9yNiPTEOUHBGr6Q8TrjEcL4cl8HwahcDXdg3Gak6HkZwS7aYhBmn0Qki4jSccozSyuchNO2UgoyuxSORcmZgk5mG7bclVgjkNE/uHB776T79RgTBrrfXRqZmam+o9qY1mHCN6TIQk+W6qbudm5DJKjN/ddlODVm+IstLEeXLIPzFYcqJlU7d4JWfJC7L1/SvL5lBgKJBTIPSRaZroDz3awNntHd1mU8FQa6211lpr/Sx1df+KN773Bp/8lU9TiqbpWlzjCUEys5rOLRFZaYqcXXR87LMv8NIffpscAxdPnrG7c4d79yam6HB64nh1BbpFOUvTmCW+piRRlMWSGQ4T4xh5+82H3HnijFu3t2y3XQX+JaYp0m7Er6aNxjiLn2WSZSImyUmehiDxVVn83NroSk6X+C/fWsbqt+t2HTklobjn+XysMVpy5XI975UcKbVRDZPILVWVrZUaNK7r7x2ujtx765rXX36HaZh44QvPEWLBaGgaw/F64sHDPZrM2RO3oHzwz5RrM/dTVAiJv/8PvsW/+V//NXY1Nb4US87VD4dMr4UMJPLHkhEAyux5UYrL+1d8+6vf5pNfeIGnnnsK5R2xpEqsU5RUyBKiIQHapibYV+BH2zZQG0djdZU7JXxr68UuuVI5ZdKY6qZQ/qdQFaKQBb6CNE8xBfHh1fBFShEjqzaMpxE2hZwN3WbD7SfBXh4lImCK0gSGjNIwTRJ0HmKgjNB2bb2pDduzFuscx/1JbmIKQy83fRjiEtUwjkGyUVrL1E+crnsw0jyXkhlPE3eeuhA9t9N1+gLf//4DvvetV9+vy2OttR5jyfuK95ZUCiWKtDiEQBhG8TVoTUkyOApTWLJ4whQ5u70jBnnA5VBQXgsXpb7n/ESvQIEqmWmKNJ0nTEm2cRJYUiXSEg9S/e7EGPGtY3O+pdt07C+vuffaSphda6211vp56nB5RdM2KCUk9RAS3hviBKCFOt5YtHXk0HP36Qty+QxXD47szlqGSbM/Bvz2gtO+Z3ur5bgf0EoRQ15y11AStZVLqaRHUZOdjoHT4YFs+VrH+a0Nm127kMbFOzegEI/3vCUzxtTIgkSYItMo9p55iZCGmpOcC75xDKdR4Fs5cjz0WCfqNOstw3EkF5FoZmQTOG8Um01DiqJAU0bxzhsPOBwD1luuHxzFazdJZM+tu1vGMbI7a8kpQQ4o5+mvjzx73i2xXh/kWpu5n7KmkPjWd97mN//MC1VeKatmVCZnVUPFdZVTykFJKSgp14w2ltXzi3/0IoeHe+4+fYc7z97FeSeTcqUg3ZAohz4wm9skIkAAKKVQt1CqZo3rOnWXZkxrRYRZsyla6bqqlkBhtWTZpZRoNq6aQUXnaYw0ac67+muZKSW6tmHzXMeDdy65fnDEmECKmYKsxMd+wDgLWtcwYjnspdpA+qbGMiiwIXM69ChAqyK+PwpxipzyIIS8UrDyLWCtwTpNfxpp6xuZ1op33rrksP/xRL611vowltYKawwBMX2rgoCIAmzON4AsyMoc6kpZvKYhj/QHx9ntDWMfKVTAkgbjDN7/hBISBTFnXONFzhMDyogEZ+xHYpChirbyPuQbiU7wXUPjHTlLnmSKH/wp51prrbXWB7m+9o++ym/8K79VwVIyfM9FY2fgR5RnQY4ZpTJ9P1GUYXdxxva8kw3Z5WkhJWfpn1CpoLUsIZwVqEi3bUgV26+KqMM25y39fuB0HPnOH75Cu224dWfH9qzjzpNnbHatnHsr60Exw/s0YYqLgqrbtsJrcBajIc9DS51JMeHqBi9MwmFQSqjJaRLgVp6CwL+8wE6uHp7YXx1BaYx3pDjRH0dc04A23HtjL+wFb/CtJU2yGNlsLKfjQImJZteijgeeeuE2ru04u73hE7/0SV7+xvfe17/zf1GtzdzPUN/41utMU+C3fuMTUMO8Va7h3FpJCHiWSQnI9DtnmJuaUmRybqzhwb1L9g+veeN7b/DMpz7Gnafu4Gqobyn1z8YgkstSoMg0wXqLVgjqu+bU5ZiX7RqwoGiVFQDB2I8ULeG+Mr3QqFa0wbpmt4lcS1bTAnTJFAI5C5lTa8kROb99xrMff5Ld+Zb95ZGr+3vJBbGG/cMj05gwxnDan8Sc6ixN61BKsdtt6c5a9ldHDtOJO0/dYhokxsA6w3Ca0E7eSJRVuFSw3uGb+XI1NG1Ts+5GUozs9yNn59v39kJYa633qMZ+IIaRjEhLtFE19FS8dGFI4pVzFlJGW0XSdbqaEv3hiNbij5s38toK3vnq7Z8sDFVTKpBJM57GGkEgKzjnHc2mWXJ+SgFjDO22pek8KUSZptrm8f2Q1lprrbV+gaptDaUozm/vFlIj82YtF3IsGKcYo+bqumc4DvjG8+DensYp0jgSSFit6DaGw2XCtp6pnyhKMY2p8hE0U5JzpALaXUtBoStVvWktYx94+M6BcYi89cYlu7OGs7OOs4sNCvHRoQTENdsGrJfzqlJK8uSM2JWuHx5w3i8WoFleqbTEE0whcrg64RrLYd9z782HGGOISYKRvbe41sIYiKHQto6mtVw+GDCmsNk6bj+5Y7tzvPnKOwiGQjGdAs6CJqGtxXdbmsbStA13nr6zNnMftSoFvvvSPbrO86u/9DEJIERhnSEjGyhqIn0pEhaeKwbVGl2ljArnLVONLyhl5OWvv8ibL73O3Ree587dM5rOMY2xNlhysUnYopg5hSInE47d+RbrHMbefE2lFL6x9MdByDzOLNJPqAAEIEQJJ9eFBS5S6nbPOV3DgaURVcjUPYZEYz1t21AuJEfvdDiitKbdesIYGY4DrvGMQ2CwBqU1t2pen/OW7dkGhcZ6ITQYKxAY1wgS17aOMAbaTYv3kkeXc0YbyzSGpbn7o3/+MldXA09+7M57fSmstdZ7Uvdeu8fV/UuazblII7XGOjAGtLK4VpETKKOwRjbrymWmHCQPrmSG04mSG4lC0RqnLMZpXv3OKz/Zi6gP4nxMS8h4qblEWiuBPGnx1yqlKCkxKkW3bcAZmtbzjX/6tcf3Q1prrbXW+kWqoiocz2GdXs6Jan6Pt6JiiqHgfIMqEFKmP44c+4mnnj/j/Mkd2hmO1wFtHaerI42HcYqkKMTwpjPEMeJ2nqZ1lIxA9RBSum8bQhwJsZCKwnrPMGT2+2vO+4gq0G09F7c6rDGLUssmkS/OYJQYy+J1C5PkAShrKCmxvx55+GBPGCOn00SYArvbQt08u3NBzuCbKuEMoko5nQJKZ+48uWF/Hbi409F1Z4QxkoYRYx1nt84oKPrrA9MY2Z1v6U8F37Y0rUdXi5NrhNq5ZEF/wGpt5n6OevXVB3zu00/TekuYIpmCNVAc4vcskaiTUB+dJU9C6bFGCxo8Smp9yYVpimw2jnEYefU7r/Dw4ZM8/4k7dJ1HK4QyaSS7yTppjHIWWeM0BlLK+MYLSU4bSm2KnLfkipI1qiJdnTSkcz6e1pqUa97HFGk7v0QliMdO8uJ866HIxZxyYhonrDPs7Jbd+ZYH71jG0yABxi4ChakPoCVAcn95QCvY1m0eRXF2scG1ns2ukzy+QfCzYz9RioRjtl2D0uAbTxgncioYq4gx8uYb17z+2kPuPnVBWsEKa32EyxiLdUYGQHUrp5Qipky7aYljIldZs2sMw3HCWvEmpDBRUpJ4gCzRAaMWyUwYf7IQnev7V2wuzhd1gci8o8yt6vuJAJl0zYeUe9Rog/Me5zX7h/vH9wNaa6211voFqZILr373+3zqlz+Nc3oZ0hmjmQahHocpyrlRK9pNhztzvPSNN0Q91lquD4Hr4xXOK9rdht2tDTG2tBsLD48crgbKFCAqrFWEKWGtQEQALu7sGMYo/reQKEU8atZbWQTExPWDIyVFjnvNvddgHDPKGG4/eU7TWsIYaBqxB10/PMgiQiskyVjOpiUXjLWUAs433Hn2jBiKhJ5XS5G1shhIKZNSJOW8xGU12443Xrlid9Gyv5R4g267pd0VDldHTIkYpbl48hbtRcvOWFJGQDJV4vmrv/Pr/OE//Cqnw+n9/Yv/EbU2cz9HXV73/P1/8A3+2l/9jTpVSCQeCSescQXeW8okmWpGTmCkXGiMAETa1uKdrR61iNVCp9ufAvvDxMXtreSxxYRWCBwlyfYtRmm6huNIfxhoumbxxMXkSLlQsqBclTHEFLHZCFKcihmPBY00jK6VzZwGUog0rcd3nuE0YV1BgsLlMDkq0TtbY/Cd584T5/THRsyx1ydiTKA1w2lkPIz4JvEwZYbTRLORrZ5vLSYm2s6jrSW0DWM/cn57Rwjy/RrryCUTo+DMwzSRs+bNN6/ZHwIvfO7j4vVLic//mc/yra985328KtZa66evrnN0G0cGrh6c+KHDvyIPrzyESpmNXNzeCrEsFdAKZzQxZcbjRJpEHi00MtnWiWSl4Cqg5PXvvMLYjz/Ra7y6f8Wtp+4wHMXXmivYaW7eSvXmKqUxzkERj2vKGWs8vhJu11prrbXW+vkq58w3v/wtPvOrn6VpG4Yh1LBui0+ZEDMpl5qvlvHecf1w4PzOOfurnjAGQigYHRmLoZA4u9NydlvOe2e3zug2bQXvZfGmHUemIaBVpjuTAXyKmVwKOSaReMYMJuM7DyazudhAyVgnEV1hPLF/cKL//7P3JzGapVl6JvZ80x3+wQY3n2POuUZWZY2tIotdFNiS2C1xUjcEkpIaAihowaWghYDe9l4rAZIAQdxI2qgb6JbIFikRTapIsYusJKvIrEzmFBmDh89m9k/33m/U4lyzqBJrjIwsd4/6nkQiItzNze3+9l+73znnPe+7G2W3urMsVq00Al2D0wptjEQZFOiuPBau9PvzP5Qq+HGaC1iFnzJpK2ds21i6hZN8O5/Zbzxt74hJcXJjgZ8i/nCgdA237p+w3SaUbemcRRVxjS5zVFfTWkop7C63/Lm/8sv8l3/7777Q7/vvRS3mfkAutyPf+/4T3nr9hhxqkGDfK4llKYWUxC7VWkOYTUCClxvAOcM4RvI8Li+z26RIJSH4xIPvP2a5lDc0SuRUrm2vYxFcIy5AAIfzPdYqlqserRUpaJm+zdl02hrpls+7dTmLUQuzFOsq4sB2DdYZOfyFiGsMYZIMqzzrl0sqHLYDWiuawdMsGpquYbXu6fqWYT9w2A4ypYxihZJSZhwm/OjZaXG1XB0tmEYn7ntF0686UoqE3TSP3w3dsmF7sRVddco8+GjPdhdZrFqOTteMcwh52738eSCVyhXGKN5465TVyQrnHFnBg/cuyAm6RcvjB0+IPqKU4sPvfcDnj4/IJUo+T+/wQwSt5hBVLXmMoycFT4rx2nDpqtsp+3YGYx278y3DYfxDf63TMPHgO+/TLRYc3zyRn0W5oLS+jiQQEyaNUlLgGWuYxlEykay7lnZXKpVK5QdDGVnXSSmhjCb4RKFQlDS3FQVVxMsg+YifImhp5udoUMZgTGF14/jazM85h1kbck4Eb4khYpSiXVhyTIz7kZTEEyKFTJnXiqKXDLvsD5Azfr1m2E0MuyP6ZTev/TSsbx5zfOcE02j8IA7t/aohhow1evaKkLNiiuU62isnyRrOWQq2a3+IXIhzY1GhUNemftD2LaXJ5FToVguefviUxdLQOENwS7rGUJSiWWhSAgqMQ6BbNGJsOOfeNS1cPD5HXe0ovYTUYu4HJKXMP/6n3+HkaMmydxSQKRHQFLnJjNWUqVzLkJTKHJ0dSy5GLlgrNq5Pn3m0UqzWPWVK+EkmdG3fsrnY07aGfr1geznQZsM0esKUcJ1DA66Rg5SfAo8+2nB6Y8np2Yr18QJKIfpEyRBLxs2GLGEMqPnw1S9bjJEi1PtAymnuUohk0hhDSbK/l6JEHhQf0UaWbqeLwPpkQdd3hCAFo3UWt3ezDbq4b8YsGuoUM+dPMvvNgeXRksWqo+laSk4EH1gfLUi5zI6VEqcwDp4nT3ZcbDwnN1YYa2ZDB0W7WrLfVae8yqvBrdtrfvpn3mR7CHzwwZ6cRIbd9r3sp2rN3TfuYrRmfbJkc7FDlSKOtxqsssSUUFZUjiUnYgykEAhBbKBzmncQZhcxpTVaGYb9xJMPHosN8x+B3eWeFDO3X7vFOInU5spuWhmNtoqSJYdOWzv/d5EdhTlapVKpVCo/OArJbJvGyJXzlHUaj4Roa62YxsDp2Yrnj7d0yw4ootpIBTc7QHaLlrM7x/N6SyEHUX3Z+XyltZwLjXM0PWgDTz48Z320QKFoOsfx2RElJ4aLC2xrWZ8sMK6l6VraRUPbSqO9UMhA8RljxEFz2k+Mg7+ONSjpKhNVz2ZaGmU0XS+OlGY+cxqlURpcawlTmnfbDFqJQi4k8aTIGdpGY/ueEABtJD4hK5ZHHeMhismKUXIGzgXdiAO8ypnHHz7m5v0bPP/g8Yv7Zv8B1GLuUyDnwm9+/QN+7qffnp15xMmtlELbNoSrmyIVtMrXrnIlSOyA6JktaSsSxqJlpy74SEyJs7Mjbt47ZdxPhDDhJ4+2ln7Z4Rop+FJK+FgwpZCLyDifPd2xuRyuwx2NEdmnzpoY5XDlGjt3bmRsn0sS2VbIhIlr101rNUGJzFEChy1+9HP2lIYJmtaRUqYU+VxtZ4Fuzo9zxBgZtiNKF4KPlBTJqTAOk7gWjROL4wXawHTwBJtm0wUJW3/84BnDOLIbRLPtOos2lgzYxsx5JPUtXXn50VomcloVLjf+et/zSqoCoFSec30yl8+3EvJdMq6xImVREn2ijSF6T0mZnKVDa4u9DhKnqFn23aAbTRwn0lWMwJXe8o/AsB94/9vvc+u1W2K0YiBnhdEKkuzYirIgQ3G4rsG2mvPHG3J6eTublUql8irxwXc+5PGHjzm+eUOyPecAbqM1424CpJlnnSWlzPFpz24z0q96tBVnSGMsZNhvPXYuZkyvmUZxxJSiUIrGEJI4ZY6yYwbSdIxRgsVP7hxzWPW0ncE2jqI83UICzIfdAeuMBIzPk7YcZaLm5kBvpRTWiQFezly7L6MUrjHXTuv6t2WjOmewbjYgjFlWEVIGpJlpNLjG0C5k51wZJ7vnU0JpzTgGYiwoo+jmsHUQKWeOicvnl2itOFxuyfqP/rz846KefD8lvv/+M3Iu/MLPvCO/cJVSbwtayRtNe8np0FFx2BxYrRqaxtA0Gmst7kQzDh7QpFAIU6JdWE5urslZc9iOXDzbEnykX4qkyTV67oxrQghzdwbMopPuvoJnT7Y8f7rj+KTn7JZ0T3IBsgR9u8aRUmJ7eRBDg1KuAyJLzujZfUhrMXKxzorEs1zZoofZARMZ4895ekp3LFfiROl9IIVE2zmmwzTblFsKhRQyMUSmYUIZ6TalDDFJkdc0jkcfPuFX//6/4OTmEV/66pfnvcC5CE35Wj52Fc5eqbzM3Dhbcuf2mpQSN04cz58O8x7A7BCp5B7KyN5DmX/TOUvwct+WkiVcFZFZxhhBXT2I1PwAT6AyWku30u8HWUTQs0THfDJZ8vZiS4yJ196+J11MU0AVtFVopeYdVmRPb5yYDgOrtaOqLCuVSuXT4bDdM+wGTm7K5Mw6i2uuChWPsVqyhZ3k84I0vrUSWWS77GkbQ7deYJw07aWpL/J5TYFcCEFMS3IsFKUkAkArmlY+72FzoF+vMNZy583bKJV5+P0npAjKdBII7hxaafQ8EQRFSZIVp8xszmfk74xR8o0V4Do3uycryGneAWQu7kQlJuYo0kAsUVRfaY4p0FrWlcYhMOw8r71za5ZLKtCQ41zsRnmuXjU/UwycPzqnFFguGqxrefbRhy/um/0HUIu5T5EPHpzzlS/e4+S4J2eZzuX0O6dzykvGRtsZul6y4qzTEqhtFW1nydbQrVua1qKVYrcJ9MsGbRTj3rM6XtL0jSylpoQfRSqVs5q75IWSC808btbWEn3k/NmBzeWIa5yYquSEjYWyi7OzpezARR/wPjP5Of5AKyiFbtnR9S1d7+h6h9EaZw2u0UzjbCOrFFpp+nXPqETDHEOi6R3TbAzTLjq0DbRACmnOsMvknJmGwO7yKSEVzp9uSAU++u4DDvtJMqualm//qw84OjtBKUPb98SYsM7KZDDWzn/l5SfngvcJVTKNVZyeOZ498bJfOn9MoaDSx86zeQ5gda3FuIxWmpgn4rxHkFMRiYhCHniIu5kuIhuZDgMpRbQxJB9olx3WGc4fPv1EQd7D7sCD73/EjTtn2CaivCHqhsycg5cy/bojhULbKL77L77/hzZbqVQqlcofjDJmLtoi1krhkkLGaIMfAwrYbgYAMaUrmcW6wQ8j1mj6lRRyOSWwivEQiTFhZud0kT8mbGPxYwQyOUEYPcrKbvR0mMhYFusV58+f8frnbvLs4QUZzW4zzLb+spNnnMVYGWA0raWEgs5alCQ5U7SW1aScSLmAlyZn8Elc2js3NxClM5hjxvuMQp6NpcBhO86Tx0LIYrp32I64vuH5kz2ntxZYpynI+kEMoigrSWSqu83A+aNzWu25/da92dcCfuuf/daL+0b/AdRi7lOl0ABdY5iiSCaVVvTLhpQzIWSmKeIBfJAMAwU5gzVG9l/m8MSyG9BmwcnxUjoO8eNTXUpFHCiNxVjL8hg0iqIL095TFExjENfJlGh7Ry7I3lku7LcDu81+3ueTmAMoGKuZBpFfxZABKeTyvCd32Hu0UfgpooB+3dN1DVoVluuO1apnsWzFlfMicbCWw3aQG4yWlDNd34oWWmlySaL59vOEQSmmyeOnwPZiz+Zy4snDc84fPeP09pqm78g4xv3A2Z0ixTEy+aTIuL1b1FDiyqtBSpkcI9YZTlYNm/OAD/naJCTH/PGkeZYuj/sJ1za0jZNpd4ykUFC64JpZXlIkh1JrjTJAKeQQGIeJ6IM8mFvDjTu3WR8vefbBI3YXnywy4LA9EKbA2d0b9KsVIcnXpw24piHuPa0LjM92nD+7+N1dOiuVSqXyiVBAiGKAlbM8V1wj50mbNbZt2D7bouePiyGRrKHtG5brBe3CobQhBMU0ijJLIeepdpZIei9rP03nGPcjxhmObq7FtyEkTu+cMoWC6wxtfwQp0fQ9i+MVxjmMMaSY0UZMs4zRslstmi9CENll1zvi7I5p9O/MODZKY3pZ5RFzPjF40VfTRJ9QSlzhXSvTyavInKuGaL+U86ffB5rWkPLcJM2KbtlitML7yPOHz3HGcPvN2+y2E/16gdL2407rS0gt5j5FlFLcOVuSvWR84CD4QpnNQ5wzMj0qMBwmxr2nW4r1ayoir9KzfrhZtChVmA4Tt+6eMOwGCezNElBOKfhRiqtcMn6Q/Zp+2aB1YXnksFYKL20U1jnCENBWsupQMOzG2ZQgglIEnyUjrrVoIxM1lHRMUpT9PTdLI0subM+37LQmzWYnrm1ou0aMTBpH2xqWy4Z+0dBcyqSx7TtKkUJXq3nBVMnNuNuM5Jy5ON9zeT4QUiFlaPqeu2/fxTQ9Mci1K5XpF+KAGbwYtOQiGutK5WXn/PmB998/59bZQnZrG8Nr9xd8+OAgXUbJUv3YjlnJ/urF46ec3rnFeBhJFFLKoGRhvSByY1SRnJ3IvLieOewOjLsBbRWr0yNWx8c4Z/kdo8BPSPCBiyfPiOMelKZtrcSyKHkwPt6NbLcTOb/ET8JKpVJ5BdFGzkQxZgoGKLIbNjDvrESazhDIZJUx2YjqQxtizKy7hnEIs9Iro7U020NI+Iv4sQ+ElbPg4qhn2E3cuHvEh9/8Frsd5GLpOsVq1dGtlrz/ze+hrSLlhMHhWot1zO7GQdaOjIYkX2sBlFY0jUVNUZRqztA5I0MRFLY1v2Ofre0cMcgEUWmFbpkNv6SRaVShKFlH6BaOxarBjxFdDN5Hmk6kmcYocFrkmq1ld7njsD/w9hfukVLD8mTJ4RBYtZqXeU+gFnOfIqUUPvrwGW++cwdrDMMYyDrjY0JryZtro+R/KKXoTk7ksOW3aMk+JMZMu7bcvH9Ddsx6SwqBFIsUX0Ec5IxW9Ectxmq0lSDhHAshyJJq25s580mRQmYaEk1rOOwnms6iiuS3GSchi8ZoSkmYpSb6MJujOIaDxzlLt2jxU5A/i57zRETeZRuHHwMxFKZxx+Zyjx8muq7hxp0T+kUjGVlKsz5dsjrqKRmsM1w+33DxdEeIif6oxQ8F7RqKDjROdNVag3KOjCzF3nztFso40NLt6RYtKWbazmGNebFvgkrlD0EphYIEfpdScAX6VnP/bs8HHx2IsVx3SMWpMlNS4dlHT1ndOMY2luyRSJHMbFyUSTGhNOQY8YdAjH6OMMm0i4b1jSPafoFWGj96hm34RBLL/3/GITIOO7RROGeu4+S8T7WIq1QqlR8WinnHWmz0rdMYN4+jyuxl4CO5SLN+OkzQaFzX4hr922IGEmiF0XM8wPxnAZQuxClLMLc12May3wWWp7dYn8E4TCzWC5brHqPl8/kx4rq5ngyJbtERQpqlljKZa5eOOIkzujWSSaq1DB/KvDqQYqbpZd+6FK7jCPwQsHMGXMkF56wYujhN9GIkFqaIM5aUM7vLA0ZDu1iJTHRKIvk0Bo0UqofdiJ8i4z4QkqJdiHTOtQ5tFD/68z/Or/+DX3uh3+7fi1rMfYqUAn/3n36Hv3zzhJunCxkNG43JhTQ706n5xgspo5uGe2/dZfP0gotHj4iHiZQKPu44uuspSnOy7CizFDMnMLZh2B3YX+7oFi3Lo57V6RqVCzF4SooMu4kSI2peJO1XDSl6VNfRdvKmVErjFBhtWZ9aSirEJPa2i3XHxbMdfd/O+Xea49MFwxBpGk0KGT8l+nXPsJ/QxrA87ok+Mh4UCshRXDpRimFITOPI9mLkaDPQuEIYEhhLt3Q463Cu4bCPGKOlMGsdRSuU7gjTwPOPLjm5dYyfFK0PGN1gSGjnCP5jfXesO3OVV4R3v/ME3jnj+LiVzMls6DrN3Ts9Hz7YS/THLA+hQIwRNcJhs+f01g0O00jJmeAlUy76QPCRMAa8n0CJFLlbdbSLBf1qiTXy8ItjYrfZ8vSDh5/qHltOhSnFT+3zVSqVSuX3xmhxeVRKzOxyivgx0vYOZWRvWu0nig9iDBILh3HgxnpJjIXh4BkPAa3BKPPxUKGzjIdwbcrVLVtAiq1+2WLntZZpmDhZrekXjhg9h11gue6JyaC0RRlNAcbRo7Uml0zO4riZgoRyj0OQHGOjaRctMURSzDgnajOlETOTDEXLykEMmaIizknenB8j2iBRX342U9HSMM0xc3y64uSsYfQSk+V6x7AP87lcHOFTkH1AUKgcGEeNti3WyorS7dfvvNDv9e9HLeZ+ANrGcudsxS//9NtiIZ4y//U//x6GjEamZ/7KiU4j7yClmbxku9mm4TCCXSw4unuf7sTz7NElyjWkIkXRfjfSNJLXdHmxx4eE0gZtGhana7YXB5YnmWkY2V+OtAuHs5Z+0RF8pFsYUhHppLGKEDK78wHTWEqKTIfAamwlfy5Kd6YohSKzDxE/yCRvd5EpRaFVgzGGbgHRR0qMZBJhTJALaQpQMuP2wPLeGcFHXNtCBmcNcYpo7ehWjUgzu4bDZqRdtCxWLSnJomu7cKRUUEqzWC/Yb7Y4c8KkrvKrLPu9Z910jIeRkxsrUhTpZqXyKpBz4d3vPeP1N064cbogp4JNmlYrFr1lOAS536UDhNIyDX/64UOWxyumYSCnSPCRnOZdiBgpOrM46mj6lrbvsbaRh2iClCNhCuzP9zx/9PA6WLxSqVQqrxbdogOl0cags5jJWTe7h89ZbeEwUubIm5gSi3VLOMjvtV3DeJDCjyI5wMbK0MFPEdsYwiTPiBTzXBzNMk51Fczd0cwmfpvnA94Hbr1xxm77kKOTFcEnghcXSmOLxFT5BAbKrAhTRsF89mu6TJyi5KNirpUd3aLBT2HOS1WYRtw5ZTLHtZrsKuYHipgIWk1JYq6nrcWWQk7iC9G0FpSWM3qRgs51DevTFU8fHTi717Lo5PebxrL7FFQsPyxqMfcJUQr+e3/6i7xz74YYGQAxRP7iL38F3VnGJO46V6PuFDMxJmLKUGB5vGZxfITRBZQlkTGN4cb9BmPVtUlIv+yhZIaDZ7cZyBmWK8edN29y2I7cfv0MlMF2itvHR/PNLC4+Syv66RgSznliiHR9i7Gyu+caS5z33ZRWxCCdfT/OuVclo2MmZU2IEt497kf0nKEXpsBwGGkXPdMU0cBwCFijSQm6ZcM0Rrplj2kcC2dpO/l7UyhoKzbrzWyK4ieRe1qnQGmcVWhTKLlHqwP7ywNHN46IIcyaaK4z8XIuhBgpqkq6Kq8OORc+eP8CBRwdd8SYMUZx48gSF5rLQ2B7KTuhCkXKmf3Fjve/8T2Obt2S7qNRKG2wrUXbBdY52U2dl8fl50+kpMzoJ4bdgcsH56QiO7vTOMzdyEqlUqm8KrzzI+9w9837eB/wY6SgCTGRYqbkzPb5TvKAzVWMVKS70RJGze5ywDUW6zTWKIa92EbuLkax/TcKLZsupCC72DkpShK5ZrtoJBc5JexRR0mBOIrKo130OKeJ00TX93TLluAl1LvkMheMMgCxjSX6jLEalEjztTU0vRP5pRa11jQG4rxLl7NIPgugjMaPQSSluYgZjDVzAzOjikTlFBXm2IGCnxLWGhbLhgJMQ8RYjXEGbSXhx7pGzu1J4rlizFjz8nY+azH3CVFKcevuDTZDIAdZEo0xYY3GKoWa32gfd72L7L0UyZBaHK9xbYM14iRZLnYsjtc8e3xB2GUUI4tFQ/vmKdPoiVNAKzi9fczxzTXGaLrVkq5vefZog23tnFEHcwgVRRXGw4R1Fts02LYhDIG+beaF2UJRhpQgThEza5Wts+KGZzTWOVCSAWKd7KO51qEo7C52bM63LNcLhslz2O/pjo4JU6Bb9oSQscZgW00pDSFkcoZpTJi5kwNSuBqrETcUyElhrcJYWZq1VpPimocfPuHW6zdnh6SGdtkTg1itW2vYnB+YxvDH/l6oVH4Qci68994Fb711yvq4I47x2vFr3RrW9wznlxIXUiZZ1N6cX1Jw3HrnDoujFXESJ7GcJW+xxIIik1Ig+ImSE2EKHC627DeXKKOwytK0MrWrVCqVyqtFLldmV2n+2S+h4U3rSCHgO8uwG8ix0BlH00qeWvQJHxPrEzkjGqPnxntD2xuCl2iD6TA39pHCznWGGCRYXBtF5xwp6Nk4RaGbhsPlwMWTHTfuHPPetx6yOom4tqE/6mmzlBxKi3O6dobxIPJKidEBsmSgfmySJ0HgZPFFCCHTNCKhlGu5OldLPrGs+WRyyTIYKIU8F7cpRNAaRaHrrUQeBHntrJZs1mkIjIdA03eMPtEEKQjFNL0Wc585ci78nf/Xv+ILb97kpJfw75xFAlmGyOmtleQ/JZkaXS2SppRJpXD7ndcpiOwy5cz65g38OLE+6thc7AlDxLUrQsj40WOsIYWAUgprnSygbvf0945YHS9IsUh2yJTIWfbzSsr0i0aCvJUS23JVKDkRJ7GgTUnsz/3ocVeuQkrN+XKaftmRc2HKRaZfITGOgW7Z0iw71qdrtAaNp3MJYmC1XjCNIzkpxjHSH4O2Blvkh8D1EivQdE4KRWvl69PQtZYUmOUDClC0ixZtNM8+Omd5siInT/QaYx1N384h7YqmdSilrl2PKpVXgVIKH3xwyfJi5GTV0raaXMRpsgRY94bUauKisD3AtI9sz5/gOsNyfYTtGko2En4akji8psR4GAl+wO8mDvs90U/zQ0lyJXNO9V6pVCqVVxAFc1RVRFsltvzOEFMhhIJuHLkovA9oLY7lxmbQhqYtkodsxSDkqsBqejc7joOiUICmlRy6FNK1cUkpSFROI86Uh60MALpe48fE7ddOeP9bH7Lb7Fmfiuv41X6f0XLGTFOiXzrClK4z8URtotFa5I25FEiZMEUWqxatYNwHTGMwRhHna8PMcsjOymsyRYy1GCORC34KxJBpFxavJbA85sQ0BIwxhCmBkj+PUtc5dc7JYKbMA5mXlVrM/QB88OiSDx9fcuNowRt3jrlztqIgeqWmc7IiNxcWaR7/pphI2qGUxjZOOvDaYJAx+HgwLI8MgxlwFvwY8T4SfcT2HQmFUpndxZb95Y4UT0kpUdDEWGj75jo0vJQ5WFEZSk5obdDaQsmkpGaHyUTJCddYcpYbRlFQWlFSAsocTZDISYq5pneEUbo5SsG4m3jtc/f48NsP0I2jKEUYvWSdTAl/mHCNlUVcozEo0Eo6OVpRrMW1BpQhZ4g+yQ+Kw0TTNZSccbbl5r0b7HYHjIXl0QrXGIbthO3stXz1p375p/nmr3+d3cXuhb43KpU/KjEmLi8OXF4cuLVq4aZlrRsUmog8uI1WHC8cLCypZNLmOcMY6c5WhMFgGllM3z2/ZNwdmIYD08FTihgSSQtl/qdSRB+r22SlUqm8iihAFVRR5KLn/WqNsUDxtG3Dct0SLyTPdHXSoygEr2hbh9Hgk6y8SJSVx1grTo+WuZiRdR1jFSkWtJXCRyIGNCFkUYVl2UVzXc+4H9ltRu69c5tv/8b74hipDf2yn+OwCtZqtJWiLhfJoGPefVNKHDidNbOxXZKpYpI1BN1ZQkioorCtSEVTSDI8SQplDO2iQVHYbwZZwSmFOAXa1rJYtzjr2G9HjDXzZDLIDp1WHJ0tuf/5Ey6fSo6esRo/ykrSy0ot5n5ASoFnlwfOtwPLD5o5U07z3/rqO5zdWEGM88dJF6SgOD47pulEdqjtxwueRlm6pSalQq8Ugw+kZ+dYRopqiFPgaG3QZFzb4f2Gw36UIO9RrMsPm4lu6TBakTI4ZzGW69BxpUXeaNs8T+qYLV/nrsM8gQNmFz0pCmMQvXOKee7sF3KShdP9+ZZSTrn/hdf56P0LpsOAHwMlFU7vHJNCxDZu1mDr63y5ksXgRKs05+FpwpTol638PqAVKGcp1rIoCw6bHX4/0faR/WZgfbIS18zZ9s/aWeRdqbzCPNlNME5c6AFzT3E6LNCdoUFTZFgNBZyDwobpfCAkmCa5Fw6HgTw3Y65yggC52dXVvyrWa8PuUhGrOrlSqVReKXLK5FgYxoBrDClGmlYcJJWS8xV6lh3mTMkSP6CNDBnU7Ex52E8oa0TqqBVt55gG2aHTs39DDNLQb5zkiForplxKaVKQAYIY62WK0hy2E/1yyWLZMB0OgOyuGeeuV5IU4pZekpahRJa8uWnwdItGCrmQ0BoZiAyiIDNWY7SsM/nBo3Uj++ZaonqUKmREhZZzoiSJLrCtw/UOpa2sAbVWpnZjmOO55LU6Ojvi8XuXrI6W6NnN0xiNT3Uy95kn58L28LHF93/zG+/xy7/wBemUlI+LI20tWRmiT2hjIBemMaBQ8oZxhn7VQmlJMWKNIsaOxijO7pySVcvF+Yh2TpY7tYaYME5JdgdSmKUrKaMCqxU+gWksORecc0xTuGrqiHGCUiILzZnso1i7Ki1BlOUq70MxTRnnlLhNRkVnLOsouSJ337xJf7Rkc37g6UeOlAt+GBj3EYxlse5m21vpysQsw2uFupZTLlYttpG4gX7VSVRBKkxDYHWy4tmDRwQvRWi3aGkWLTlmcdbMmZz0x693pfIqEyGQCd+Hj9iiV5ZVsrgzyzpbsrnKqwOtPQbFsoWYMwZFUYaQCmG6+vlzNZGDxdKw7B2rleHhC7zESqVSqXxSlEylYqCUQr9s5hUZRdM1xGki5zI7HWeCTzinmQ4Tpe9YOTNPnMR8RCtF9LOCLIo5iXViJpKSxs652WE+K0rmW5KvQRXGfcB1lrZRTIM4Rr7zY2/yzX/+bYbDAbMxLFZLSlb4IoVn8B8HkystRaZtzFxMNnMRKZlwlEIMERAlWRoKWiMGKEAG9GyukmLAj54Ccx4d6NbOxacMJVKQIUeeJ4VX+3VN53j6/sDqaHHtJM089HhZqcXcD4HjRYsthYunG269doZPYnqiEfmlWbdEHzCtvp6IyRTN4Ed/vf9lrGHYe5zTHHae9dkxwWf6dU+3aNleDsQgWXIlF8ZDYLmWlMboJWLA2ll/bDS7ywOH3XidI6K1Yrly7C6m65wOye9IBO/FYXL08qaPiegjfpxoWkNJmRBkF9A4jcoRpS3WZZSyvPb5+zRWkaYDTeNol41YrzdG8kyMjOxLFkOVgshMcyyzs5HcuMoarLlaO00Ya5nGAb8baBcdl0824lSUZf/HNrPUoFL5jJF3kQ0R9QAuUDRWsVw2WNfSdjKqKwoMCtdLl3GhQK+V7NoajZ7zelRR4pOU6n5ppVKpvKrYRtzJgxczEK0yfQPTGLl8fDFn8EoBo60mBjlfWZPJqYh7pJbpU9OLa2NKGes+boynKO7sKc4eEDnJPvfs2K6NIoVA2xm6pWUzWZpWTEm01vzIL3yRb33tXXbPtiSfxaVdacJsHmga8UxQ1pBjoSQ58/kxUHKZYw2UeCkAYZK1ATMr24yzTPsJ185nwZLw4ySKL61xzhLGgHFGCkiVuLyc8FPCNSLlTEGu2zUGFLRLw+psiT9Ehn2gaTIpvby2z7WY+xQwWs2ulPDjb9/mzo0ly87hU6JEcYHUIaK0xmrNvbfu8PC9c/pThW0crpG0e5CARwn1VqSQaPsGY+RN269aYki0nSUlsI1j8pKf0fYOKHJzGDmoKaTzsLsc0SpyuNzhQyR4aBpHv+pxrbhGBh9xTuGHLIGNKUORr6+kjF1Y+mWD1gvSvIzaFSVOSSmw38iemzYNi3UWx8sCrnO0a0Cp2Q7W4ieZStpG0a1a+aFQyhzemIlhlnC2jnHvsY293s87uXnEYTNw6x40jSENCddoUlL4nSdfhSxXKp9RSoZEYfCFwY9YE3GNpneGxdpJU2R29frtjY2UM7kotCpQFAZNLLkOsiuVSuUVROSGWqSKuRBDoO0kMy7HhHVKUo8VFDI5FBSJrtdY10r8lCrXRn0hBIzWuEbs//OcVWecQReFdRLYHcZ0fc7KOWMai3EWpeGwDzSdZRwy++2ebuEgF177wmucP75gc74np8jiaElJhpQV01ywNVqhG0sMafZu0EwHLzFWo5xLm96RcqKERJ4i3cJJVEEjzvC5QJi8eC60DcYZFkvHIWfCfsKeLolBVorIhekQ0MaQS5nD0MWd8/jmipIjXWul4amQ0d9LSi3mfgDWy5Y375xw63jBvRsrDoOn6+ycg1HIY+bi6Zbj28cYO6fUT4GH3/2I1a1jnjx4jHENx7fWoBu6haXp7RwCnEGLbDJnaLpmljpm9ltPTOJCpOYRc9NaspJOiDGK8SC5G+NQCJPHqsj5R48Zdge6o1N848g5cdg1aA3BT6QQ5fCXE1aDD4q2twzbhJ3DGK8cJ0OQSYCyLSkkpsmzWDXECDEV5LxYWK6WpCjuem1vSBmMFfv0RkvRquYwSxAZ6qI3+CFAAWM0OWZsZ0khY5zl4vElIWbaxZIhDEg1mLHO4VpHreYqf5KIKRIHGAbY+5HloqExlq4XMyLJSLnqcogEW6ybCx893DJN8cVeQKVSqVQ+EYWCswalIGeFtuIE6TrHo/cGihLVVdPOuWvz5s902GJbh9KFkman9ZBQjZQFtjUYK6YhSityKkwHT9O7WcoI1hpCKPghkELCTwHXWFyjMdbgmhY/THhf6HrF7ddvcXp74sH3HnPx+Jzl0RJjjJgBJtl3M86I4V8jcQRKI8ODRTOrtzJ5zmNNKTGNkaaI8q0oSCHy/KNnpJg4vXWCtY14L6jCMCT2W38dZaCNwnViznclpyyzgaDSlo++/ZQ7b98SF/ZcZm/Pl5NazH1CfvKdO3zp7dssOkfwgTy7/YSYmEIiFZhiZvCJ/HTL8mRB3zWiM7ZQsuLm/RukUNg9v0S5DmOOZ0tWhdaGkudDllIEL50QNYfXWadoWku/aMhxQds5MVgpkJOMtkUeGUXe2FvOP3qKdQZvRpyRiaB1ag5zhJwNOWVSkh1AioIikQYlRkpKYlIyenQJUDQZcbosScIgn3y0pT9aYOdpokQfGFxrGIeJEhLGiD5apJSSKUIUfbVzV3s9s1GLs/gxzJLMIll5IbI87nj8cMPxacs0enKSeL2ccpWNVf7EMo2FaZwwytP1hpsnhqwaUFdOZwCKbAqXzw9sN+OL/pIrlUql8glISfwCUEoKHCdu5ShZpdluRoxReO9RyrK/HDg6WzIeAtrK8b/kQkkJoxWmcygkqy56kVJGnzDNxwqOksVMxVhzfdYyRmGMxTgNBZE3Wo1rG1IupDTgJ8Xm+TnL4447b9xCUXj68JJpmHCto+laSkFUYo2hJCmeUkwYa8TTYfZwiCGSjEZbTZhkNck5g1YaP0U2zzYYZxjHiRwTF888qcD9t+8wDoFuIf4Ral4v0lYylUOIWCuu6/unO6YhM2xHicbSL/eYoBZzn4BV3/Cl129gtGI7RVnQjJHgIzFL5xs1T3FDZAwJBbTrnhgduutQ1rJ5tiGlwvGNI4ZhYNzu6VZL8qxBNsbMckcpeJSCGPh4WVXLv0smSKLtGkqRiAJjDNpK8OI0BuLkaddHKKVwyyUAKYh1bSKTpiAdnZLJZHQB6yCFjFu0Ml02hgxkFFMQu8ngJ3KC9Y01u42n7SzOGRRimiLdItFod60jGC0/LKxGa7GcnfVgYoqSMnE/Sacmyo6d0oqms6jS4A8SdTDuBnKwjLtEKmItu9tMtG1Xi7nKn3hSKewPkf0hAtMf+PGVSqVSebVIMeG9GIRoI415ye01hFEcIXeXB8aDR6GYfGQYAn7wrI4kQNyPEtqtDcSUr+WS1khhprQMCsIUJcMtSrEYpkhEoYwMD8LV7p2Ctrd4n8nIOlC3UOwu9yxWDpRh2A20reXeG2fEWBgPI7vdRDs3+eOcqRxDlF2/RUOYRHqpnaH4QpgCJmmMM5AKIQZsYxjGgfEwcufNW5zeWBKnie3jc9TyCFUghsh+J/tvypj5moqcU41EJaiS6ZYN69Oe88cHzu47yZJ+ic+WtZj7BBzGwJOYuL3oiSEzZsghM4VEiIVSMm62TqVkMorLiwNHiGzQWItd9Cwmz27nWa06bGt479tPeON4ic7iKGTnEG8927+iwEieoYyxtWJ3MZBDIis9dzGiqA6NQZcrdx/D9jJBu0JpiEWRp8Runxinif1mlKmf1jAHnAcfQcnyaL7KrNNaFkVnCahC4doGKNx94xYxZVJkdsJUuEYzHjzaaODKMlZulhIyqtHztck07irvI4Y0G6tIpofSMA6B5aolhEIuiuwjq5MjlNHEMSDB4g7rJPqgUqlUKpVK5bPK1VktzyZ7rtHknDnshvmc2LDfnDONHusspSjGfZib/J5hiHIuaxQhyKpL9EEOXUUUWlemIkorYsyzG2SUaVhrCSESfMJaNWclT2ilcVbj48dnwdPbR5Sc2F7sCVPAthYfMiVn+mXHnTduMA6Bpw83kDNt72Q6Z8GP42yEorFF4ywUUzBu3gEnsb0YuHF3zf237/HaW2fkVDhstnz07fcJPrFerHGdJkfwg0hHyTJYKDqTM/gQaXrxe+iXLc1Co6yTNaHOScTXS0ot5j4BuRR+4+sfsOwbFIqffuc2G+8JU5SuBjJ98jFxmBKrRUNrFdN+pF91tI3Fp0LSlqaH/WEihMSwPYhWeJ5szV6osn+XxbHS2Cv5ocI2lu3Fnt3FyPrGQjTMjZuDyjPDLpBzlr3NJIHiFDCNA5DU+5Dm/y6EUf5uozUpZkqRbc+cEnpesm3mzxFClL/PNOwvAusbay43HqsTKEXbt3NxZj7OtdIKVTQ5J5Q2tF0j7p1lnjaWgnUWlJ5H/wVlkNfE2lmzLF2fXDLTFOnXPdZJIWqvMvNewHuiUqlUKpVK5Y8DrTVf/uqPkkrGj15cLbVCzZnG0ygRA+uTHjbiHq6UGPWVDKUoUsxoY1BGU0KW7DctDXmAprMEn2h7g5nDwiHLoGGOEtBaoeYhw7gPUuBNCWKWuCyfZ+8HyWo7vnnM9tzhp0mmDcqQUuTR+88Yx0zfW/plxzQlVsc9YZKJ35UjZ0yFbtHIa2CM7O8V2clLIfHg3UsWCyhFCrHJF9xiTS5w2HlyViwWjhAzqsjhVDKNC6a110WjbhxPPngO2rE+WqBmJdzLSi3mPiHnlwfOLw84o7m8e0zTdqherFafnW85HCZGH3l6uedXvvo5rDUYKxasKCmk+kXP5fMdu10k+z39uqXkzDRIN0OleVzuk7gDHaIUMnneoSsFpQ39uidFGU+7VjLsrtx9wpSIIWDEI5aUJFwcJa4/SsnuXc7zYmwq5CJLp1maL/SrDm00KaQ5lqBgXUOKkeQDzkkw5e58S7dasmzmG6QUlFG4xjLuPNoqrBMbWFAM+4mmE+0ymfkHSybHREpi0JBylsmekX2fprGUDMN+wjiPYoEyFnK6LnbrYK5SqVQqlcpnFaUVt1+/zX47Ya3IB7vGkbWeI6QswxgpKE5vrRkPkzS6VZGCLSRiyCz7lhzTtcNj21uJ0rKy2qOVYjxMKBRtL5LLFEU9lWB2u9RistLZ6107MSPJ+FmeabQiTJGSIyc3F4BkEvtxIoWrpr5it5koKLyPoFpKUZRUGA4TTefwPqPN7CFBIuVC2zlSFAWYVomcHSUHNpeB9vQ2rrWkHElZnOdzlnxj6ySuJ8dEu2gIPs2SU02cEk8fbLj3xi1WJy0lvdyDglrM/YCElPknX/+AvpFQwlxgmLxIEZFuhc8ZZ6TbkICsJNupaSxt59jvRjYXkVv3z+RNpmTqVsqcndaK5SqoOcRbIgdyRm7SvcePkeAntIFpEHmk0rKsqqzCtYZUFO2imcPA5WYU+1kpmGKWKWC5+sK5koVKaOSVJNL1kgHnx8zh8sC9t++w20w0fUvTN/POmli56qsbxyia1s2/LtejtbkyopT9QLIUotdLtSJJTSnhx4gqIgFFKXaXB9Y3bpCy5JEYZ65DH1/qO65SqVQqlUrlB8QPgXE/0C1abCtnOKUU1irClChZ9ulSAK0y5Iifg7KtVVhnZ9M4WKxaiZ2a8+j8GLBOz2swavZqULhWfBpKkelgjInsE1pdKbYUTe/wY0BpjXHy/5LL9eBADFsK/aKhaS0l98TZ9bxMnv12wpjCuA2UotDWSNadhpIifkiiHCuFpreMuyAxX11D21rGIeK6ntO250xrSkoM+5HWKhlI5IK2c5RBhmYuUpVWFAV+THRLx+ndI/qTht3lwHLdk9LLe7isxdyngPdRugi/C6XAP/z173Hv1hFHq5amsXzu+AyAaQgYo+n6lnIC7dKRo0YrkUWmJF0CVYrkbmhF9JK1UXLBNRqrpeBLKaJUIUxXWueMnm3/yYW2a0gx0yxaCRQv0l0hy05e9HMsAeIU1HYtOeU5483M42UjWXBadtz8cEAr6Jc90+BZrTvQEu5onWEawvVrIA6dEKOMsFPMaCfuk8YYxsOEbZ18cJyNU5JooVGKtpVduIxoKf0Y2V4c0G1P0zaAxjVS5FYDlEqlUqlUKp9pdMZaC0WxOx8xZ5Z24QjbgB8C63XH6ESRNewPbC8OWKtpGoNrNU3vMFrhx0CIEk2QU6ZfNpIVjJwlm86KE/qUrhvwSql5YCDn07Z3WK3EYTPG60DupnNMh0DbyXpMQdzaQ5S9tcY68WTwkbZv6BYt2hgomRTzHGslkVNafzxk8FOkpEy/bokhz/nLCR8K/aqlaeRcmbNkMRtnmcbAbjuxOu4wVtP1Dj8mxkOg6SWfL4yJtpN85aMbS7ojx5N3N7RdS3iJY3xqMffHxEdPNnz0ZDYvuX2P1750QtSKpjN0SyeSxRQwdoEqZXapFOlhDAFnjUgJtbj35JhwjZ07HQNaK8Ik4YeliFFJjJmubxi3B1QrW3gpJGJIhJDJc5xACjL5cs6ijcIWySzpFo1MCLOM0bXWKArJR/wwQCm882NvcflsTykJZRq6ZUvfOEpJGA1oTU5FAr7nnDyttcgrEzBbyV4VaG3vQBdKUYh6UjTdwYt1bikSYdCvF7hlhzJWukSp4PqGGGohV6lUKpVK5bONtQ5jI8Zp8r4QQ6YcPNMYCD5ijEzplNIMu4HFqheH8AJkhR8i1irQYrqnlUJrTYpFmvxzeHcpBT9Kcz4nANl/M1pjjRjXieLraiig6BaGnMQ4r1+KYisHUX+peULXtvY6sFxbQxjjtT9Et3BorXBOoq2ui7nZuMRY8abIBQkmB6y2skc3y0glo9mJp4JVnD/d0XStBKM7MfFTWl3n6F0r0qx8fPGK97/+iOXxkqYx18Xky0gt5v6YKQWG0ZNzIYRI2xqUkg5Byh5lCjml6/BvCsQohZhttBQrBXELaqToUlfj7iIFXbvoKTmjkembUjIRUxSmITDsPcZpog8iB1VzJl1JmCw3iDEKpWfpolJQ5GNKLoRxwh9GXvvi6xw2B6KfaPqOprfoWRF6lVOnSr7WIFMK2lyFFyMLqLNTZpmz81IS2WeMCa2k6LONJYzhejqptKKgGbaeHLesjnu6viEXdR3+WKlUKpVKpfJZ5aoQQkHTWsaDx7WGYR9QKoMq9MuGw2ZEKcvqaEFMiWE3sXIdrpEDm7FynkwpEyc5ZyFCLZEjFsgxg1a4BmJMxDCrrowiy29JVp0TJZcf/SzxTNhGJntKIeYoucgkzxlCiaic0Vph1y3RR9nDs5IbJ2Z8cqYTqaWi7RsUMhFk/jVrtcRbBTlbW2doWis5ezmj0DirOL6x+DhiQEGYwm97HdXs6VCIsTBNEVXg5Gwlk8CXWPX18lqzfIbRRpNTRqFEg5szWmkuH+8xWnLiQG4QlOSxFTQFRYoRrcpc7BmeP9uhNGgj1v7jfmQaJsr8v8unW05urQg+MBwC4ygOl9HH6705CYEsGK1nPTOUkmVK18iNaSxYqxn3A8Yqbt4/4/zxBcF7+lVPu+hwbUO/bLGtlesISexwcyFMcS7ExCwlxUz08rVoIxO/6KNEGWjNYtlJgRqS5ItoRPJpDe2yJ0YZ9ccQWaz6uWBU8rlqLVepVCqVSuUzSs6F97/1HtaI94A2+jq8O8ZMTtAvGkqS/bBu0eHmQqvpGrpVgzaKaYr4gyeHhDViCAJSpC2Pe3IWNVS3auZoKrgqHeTcqcRpPMsUTqt54qVEIWaMpuSMP3hKgbZ3GDMXXj5JnrDT9GuZmC1WLW3nMMbgGotrxVtCK3CNxVpDSSLXNEbT9u56mpgSeJ8kZ1mLwYl1RkLIC6xOF7S9pWnNrGorKGtE7Tb7LzTztND7jGsc1trrsHDXvLzzr5f3K/sMo7TCtQ7jRD4Ysyx+hkn2w7RW10YkVyn1AGR5M5Ui06xp8CyXLfudn21f1WxR26CNhDtuz7eAFG85JVzXUVLGT4EYxCjFNhZ3PUKWRdeSpWCMXtovMScOMbJeL1msHc+f7ikpQ9Ng2oamk0XWGDJZSYi5aywlZ4wVqWWMIuucvMQlWGfkc8ckXRVnaebXRylF24strjgvFVwjQZjyQ8CzOd9ydu8Gzx4+5/TmEWQZ3b/EzZNKpVKpVCqVH4iSM9/5V9/i7lv3yVHOim1nsVajtTTQw+xySVHcun8i6zlRsVg6XGOgiNt4jolUCnGMKC2N8ejjdRxUih8b15ETCiTWajY2KSIXo6QsAdxJ/l1fedIVObsao+fzmRR+be9IMYsRX5RsuThnugUfmcZIKZmU5Pynlai3tBHjFQDmIkxfFXBWk4tkGUPBzYVuNzunZx+vDfOmwYuHBEoiGpR8XakUSszY1rI+6yml4NoGZV7e+Vct5l4IIpssuaCVIqTMNEacM5S5GLkyDSnz9M5YNQdugzWz7NJqFuuG5CNFw6iyyDV9ZCIyDSOqFDbPLvFeZnvOyxTualx9ZWhSSiGnQkEMSZSSwimFidPbpxhlce2K/fbA+dMge31Ng20XuEY6LSmKi2WMmTLfdDlz7RJ0VaRaq68LUn0VjRAi2slCbUoyqUxzpyfHLBKAmKHA0ema/WZgddLhGgc5EILHey2h6ZVKpVKpVCqfYcq1w3eZFVQG4xSuEV+F3Xaib6Bdt5QMfixMw0i7WLJYOMYxXWcZJ5/QWrFY9+Qg8Vgp5llumRn2Xkzz5r+z6SzT7DKujCKGSJwi2hoprnSBrAghY2YDvKuwcDH/U4QoJn8pJIwTw5KSC2iHn9LsoKmv1V05l9mIJV47aaLUXOgVUohytk6ZlDPGKsYhog0sjpZEnxi2EyiZNvpJ3NNzKrjGopXsHabZ4fMqGD2mQqM+zt97GanF3AvAzTlrxhqMLTSmQ6N4uN9LvoeRoibGj3fnSinXi59KKVLOxJTZbwa+8S++JzazRrFYH0l4425HzgFs4fB8QhlDyJlx3EoMwDx5UwbaztL3Ha6xpFiI48Dq9JiTs2OSDwwHz2EM6DmDI/uIazts6+h6yX1LuWC0wlkJIrezlWwqUryF2UFTUiLFIbNpjeiz+W1TyIzIPOfXKsWEba9iGuQ1SFnhp8jJ7SO6VqOVI2dx4bxyYKpUKpVKpVL5rKK1pl+1s8RQzljBJzGrU4WmdQzDQAvzyk7BNi0pBHKMaAXWKZS2DHsPRjNuRwkIn2MGlNbYxpBikiEAEgV1ZdDXODnfXcVq5ZwpKWOcosRCRmKshgkaZ0TWOUW6ZQO5MMYwB3VLIz6HxDR4UAql5KwaQ8LMRWIpheAzlATzJC0qiJl5WqcwRqZxV9FVJSumQyDHTAgFlTIZyb/LY4LZp0JbQ86ZFPLsSaFp2gZtLdMUpLh9SanF3Avg3W+8y523XqfpG3LIlBhxjaFftrPmWSZX19rl2cURQJl5uuXmfbKUWB615HHEACdnParp2JxvsbZHK7h15wytZZE1TIlcJGsjp8I0jnN2huXodIEi4ccJHwyP3n+K1qK19mOkQRE9tMsF1sgNXnKRAkoxO17KXVVyIfiIay05c23WMo2FHDNGazTqer/tWj6aryIJyjyiz4yHgDVcB1lqgJLpOgslXZvAUAp976oBSqVSqVQqlc80UmwZUvIoJe6UkMmpkAtYDSFkCgFjLEWmAZRU5lDuOT8OcRwvRRRfKResNRy2E/2yAT0b7gEUUU9dGeV1y4ay93NBl2V6pyBlkVqiuM5dDkqLO+UcRZxTkVUapEBbrDqCj/Okzs6RAeK7cHUIln3ALIYqWrLyFIXJh+vJnXg1iPlK2zfXr9Vh79lsPOuTnkVrxTimKPLs/p6TKMas09erS+N+IKPRWoYGLyu1mHsBXDx+Ll2FXOYxshVDkpCvOxwit5xlhrlcW/QbY66ddySLTXbKhs0WTeHiyYrT1+TNe5U3V4osmRYvb85wCOjiKFmmeX5KFCKH3ci4H4k+sD7tSAmCjygGlJPdOIMWl0otN76xc3TB1TKrEYelosXB8krHHIPY5Ha9I+dMt2iufxBlSS8nxYQ2mqa1DAfZA3ROw5w1YhtLCrJA66xm2Iwcnyw5DAO2VfSrlve++T5+9C/2G1ypVCqVSqXyQ8Q6g9F6FjxJV/tqQlaKuDoaZ7h8thFzEqNoOoe1mrZ1DKP4NRgrZiMhJEBjTKEUzfJInNFzzJjWYqz0ylXKxCmyOOnZ771INI2sBF2dW0HWa4w1wDz5UjAOAdfOpcdcHTatvY5TmP8gSkG7aLCtIc8TwZwyFDHmi/NOHEXRdPb6+oxR8nFIvFZBYTT4KZKLol+2aK1ISSSaYZaGGi2mLOLwLgMPgO35hJ8SxzfXL7WbZS3mXhDWWUDedEoV2tbOvyZdDGM0zLtt2iiaxoCWLI+cC0UpKInJJ9rFgo3pUBqUNcQxUEIE7VDKzoWQw0+B8WICCsM0iaOQU/hxwjot9WVKpKw47CPaWpRVKOdwRtMtWnJRGKUl90PJOLuUuQgtBW0M1pp5uRbpysSCsxI4jtbkKYhm2mrCFGWEn+bcEidjbsktSYQpEKMEWUo2giLFRLfoyClhWkvaFpoewuT5zte/wzSML/JbW6lUKpVKpfLDRalZTphQUQw9JFdOdruUhlwyPiS63pATPH14gdWFbtXhmo7DpSc3lqZ1tJ0UdHk+d7rWEoN4GzArqfTsHJ5TYtx7mZpRCLHMf6e+Pq+pIrEJ2mj5HAqsBU0hetlrc86Qi7izK62w2tK0iuADKWd0Eif3j80AFSlkyWx2Fm20FIFzsWid+XjipzUaZpdzRQ6B5bojo5jGiLUG0xhZXQpi5JKzZCEbJUWgbS1Ky+6ffnn9T1Avc6WplHp5v7gfkM/9+JekYNGKlCJaK4bdhG0cORVsI2/InKXDoBTXmuGr3THm3LgwBUKINK2jaRz90jGNsiw67CcoonUu8zJazkWCtnPBtQZtpOBzjbu+EY2Vj5WsD0XKYJ3cEKqAbcycL5Ips8GJ6J7FrcjNWR05yyj+ygzFteKcJH+HuZ46KqPJUfJI8rx0m1MiF9mb00o6PEopgp9onKZfLbDOst9NdL0DpXn0/kc8+eDxi/zWViqVSqVSqfxQOb55wptfeoeU0rzPdSUVzHPMkyLlhJ8CJWa6Zct4mEip0C1a+kU3T+bEPCVeTcBmkxNjDcxuklcRA8bKQCHGPP9akQGElfWWOPsjyIROHBGkkBMTvOjjnJtsST5du7WXUmYjvXQ1BxA5p56zj60YpaAUeg4MB2bDFCXZyldFI3LOdY2mZFkBUgo2z/dSzP22s60oJyW/mCLun92ykfNpykxDkCKxcYz7A+99890/xu/wv8U/L6X87O/2G7WYq1QqlUqlUqlUKpWXl9+zmHuJh4aVSqVSqVQqlUqlUvm9qMVcpVKpVCqVSqVSqbyC1GKuUqlUKpVKpVKpVF5BajFXqVQqlUqlUqlUKq8gtZirVCqVSqVSqVQqlVeQWsxVKpVKpVKpVCqVyitILeYqlUqlUqlUKpVK5RWkFnOVSqVSqVQqlUql8gpSi7lKpVKpVCqVSqVSeQWpxVylUqlUKpVKpVKpvILYF/0FVCqVSqVSqVQqlZcbrRUA9968TeMMpcD3v/sRzllu37/J9nzD5vJAoUB5wV/snyBqMVepVCqVSqVSqVR+V5yzHJ8d8dprt4CCay3EDFrRtw6lQDcOoxXL1YIQAtvLA+PoX/SX/ieCWsxVKpXKS45SCusMSilKKaSYyTm/6C+rUqlUKp9hrDWs1itWJwuWRwuiD7Sd5XTdY3Km7VqGVceDB8+YLveQEiVntDbcvHuDy/Mt28v9i76Mzzy1mKtUKpWXmOWy5+4btzi7eUK/bJn2E88vdhw2e5SGlDLjwfP82eWL/lIrlUql8hlhsep57Y3bWKsBhZ77h8fLjpvrnrs3V5zdWovU8r0bvP/hc77//hP8eCCFSAqJ1WoBpbDdHF7otXzWqcVcpVKpvGSsjxbcf+M2bd/irGYaA7vNnsl7Fn3LzZsrFnfPUEZxeblnu9tz+/4Z7377A8ahyloqlUql8slpu5Y3P3cfVTKL1tE1loVTLPuOW6cdZ8cdq5XlpFNorXH315ytGowufO/9Z2y2B2IqpBhZLHvG0RN8fNGX9ZmlFnOVSqXyErE+6vnSV95ktz0wHkYmIMeM0QqsYTxMdM5wdKPj7q1jdrvAt7/7EY+fPOf2nVMefvQcP4UXfRmVSqVSeUXRWrHqGlSJfOGNM167vWbhCioXoLBad/SLhq6xxJQ4Oe7oO0PRd9juR3b7kRwCMWZyDlhjCNRi7odFLeYqlUrlJWK9XtAZA31PKYXDMNK1lrOTFTdOeqzWHK2X3LlzzL27R9jG8vq9Y37t19/l2+9+yJ37N3n04Bl+qhO6SqVSqfzR+eLbt/jRd25TSuQnv3SHo87gh5FhCgyHgFKQUyKEgHUWP0wEH7lx1HN2vOD9D5+TU77e8277lqZr2FzsKKXaXH7a1GKuUqlUXhKcNfz4m6e8+c4p6+WC9emKb3/3CZeXe964veRs6ehazfJoxb23buK6hv3mwOt3O8KXbjIOBz54fM5rb97mg+8/Ivg6oatUKpXKH562sfz5X/oSd28sMFax6A2NVhRnGX3EGkWYPMFrym4k5QKloLTCNQ33bq/4zvs9+yEQY0QpRY6JbtFgnalyyx8CtZirVCqVl4QvvXObP/tLX0blwo2bxyyWHXfWlvPzA1YlnDUUnyBE4jShjSLlRAmRo4XiS5+/ydPNnvPzDSc3Vjx5eP6iL6lSqVQqrxC/9HOf5807S/I4YXTDtB2htaSUSalQVCHGjLKQYiIrsNaScyZMgd5pTo8XbLYj210GMiiIMbM+WvD86eZFX+JnjlrMVSqVykvA0VHPf/fP/ySLhaNzltYphs2WMnmOFoYUFJQCjSakxPMnG4w7kGKiGEWMgVtHDbdOFuy2B5zRL/qSKpVKpfIKsVw0/Hf+7FfIMVJQtK0lFUUMkaJBaQUFpnEEZ1Eo0GAaCBmGccKoxOu3l2x2I94nfEzkFCmlSB6d0eRUo3U+TWox9xlFKQAFiDb59t1TxsGjlMY5g1YKreH8+Y6xmiVUKi+c1+6d8pUv3GZ3fklMiXHyhGEiTgFlDEppYilogALjfiBrhbaGaQq41mJbxeffOOXBsx3bzZ5+2THsxxd9aZVKpVJ5BXDWsOwdYRdQKLTRlARaaXzJ5BDxIZIV+MGjjcI5yzB4UkwoJbmofaO4fWtNQrE/BPb7A8FHjNUslh27GlXwqVKLuc8gWivefOcuTWPxU0ABw2Fk0be4xuKMoWktIWX65YKYEs+eXLLfDS/6S69U/sTycz/7DoftHmcdxhpyTOSiKFpRYsY6jTEaoxXFaApQUmQYJumWGlAFbt1csGoMg9G0ravFXKVSqVT+UPyP/tLP0jkH1jKNA9EnjDWcbzaMMTH6gNKaYgzaKkCyTmMMhAjkgmoMqYB1ipu3VrT7hB8DSSdyzDSNe7EX+RmkFnOfMW7fP2PRNxggDgGjC/v9REyFRadojRz6NruRtrGgFNZYbt0+4ezmEaUoUkk8eO/Ji76USuVPFF/+3C28DyyaBqc0+2kkpITfjSijmWKiaRvGYcA4S1EKPwXGyeP6FlUyGk3XaN5664zn2xE/1UXzSqVSqfzBOGfoWoPKGa00ylpySIzDxOVuYoqJDBgDThu6ztE0ljB/jNKFGBIEkWXqIsYnrS28+fnbfPD9Z+z3B9rGXrtcVj4dajH3GUIbzc2zFcNuIMSMcZZplC5Ka8BYxegTmkTbWAkXnuWYTdfg2gZKoe96+r7lO9/84EVfUqXyJ4L7d0/IOaKRXQSVIyolbClMStM6iwLCMLIfRkKGmBNN7ygoYsqUqWCdQWnN3ZsLbt86ZjhMDIexPjQrlUql8vvyUz/xBj/5I/cYnu0ZtgMhBB5fHsAa9lMmhYRupWxQPmG1IuqEsZqmdWgf0a0jAI2ztD7AsmE3JoJP3HvzjA/eTfjJ45zFV7flT41azH1G0EZzdnbEbjPQOg0K/OgJMck32Si8h6NVj1KF7XYkhkxRzDazgRCi1HY541x9a1Qqf1z8yFfuk0MhqMxhM2C1Jk+RHCPjFJlaw2Y7YpuGwSessyQK6Mz6ZIF1hhgzIWRiTByGiRt3jtjsJ549eU6t5SqVSqXy++GnyOZiYn+5Z9geGMbA5COxgF12JDSHmKEUbEjYMZBL5uh4gdUaYw3L1hBjZHKWrskc9hFVCt2qxbSOkm7w7rc+4uhkwdPHly/6kj8z1BP7Z4S+b1itOynKVMb7RIoZqxTaKGLMLBcW5xTPnu8ZxogxmqI1KRd8DBSkm9J1GpMjJ0c9F5u6R1ep/LDRCowq7A+eVDRpSlyeH1AkckhM3vBsM7FYg+tastKgNWMsuFBoc6RdOOIhk0umxIzTcPPOCd/9N4acq9yy8mpydLLk+HTNsB84f7YlZ3HBqw2KSuXTJeWMspbzywOH/cB+O5ILPDsEstoyZWibhvW6wSqIsQCFlDO3ztagNEpB2zq6NjGpDutHWqeYpkDXtXz+y2/y8INz9kPd5f40qcXcZwDXWI6Pl8SQRIoVMjkrnDM4Z0i5YJUixMTTZzvGMaCt/LqmkFAoBUYrtFYc9p7je8f8xOmCf/zffEcCISuVyg+FxaLhR7/yGqA5TIWcE8+eXTLtPb3V+DGILDpGLh4HduM52hpWR0uWRy2HkLBWcVYWLJoGlGLROzbTxM0bC7RWL/oSK5U/MsZo3njrFmiDtYbTkwV3752K2sQ2PHr4HHLm+fNttTmvVD4Fzm4d81vfe8DTpxuYIhrNce9oxsiQCzlFnj0fefwU1icNp4sGrRW7Q+Z43bBedOgihl2LxjKRaZ3FWUWXJt481pyoDaufeY3/4h9/50Vf7meKWsy94jhnee31WwQfiCGjKKSUyLnQOUfwQdIJlCblTAhpTiwoKK3R1uCngLWaUiCGhDaGx093/PLPv82/+sYDzi+qhWyl8sNiuWiZQuLDp3sePbzgcnOgpERvFAOFo66haSzTbmTvEyEnNk9HLjYH7t5ZoY6XDBicAXfDYJVi0VpKHkjThKl5c5VXBKUUr795i9PTJY8/OieGTNMoSiyUrBn2kVIypVHcu3eDfrXg+OkFXd/yra9/H19jdiqVT4TWii9/5TUefPCEYTtCyrx175TTVcPFFNjtJrabiSnAFDNN07CJkcXCkmKkZIhTJBf5XCVl4uA5vXnEuDtQnObiySWDgk4XbqwbHn70oq/6s0N9yn8GKDkTpkDJESiUkjFa4UNimjJKaXIueB8lsFHLt10rGA8TFJGsaGtIKVPI5JR4Pib+5/+TP8vt20cv9gIrlc8or9874W/+h79A2B7YXG4JYeKwO/D8cs/lYeJyTLi2oe0cGMPBZ7abQMrw5Dzx3e8fePe954zjwGE/orLk0DVa0TaO6DNf+vIbL/oyK5U/EKUUd++f0TSWy/MduRSUBj95YozsNyO3biywGvbbA9vNgaePzjHakGPmrc/ff9GXUKm8suRc+Hv/1dewTeHBwz2bbeR0tWDykVVnuXHUc//mEV9865Qfe+cGb91e8vrtBbdOOt66e0SrFbkUfIiMo8cPnkXX0JbAwmQalTC6sOoN05Q4BDDWvOjL/sxQJ3OvOGc3jxiHiTyblqRUSBlco4gZrJVCLqYkD0cUoGZb8ygdFAAFKUjgY04ZHzNPH13y9r/7Je7eOubx480LvtJK5dXHWcNbb5zx6MmGv/ZXf4Gb64bvf+sB21jQreb5uWc7glOW/TZy+1bPyXEHwKpzTFOi04auszSNxTUWrSFGmcxP40S7WtAtWm7ecvS+cGNl0NM9vv7dZ0yjf8GvQKXyb6ON5s7dG6yWDdFHQkgopTAKxphRKG7cWHB5sWe3HcgoSi5gjBgxNI6ub2i7pr7HK5VPyIcfXbBatty8fcytVcPRsufxMNJaQ2MMR40lKc32MGG1xmlFozUWUCiCj6gCKRdiBmLCNdA0hu9vI+TI3dMTTs/WrB+NHG+WPH9Wz5afBrWY+wyg9RwsXCDGhNGKKWQooIwi5kzOmZTKXLzJ/yiFXBTGFHLMoKTU02gWy45xDDx8dMnPffUtfvPr79eF80rlB+Q//uu/xF//yz/LP/j/fJMvf+Eu/+RXv8m3vv+Mk7vH+HPPa7dW3D7pSD4QQ+G122uWfc9mO9A3lnsnCzIwhcx+iqQS6JaW2zdX9H2D1Ro/BbKCo6MFS63RTeCtr74J/Yqv/fNvveiXoFL5t1ite5bLFqPg4AMpQdc5FOCspmssu8PEdjtSciYX8CVjHahSUKpwyInTsyMefvj0RV9OpfJK4n3iR37kPmtn+eLdG4yHCR8K45QoIXF63NM0Dad9g3WKkDMpJzKa/RCwRqGVQimFnYPDVS6s1y3rZcOThwOPnw3c7xf06yXOXdA0Fu+rQdcPSi3mXmFu3DyiaQyH/YjRGkqh5IIymhRlfy4rTSqFlGS/LqQkHU0xIYKSKVkKQaXBtQ6Qm3q3m9gn+OLbN2vAY6XyA3L71hE//6feZDxMHHeWD959zFtv3ODy8h6l0fSNJYyRaCAWWK4NJ6sFVhvatmE7BHKZWFjNyrXcPFmgDRSlUKqgFKSU0VoTxkROA6axHHYT+TDx5qnlay/6RahUfheWi5ZpmEhGoZSGEik5M0yRvneUUjjsPTEkaUSiUMbIfnjK5FJwTnHj5gkpZZ48fP6iL6lSeSU5WXW8efOItPNsLw9YY2idY3OIPHyy5bXba6y1dK1F+8CYNeOs8lLKSFWRpOGCgtY5tps9+yFwcrKkbxu+8d4l508ucFbT964Wc58CdWfuFSaFRPSRFDMlZ0opIqssoGYDu5KzuFkaTZmLPancFEorjNbkMrtZGiWhkFrL3p3RfPjgHKUUP/NTb73Yi61UXnH+zC9+gbu31zx/vuXmrSOmmPj+u09IMbHfjqx7x+2TJW/fO+ONO/OeaikctjvG/QGNQmnL4/M9KXgchVXnOF61WGPwU2KaIiEVYs4YCk/efYjTkhX0z37zwYt9ASqV34Wj4yVaK4y1MnHzEecM3ke0NSgUMWVSTEgfUonJgipopbBWo41E7MSYyDG96EuqVF5ZDtsDjVacX264PBwY/ITWmX5heb6dOL8cyDnNHgwa7QzWaVF9lUzJoBsHCrkXlSIlxb0bK37ix+5xctxjGss0eBpnWK17lKqOyz8otZh7RdFacXpjhVKyF6eVIudMzoUUk0goY8aHjMpiihKuCjVkyqaNmvfoiuzMxYw2Cq0KbWNpe4cyhqwUb75++qIvuVJ5ZfmlX/wif+2v/CyLzvGN33rAowfPOTtd8t33npKiRIlMPtMqRUtmv5PA1t048OT5hicXO55cPKdt4OKQeHS+x8fIuPdopWi6hpSLHGiD7MeakikJnj3eYo3mo7r3WnnJ6PqGm7dPKAWM5roQK7lQivx3yoVxDMQkMzmtNSUXgo/k9PGed8kZPwVu37vJz/7MOy/2wiqVV5T/4u99HVUKmzHwdLPnN/7NE371X37Ew6dbVouGiGIMAR8iiYJ1GmcN1mhQipAyZTbi0kBOBadhfdwQJ09Wio8uD4SQSCnz7/3FX+DGzWqy94NSi7lXlFIgJVkSV0ChkFPBxywxBKnM392CNpoY5UGYcxYLFKWuH5goJft1SlPQGGNoWkfbObYXe84vD7VzUql8Qn7pF77A/+pv/Xmm7YEH7z3h7r1T/uVvPeDx40u+8NZNdtsDm82e7SHgQ2azH3j0ZMt+CHzjO095dDFyPgTee7Tng4cXLDtLLJrLzYBPiTEkrDNoo7HWoI2S+79knFH4ALqVTmml8jIRfOSwHUgxMQ6ePEv5i1LEKPJJ7yOFgtKiIKFkrgT/xmpCiMSQSDmTU8Y2hh/9qc+/sGuqVF5lci5EH9ltJ0JuME5z6+aSpB3r4yVt6/CpMIWID1EGCDnjQwIUxmhca9FK46xFUYiDx+WCU4oPLicuLydSjLStxU91kv5pUHfmXlGktlKklCVjoBSKkhtR6yz1WRHr15Bkf86oWZ5iZEcuZ0VBnMKsM6A01iqyUnS9Y38xkM5WJODz79zkaN2x2Y4v+MorlVeDk+MFf+2v/hy/8guf491vvM/T53syhS9+6TWOjnr+ya99l9dvHfGdB5corbhxumbqHIfDyBAzp6crYpM5DCP7y4FbN9YkJV3QzmlSjuxHj24tGIU1BjW70xqjpLGT4fU3bvGf/4Ovk3Pdea28XNy4eYTSihxFYRJ8RGvZ/VRakTOkGOcHnkJrTc4ZYzUaRG2iQFt95dMMBR58eM7bb57x7nvPXvAVViqvFpOP/ItvPuD2vVPK3nPwE8tlg7WGZDVeKXQpJB/JSpG1oijEjwFYLhtyTuSS5WiawSfYjZ6pcTzZefbbAwp450v3+dW//zWePbl80Zf9ylOLuVeUKwlKzoUYMsZA8BmlFCmKnBKl8D6hjbgLKa1wWrr2KWaU0RilsUbjjCGmTEmFdtWi25Z37nc0c7f/9bM1y0Vbi7lK5Q/B2Y0V/8n/8t/n8/eO+fY33+fb33vChw8umVImJsXN4wX7kwXeR7ZT5NbtY6aU2HsJZz29ewO76HDbgXwY6NYd2spuQkxQWkeMQMg0PmAXjlQKumharTDaoFXmzv0jjo+WNLb+qK+8fCyWHX6YMFaj5iIuzM7KAEZLcxHAWkXw0sU3Vl3vgGtnyLlgG43WGmMUViuOT1ZQi7lK5Y+ED4lvvvuMt37kLfLmIYuuQSkIU8AbzcFn+s6wbhwoRcxF/p8SqkAJmZggFRhjpNUa01i8gsYZ0IaYC84ZlNa4rnnRl/yZoMosX2FyKZhrYxNQcxtEz7LJnAsKsFoebqUUYizysNTzw9IakVpqTbvqObqxpO8tVsH2EHELh3NiHZtzfqHXW6m8KvzcVz/Hn/qxN3j65JLtdmS/H9nsRy43Bx4+eM5+cxCb9RhZdo6f+NIdfMiMIRJQBK0pOdDkyNppToyipeAoWANRJ/Y+UZylaE1KmcknwujRSqPmB+dq0fDdbz/iwyfbF/2SVCr/FtEHUZHkwjQEeZYhz7KcMimJsZdz8p5WSh5dKcl+uFIyiW5ai9FaGpRWczhM/MxX38TWUOJK5Y/M+48uef/9JyjvMd4TdgPFBzabPSlO5JwoWjHv+ECGtnUsVy2xFEJRhFxQyhCmiHOarnNoCnHyDAdPzpnnz7Z86xsfvujL/UxQ27WvKEqBayxhDMDHOwZKSSezIFbl2ihShpDzlVJFnMOMBm3QSuEWlrZz9Mse1zeYlFn2Dq2RHZyUaPtOisVKpfL7Yq3mJ37iDYbRk1G89+E5Hz3eMvo4H1ATh0PgN773lNbCh4+2/PN/+R6v3T8lAVOBPA5snk7sdp7FqhUD2lxIh5HDEFlMHf2yRRklLrQJYsqslh05J7TVTD4SU+G3PnjG5a5O1CsvHyEkrNGkFNEU8jxty0gmas6FpjEEnygFrNGSm5rEkdkYkXGVVEhkVMoUFP3xipv3bqLrM6tS+SPz7OLA5fZA3Iw8vziQc+HIOrCaftVy2IysGkOxGm1E1m+0JgGHKYAxgEbpQqcV5IIziqax9I2ox5RR/Oavf/dFX+pnhlrMvaIYa+ZwRubpHCglhiYp5jnzQxHnX9dGUbI8DNVsA50KOKNYrTtc53CNY72wKKVpnCLFTIwZ7yNxdsqrVCq/P3fvnPDv/pmvMFxsCT7QL1tiAbTCGstHjy9JR4EYIt/+QBwmcy4YY4ip0FrDs+3A48c7dOfYHQZ2FxONteQQGIbA7aShwI27a1zryErTNpaiND4WtC4oY/n+kz2/+rX3X+wLUqn8LixXPV3fMOwnef9bRfJiolBKIWfJTJTMVJnWhXlSxxxM3DgL2mAbSy6KxaIjThHInJ/vOT7qefK0TqUrlT8qx8dHrFcr7i57oOBLBqtorGQXx1QIjcYik/UQA51rMVaTUYSYaIpCGVGRaaVwznC8bjk5ann/wQXTVPPlPi1qMfcKcnS8pO8attsRSkFbTYyZmOSGAdmpK7MsRSmFLgrTO3QpaGfRFIzWOKcJsWCnyHbKTNuBxaLh/v01vrWia24s/+gffYMnVapVqfyB/I2/9qfJKTIcRhIQYwIlB9DJJ2LK/NqHD3h6vv/4D833cdEKa0GjuHm0JIXE0aJn1A19azE+oRtDt26IpeCnhD2xs6OlY4qSK+lMpm8s7733hBCqW1jl5cM1lpK5dlNOqUiOXCnXz61SCiEWlIacZQIw13KyZ2e0rAi0lm69ZL1oiCFQUmbcT/yFP/9j/J/+z//fF32plcorx4cPnvEjb9xgdAXvM7rRdKrQNBql5N5LueCR1Z22sYxDICmFbTWUQvSBpC1mLgCt0Thn+dZ3n7zoy/vMUYu5VwyjNV1jxb0yZygQpkiICW3E0Ys5ckBpjSqFrjMUNCFmlC6EIMYpbadR1qLJuEVPWwrb51sudyOvf/kEj2XoLG3rqEO5SuUP5sd+9HV+4ec+x347yPRAa7Q1tM7gk9y/isKHzw8M/uMi64OHl/iQ+epPvQXacLRsWN1saFDkkBhaJfutWqGdppmzJUMpDIMnFzAypielRKMtRqsqja68tLStk8ibOWogJ9kBj1F2s3ORjv9c62GMFjMvwDlL4yxZaU5PF9x5+4zd+cj6uMNPDauTFeuzFdtH9dBYqXwS/s23PuIXfvINppTwz7foGMGa2aNB3Ga71jKNAbSmGI2xihglJouSZ9d0yTFWQE6JblENT34YVAOUV4y+d3StpWk0TSPudlqLXrnMC+HG2usgVeeM/LrV5CQh4nHy5Jiu9wmCT1it6FqD1oovv3XE0bphM3isyiz7ptqaVyp/CJbLVsKLR0/wgWmcMFqxWLS0TtM4TesMb9xeYY3GGkXXGL54/5jXby6l24ni+GyN0hCsZlAwWs0mJgYf4bftwzpnRe7iJfNnOHgxOgqBkiKGet9WXk5KAeMMoGRl4Lc1HlLK0rAshYJCodBaYZWmsQa0FgOwOWdu93yPVoX95Y7kJzgMqJIxuhqgVCqfBNM4Tu7coIwBFwsmFnSGNE/KY8pMIVKMpu0so4+oUrCqkEMkx0zrNNYZUpGM4zB5bH0m/VCok7lXjLZtyCUzjoFS5OGnlMI1Bp1FogJgUOJqBxIanONs/Szh4NZowiQ333LRQkwEn1h0hjJO2L7h6LgwTYHgS53MVSp/AErB3/yf/lmm/UgOkTh5/BhQpbBadqzXCy53A36MfO71U966d0IpzIVYZpoi2/Mtx2drjDW41uHHgEkJPQTIhba3OK3QBYwxGCM7cigxhbDOkHwkqYxSDb/8C5/j13/zAw5DeNEvT6XyO3jy6DmUk+vdOIAYkgSHzxJLlEJR0MbSWjCNZRwjqmQCcs+lzjENkYaCTxGTIj//p27x7mbLk8tq/FOpfBIefPicr/3L7/P22ZpNzsSUSaVQSiYnTd8bca0shTB4pjGhUsY6C0WM9mIqmJzFTT2LO+2iNdy+teLxk92LvsTPFHUy94px5/UzbOPIWTodKYvrl0KhEfOTkgtaG4wzJBRu0YgMSxu6ztEtWoC506lwRv5sZxWMHguEQ6DrHOtFi9EFbepbpVL5/Tg+6sWRL0T86BnHiPcRrWDROW7fOuLunRNOb6xZLjqsMxhncY2j6xuMsxzGSAgJZy3r4yXrruFk2XO8aLixbFi1DY2WvQO0JuRCKplQiphApITR4KxM5BsrRhGVystG1zWQM84ayJkYkjhZzgk4V/tzBXG9HEJhmOJ1ppVpDMY5wuAZLnYYA37wvHX/mDgOGK1wNV+xUvlElFLolx0nt0+YUsZnCBlM06AbS9aWohU+ZfY+gYGDj0Qf5DlkNVoXYkyUInFYhcLpuuFv/o2f53jdvuhL/ExRT+ivGMbKsrdSYtusZitnY0WmUkohp0wpElNgnSGHKLLLMt9YyPK51dAtWk5vr1kdtWTAallwvXi2o180rJct0Xu+8/0avlqp/H78D//Sz3HjpCenwniYGPYTMUSMNaxXHVaLm6xrLG3X0LYN2ogT38XB850PnvOPfu27fPhoQzGGFBKubUglM4XEmOEQEgefKdaStSYUOEye3W5iv5/wUyDHKD8LcsZYxa/88he4c/foRb88lco1SimOjxb0nZVsVK2wRtM0Fmc1bedwjZ1XARTWiKz42rU5yfPN+4BzmvVxTzt//GEzoLse6yyff/OUWzfXL/pyK5VXkt/81++TmpbUNHilsMuWYhty07DzmSklck4yUU+JkhJhLuimwyiRIigyigxEH5lGz9lRw1//j77Kv/OLn6P2Gj8dajH3imGN4ez2ESenS9rWYt3VVoyEpf52N8sUE8lHCVS1sl/nGoszYJ2l6Rxto8EHcohYVbDWEZXm5PYxcc4C2e4G/vXXa7BjpfJ78dr9U/7KX/xZvI9sN3uGw8ThMOKD7KmiFT4kcoEYM1lpmtbROM3zzcC/+tZDHj7bkXPh3Xef4HOGtmU7JR493XM5BsaiGELGx8z2MDGEwGY7st1NlJRQJaOyZHWlGEkpUnLmp79ym//wL/0UX/nSHdqmTioqLx6lwDrDYQjEJBE4KRfJoVo05Hk8p43cJ13fYK1BAW3XYtsGEnTGoYuh6zu6rqGxBlUK4+aADoF7t1acna1e7MVWKq8ov/Evv8cHzzY0J2u6szUDitBZhpKZijgzqwJWSfRVjEUyjrW6dry0VoYM3kdCBlBYq3nt7pr/4N//U/wn/+n/mK/+0o9zfKM2HH8Q6pP9FePhw3N+6mc+R/CFlBLeR8YxknIWpy8NqshCudVqdv0y0sk0mhQLKWRWRw2L3tE1BmMNq1VL2hem7chhjBADx6dLIpacauukUvm9sFbzl/+Dn8ZQ2F7sOH++w0+BwWfZZzOWKSlyKkwRYoasFFPMfO0bH/HwyZYwO/gBPHy84b/+h9/gz/zpr9CtFyzuQNocSEZ2Xd1RhzKaw24ghECOImkJPmK0oihFLopM4bAbaTrLl3/iHj/5o6/xta8/xq9v8eTBc/7ef/4PAcU4jJS6FFv5Y8RYw+17N/jw3ceEGCkFMeuafy8dvKwOWE0BvE+4xgJKduoouN5htGK1tpTkCVNi4RTT5BlC5OT0iA8+vODBRxcv9ForlVeVnAv/+//tf8Xbn7/LG/ducP/ekoun5xg0SkO/bEFrWfFJmZwTKPlzjZOireQsv1ZEPaasJmdoFy2TW3Lv7Xf4X/yvf4IH7z1ie77lP/vbf5f3v/uA4GsG3R+FWsy9Yjz84Cl3/tIvcv/NO/w//7N/QoiZnDI5FywGN3cvjdU0zrDoG7RW9K0lzw5h2hqsM1hVxDrdKIyCtrMcWsv2IPs+bkzQGv7O3/2Na2OVSqXyO/kL/95P8lf+0s9x/mzDbjcxTomCxatCLAFlLSEXQoaUC0kpdoPn1772Lk+e/e5L4N/59kMWjeEnfuptFjfXbMaRw2EPaLbDwHLhSFOUvEg1Wz8r6YimkEgavFYUrUglc9iNXIaWz/3cT6P7nuUvr/hrf+svY13D/+3/+F/y+MNn/MO/80/wUzVKqfzwKaUQvccYRUxi1FVykVgCpcTYJCVyKDStRHwYo6AoccxrDCUljDUsekvfOhpniCGQxkhKGR8ij58ObDbDi77cSuWV5t3vPOS97z3iV/7cj/P2/RPG3YCZo7CMMxhTCB4oWmSVpYCemzE5X+dG5lxIKPa+MLlj1vffZB8UPN/xuS+/Qb9e8vP/7Z/la7/6m/zTf/A1vv61b/Logxov8oehFnOvGKXAP/h//DN+8c/8GCHE68w4VKGxjmINOSWcMzTO4JyhbTQxFaKSbkiJ8VrqZeZCrms01hjWq5bNZeLpxYHueMXqdE3KNXS4UvnduHfniL/83/8q4xTYbQYOgwej2Q2BKQRKzsSYyCkzhsR+N/G995/x/feecbn9vQ+Zt+/f5Ff+6p/j4W/8a7bB0xgoxlBQWGuwCmxr8FMWJ9tSSCGhCzirQMuOLKUw+cSjp4Hl62+gtcWf79DKsL08cHb7hL/xt/4qRhv+B3/jz3PxfMf/7j/924zjxNOHz/8YX8nKnySutB4pF7RSaKMkNDxllJvjBEpBqULyka61kqOopBlpnKFrDKt1j0LhnGHdO55vkKzFAutVT9fX7n6l8mnQ9i3vf3jOv/Mzb3CxaYiHiWmK5JzEkK9A0zqij2irCCFitEilpUmTMbkwRMulXWP0Ei4OWOdYHfVcnm/xk8c2Db/4Kz/Nz//Kz/Dog0f80//3r/N//7/8fZ58VH0bfj9+oGJOKXUC/B+AH0dyPf9nwDeB/yvwNvAu8B+VUs6VWKr9b4C/AByA/7iU8us/yN//J5Xvf+chy2VHCImSZDLXdY7T0wXTGJmGLHlWrZXsuCLFXtbQWEVG45wh5TJP6MQFrG31/4+9/462Nc3v+sDPE960wwk3Vo5d1V0dq3NSh1JLrVaO3YBAARQ8gEFIthZaNvbCGI89Y3vGgbFn7PHIeMZjlrEBgwcJMAKBEJJALZS71bG66lbde889YYc3PXH++L33tCRLgNStvl1V+7tWrVv33BP22e9+9/M8v9/39/myt6ypjWKI0A6B1fUVR7d2CNmddvqt9D3f/iXcfdchXTvQ946YMklBiBFyJsRA7yLPXjvjk0/f4plrp//c72eM5i3vfT1vfs+TXL58wHjriLPnn6coCsr9OcPg0EpARlVp0Bqcy5gpTFlpiClRZE1WkFCMVES7TzzrqedzUsy0256qLnHjwM3njgDDvQ/cxf2PGP6fP/Ifsl63/JUf/lEg84/+9s/wzKeeYxcPtNPnSzEm1useYw0pepTS5JyIMdL3ebJdCgxlPitpaktVFfgYyUpTlRY3eLL3UFmBo4SANXCycfRjYFYaQty9aHfa6XPV29/3Bt763tfz0CN3cf2nfhJLJhUGFYLkQObMrLYoIGgl8TlKoQuNdxEFxAy9bjBX72doHYd1Sd+OLA5KvIvENAWMM2CMYjaf8cCj93LPA1f5sm98L3/zL/0dTo5O+dH/6e/t1qLfQp9rZ+4/BX405/wtSqkSmAH/BvB3c87/gVLqh4AfAv408JXAY9N/bwX+y+nPnX4X+tivPkthNIcHcw72a65cmjNrCo6ONqi9AoCqlHk5BYwuUsaEVRndFOQYCUlLLIESq1ZRWua1Ziw0Bxf3ME3Fs9c3HB2t7+wvu9NOX4T6iqee4I1PPkhMkbEfaLuRkGDbOWJKZKQr9rFPHvHhn/+MVCd/k/YvLLl81yWeeN2jPPHky9Bas39hSc6Z9dmWYm+BPVL4ENFarNMpJVQWepjSCqNk84uRQXMZNo8EpcmXLpGbJUlbYoysjzfYqjifr12dbDi9tWK+nDH2PbNFg2LG4aV9vuP7vhltLN/wbR8gKcV/+x//D3z4H/3CrmO30+eslOQ1e+/9F/nUJ27gfDj/WE6JsrAsFhVNZdlb1jILmhMlBu8zVWkwSiBfxii01jQLC6HG94GTo5btECmqnflop51+N6pnFS9/zaN85/f/Pq7cc8jZ8Zr1yRm9T2Q3opFoq6gUqrBoDUYxzckJcXYYPDlBLmuKC1fJ9YyoFPO9GUVRoJSGLOtidIFt75jtzbh1c8Viz3GgEgpNcIGv/davYO/CnN//R7+RX/25j/HX/uKP8NFf+Pgdfpa+ePS7fqdTSu0D7wa+EyDn7ACnlPp64L3Tp/1F4O8jh7mvB/67LMNXP6WUOlBK3Z1zfv53/ehfwhKLJezHGqXgZQ9fot10FMyIWRbLojAURkNKJAra3pOSzNM5MkWhsSpRKLBKbsSysmilKCqLbSouXN4BT3fa6TerLAzf8Qfejq1Lxt6xPutwY8DHKBvTGBl6xyeevvUbDnJKKaq6JOfMU1/7Dt7yntdz74N3c3p0QkoJaw0+BFJMrE/WdO1ACJkwjGAthVXEIAe2rMBqBaUluQAoUkr4kAi2gfqAbBssmroqmC1mjP2AUhlygxscY+8IPtFuBhZ7c/ptTwyRYfDUTUNOjguX90Epvu/f/W4++ZGn+fG/+ZP8lR/+EZnB3Wmn36VOTzZcuWsfrRXe5/NCRGkNB4uKy5dmzOqCwhoKK1Ebqz7QVBpdWLxRhCAFjkVj0ErJAbCw7B/M8Erzj3/qY3f619xppxeUitLyure9ivd/47t59Rsfp29HYvDceOYGpixI80PSeB2toKkt/eAly1QJqdkoyTB2EYZkqS9eIhYzeq25eDAnhYxSCTQ0ezPi6AWMohU+BPpVy/xgQU4ZP3hyVtgCtBHo36WrF3nH+/Z563vewD/82z/Ff/8X/mduPnfrTj9td1yfS9nqYeAI+GGl1OuAnwW+D7j66w5o14Gr0//fCzzz677+2eljv+Ewp5T6XuB7P4fH9ZLS80drrh+t+dJ3PoLOJUZlgpfgcFtqye/RlgzMawta0Q2BwoC2GpWhMEr+rhJKT1hZYwkh8Wsfu36nf8Wddvqi0qwp+df+2PvYP5iTY2J1umW9HWm7gdHJQS6ExEc+cZN/8uGnJ+sI3P3AVd7xZW/kg9/11axONsQYGbYjx8/fgpxZrzZs1j0Hl/YwRtFtO1RR422N0hLEGjEYrc4XT6sEJqGtFsQ7hlAsqA4uYcoChYKYsEbjnaeuCpmfnQ6cqIwp5IDZzErqpqKoCqILpNJPEQsdwUc2q5b5csa3/ckPcvHKBX78b/4UH/lnu83yTr87rdc9v/Bzn2bWVMSQzkFdFw4arlxouHplwbwusEahkY1iUYwCEoqZojIwq6gqQ2Plc+qqQFclFy7vk7Xm45+4cad/zZ12ekFIKcXbv+xNfOUH38s9D14FpdhuOrTWdJ1n6D02J8pLl+myIndr2rMNfTeSlcK5CEqxd7iHT5lQNYTGkm1FWVkWTS3W6cLSbkf86KibElVa+naAnMhZo6wm+sDZjQHXOWYHc8iZYXCsT7fsX1hSlAWzZcP7vuHdvOqNj/OX/+u/wY/99Z94SRMwP5fDnAXeAPyJnPNPK6X+U8RSea6cc1ZK/Y7crTnn/wr4rwB+p1/7UpY20i1IlSXYREqQc0KTJb+nMJTGkMhoU5Abiy0sKme8D5JPFW4fAi0uROZG8w//4a/c6V9tp52+qPQ9f+gdvOOtL8PHRNeNnJ21rNYtPkTG0ZOAT37mmA///DOklLl45ZCqKfkz/9n3cff9Vzi7tSaEyOmtM9zgcKMn+wBaYzRTV8/Tb3qqvQaaJZvTDZXJlAWknMhZEVMikDFa0/mM1zPKg31m+1L9rKqSelaRY2boHWVdEY3BFpa+G84PhfW8Zr6cEV2ApkJrTdv2rFcbbGEYes/JrRV7h3OMnbE63fCVv/99vPur38EPffu/yzOfeO5OX5KdXqByzmO04nDZUFnD/l7JwUHDsrHMKoM1mcpKUbIqDUWh8SmzHQKjT2hrqIyi0DBvSgpryAn6MVDUzZ3+9Xba6Ytezbzm4Zc/wLf9iW/mocfuI/hI3w9MU9jklMT6XFk2mx5rFdQzNmPG1Rpv5qSYGJVnub+gLwuCT9RNCSlRzyrKqmSxP6MoC1JMmMISJ2dIUVjK0uJcoJ6VaKNxzgGwOl3jY6CsSnKWmfKz4xUpw/JgTlGV3HXfZb7vz38vDz3+AD/8f/lLuMHd2Sf0DulzOcw9Czybc/7p6e//E3KYu3HbPqmUuhu4Of37NeD+X/f1900f2+nzoBwj/eAJShFKzdlmQCloCoOKmbPjnrqc6JWFQeuMsYo4RqxWlFUhMwspUdWGXBR89Neex/udlWqnnW7r0Ycv89rX3kc/OlLMrM5a+iEQQ6TvHc5Htp3jE588ompqvvbb3sMHPvgU1khe1tFzt9huOtYnW1mUUhZcsxsx1qCM5MUdP3eCbSxDN1IvZ+R772U4W9PmqfMXM85lfIBmb4E9rKZAcIUbAkVd0MxqrLVEZGE1pdz3mgTG0HeO/cM5s0VD3ZTEIpGBsR85u3WGUrB/cZ9bN24wtAP7F5fcvHZCsxBr93xvxpd+3Tv5i//Xv3yHr8pOL1TlDMPoeeoDT3B62tHHxGxZslr39CeJw4sNNiS6s4GDvYq6Fqvx3qwgZZmZyylTWE1VaLS1jD6yf3GP/+Vv/jPcS7hSv9NO/yK9/X1v5D1f+VYefdUjRB/oNh1u8GiriUlcWpBZn25Z7DXEmDg73YLWVPOanDPt8Zr5co5pEliDrQu8G2SOtam4fM9FckyUVUkMEVsWDH2PXtQowLsoFsqUKcpCYkcykDN+lLV1jCMYRQoREqAVwXv2L+zhhpGmqfjAB5+iakr+83/7v3lJRmn9rg9zOefrSqlnlFIvzzl/FHgf8CvTf98B/AfTn//L9CV/HfhXlVJ/CQGfrHbzcp8fLZc1HVBcqOmubbGlpkoa5wLtNqKVomsDg7IsZw03PrOimRsK4zBkFnWBtYayNKgMxhhUZfnEp48IYRdLsNNOAA/df8j3/yvvZjkrIGc2m56uHYkxiHUxZYYxcHPledtXvoMv+cBbUNrQrTvKuiR4z+Z0S9/2bNcdq9MNSgu0pF932NKyd7gghEhWEdeNFHVFGAMpQbm3ZOwcXgVMramWmrnSKKMxpRUrZYjUdUXZVBSlpawK2r5j7B0zoyiaGUZrxsFjS4PSmhgiznm69cD8YEbf9njnaRYNJzdP2K5bDi/uc3TtmKqpmC0qum1PSon3f/N7uP7sEX/rL//9O315dnoBK1tNuVfR3twSh8iyrqlKQ2MqisLQbgKL+w659vFbVAXM6oKyKNBWqHml1WjNVPUvKaoClPoX/+CddnoJ6u4HrvKGd7yab/mur0Jpzcn1U9BKLIxNybDtpetdl2zXLd5HYowYa7l89ZDtpmNoB7Q1LC8sMMagU6IsCvzoObi4Jx9XGnLGFtJ586NH+8jFuw4oKyk+ymxcoCoLunVLvaxJPqGNptsMVE1JTp7gAkVTorQmucA4OLbrjuXBgrBMzBc1T33NOygKw1/4sz/M+BLr0H2uqKc/Afz3E8nyk8AfBjTwPyqlvgt4GvjQ9Ll/E4kl+DgSTfCHP8efvdOkN77+AS5cOqBdeZorc9ZbR5xFkg3YsiDlzMHVArRFGzh85BBTQlVUnN08Y6NHzq5vuO+uJfOmJKTEsqm4cOUC8Jk7/evttNMd1wP3HvADf/QpDvelOplzYLtp2XYDg5OAYucC5XLOd/2xb6Veznn2k89x67kjlNKUVUHfDYzDyOZ0S/CRmBM6Kemq9wMNFRkIKdJtR4KLzPYs1lqygqF3mEoTo8IYgxsDqlQUOVMWFm0th5dnjN3IbF7hXCDknsXhXKqizmO0JmUJek2Dw5aavhsk8kBrtusOP3iK0lLVBTevHVHPK85O1hhjWBzMZTav7ckpsTxY8O6vehs/8aM/Q7vp7vRl2umFJiXAheWlC+Q2omcXsEWBrUqMMdSLhroqOHzMEIPn4bfci7KaG7/8CVR0zBtLYTTGaGxZEJICY7CFwZgdvGunnX6zrtx9kR/8P/1R7n7gCuuTNX46GBVlwXbVsjyc471nb1kTosRfVVVBvw0Ya/CjZz6vyXEP5xw5Z+aLhs26I6fExSv7uCEQXaSYF5iyILpIs2yoZxVKaVJKdNuRsXfM9xt0oUgxobVhfWvD3oUFXTtQzUtAOvDKKGxhadcdQ++YLxtQoFXGjQ43jhit+dJveBd/9b/9ET75kZfW3vVzOszlnP8Z8Kbf4p/e91t8bgb++Ofy83b63+u1b3qct7z/naz7wFk+JeaMWtYoFyga8SbnlCiqCmMVtiioU8ZoxXyvYf/KRVKMGA3rZ6+hguNgUfCTP/VxfvwffORO/3o77XTH9cC9B/yp730Plw/nKC1WrrPTLZutY3QRPwb6wdEPgXseu4esFOvTFdt1y63rpxhjaBYV3aYjuMDYj2Sg3w7kmFgezAkuUF7cp543+HGkbkq2o2doHWVTUdUVtrAk5xmtEGfLskAbQ1UVBB8kUFlBXZcYo6nqkhgTYQwSNF4VpJTkMGc0xWKGMYaoE8ZoytKyXXcCV9FwcnTKdtNRzyq88+xdXZJTZmgHitLixkC76bnngSu8/LWP8OF/9Et3+lLt9ALSXfdf4bHXPMpbn3o9/bpl72Im5YwxBjKUTcViOWO21zD2HpD4HKU0F+9/kBwi47VPUVpNbjco7/Fjwo+Opz99xM/+7Cfv9K+4005fdPr2P/VBLlzdZ+gH+n4kBOm6LZs5prK0656yqcSqmDJooZuvzyLaaMbBobVCGYUp5AjRdyNVXaK0kiKjNZRlQTlxGZKR+ezZXkNWmvasxRrN/sUFw3YgK6hK6abvX1gyDKMcIuuS9emWzWnL8mBGt+noO7FwphQYu0hbGGBAT6To06Mz/tU/+0f4j3/o/861T790AH67EJYXqC5cPuBb/9g3UpQFXisSgXoxxw0eN0qVBRRKQVGInWp5sCDGhNEaY810s1QSMlyUFC+b49sN9d2HHLo59c9+mrnWtNvuJU0J2umlK60V737bI9xz1z45ydrWbQZWq4HBeUJI+BAJPnF4/91cfuh+Nqdrcs4MXU/fjdSzknEUa6PrR1JKoJQUUYwR6FBRUDeVHKRQGGvQViqYSknxJfiMD4mcoGhKKmOIMTKblYRoQWncIAc3kswjVXVJThBDgqzQxtDMBYoSvYBWZvMao6FrB9zgsIWhyJYbz94iozi9ecZ8T4bcVQFFWdAs6imaQJEzvOer38Yv/dOP4kZ/py/ZTi8AXbrrIt/7b3w7h1cO8T4yth0pJWJMknk4OHLKUpSoKoy2GCsFCqUU82VDiDB7+YOMvSONLSpGnv/Fj5DPzmj7+FvmOu6000tZj73qYS5ePWQcPOvjNUM/UDUV9azGx4jvAjlCURW43pFQ0rE721LPKlKIFHUBGXRS1LVY7o0Vp0hRFVy6ekFcIS7I7FsCozTBB7ptop7VAucrCrwLzPdndNsBrRVdOwgUpRRA39h7uk3POIzs6TkxpmmOD8YhYK3GD462HSing+XqdM0jTzzAn/kL388P/L4/S9/2d/AZ/8Jpd5h7Aeri1UP+2J/5TvYvLVmfbem2PfsHe4zDSLvtKaoSbZhe+AZFRitNzpmUEsN2QFst1c9ZSRg9m/VAGB3NvOF45bjvkXv4jj/1IQ4vLvm1X/wER9dP+am/92G6zUvjxthpJ4Avf+/jfNWXvxKtFGMIRJTEEAyeFGVGx4dEVJpiuSTFyHrVsj3bEkMgpii2kHacIgsCKSRCTKSYKUqZKajqEmMNYztOlpGAMkaAJKOX/EetqWcVABrFOHqq0vCqfTgZFc85Q1EWlLWV8OWcCc5j64oQIot5jU+ZGBLGGKw4WMgp0bWO41tnaKXYv3SZzWpLCEIx67qRK/ddpppVmIk0duvjtyYrphAzX/bKB3ntW1/JP/0HP3/nLtZOX/TSWvE13/Z+3vglT3Lx6kWqmdh/OyVdOaWgrCv86KmWFd2mJ/jAfDmjaiqSj5SNWDCNUWhj2btUY9QBKWUuPfwwaPirP/w3+apvfT9/63/8u8Td3PdOO1GUlle+4XG0UnjnzztcepqhboymqApMJXPYSmeiSwQfqJuKGBP1osSWBWPvQIEfHVVToTU0ewbXOzKfnVWNQebsgvPYbDFKMXYDs3mDd4EYM057isJSzWtme3PabUfyEaU13bYjxkizqCmrgqEfGbqRqqlQIVI0M5zz+NFhjcENjqwUt66fcHi45I1f8hp+4m/9zB181r9w2h3mXkAqq4IHX3Yff/BPfBNX7rnE6mQtePGs0CZjJoSzsYpuO2C1xpbQbnusLQg+Ml9WFMsKbQ1kxWxvhh8DZV2itWK5vwAy7WagrCPNcsZr3/oatqstj7/mEYLP7B/MqWYV/5+/8D9TVCXH1485O17f6adnp50+b9Ja8eXveYwPfe2TVNbQD5HBRUafJMjbBXxI+JhxPmJmDaqq2ay2AGzOWorKCCgkJJZ7M8bRkSb6Xp5oXEVpULZCG8XYD9NhrMC7Eu89KkNdVyitKErwYyCnjDaG5bzkrtTR3zyhqStsdQFTGDJKDn0ZiqYgDIGysKRpED2lRFOXKG0gZzJwdrYlhsT8YE4IgdXJhmZW8dzT17nr/ivEEDm5eUK37pnvz0gpU07D6N5Fch4JYde93+m3V1WX/IE//k186dd9Cc556qbk+PqKqpHX0GxZ4wdH8IFmVhFiwhoJA5eGdWZxMAP0+XyNNhprDX4IgIJSY4zhD/3JD9JvOz70R76arBT/4Ed+in/8d36G68/e3K1VO90xvfGJu/iDX/kafuoXrvFX/v5HvqDd4z/8r/9+9g8WhBDotz3GapTSmMIwLwuMkrk1MvhuZP/iEqMTwSu6bY9WmhCS5M0ZTT2viHXBOHqU1gQv8C1bWNlbLuSglcL0PUdZH4zRBDzBC9kyZ8VsWaO1ZuhGFAptpPmQcyaFSN1UDO2IGz22MMQQKQpDiJHt2fbcnp1i5PL+nJwy4+j49h/4IM8/c5NP/Mqnv2DP853S7jD3AlFRWr7pO7+Kr/+Or+DseM3N544IPhJcYLas2JxucYNDAdoYDi7tE5wsivPljLIuyUqhshz6irKc7F2WZtaQM/jRSxV0b8bycIHCsDxoCEPEX73AQ+oBbGGn7kLk3/xPvo+9i/v80j/5Va49fZ2f+NGf4umPPUvOyAzPTju9QPWetz/Kh77uSfaWDYML9INnDJG+D3gvCP+cMzFGQohcfuIhyb9ataSYZFFU4HwkDA5iJOYMOWGsRWVQZOkuaE1Zl1R1RTOTWYVBawprQYN3AiRJaKyxmJlYznQ/0B+dMIwj7O1zcPeSoR0BdT7rgDwMdGmpZyUxQllZlJENsbWGzaYjhEBRCWhlu2rxblrwC8v6dANnGxZ7C2bLBlsUKAVaSVW3mVX03cBr3vRyfvGnf5UYd/a2nX6jnnzHq3nqa9/B6972GkKQ+yP4SDWr0AYuXD1g6IYp6D7TLBu67UhZFZRlIXZh8gRNGFkezsk+UZQSsSFrmWEcHdF7op+L5eueq1RNwQe/6+v40Hd/PR/75U/wa7/4Cf7Gf/+3ef4z13HOw0uPYr7THZJ3geMbK1553yFnb3yQv/Mzn+ILQdF/6PH7uHz1kG7b08wqYkio24e3lKkWJSlEmroiBokoaBYN27MNzgV0hhAizbJmaY1YKGMCrbHWUjQCOQGkpjIraPZmdM8c4/qR2bIhA9Wsoihk5jvEEVtaZvOSGDMhePwY0FZifFDyfNmiIOUEyqA0dNseWxisbYg+cnZrTT2XGJ7ZoqFvB/q2pygt82VDM7lZfrOMVsT04rn5d4e5F4De8M7X8P3/3veCgs984nn6dmCxv6DfdlKx6EeaRUM1q+nWvUAPYqRqpNu22FswdA7Xj+wdLiYoSmY5Ue58iFIN2WtQWlNXJQlFDJFhK7Sisi4pSjtVSeupagIpRF72yod5w5e8hi//xnfjRs/R88f81b/4I5zdOuNXf+5jd/rp22mn35H2lzXveNODNHWJd4GuGwlJBru9j8QpGy6EiBs9ycjgdrvuYDqgxQnlrFLCDR5rFMpoyBJFYCcw0dh7qlnCaAlnTSmTciJlyd3RWg5dKkSslfm0upDDGD4wbloAdEwUTmhjt2fZyJIZt3e4lAw7rSiMIfhESgGlwJNZn2zxgycazenNFVUjG+jrzx5hNFRVIaGtQFaKGCPWGCDhBofW0Lcjj736YbTRu8PcTue6cPmAN73nSb7nh/4gQ+ck6D546kVJe9YzdAP1vCbGTMrge8fB1X2CiwJeKAsUipgSs3mDGz1VLaHE8gIfCVPFX2tF8JGiEjx6M68BcIOV+yIlHn/1o7zidY/x5d/4HoLP/MX/9H/gZ/7+h7n+zM1/zm+x006/Oz3+wAVmpeVXnj7G+cjRac+nnj2lKgsevrLHV73zZfzoT37i9/RQoZTize95coIHCQTv5OgWOWWaWQ0pCS1yOlVmpTi4uEe36cRhUpdgDfsXBYC1bVv0RLWsFpWshUNAGc1sWTNsR4ZhIPoNF+/aZ2wd84MZCsVm3ZJDopjX2DGQVca5QPRROntVScqZtnOAjAWFEFBGQaUw1koRNSSCi1g/Mg8dhhJQdN3AzWtH7F9YcvX+KywP9vieH/pWvv/3/TvTuih6wxP38PiDF/hLP/rigXbtDnNf5Hr1m57g+/7896KLgtNbp3Joqy3duie4AFrhxhGtjAyNo8hIxE5OGVtpUsoUdUHO4AZPpZSADY7XLPeXmMIQRo8tbqOc5aYZ+gFQWAtQMgyOOC2WcpJL5zaB7qxntqwBxdV7LvOn/+M/Trvp+Pmf/gj/5b/z37BZtXfk+dtpp9+JlILv+ta38NpX3kMCutYzDJGs5DA3uijzbtNhK4REefGQpAxaK2JMxJjpuhGlFK4fiTHSdRJsagsj3W0fBVDSlAQXKSqLdx5jDNWspCwNQwt+8JRVgdaCX48pM3aOQisKMoPPaA2VQub6vKeqxLYpXfTbdpSEixmrZU527BzWynuGc56UEmM/4r3HWA1KsV23XLnnIkVpKaoCazXRBULOmGWDG4OEkVtDTIl61kwAl51e6jLGcHj5gD/5576bV77pFfRdT4wRbTV92zP0jugD2hg2py37lxbgEqmwuMGTQpQCiFL4EClNiY8JNeVSjf2IsRo/OmLMlFWBd9Jt2K5ayrpgdAJSIGeKokBpTbvpKKqCsrQ084o/8q//fr7k/W/m3//+/4zTW6s7/bTt9CLTu159L3ftN7zqwUv84tPH/MLHb/APfvl53v3a+0k+8MqHL/MT/+wZVtvx9+wxvP9b3sP7v/ndfPqjz2ALS4wR1zuM1QIVWjRYY8g5M/YDSinJE91v8M7TbQfcECS65myLUmBtKZNxScZ/YkikGAVC0mRyysQYpj8j3WbADZ6+H6nqguHWGlMY6rKSzLjeiVtEQYwR3zt0oclJ5snLuiA4j/eBqi4Yh4BOkQvbG8zDFrdynCjotpqiMpiikJGEmLh05ZB3feDN/Pj/76cBePDuA77hXY/hU+bC/oyT1YsjUmd3mPsi1d7hkq/6fe/jW777axiHkRSd3Ije0feeGASQYKwm+QxFksFW76hnBac3W5pFgw8ZPV1lO83TkBVZZbQ2nB6d0MwabKGxRUPwAaVGQCwsbnREr+nbM6qmxFpDu2mJPjFbVnTbkWbZEEicHq8xlcEacz6Q+q4PvIXLVw/483/iP2F1sptV2OmLW48/cpmXPXSJkGAcI5tpkfUx4uLUOUsJPwZCiPiUWR5cYHW8JvqAGxwp5ykqQLptxgiN0liDsQaAuhH6SFkVaGOpKrGXZfJ5Z00ZI2HISmi0tiyoC0Oh4J645fTmEWvnMIWlXixJwOGlfXJWlI0sZiixrFSlxRYFxmjGMaA1eO85OzojOC9Zd20vc3AKnnv6hgTBakUIiQuLGd4HRj/iXZCuf2llHiIJOnNzuhWc9U4vab38dY/y3q/9Et7/Te+lKA3HR2eEGOhXI3kCnKSYMLrAWE1WmX7bi13LykYsDAG0FscI0s0Oo0eRCTlRNSXtukcDGMXY91KsHD05ZmyxQGtDt24JIWELi7WGnGVk4eDiHuvVGlLmsdc8wg/+R3+c/+gH/wtOjs7u8LO304tJH/61G7z2wYsUSvG+Nz6ED5FPPX9GmzP7Cs5WHd/01Cv4az/+UU7Xw+f95+9f2OOt73uDrEnT2M04OvpOXFq2LMgxEVBCQUahrWG+bBi6kb4byDmzf2GO94GytCwuLmnPOowR0nlZV8yaGh8Dow90m4GhGylqAaUorYkhsbywJN6MNLMK5zxGG8bOEV2gairWZy1KwTgd5IyRmVjXO1KSLLmYBMjSrlqK3HB2a81m3UFK1Emx2r+MjgbvPN57QgjsHS541wfexs/+w19ku+7Yn1fUpWVoPYV98WRRvnh+kxeJtNE8/ppH+TP/2Z/i2/7kB+nagfXplnbTsz5Z0W1GvItg5KDV9yPojB/FNmWAbjMyWzTSKQgRN3iaeUVRF1RNSSZRTCAEYy3ehamLMEg7GxlmXR2vp0Bh8SBvNy2nx2ui96QUWR1v2a43rE83rI5XrE5WbE+3tJuOvhvp24GzW2c89qqH+Df/8z/JwaX9O/307rTTb6tLF+Z8z7e+hXlTMo6RrnPEif7oRo93AecD3gc50IVIO0ZWJ2vGYWToR7pukOBtHwk+iNUSJVYUFyZ6GJBlXo0M1mpyTOeb0bF3pAz7l5ZCpSTTLGrqeUVB4rWHGrVZc3Rrg4sZXVfYS5co6oqiLqmaEjd4saFoxLKmxP7ovKes7fR4vJD+kswveedRwDA6vPdcvvsCZVWyf2GJm+hn4yDB4zlPmV9G0/cjfvS0m47DSwd3+CrudKe02Jvz5Dtezb/9//hB3v/N76XdtBzfOIGU6Tcj2mrqWcHFq/ss9mrKuiTESBgCRVWijKFe1KjbcB2jyUkObikmYpDNZDFZJut5CQbC6Bg6R85gjSGExOnRitPjFRkoSiPQh7YnBE+Ika7taDctq9M1Z7fOePSJB/m+f+97uHj18E4/jTu9iPTLn77Frz63Yj14Ts5a3v3k/TxwdY+//ZMfp3UJ5yJX9md8x9c+yYWD2ef95+8dLnjgkbtp1x3OebTVtOuOohbr/GzRUDY1tpC1yBSGsi7ZrnvaVUdVlSz2ZiwO5lhjqBY1IWb86MlktJISZLtuiU7iQGxp5Ps3JcsLSy7dd5GhH/HOS2bk4Ag+yv0+qxidJ+dMVcqeNDgphCotYwXNoqasiylOK2KMnnLmMpvO025GtltHWm3YK6RpsV21DP3IOMr3fsM7Xskjr3gQoxXvf/PDzKqCam+GexHFl+wOc19EUkrxoe/+Ov7CX/n3edkrH8a5kX7TU81KtutO2tTdyOgdtjTnyFfXeZTRRB9R1jBbVJjCUlYFVW0xRpEVkjeVEm4M9O2ANQpypmws3abHDY5u06FUYnO6pSoLwujwzoGG4AJKJVbHG/p2RBtFNatJOeNHx+gC223HjWePOH7umLPjM4ZuYHWy5hVPPsa//V9+//kcw047fTFJKXjP2x7mYG82AXwizgdiSIwuMI5B8uRCkgNezoxjYKDg7GTDdtVOi1SQQ5FRpKlLlZDvpxTn3bsYMyDd8TwtokpL6Gkzr2VBrEpBslvD40vNQ2XglUtYhIEcIy5nIqCsFvpXjEQfp4Nhpp4VU0dC44aRbt3RbXq6bS+/W06MzstQel3AtEAO7Ugza1AKFvtzck7nYeLG6Cl8PJNixg0ON8jCXjYV7/+md93Jy7jTHVJRFvzJP/fd/Fv/+Q9glWSYlrVYG0OMMq9ZGSl4bAfazcDYSdDvXQ9ekg2a0RSVJYZIypmqlC62zAxkZouKcRzp2oF23Un1ffC4KQM1+EC77UlZ5u2qusBPH1sdr89z7FIIsm4OTgizyGzpfQ/fxXf96T+4Kzru9HmRUvCeJx/gNY9cIVuLm8ZS3v3aB7jn8h4ff/YUW5WMIbIsDe983f2f98fwti99PSkl+m2PHx3eedzoaWYV+xf3aOYzWTtSlvsURb8dUBrmi5p60dC2A7eOTtlMa0D2gbIpZH61KSkLS7WoWRzMISbm+wuqWUUYPEfP3OLm0zcpq4LN6RZjZI0hw9ANnN1aUdYl7brFWEXdVBSVWMnqRubx+m4k+gBkgo+kmBhHR9aKXhe0g6PtHavTDf22I4WI1ortWcv6dIN3kZQyX/eHvownHr5EDokbJx0f/rWbnL5ILJawO8x9Uen3/Stfz3f8wO/j6MYR3nnObm3YnG45vnYLyMzmJUVpJOD3rMV1I0ohFUygmBWUlRH0a/Boo6mahqIsSC5QNZaxE1tWnuhEkBm6kZQyMSUUcHxzJVXVm6d45+TNoOvxXir5MWfG3tFve4LzbE9WuHEkeqER5Zzpuw43jEQnCNq+Hbj3gau84nWP3tkneaedfgu98y0P8zXvfxVlaUkoRhdIUfz+KckiEkKSv8fEMAa2vePsrKXbdAzdyDi46fMT/nbXC1BaYQojWEkFtiwIIaKNoahl4UoJqqpkr1DMk+PuAxlML6zmft2Rrz/P5pPPoseedvTkquTKPRfZv7RHeeUKxbyhnjdUswZdSP5bs2ggZ/p2EDplP9Bte7arlm7V0q17isIQY6bvHVorht7RdwPL/RkXrxwSXWB1ssVPcBWxhRqMMXgfiCHKIXZ6L/E7iu1LTk++/dX84P/5j/G2972RzWrL0Pe025ZxGKVC3g4yL2oVRiuZfXEBFIRB1rnCGsZuoFv3mEpDTgyDzMiMgyPExPGNM7yLlKXl9sZOWSW5iimDEghKCpEYIuszITwbo7FGEUNgaDvpok/d5xgCKUbOTtZUi5q777/Md/5rf2DXodvpc9Z9l/d45K59bM4czitGH+ldoCg03/SlryD6yMeeOcbFRDd6XvfwJZ58/Orn9TE8+ooHOTta0Xcj5awSMmRIXL3vMgeX9ogxsD3doBAHSIgBWxrGfqTrpDgpcTowP5yTUqZe1Iyjp133hJAZB1kbjJGcUzeMNFXBpbsO2bu4xBaW+V7D/qU9UIpuM0qkSFkw35szW9SgFM4FNmcS7ZNTZn3W4gY3dQEFsjX0IylFyWyNGTWbA4pxCAy9RztHURhSlmgCcYn1KKW4b2n5pi99NZvec/2kY/MiG/vZHea+CGSM5hu+/QN803d8gM3ZCj+I13foB1L0JGShGoZIiHJgknyOhB8iurREL90DCQTWVFVBiontWYtSMosTQ2I2rwijIyMoZ2PtdBNa7NTmVkrsntW8lv/XGjtleEiVJOLdQIwBNzqUVuft8W7dkVKWQMiUzzGxm9MNRWn4um97v+SQ7LTTF4nqyvJl757CVH1gHDzj4AXyo+Rg53ycKI3qnGTZDRHnPztHF3wg+CjJADFLhyEJ+vm2LUQ6aIlmVqONJoVESglrNNqN1J95GvXss5z84kfJxyf40xUnz9/iU0ctn35+xcc/fcTTN9Zs+4Aylr3DBfuHe9SzmrEdObl+QrfuBEoy2TwlPFzu06Kw+NFTTjN7VV1RNpZ+O9Cue5mlOFiw2JvjBkcIgWZWs5g6lsZKx/82fKIoLWVdYfSv66Ls9JKQ0oqv+OBT/Ln/+gd565e9QSr3RoKCx16sVMGHKcYjkRKEGHGjk6JkoWWWZ5SqujHTdiRD1RQ084qyLCgrS0qRMAaGbmSzagku0G86QJGyopxVMFFiuf3zYqDbSLFFW0O77Wg3HX6Qjt7mtGUcPF0rcIbNyZZmVnPx8j7f9+9+F1Vd3rHndqcXvp65ueaTN7cMIRJ9YFYV3DzeMriAH0be/5aHZI2IicIYdE58/Xse58mX3/Ubvs8Dd+3xxz/0Juryd7ZvKqtCiMjGiN1fKfJUNLxw5eA8x81YTdcO5AneJSHiQebUBo+xhkIZogsYo9icSIetnlUUhaEoJbM4Tge/cdOTYpKCDXDp7gt4F/FjIMVEPZNRAJ0SpAnK11RUTUUzayQrNSXICW0UKUrhpqpL+nYkxiSh5UCczynmDfNZxby2HKSBdtPDlDsXXGR9dEb3mWfpPvoJtDXsLWqa2hJeRLEEsDvMfVHovofv4bv/9B/Ex8z6bMtm3bE6WzMODl0UhJBxvUOpTApCChqHgbIuICdCLzS6YTvIAjTlWVlj2T9sUIVh6B1nt9bimzYajeTK+clKFuLtBTigjcAaXD/iglRL4q/7d0jik94OhBCoZiUxJLz32EKRUhCvckoMw0i77VmfbnBD4M3veT3f8f3fgtK7Td9OXxx63avv5YG7D4g+ESMM06IT42RJ7p1YLH3Ee7FbOh/pnBROchJbY0a62yl9tqOntGS+CU5ZvoeZ5nq0FltZURhMqfGD49nrZzx/OnJy0jJ85hp7voNLV4gu0DnPc8ctz9/YcHTS4UKk7z1o+d5lYamainpRE7w/z54cOkffecqypKxKjLU4J/QwgKEd2a62rNcddVNyeGl/OpQqmsWM+d6MqpEFeLE/QwH9tpcDas5oI3MT1lruefAqBxf37uwF3ekLoguXD/j+/+P3kKdNVwweW1jcGLCFzFU285q6KUALFGhvf45SiuAcrnNUVYHWYq+yhWXoBvzocYPn6LlTxmGkmVUoMspqvPP0256hc3gf2ZxsaVctwXnSZF0uq4LzoKoM4+AYh5G+HaVQ4zzOOSFshkAYHSlGVE7UdUFRGrQ1PPrKB+/wM7zTC10/+UvPEJsKMxMacF2VfOa5M7aTbf/dr3+Qy5eWQgqOmegC3/jUEzTVZw9uX/Hmh1goeOMTd3O4/y8/V/fU176Du++/QgqRdtVS1RVuGDm4vCd7v9ERJlqlMYroIv1mkH1eCJwdr9FWHCXlvMJYzXYzMI6elEBbGQUoCiGfd/2ALS37l/aYTWvGbG/G6lg6f/sXFuSUhcocEzEnYhLLcxhHht6hrVCjtVFoq/CjJ6Y4wUyE1dBvR4GPucCt4w3VrKZqCgCGtqdBRh0g07QrzEc+ws2f/DBnx2uSj9x7eUlIkeduvrjotbvD3BeBvuW7v0YGs8fxHETihxHvAyF4ytJOL+BA1UhVXBlDihlbaLTJGAVVXRC9B63EFlVqspLgYD8I5MQPjgzo6c0jZ2SYLkk+VlmXaKCsrcwwaINS4FwgpYgfJkBDytRNSb/u6NuBGNO57coWkusz9A4/CjDCjY5u27E53fDUV7+dC5cP7uyTvtNOQF0XPPXOxzBa7gcBnERCyriQcNOhJk/ZcimBD4nRRXqfJ6DJ1GGLCTJSlRw9MSWMFSKX2EqMdA9KSzOraRaNZDg2FTZncWHedw/b3tF1A+vOcXqyYXvtOt0EFHKjpxsEkNJ3MlTebkepXC4qZsua+XImG1uliUmKJkop2m1HP4xSkEkyY5SSLIpDN7Lcq9m/ILaYqpHB96K0RBdQSlOWxQSDcWijKasSrYW0WTcV2mgeeuy+nUXtJaD5suFf+TPfxtD3hBBoNxI9sDndUjcFXTsydCPrU4mkGQdP8IFuO7BddYy92CfH0YHRtJseN3pMaWROdQzUswJtFF3bc3q0nkJ3Jreyvj1nmjBWE13Ejx6lNahMzgml9ORIiWLPihEmqp8bPH0rtmOlpGu+XXe0bc/BxX3Wx2u+8wc+xMOveOAOPss7vdDVdo6/9r/9EqetdLjuudRw9eKca8+ekoCUI3H0hGlOdHSJtht56k0PUU6kxesnHevB84aX3803f8Vr/qXMD8Zo9g+XlJXs46w1zBY1zUxm0lbHa1YnG4Z+ZP9wQUrSaT+8ss9sWTN2jsVyhh+ESn52tML1MjqwOdmwPWshZ8bB0cxr5ssGa81nu/LOc+v6Cd1W3heKynLrxgk5JzLiYpHgcNClRVtDioF+2xG95/pnjnCjI+eEMYoUpBsn8+yBlDNp6sQXs5rCGkLM9OuBut9SxMh+t2a5OiZttpNDTZMSHJ21uJj5xGeOf28v/hdYu8PcHdZr3vwEb3r36xi6AVtZMomyKaYqjQSijsNI1Uj3a33aocS3IkS5ifLDtKmSLSFUdcPYSTXSlpZqVnLxyj7NvMIa2ZjpwqKAcZSb1DvpHLgp9wMyw+gYe8d2tZUMKY3gx5P8aSsJEo8hYAtzvpGV6qpkWGkNOSWOrp+y3bTM9+Z84ENP3cmnfaedAHjisau85hV347x028YxSjD4ZO3wXmZqpBctNjHvAzElirKgqErJjqukgHGbIAsZPc3woCB6T84CIKrnFcoK1EFrjU2J/eMb1M8/S3F6wuLyIWY+QxUF43SgdCnjYqLvbwcvB7KWOQe6ltKoc/tYt2rJIERbw3RvahRC+vITPSyGSAL67YA2iotXL0j8SGmo6pKiFOJZPa+pp8gCNzqxwdXVeXc9R3kvks16/xvCWXd68clYww/8B/8HXvuWJ9iuOtwoIwE5C2Fy6EZm84pmXqBNZnW8wXWOnDL9pqee1izvEuPg0VpJxE4MtOsOrTL9psU7OQCuT7fY0jJOBzBtNEPbT13nkbHt0YWhWdSkeHsdC4z9cO40iZOdWe4YuZtvW6oz0l1OKZOTHBYvXD1kaHuefOsr0Hq3Tdrpd69tO/K3f+KjfPgjz5OU5mC/5pGHr9BUBcknwrnrIzKGSB8Sr33ZFb72XY9jjeZnfuU52j6w3Q7kf8mZ5A986Cne9w3votuOjGOgKAus1uhCc3zzjL4baOYNexeW0nkrxbKfU6LbDBSlQVmZ9R5bAZC0m46UI/W8otmrIEMzq0gpcXa0JriArQr6dmR9umHv4gKVkWJh7yCK9dEaIzA9oyEGNtNMdk6ZdtPjR8+Fuw6FTmtlr2pLQ8pCtFVaOu5GS5H1xiDd+L29OXvzknnbctf6iGbYStc9JcrS4nxkte5Y955nbm7EwfYi0m546Q7rkVc+SD2r2JxsGJ2XrkCMjP2IrQq67ZYUM8vDGYpMvx1RRnLiuk2HKQ1aG6yGHCVyYBwkR2qxv6Rve1zbU9UFq9Mt2hiquqBd98QYmM1qlKmIMdKve8rKCrXOKpQ2+NGTQqaZV8SYKCshlEXncZ3gptEKsqauCmgEhV4uStk0Toe7nDJFBZuzDlsUNLPqTj/1O+3EIw9eIsc8deUiPmahT6bEeNuGPHXkEog9JCScTxhbCDLZaJlHUKCyJnhPpqAoZdGZXM/YQu7Vwlq5Z60hK8m9Wm1GTnqZO1WbG1zcn+PqhrPrtyiNzLZeuLjk9vKTjKIAOShuN3Q3j2juuko9K+m7kW7TM+iRsR9JUcBGYfR0KYltzShSMsSYWJ1uUErRLGqqqsQogykMVVMQw0QQmyAwMSTKppzQ1gV6ijxgmseIIb3Y1sidfpPue/huHnjkHoZ2oJ7V2MrQt5ntekDpTN+O1GOBLqS44UYvBZFsKOuC7VkHVmMrgyLTruXQZa10tVenW7RWqJzZbnqKQjO0A0pLJEGIaqI3C0hF1SVuHCmrZlq7DLGNhGnjF32cCo2arGS8IIZIvx0w1nB2vGa5v2C2qIg5EofI/uGCjz17xDu/4i2sTlt+7K//ozv9tO/0AtOFvZr7Li+5drTleN3zcx+9ztlm4L1veoj9RpF8RBvN6GWeDTKDT8wKg4qZt7/6Xt70unt57tqK7aY/X5/+Rdq/sOStT72e6ANFKfOmptDYynL87IqislQXlrjRkXKibmqWS9nfWaMp9xqMUfT9SIiBSAKjyDEzbEaM1djS4p1n/+KSs1tnxBhp5jWrkw3Lw7msP93I2a217BGVIgGbTYe1+rwQ6ENgsd+wOl5BVnjvaVcd82WDNlritqw6nz33U9SAVtJMcKOjsZrFfE5Boiq0tO2tQJRSTJiqwJSZ7dGGal4xjp6PPX3Ei22Z2h3m7rByysSJ9hhTxAepUKScGdqBohaQSbvtST5S1ZYQIsZqqlnBdjOydyBzcj7J0KnWQMoMfU+/ldm6MG1CU0zMlzV1bcmqIIwePXXUmmVN9NIG9y4BUUJWa8mis6WQkBKCfs1ZEWNC+8je4YIYEkormllNRiibRimxxGgFSvzX21WLVrehELsq/k53RrNZydd8+asJPrHZDEJiVNJZ81M0gQ+JlBGwxwQ6iSkxJk1RlefBptpIQHiIEVsWKKUxhSH6hC0tZEVCYY0+DwWPWaMyZG3QD9yHX31MZmELjXMev+1Jo2OTBdNcaEVdFrgY6XtHCoqqsmJPe+YzNNnh7n3g3MISQ6BsSraj5FTaQmwwMSa6diD0I1nLAro8nFOVMr+ktHTzhk6AJ2KZHkgpnwef55hIIaIKhRsd4yAduxgzX/r17+T/9R/+pTt9eXf6PdBDj9/HH/23vp0YI3vzOePg2Jw53IQMN0qRYyRjiT5itJbXvgJCghypFiUpCs01+kBRGZnZiYmcIkVZEFMkk8kx4WKQAmKM5KnIUlgtOY4hsl1tOT064+KVA+b7SwkU3+jPAogyKB+m/CqwpRQsU5LX8NiPLPbm0h0Mme2mxZViNe7WW1JMQsl8kQETdvq91cN37fMN73yMTzx3xrNnHT/588/w3K0NLiZ5/1Scz2ZjxAKotTr/ez84rIZSy/tuXdlzqMhvJ2MN3//vfTePveohzm6tiUEAWForsSnGxGLRoHLGFJbFoiGjpENuhYrerlqhE2fYrju0EmtjnGzSTV1iljOKRUMMkW4zEFxgu2pROXN2ayWwPSP5dV3b44YRbQyQMbYk+EjWXizVyWHLUuJzNoM4YHxEpTTN2CWcEyhgipL5qpU677pXZcmsTsRxlHl3wFQWnaTIio+szzqBf/lAUZfUsxdfRNbOP3AHVVYFr37T43jvUVOYox8FsZpzJmWBJMyXzUQOsihrcC4QXCIldT5AXpZWXuzIm0M2mrH3xBgxpZVB1criXOD4xoqhH6U62Y3nxCJiwk4dOVMacsoTFSzRzGtyyjgXUCiCF/z6bVrl0fMnDP3IfNkwW8wEjT6rZK5BKbTRLPYatAbvHO/4sjdx70OfXwzvTjv9TvSNX/16rFb0vVjA5DAiuXJtO5xbLiUvLktXLmacT0SYEP0Ceogxo6fZm4zClpYYhIaZJhui0bK4aK1kLq0saFRicXYLfe05rr7sfu5+/ROYqqQbAz5B09SY6YA5ukDXOZm70xItIkCHwLDtGG7cYLh5JIfR0RPG8Nm5gsqijVR/YwiYnFFay1xCSly8fCBdjBARYqcs4OPg2K5b4hSuenv+T2stz5eX+AVbFJQT/e+eB3b39YtV3/kDH6KqKrRRBCeHpZwSYXC4YWToBub7M05vrgWMUkoHrqzkcKdUprAGPwZiCICaDl0y56OMIaeItZqEoig0wQWil1mcsXMC73IBpWS2O6VIv2k5ev4W65MVCikoVnUlsJWJqhnCZ+dfY4i4UTaTbnCM/ch202G0EjpfypRlwXYz8O6veisXruzmQHf6nena8ZZf/cwpMWUeu+8Cf/Rb3sx3fM2THDYFs1klxTMlhGFjLUVpqRe1vC/HSNc7uiEQ423yuKYw//wt+2ve/HLufehuvPN02+4c8NO341SQF5t9WQroRwoVmn7bi4trdGzWLWM3SrEjJVk7tCKkRFlZxqkjfvHKPmM/EnNitt+wXXdiGXWBdrUVe/UgFs2MzNeNvWdoB7FNei97x2tHjN0ISjHbqymsot1s6bsRbQ0xyv2fp2JiDDKD17e9FIyMprBKnrcQSVrj0WAMujBYrVkuKsrK0ruAXcz4wB/48hedfXrXmbuDqpuKd375m+m6gZQiaHC92Cj94LGVJYaAGzJntzYyezZVCLerlmZREcmM/YieFi9dyDB4SjJ7p62i33Y0s1o2gVZTz0vGVqrpRVMQJwACyGbtNqQhyR6WopSKkNGwWfco3aDy1M0ureTUxUjfdrSrLYeX97nrgauMg1AA/SAbRmM1tqin2YqGotqhn3e6MyoKw7wuWJ21U6iozC64KeKjH8KUVZzF3hEEiuJDJKZEU1ootNC+psBVkoBG/Ohp5g1aMXUbQBVy8DNKCJDeCbJ9njyba9dZnXbkozOah+6juXqZs2duQEpoo2nmlTzGnIkqkbwcrHKWbrvRCqOgax2L569R7O8xVHvYuqJbd6isMHWBd46MwJFSTDjnplw4RVEW5ARM1paUPkvq1FoL8l1llNIoZA7Qe7GxGWsgT49vii/Z6cWnt33pG1geLIgpUhbFFHkRiN6jrWJ1bcP+hRn9VkYE2nU/zcYFDi/v0a07MgWbs1bmskdPM5/hegHqGAV26gJrryhnJZvtIHNx3SjU5Om1L69Fw9h5YoKcJSR47Eaaec3e4XIiysZpHjyQmWJACkuOEZLkRi7254zDiBtHVJYA9OAiVV3IJrQfmc1ffJX8nX5vdf245cf+2dO84/UP0a16LsfIXl1QGIVGXEzZaPxU7LZWk7KsOSEmdBbbup5iC7JRdC7+tj+vrAre/YG30jQ1KUfC1JVLOZOi5IH60XN4aU+K7EYOkHGybg6tuFNySpjSQs7sX1wytMPk4JA8OVBYa1ifbjk5WlPWBTFH5ssGNzrsRIPttj0kMIVme9ahtBQ5GTMpg+9H+k3P2HuK2tGddDTzhpjFsVaU0n3vt4PQam8PtE5xWX07gDo3zeBTQhUWWxUStWXFGZZCIGlYbXpiSujlgvnlQ/YOF5wdv3iy5l5cR9MXmPKETU4p4IdA346YwpJCpl7MGAcv5K/BUc9KmkXFfF7JLEAx+ZBjwhhLu5Lg4vXRluQlI2TsPCpLjsfZrTUnN89IcbKbaDVVJSPr046ysuScqJrivDIZg1RD+tax3XbElKjq8nyTWhSW2byhqitMYWnXPdevHXHj2i2uf+Ym1hr29iUUMiP2yn7bU1YFYXTs0gl2ulO6++o+X/LWR0k+4X08jxvYdo5+CJOdUrpscfo352RTqIKnMZnSqsmWKDATAYvIIp1iwlqL1tK5UzlBzhxcWqAArWRo21y8hL96F1sfGH2ge/oZ4vM3CG3H6bqjHRyjC1OoK4SY8CEyTmjrhMzyOR/pB8/qtGX9/E2Kkxv0mxZuZz1uBwprsVN4+TCM5JxZH69pmgpjLTkl/CgEWq21WCoLQ1mXErxcFELvNBo1HfDsBFHyXnDQKUoXRO3y5l5UauY17/26d8iaEBM+TKHfXmYktdFcefCQrDRlbVksxckBCEDrrJsgXvK6jTERXcQNckhL3mMqS4oSCt63I+vjLTlHrFHTjGbJfL+mXpTnBUdjBN5QVJaUE5tVx/XP3KBve5p5Q85K7sGUzztyYYrnSeTJtjmyOj5j6Iapm+AZu4F6byFz4z7ytve98Q5fgZ1eiLp+vOWv/G+/xI/8xK9xq/Wcdp52DPSDZxg9LiS0Nagsw9VaSZyNKSzcBtsZM4HtFP/0Fz7z284k3/fw3bzlva9nsVcTnGfsRtpNLzEyE9GyKCzNvJb3bQVKa9arLSEEYs74XvJV27WQXm985oiUEuuTDd1W5uCaWcXyYCH4f5VoFjUxBlJIHFxcTlZ8LXtFoybqLNK970aUhrEb6IeRRJKc080AmYlq67BVSb91tKtW8oyHkW7bTaCtyHbd0neDzHFP4w1ohZ8ignJOVHUh63LMbLcj4+C4+xUPceGJl0GGd33lW7+gr4Xfa+0Oc3dSCrRSuD5QzYUe50cvVe/RScu5lE1WjJ8lTc7n1RTinWkWtfiJM1OQOEQkz0SpRAhC8qlnJVVZMPZCmgSmodLIwaW9iQImVRDvJTerairKcrK9GEG3KwQzraaD5LDtKUpLUU1UTTRnt1Zcf+Ymx9dPyDlT1xXLwwVaG1YnG9rNIEGt0+PYaacvtFJMtKuWoR9xzjOOnq53nz0kZemy5ZwZXSDEPIWFJ+q6pC4M2juZPTBSUR2n2bEUpQudJ3umKYzMjIJ00Y2hrIUCNrQDs7uucPFVj6EWc7oxsnURCsu8KTFIldKnJJvP20GvBsqpK5hiJieFDxkf5aDXna4obz3HMvUsDxpsKXN4EkMicIjV8Zp23VE1BcmHqXNvhFg75eVp9dkDa85CA7xNq0wpS0blVC0WQErk0tVD3vWBt9yZC7vT74kee9XD3P/IPQzdSEYskWM/4oaBYZAixuZ4y9iPE7zLMD+coY1itqxYHNT0Y2BzsgEQZLqWomC77lheWuLHMB38WlCZEALDdiDlxGxZoxWMvWccnKyHzk9FCHndFlVFUVpObp5y8/ljqqZgvmhQWuZX82QLvl14uL2GtauOsq4oyuIczBBCpt90LPbmFGXBwcUlj7364Tt4BXZ6IasbPP/rP/gIP/azn2YbEj6BT1IEyc6TQ4QQUUmy5rKCiJL5aB8YY2KMWaKmfgtduusC3/GnvpmyLul7zzg6MpngnBBbM9KFPpgRwzT7rBXdtqfbdKxPt/jB4Z3HucjgHOvTjbjEnGdxMCc4ITvf3i/GnDHWgoLZsgEDMUveKIBzAr7rVj0pfTZE/HaDYnu2FedZLz938pvgBo/znkw8X4vi9PU5SwGmb28Hg2tCSAwx00+jAMFHeX9RisIaFIpSw4VLS+b33kXVVPRtz2xWUzUvHnfYzmZ5h5Wy2Dj6TU9wgRAj3Vkrbe5tYr43m4ZTrdCHYiQHyd6YwkGwVuPHARcjF64u6VuxjCwPZ2yOW5QRa1S9qNFW0W0GZqWiD5HlwYwYE0prFnszmWOYsnrCOE6VIamyBO/pezfh1hUqThvUbpgW04KUwfUDp7fOpEJvNIvljHoi6O1fXJBipCgbvuYPvI//4s//v+/wFdjppaicMilMXbkpHNy5gPfTYHrKpCyd6ZBAaem8lUZmHKTbFsgxgJL5UJkhbc+z5+TgxZRdrCQgvCzOEenGGGxZoK1hcfcV5nddYnN0iutGNs/dJDtHDGGqNMrByhqh/GmtUFP30LtIVIkM+BDFdmk1hfPgPewd0KsGXQiddhydzOCNnq7tJ1hKFDCEBpUzSptpfm6icQLGSrfOWiPeFjX9fhYKZSYghQBjqmZHq30x6cu+8V1C39Oaqpb8t+2mkw52iGxXUpgrSunWuj6AyjRNydYHjp+X2SFjDd2qI7jIpbsPyEAEoTSTMZUVG2dVTDYq6Qjf7qrlnOg3jugD3gkQLMVE3ZTndrLV8Zrj6ycs95c0s4pmVhFGd95BTyHiJ7hCTInF/kzsaOmzRZi6KSiaCqVguT9jdWvF469+iE999DME/9tb3Xba6bfTMAY+de2UojC85w0PMK8tVVXIISdFDFk6aFOodoiZ4CI+JFzM3DxeczzlNv56KaV4x5e9iYdf8aAUCDvJchy6ETdKvAcK6qakaiqKUijGTGAfUmLoB/yZly5a0EQfmM0bhtGxPlnTrnvK0lJaS1mVbM626IlKOQ6ObtVhtEZrxXbd4acZt27bo7SirAvsgWG76pkfNALLUxrvJDRcHB4ZZRTzZSNMh5QpCisdQ/8bR4GGtTunKWutGdwgM4FGo5DijkHu96IwVNYyu7hHWu7JvHpVcNf9Vzi4sMeNa7e+0C+F3xPtOnN3Ujkz9j3WgC30ZNHK2Kog54hRMLSOGOUGrWupxGMUpiwkD8uJjaqqa0KItO1IUcn8S7ceMGXJfH9OBtwY2Jy2zPdnQstzUhXROaNyxo1ByEbdSE6JrCCliNKKoirYP1xS1aXYzgbH2I0CX6lkcztfzmjm0k10g2d1subaJ59n6Hq0VsyWM4wxk5U0cv+jd9/pK7DTS1QxJdrNgHORfrJW+phJCkJIk6UxE3OWDl0UgEldlyTEnx+TQFG8CwxTFo9CiS9fyUD2bfKjMXJI0kWBsRZlBOyQQqBbi/3Y+4xuZswuXWB2/30cPv4wlx65j2Z/j0sP3sficEndyMGxnohcCqFj+hDZtiNt5+gHyYZ0o6fvBuz2lMZvGbuRoR+mXDDHOIzMFg1lVVBUBSBgCT91L2KI53mWccpCAmROQatzdDRTF9MWVg6UtzcQO70odOWeSxxePuDseH2e87Y6XrNdtQLFGqXAl5Nce9d71qst/banXXfEmGgO5lJJtwZdGg6uLklKoa1h1lT4QayPrhtw/cDq1noqbibaTQeIk8RaWT9sU5LJ9J0jA5vNgNaG5d6M+d6Ms1srnnv6OillsQmXBQo1dctlLkkbIeLlmKUrpwwhRPptLwWYGBm2Pd4FZnszLt91kbvuvXIHr8ROLwb92qdv8SufPqaoSrJSUwxMSUKIyHkisGZktsz5iIuJdTvSD/97N9M7vvxNfPMf+UqKwkqB0km+b4wCsBpHR7cVS2IYpbvWbTrOTjes11vWJ2vGYZxmTAVs50bHredusT7ZCC+hMKAV2mqMVZR1SbvucL0juCCUyEoKOcFH/CgOFVsZikqs+DklitIw9g6jNQeXlrje0a576cYNXiz7kw06pyQNjJCm+1VP0KI4wflkHUo50fWe5CNhcCyWDYXWqJRw/UDXj1y7uWbrI8PZGsgs9uekmLh814Uv9OX/PdPuMHcHdXBpX1rEQ0BPrWmtNXuX5qSkCBHc4KjrEms17XrAlHaCE8D8YAFZwoshoYwCEkPXYwpN3zpikhk5rTMoIRONvdwsRiv67cjprTV9P6JyRAOmsIQosANrNcbI41CF5dJdF7hw9VCqI/14PlMTXKDbtKSUqGY1xmpCCGzONjzzyetcf+YmYyu5PmVlGZ3n6r2X+ZL3v/lOXoKdXqIKMXG2HUg54yZSpHcyy6OUHE4Ucj9qNYWXVpasNSkreq9Y9ZHT05bNqmWzbhnaUeA+dUlVl6QJ319UBcYYyqokh4jrR0EvKzn4Ka1QxmCtZrao8c5TVAXNxX2Ky5c5fNnDXH7iUa68+gkuPvoQRVOjrUXbaaE0mpyRKIUpfDZmyc3re48fA+WwYZl6DJmuHVidbNiu2s/GJISAG7xYJaMslhLTIBZvNQ2dp4n6qZieI/3ZzCBrBfKSUuI9X/V29g6Xd/IS7/R50gMvu5eDC3usTtfn8RYnN1coNNWyImex+xd1gQ+erh2IoyfFxDiIpTGMnqw1MUlHwHuJ1+m3PWM/YKxifSr3kC4smYzSAioqphm6srI4F3G9I7qAynKvxhBkHCALwnw2r8gZjq4d0W5byqrAWovSmkyWsQQncQfnNNogcAWVoZrV50UNUxhsYWlmFXsX9nji9S+bxgl22ul3r1/4tetcP+twPgrwg2m2c4J/pCjRAB6JKVBas7coKH7Ta282r/nq3/+lNLMarTVuHNmcbYk+/LqDmRTXgg9oqxjakbNbZ9y6fsL2dEPfDnSbgRgim3XLyc1T+u1As6hJOUkxc3rfXx4sJyv0Fu/EzWGNpp7JYXScRmeKanKgaIUpLePgGQZHzjIr7kYZ9ykKK4c0F4hRiLVDO+CmmW6tBYJymzSdYp4KhxJrklMm+AmupzLzeU1VWowS0NfgM0dnPd4WDKMndVKcqZqS5cGMJ9/+Sqr6xWG13B3m7qC+7g+9fwrjTfghYK0iukC/7ji4NKcoDcuDGcYqiqqknNdSeRkl8HRoO0JOmEJTNIXEEwyeuqoomhJjNXFw2EKjjcVaS05SIVmftfgQUFaBEetUyplhdJT1RK7TmXEMjIOQhIa2o6wte4dLyrrEFpaTmyuh9y1rqlmNNUra30bCy/tu5PTWiuvPHNFte6mETt2Oqi44vLx/py/DTi9B3Tpp+f/+jZ/jZNWz7dxUBRVISU7gpyBxUp5CTjUoQ0xw1kdurR3rdT+FckesVhgLzaykqiTHqigsSslhUE2HQq2hmlVYK9l0ZWHlIFcKQGh1spW5CK3YnLYCYTHSMSgXNbN7rrK4927JfMwKHxLaarQGrTTKKPIU0BoiuJBoe5mbsN2aA3fGMg8M2571WUvwSaAl02J6e2MbYyTdtlnC+VA7OZ/nbcnievu/PB2EldzbTSlk3J1e0DJG84Z3vgbnxCGijeLs6IxhGKbulURfBBfFmrVomC8kXyoBGCa3x4DrR/qtk81UXdBte1KQImRG5kBRGT9IV7hbDygNWIMtNF03Ml9W1POKoqlAG9AapTRSXgBblejCstybM/Yj1z75nMzHaSY6q/ieU0oSqDwRWGOQHK3bFL96VqEAYwzNQsLIy9Ly6Csf5J1fvoOh7PS5abMd+fF/8inMvCZMhbeUBdKjlbyvS1FOk4whK7j/7gPuuWvv/HvYwvKv/tk/zBNPvkw6ayHSrVuGbsA7z9CNhBCJPjJ0A303sD7ZsD7bcHprTQye4DwuBLLK524r76TLlqfxvJQlkqBpauZ7M/QEaCmrQqybdUFVV0BmdWtNypl23QsJXRuZW9NK8pS3A307TJAsTVGXFKV07iSrNYNWWGtJPuKddOJQMu86Dk4iSSY+hCkMQRt6pFhz4bChLqVD2faeo1VP7xO6KnCzBSuf2Zy1UlwtS+5/5B5e9YbH78hr4POt3Wp7B3V6dErKiaLSE4ghMQ4e1ztWx1u8T5wdb9mc9RKq6hPWahZ7tcBH2skOmSDFTNWUmKIgKSUWFR8p5w1jN0iHwGrKqiDHjEJTz2qKylI1gmD2IcnPPtnKLN8QgczYjZzdWrE52fD80zcYh5F7HrzKhSsHWCMo9vmioaqlA0FK7B3MSFHa47eun3Jyc0X0Hjc4uk2HtgYfEl/5wfeyPFjc6Uux00tQn752yv/6Dz7K0WokKyMh4NaSkEPcbUvwauN47saGT3zmhOdOeo7XjsFHYpyy2PoBN7rzr79w5RBT2HOASk4CDgpOcq7slNPmx8B63eEGhxscIcXJsghVI0AkrWQOyRhN9kI6Ky4csrjvbrRRshBOQ/FVZbC3u3RuskrGRD94yc0bPMl5rjTw5AMzHr17ybyRSm+eaGopRhSy4ZV5PpnzM0VBUZXU8xozDZWrPM0WJrnPU8zn5EDvPHffv7OkvdCljeHlr3mE1fGacZA8xhvXjkmTNdGNEjbcrbopADkSo8f7SGEN/XaQfLgh4AZHv+nQWrE+WjO2IxhF344ElyhnlXR2o8yCm0LLBnAz0fiMYbvu6duB7cmG7aqX+8rHczx58pHgJKdu72DByc0V3bZnNq8xVsm9MTpSkk7yZ7vR0uULzuOdnwjS0uHrN0LRU0ahUNKpvLj3L3zudtrpn6drz5/xj3/+GSgKfMp0oxCNfRJHhctqiipI0lHzkde/6m72FjKP/Id/4EO888veNM1AJ45vnHB6vJLuVjfiRxml8U5cF9efOeLW9RPObq1o11s2Z1s2Zy1KQwqR1ZmEftdNSVkVDO1Ie7ol+IAtCy7edUBZGrbrjn474AbH2a01xzfXVFPxw5bFOXBIGY02kilprKFZNiwvLjBTt00bRTOtJylFASANbrL0i3UzuDDFiGRxoShwo+Qwo+QAGELkljfMlg3aGJxPbNqR003PajugrKa+uE9oZiitJ0iZjA/5MfD2972eevbCn/HeAVDukG7bkbqNDIumEGQTlUEXFiUFCqr95jxEXDPNtWXZHJZNQQiJmCLDMJJCZDafEZynbEpsaSUYMilcJ/aXorSE4LGVkLvGzk2YdBkKJUuF3Q2BpinO52C6TUe7HSicEMfuffRu7nnwKgrF6mTN5qyjKA1FYWhHWQgvXNnn9HiNUnD9mSMef93DVApSSIz9SN2UHF7eo5lVbM62d/qS7PQS1NPPnvDsc6c8+vBlXv/q+8khTNZBzfNHZ3zy6WOOT1s22wGlFA8/dg+Xrx7AGCeQiUYjIIWP/MKnAHjD21/JlXsu06cerZHTGTIfpJXCGA1kTGXIg1hIbCWVRdfLDBBKoa3Mqso8khyaYh8wWjG77y786AjPXKOsxHqtEEqn0oqYpNOoonTK+iBzGJKTmpkVmte97AJDgOA7YlXAtAgj00Vi9ZFHSgoyLK/VZyMJyEyLrpZ5OiU0MaXkRPrU176dX/25j31hL+hOn1eVVSFBwF7iMTarLSknmnmNnjrIxmr6lDAqsTruJuuVkRBuA1oruu1IPavoUyKTqRYNZWEIPpIzhNHRbTxhItJVTcHQOcLoMArJWYyRoXMYqyU/tR1otZpyFjUaLba1EKdYDQuj4/mnr/PgY/dRViXGDhgjZFZxxYj9rKxKYkjTYc9TW4P3gWZRo41kbKlRUTYls9jwxOsf46f+7ofPZ/B22ul3qhgT/+inPs7R0ZrXP3E3+7MS58QiH7WeAr1vW9rFRl8Ulne+6SE+frPnXV/1Vpismf22k3tUSwZoTul87jlFoRcPvZNC+4UFpCwZqVkxuiBdwZwY+pF6VrPYnwu5edkQXZQYqr0GP0aOnjuBLHElVVOgjdilh4nXEPoB17spYFx+ti1lvfCjF4hSVdBtepplQ9VURB/OZ83FEi0FxhijBJZ72QNnMm70FJUVu7YLEgFkNTEGtp3H+YG2HWl7j6kKqlnFwkTOvCP9umJRUUhUT1EWvON9b+TH/sZP3umXxOek3WHuDunx1zzCK9/wOKuTDbN5TZow5rc3W2Vl6dY9s6IhuIBzFmsUprJsNh15qkJK+zljrEKXlmpRMKwHSJHlsiYm2UiWjaXvR2LvUNPN1HcDZVOQQyRJEiNKSZUkeUe+vZgBy/0FbhgZu4ER+NSvfIb4WOTSXYeyIEZZlFPOlKW8rC5c3if4yDiMnN48o992HFxYYquCMApQYbGc8cHv/ir+b39uR7Xc6c4opsyvfeImn/i0UK1uw5/Tr7MUgiwun/zYNdCapi6ky53h6MYZN587Pg/LHns/kfEMIQQJC7eaorRoIzlvKSYptiwsKWbcFOSdciJHIXYZW6Kykq4BDrLYF93gMT4xv/ce2YBeuyaD8oMTymxKWKPISUOWrj0KxgntPMuywVYKZoXBqBE1HnM2lKS+ZNCSD6T1FCCeFfN5jcoL9Jyp6CRY6pQ+a600hWHsxmnmLp3Tx3Z64errv/39NPOK55/u2dufcXJrRb8d4C6xZdnSkmOmbAo2J2tygqoqSCmjojhNbFVgpxmauikJMbG+tWZxMMdaTQzyWkkpEqLYmlOSDnFGDl5+8BirqZsKUyjc6JgtG4IXCq1SFcZk/OiY789kzlNXhGm+7/DygRzmjBH3ymQplpk7mS2yhRBZi6LA9U6iOoymqqRTMY4Om2Tz99q3vJyz4/WuWLHT56ScMx/9+A2uPXfGW17/APdcWkJWZGsIMaIi5/dDnpwQs1nJlUPFyY0T/DCyPdvStT0peDYrmUHdnHWf7TqHJFAS5zk+Gsg5sdibMY5+stQnQohCMp6CxBPQbXpImWbZyFo2Cviuqgop4hRCY2YiwcYYCVuJFUgh0IdAPato5pV06EdHcInZomQcHcYYckg0s5puI539xBTXEPIEBZxAYVEKLylITIGa1tAYEoqIKS3PB8vhWUc3OLpRXDCzZU1ZWSBTdyt6LRE97aajqspzd8HBxRf+fPfuMHeH5EZPt+mp5hXjMFI1gu6vFwVjH9HayGCmlmHPseupZ7V4q5FqaApMGFaFraSD1m8FruB8YjYvSWSiT6ScJ8plJoRIJjN0jtm8EkzthCbPOcm8gzGQxaNsS4vrHbawFBUkHzid7CuPvvJBLt59gaNrx1IRNZoUNUM7UDYlF68esDrZcvm+S/TdeN6yL+qCECObkw0Hl3ZzczvdecUpP+2fqwyf/MgzQoAs5O2z3fa/4VOKyk4HIaky1k2JsYY4zd/dDlMOTqqOGYXRMo9mq4LoZV6trEvG3lHVBXGKI2m3A9Zaxt5R1Jby4kWaGMk3bkJOsuBFNQ26qWnuR35GmootIQSsNYxjIsdIVRWYFNlTDj9AU1Z03ci1ayfn8JeNNSQUSRnQluWVA+567CF0UQCKrMD7yPasZRgdxsD+hSWPvfohPvZLn/68XqedvnAyxrA53eK9o8oFQz8IwbgWZ8cwjJRFCUosVsv9hRQOpg3VOEiAfMoyN5pSot/0zBY1bvDoeS3z1rVlfbIlk4khk1IQQEJl6TYjzdzSrjvKpqJqGoqJmqeNQZspG24ClrgQCb0nTgHkg1HcePaIK/dcpigsfspUVEox9E7u09LixtuIdDDWEp0UHJt5w2zR0HeO6KXrl3Li5a97hI//8qclPHmnnT4HbbuRH/tHH+Ph+y/yjjc9jNEIMCgIUVnoPELo8S6ytJEf/+/+OvX+kpECbQyLg/k0bxcIXg5wGcmXCyHiRqFUjqPnSpJDVJo65SmI7TDrzMGlPdqzjrIuCEOgrAqW+3O0Vgz9eN79CyGQk4zotOsBP8i95MfA4sKSbtMxjI6Z1vStzL9qg9zfGazWKA1lXZzDtRQQQoSczg+vGSm4pJjORwekgDiRL7UmhMioMpt+ZHABZeVxmSlGyGhF6FrStqX1Vxm3HVcfuDoFssM9D93Dgy+7l6c/fu2Ovg4+F+0Oc3dIIQSGrkcZGDrHPCTqpmLoPc55bCn5GqGTQ5QtrVivjEBSQookpsDipNAmU85q+m6kaCxxiISUSCGjFWxXLbOJMjl0Ur0pCnOehxWjYGP7bpRqPwCZWSW0Ij2R/EzKlLXFB4GbfOTnPs7Ln3yYK/dd4PrTR1KhN1OVvnfsX1gKvt0H3OhZnW7RxkhcQsqoUmH1jg620wtLsjD+1pu487Bwo8hZwoqZsuHQEyXTaIyxpCiD7yEKeCUmsTbGIECJ2xlw870ZAGPvaBa1ZFNuB7TVzO++Kove9euyAEYPauoqThuB2+HfoAh+qmhqRQiKbvDM6lK6+9pgo2dRZB6574DtNKxeFHIIHYaBrg0cr1fc+LWnGUNCzxb0PjG4wGYtuXV1Y3nk1Y+IHXOnF6QOLx9w38N3c/O5WwIbMJJRutyfy9+1ZhgjFPJ67bYDWikJ4W4KrIHclISQKArF2dEKW9kp2kJyC8d+IIXA2HtCygQn3YKiKVDGoLQ5X1OqWSndZ2YobQh+smEWhvlhQ78d5ZAXk5AwrWXsRoy1bDc9B85RloV0HqZ8PO8cxmjqyY7WdwOhtMwWGl0YdGEk964RsFG7vh3ZAQeH+zz0+H187Jc+dacv1U4vEn3qmWMSmafe8ZjYi5F5Tqn/Tfm+E1nYdT1xGFh3gZtnA/c8fA8X774oMKIYzz//9tx28LcJs57nPnOTlz3xIM2yYXOyoZjcVPNFQ1FWuEFoy9WsEpJkyGzPOlCKbtMxW85IKTH2jvnBHDaD7BFDwhRmmqMWW37fDZgpOy8hcQJGa3KMxKhQIYnNOcapoKrQyjBO0KU8deLUZD3NKWGmrGNg6lpqQlUxhArXOQqbKetCuvrOMZDEMt6PpBvP8TO/9BxX7r+LJ596E0M7sD3boPULGyGyW2nvkC7ffZG+GyUDZyJAWi0Wjm47EAZPmuxQPiaq2kDObM420tpWVl74OWNUJvhI2kiHYBg9ZSUVcxn2jhRlQdaKvhtomkqqNZOfuawKYu9oNyMqZ6xRuDEAmXYzMl9qyFIZISUiAlKxpWW7avnYL36al70yc+HyPn030G0FcJKcZ3W2wVpLM68Y2pFu22MLi7FCQ+pax3b9vw/C3GmnF6pu2w5TkgOctlpgIlpsj0VlZbCdjB9lQB2liDljCiMLVcrnC+ltElj0gcX+XNDNnVABgw/M9mrMwT62H8irM9CKMHoUcmhLPgu6WQs62ik1ASXURLiNxOQwWuAsZSldRTLMm5JYWXmMWiArxmh8Fmrm0UnPjeeOGf1EbplmqMZ24OTGCdvV7t5+oergwpJHnniQf/rjP4ctLNev3RLi4wTwSQhJuZ5XnJ6s8WMgL8GUhrYdKAtLXVvObq3RuqJeSIxBcTsqQBnqukQXmhQzOSBW/tFhSkMOgagBFEfPnzFf1tjSMnQDewdyH2AUKSY2py2gBOYwUTdVFuJfhax7Qy+HvRgk/FwbQ5py7GbLBjcqimnTPLQjaiqC3IYhFVVBPavxbgso6nnNG9/1Os6O1xw9f3wnL9VOLyI9+9wZ23Zkf1mTJ+hOmOyFOedp7RDwRzKaykKZA9c/cwNTWObL2zZjTZ4gXSml87lrgM2q4+bzxyg1dcQ01E1Fs2iE7Boil+65iNaGnBL1omLoRtrjLUVTEZPkCtfzim7ToVKGKFlwQzsIhTknySJFk7NitqgFopTTVMyxkBVFWVDVBZuVcBNijJSVxRhD9GKH/g2zqZP9OSs5IBZZikkhRMxsQencFPGQpkYFjGMQCEVWWAOvfOQCz12/xU//tR/j6HjL9aMNN2+e3YGr/fnTC/so+gKV1oq3PvXk+YbJuUBRGPYuLSQ4UWlCTBSFIMlJgfZsy/Z0K0S8jFTttSLlTFUaUsqMvSNlsW+FEOk2PcYYwYb7wNiNgMARUIJ1Hscp/4NMCgFtLDHJhnO+31AU+nygPKdEmAZSbSGdwmbe4F3kVz78cdYna/m8DG4QklK77hj6gaF3dG2PncKJx2E8fxN56LH7eNkrH7yzF2WnnT5Puj1rZ4zkWEWfiBFsUUjsgQty2MpIPldTorSirgu0EuKeLQtUgrIqIeXzLtzqdMt23WNKSz2vmO3VJJ+ks2FLTFlijREAhDHTjJ4cLEOI0q2b3ndSlI+FKKHgPkSG0dN2jnEMRC+RBcREUQhRUyshaDaVxVqxqExtfLSRbkaMYm/pNj3PfPL5O3kpdvoc9KVf/yXsHcy56/4rzBa1wEmUWCxnywohLTAFAUvG4tiPpJBYHi5IObNdiTVyu+5IUTpcJM5zoqpZgc7QdyMZsKWlbCrp8LWebjvQb3rKWmyVpEQMgjWf78/EImwl58paTVkWUzSOWK9CiFIsLQtuPX9C343n9FU3unPgieDS4/RvMkd0m6o3Dh6UYNCNNWgjM0wpRA4uLPmyb3oPF64e3unLtdOLRDEmfu6XniUmyWkchyAB2Ypz2rHRso8MIZFRHB7MsDlyenRG3w1ycJtev7dnmI3RWGskkxG4eOUQW8rr2Q3+/P19c9oKkRIp7s33Zhxc3CfGhK3EjlzPK5pFTbfu6btBSK9G4F7VvKKuCmE6RHGHlLWMBswXDc2spG4qXC8xCDFHiTmY1dPeUoo7MQRiitNhVNaolBJGa4yRJ+P2gU1N0Beh4Iq10o+y3sUE/SAAQO+FtFtXlidefpXH7t/D5sj16yf/cmMWX8TaHebugDJSGdHWTCPekpsxDI7N1KUyRgvq1RpU1qA0w2S59C6IJWuUQ2BM0npOKUGO6MICSg6D3p8DCyDjh5GxH/Gjgzz9nAxVYclZSUVEy8/rNyNxGjzdrDrCGMgh4cYgg+pGU88qDi5K7tyv/dKnaDdb6llFPauwhYEkyPJP/vLTQu9cd2zXHd4HcoqMvWfv0gFve98bz8OHd9rphSylNLaU7jNIRVJm5uL0jquQSYVMzpKfk1IihDRVIxN+DDLnMIzEmCR2xFrKssAWhjwtkkUhG1etFVEZ4vIAW9dUVYG1GluYc+JYniq6cYoMQXFe9YyT1TrEhA9J8iVdmIbjEykEmIJtjVFTfEEmpIxzEgabFZicWa82XHv2iJ/9yV++o9dhp89Nj7/6Ybz357Nl3vlzMmvwkbFzEv47BFKQuRRjhIg6tsM5ZGRoB7E/hkSaYFl+DCSSbCKnUO6xHcgxUZWG4AJlZckx0cwrygm0ABLBk2KiaSpAMlTHUTZptwspbhCy3+1Q4qopGQdPu26FwOr8NOagJwuauFTCVMAIITCOjtFJ4HJOicKKkckYgbaEEAjOs7c/52VP7IqRO33+9Mmnjzk+66f3cRlD0Vre582vK9aZ2wU1BU2p2dw6lcJEiHgfP/venCVKpJ5V0mFuKppZTbvqZNbUasopPHt5YcHyYD4RlQ0qw9Gzt7j13DGnN89oFvV5wT6RcZ1j6B1aweZkQ55szvW8QiloNz0pSyduHD3rky3BBbyP04xswrkwzZNLBzFPp1eFwmhzvn4ppQgpMbjb9+tE7MyylnovjjI1IUDDOH1OkrWt70bGQQ6Hfgw0teW973yUxx++fKcu9edNu8PcHVBOmZ/+ez/HdtUSfQDA+cinfuUZwugxhcJWWtKepnZ1SlmqmFYha4qgx3OG4+trUsrYsiBGAaIIqjXig9g1i6aQWTYrCPQYI8pATAJHGUdPWRfEmChre7t4LwPhRklazGVGAAEAAElEQVRwsjGUdTWFKMuNZbTCFAXNQkLDn/v0Ee26k+rLrKac1bjRi+dZKYa2JwQZbu+2vQSiF4av/v1P8f5vfvcdvCo77fT5UU4y/6atoWpkgSxKQzmFoxZ1gS1KZksJI5Z1RyAMeiJfGiOxBMaKFUw2sGLDFNumEMdCkI1xTkKRNXVFXu5j63rKvZOoAZRCTSTNnOX9xk+LaAiRGKWCGZPkdcWU8VHm4FyIjE5oaOKkFJumtpYUwZQFZV1gjQAxrl8/5fR484KvdL6U9ZannmS+bKZ5FSUxFVnyA+3kBCkqmT8LMVDUFqUV3XZgc7rFDRL2O3SOsXdsz1rIcmjKKU1fK9bjftPjRw/TIWy7GqQjnKRiH7MUFLUVlHi76VmvWul+Tx2+ME5V/AnkpY0SaJA1UySHFEfbbc84uCmHNUjRJKbp/yVjLniJC0kp4wcBqYQYz+/D29EcOUkkyTiOvPKNj/OK1z56py/bTi8S5Zz5+V9+duo8y9yo1tJZk2L7Z+fnlFYoragqy3JmaU9XktHmJ0LyVLxTWqGNQSnNq9/4OPW8EhukgsVyRlGVHFzcp64r0pS92MykA3/j2pEAtTJEF2hmFSEEtmdbsoYwSj6y2BtljQMJHI9eYrdSimjEOeadEJ+Di4zdSFkVzBY19bxGGT0RK+Xxaiv3rtYKa4XzsDlr6duRmG7bT8F7L5mWPk220tv/ls7HHjJMjY8s4ebdQE6J173qHgr7wj4OvbAf/QtYH/35T/D809fJCOHu5OiM7dmW5f6M2bxh2Dr61RYXIrrQWKOxRYkfAsPgJAA1JaKP7F9cEoPQucZWbig/yiJUFEIB67fDeYhkCoJ6LQqLzkpyoSYqZlkVxBAFzz59n9szQFnfJiMlSKCMZhgcbhhZ7M1Z7s+xheXGtVucHJ1RzyqqpqTdDsQp98T7IJjaQR5nu+05O14B8OYveQ0HF174iNidXtpKk+0j+cg0ZirzctPidHso3Q2OOG1Im3lNURbMFg0Hl/fPUe7WWKq6lOw7oxjageQj21UryOaUKGclKSXp9KcMVYW9cgVTVVRT4LjWWja2WhOn7kVM058x4/1EPPOBYQj4lPExE2JidBHnxY6ptHwvpTU+KexyzsmqZ73tefrpm3zi48//hjiHnV54auY17/3qt7N/cQ/nwvRRWV9sYahqObzHlMgKzo7X5BTPX9NoJSG8GdAwW1QcXN6TQF8nHTM/jJSVwYcwYc7tuc3RlpqsxIqpbnf7JitlDFK5N0ZTVOVk9VX4EMXaP1kmf33XQk3d5LIuGbrx3BJ627Z1e7MbghzYhPznJPtqsqq5MUhxc3o8RWllk43kpkYfefRVD9HM6jt23XZ6cen6zQ03bm0oSzsd4gRipY3Cak3x/2fvT591S8/zPuz3jGutd9jDmXpAN4iZ8yxRhGJZgy1HJUuxy66yU04qiT8klY/5M/IxqThVqaRSrspQiV12EsexrMEmLZEixRHEQIBoAui5+8x7eIe11jPmw/3st0EBoCmTwDndfC8WijiN0/ucvfZe+3nu+77u62f1ITX5xnrorWH/9Iqr+4/EXgktAfLme1322qyzhCmRi0zGXOdYny5wvT/sut00LEoRu+J0PeG8kwl1O8NAgoa6hezRDeseqMxTZH2+xHe+8YwnadaE9mfmwmLV43tpCEmjSJooNUsBVkHeMS2DDWNuHC/mwKGb9vPhPY4hcfXoaQsVg6oUMUm42AfFoT6kZE5joBSYpsirL5/xQ698uK3Sx2LuGeq3f/UrzPtAipH3X7/PYt2xvrWipoK2im7ZS+cxZfZXe+IkIQUpFrbXkwCxSiHnhF84lAK/8IQQhctjLKpWuoXYUuY5CZeuKJw3bC53aA3Oi8d4t53ZXFyjm/fZ96519jWlqlZQGpbrTuyYVtMNHcZIcmUuwsnrBs+TB1eyB7To2TVrZY5JOrY5Cyw9RHTrLnW945VPvMjyZPmsvyxHHfWnkrW6mRCBWunXvVxUv20CAIpS5PcKUFy6rmEKxDnhvEz0ZIKuD9DmG/ultY2J5R3d4BlWPUpr/NCRYmK7C+TlCUVbnLU4JxM+rXQDKstFtypFAWKzV8pyPXLwIxO6iqw+JCGYSLKa0rz/8Jpf/7U/4P57T3j7jQdsr/fP4Gkf9WetYdHzwz/1aVJM7Dej7F9vZ+lup0KYxOafYiKMkVpg3M1M+0msvdY022Pbu8yFmKVAutlt8UNHGKOsESgoOYnrxBlcL5bizdVEDAlz8460i5ixmlwytRZ871FKYs7lPegOXX5jNbRiLWeB3k/7SQIa0gepsaWl6N2kOuckSJ7D1DrJflzJEiTmO9+aI7KfY4z8vV/42B3+2t/9/LP94h31kVEIib//X3+VaY5YK/vXRmuZsLVfa/1BUrFp35PWaC7uP+HpW++RxpESIyVmasqkMKMVpCDv581iSzd0nN06ae94ZXW6YH26IMwz775xn2mcWN9acn73lH7ZMwwdtVSsNYcJOUjacoiJaT+xvRJ2nHWOqtpOndV4byipUHMBBTHIBL8USaC03qFQ1CzsOoGn37xrUrJYaw8Ty9LC/MbLDZuHT7m83FOqBCNVapvCfZDqCa152Th1qTUyy4fcSHIs5p6h9tuR7dWOaYrMIbI+PyFnsU8OS0n+idNMrYp+PaCMoQjbG20UYf7gEKpZwKvGiPWxGzpyTJRa0YBSH3RWT28tCCFKUpBSTNuANhrXiQVzvB7lYugtzhvpCrm2l5ML0y7ITsEUKbXSL3sJWWgJSmKfUbzxB+8QpsDp+RLnxO6ltCIFwRrsWkE3jbJ70S16/uK//FPP+sty1FF/KqUQ0YDzDqMNcQyH8ASQSUE/eFznKFkW06kwbWWR/AZtQIVpmtlv92yvdjItaBMQZRRKaeKcDtOKm6ATYw3aGhKa0vUoa3HO4p1Bq7YnqzQ0IHmKpS3Vq8NieU5iVckV6cIiHd6U5GdGLvDNNx4zT+HZPeijvi/6K3/rF0gxAs3eqJszIyWZBrRLpDUG5y3b6z3jTvimIGw6aiWMUZqRuf13K2FcxhhiY7oZo+XfMxrnBBmwv9oxrDqBijvDPEVyrg0sXmQiHTPjdsS1qUXOgtqYWsrrNMbW65RzDxTd4OkXXdubiY1TJyFiN5c8mWbIPmrJUsjFtnNTSkEp2Ql03sr7bTRaS8KrNpq7L93ilU+++Ay/ekd9lDSHxNe+8VDshUaD0WilWxrrjUtCvr9RLYBk8HhvqSkRnl5Qd1uYR0oMzY7vOD0XB5RuAScn5yv2u5Fh1WO0nCsA++0s7pCuw1pJW/e94/LxtdjotaBzKBVVYbkeULXijJWzyFuG5YBqxVWYowQcGU2I4gAz3qCswMFzlIAT+XOkoSmfl+QvKHUzGRfOq7WG/Xbk/tsP2D25oO8s8xzIOVEUpCwTPq1asnR7XoJ3kHc/lcI0BUr9cFdzx2LuGWqeAr/5K1/m3bcesz5bsm6Hl3GWeZxxzV51c8FKSWC83dChjaJfeipyUG6v9ocwBYAwBSnk9M3OTS/WkwYhpyp8gxkbb9lvZ+Y2svaDl5AFJbDhClAghSSj6dblNFpTkkwbtFIMyx7jNDEkFicLlILrp1ec3z1tXBIZy+d2SG4348HPnEtlWHT8zOd/nDsv3npmX5OjjvrTSimZIGgFXS8HoEYOj66TAg+E01W5SdWrdEsvqIAqHUs/OKyzOOcklbZUur6jX/SC+1j1KA2by0070AvzfmpWHFlon03HtepIGLSxKKNR7eDXRuLZpbhTcomVDT4BjGfZ171ZKNfWkJpFRTnLe+9fPrNnfNT3T5/6kY9jrCHOSZp+WXZSbOcZTnpxd6TMtJ/EXdGcHNoaCTnJmTAGYb05gyoFZzWdl503YwzWO7qFl4uWlQsqyFR4WHaM21EaD1bslAA5ShgPWnZqNldbwpywzcpljJFdVaOhwcdVu7xZa9BK0fWCR9hd75knOe8k5EcCGUoWBEhutq3c9rt313tSkKAFZ41YQJV87IpM9EqRSeH6bPXMvnZHffT0+puPCVl23hTIj2itmg1RpnHa3BR14Jym6xyuTclVyRAjaTfyja+9ydXVtn1vZ1CwPlnirATfPbn/lOX5kn7ZE6dIyRltZaq2WH8ADg9zYL/dC37HGwk48Ybt9Z55DMSYJPQoZrqFp1t2uF4m2jHEFk4kawFhHwQ34O1h2r04WcivjTnw37SRz08SZdVhN7bUyld/75tsd7NYMm8aOFX+v0zc62GHuxYJ68rNraaMJoTMt9MPPow6FnPPWPM0c/n4ipPzlSAFYpEOfC4Ha8eNLcQY3V6U2EIP5GLVL3v6hRR4KWZoEzhyaaNrSf0yRlNiIQcBKHrvSDFSa2Gx6tBWDtkwR66fbgjjTBhn9ldbQAJW+oWnKkgxSzffyII5Skbsfd+zOl2gFQI6VpqLJ9eSQhQj025uMbOyu7e73jPuJi4eX7FY9W3ieOz2H/Xh1TTO7ZA1lLYLINZFeYcEWWAOlq7hZIFR+mA5k8NIfv9+O1FrxQ+eYTkcmJQ37CCZnEjB6LyXqUdIpCBx1s47cJ6d8hRjUcaC1je81UOH96aAuzkIrZXpi2t7QXJ/kI6msYYv/v67x4CTj6B+7Oc+yyc/9wo5ZMIcCaPsofnOEaOAwef9fOBYUWF/LXxTrTXdwh8sV0orOa9CpNbKuJ/l9zWrZS2VeR8w5gOrlHVWpnC7QJiC7IiWivPmgAyobWpmraEkCWOolYOVuTZrcGyQZEVt08UP2FPjfmw7evWwr3MDZ86lHuDEsj8XAbn80eDFXePD1sLhEliypDVvLrc/8K/bUR9dXV7u+S/+wZeY5vyB1bJy+Ll8w0NUqMPkyncyuZIf8wpjDdZbnj7Z8JN/4YcPKeo1V/zgWJ0uQUsTcrfZy7vT3osUBUeVoyA6+mVPCPGQqVDmxDzOwkcOCeOk2ZFibsMFeV/nfRCeapKmTJgiYYromwKsha67zh6sld3gD6iRm71v1T7XWuT+eXOWfuuNR+wnWWGQoJPakjzbyoOSn1c35522UjzGkA8NzQ+zjsXcM9b2eo8Gbt87w3US6ZzmhEaTQhb7R0uzTC2lZ3u5JwV5gUAxTdLZSEkWV43RhJAoiJeaKrBIraXDMk3yUqVcDoyoMEZM+/2qJdXlXPCDJ2X5TUop+k4SyHzf8ANW49rLZtpl1FrDSRvjp5h48vDqgy4JpS2ZS0zsuJvYXe9kLwHwg+f64ngYHvXh1W/9ypeIU4DavP66HbRIF9EPDuM0JWV5b7RuuzsySfe9RyGdR9/JLmxq4UHGW2JLKUshobVmWHQN5qwar65nWA50jV9XqVTv2SpH1eZgW7Htz/ad4A5k0V6cAFrJNKPzDu8tfe9ZrjqG3jGGzLfePEKSP4parRdYZ5mDpFCmnA5WSyqEKd3g5SSl7sC7yu37XUk8uJFYc+8dSmvCLMB73zm6hexc55jw3rJrgVjGGnKSwmtYeHKRyVqOid31KPs2TiyctoX9zG2HdFgPpFkmCSlExs3Uptml7XjKJRKavarBwm8Kupvkv9pS8G4ugTfF47wXmPnNO2ecw3pxtvjeQansnjzl1//hb/LO6/ef6dfwqI+enjzd8Q9/+avMscg0qhRKa+bfJD0egq60PoT0+F6aKyFVvvSVt8UxosUmPE0B3zuWq4VwFI2iX3ic1azWC6b9jHbiCFmsFiiliFNsAUL1MAm07R1XStxbpVkbpzEAjX+cZWIfp4DvHOuzJX4QTIJqk8V+1bfpem28OXNYDYJ6WPnRWlisSktj8+KxBOjFlHnw6Bq0Yg7t59RN8F7IByu2UvKMAFQ7D2+slx9mHYu5Z6xH7z+l1MowdJIw2Tu0gRgC2krYiXYW38s39A3D6iY5LMwzRkHhJnpWug7d4Ki5EIMkWMYQ0d6g2gLpvA/kIB3XWipdL9DVnIt0R2M6dHqW6775nQP7FtgytULsBnxurWFYdO3vRltcN2yu99x/59EhSjenD1LDQPhFqS3Hby53uA+5b/moo37xr/8s3aI/LFxLYmQ+2ClTSC0lVkDiKSaxGufMuJPJx7ibCOPMtBtlf6FWtFKUJNHuKcR2eRW8wLSfKClLQdYZjFGSXHt4nzNFW7Y4knYynTOmgV4V3ln5j7f03Qf7CMbqtm9n6bzwid5874onF7tn/ZiP+jOW85Z/69//W7I3ViHndGj25VIOdsLN5Q5rDf3Ck0IiBOGwWSsNyBu3xjwKf6ob5GyZx/mwBxqDgL+FF+dZnS3F4KEqMVb2uxnnbvZoBFFQqyQoa6U+sECGwPZyy+pkifWCR0i5UGpBW7nQatPONW9l2uydXEQPViwOtrMwR1KSiVxpiZcppBaWIGFEfnB0fcPxDJ643/Pwm2/yW//4y7zxzWMhd9T3R0+e7vgH//XvM8faJsP6cP9SN5iCFobSd9KA821n9cu//za73cz6dEm/8KDk3VidLDm9fYJ1ju1mwljD3Y/doRscOSdCS369vtxyentNN3RMu5nNxZZpPzPtZrFkjzPDepAE2VJw3tIvPNNuak4VQzd00qg0mv1mFEZllEneDXrHdba5QsR67bsbtI8FJEVZG43zsjJUSuG9tx4dntH7Dy555/3LQ9jfjXOytJ9d0iCS4q39wEEZjbX2OJk76k+vr3/xG8QGIM0xkUOhVsW4nwWuXSElKeRuEoTQ5tBl0dbQdV6KKWckBShV+mV7ETq5mJWYyUmKtOVKsAE3aWEpypI5tR66OrUKWFG3ro3rrGAMCnhv0VoTQmTczw3WKpOIGzClUoqH7z1h3k/SRZ2CoA0OkGTploQpsLvak3Nh/+a7mCM8/KgPsZwXxo42Aj92neAFhmUvgQ+pNFyBQbV9tBDSIU0rhkS/7Cm5MiwHYcrVyrQZW5czH3Ymbho1vu/ERgKM24mUxW7dN+TBsOrF+qI0e9uxK4aColZpsCg4WFlQAls2RlNaB5R2+VXW8ttffPvZPNijvq9yTn6m55SxTr6XBLCdqFmaEjEktlc7Cd7JRZoO0yxWxSgBBylGxt0oE7uY2Laf7TlnsWdWRY5SHFpvW5NPwMXWWaiF9fmSisI6y2LVszgZDpct1QJYjBU71rSfD138m4+VUxHuVakfnJkgk24UWsm+9w1yodZKivEDCHEr3iSBNh9262KQc9I6g3eavLni8df+kNe+8gZvvPHwENd+1FHfDz292PFf/eOv8dWv35cAHmfR1lDaxrO1BuekGWe0Yo6ZL/7+O+xHWV350Z/+NMuTBbvrHcbI5Hy5XmCsZne9x7Qp+qP3nrLfTew3E13f0fVe+KQNb4VSDKueYSEog27RYaxhWA/y3428X6uzJd3gqaUxJtt557xlWHbo1lCZpkCKqQHMBVXgvPAcu6Gj61uCutF4ZyWAqN0xv/2NqxXuP7wipnI4D3P7GXCDJJDALxl8KGR33TUMyodZx2LuOdDjB5f89q98Wdg9IWG9ZdqPB0BinJNc4mqLCc8FY2S5fLESno8+FGtiyTRGCaujFIyTBK6UsviTqej2sRfrgRwbNLiKkzqn0rzS0v1MsTBPkd3lTgqx9vLVenMJVDKBSLLnl1MmzoH9duT6YoNWwsRr67sH3klOYh2b9jOlFq6eXuNV4b/3C595pl+Po47608iYdjgg+2U55sNOQElVoMPNUpZChirps6uThUSqO8PUGiCmAVPjnLCdJcVERZhZxmr6pUwHnHeEkMT+5RwlV8IY2o5qPjB8bhZ8ZuO5tguK81StiUku47ld2mtte0ZAznIYKq3Z645bLxwDij6K+jf+J/8a1knjIE4tJRUORVhqjLgnj65kr7kCzXJ4E1IyLPu2gyLfO74XC5bxFm0sxknx1i88Chh3E9N+FiTHFAlTQGvFfiNBPqVUdruZaTsdptMguzrzFGWnZvBorbh171Quiu6DUJ8Uk+yiahqvztL1DustuZ1jsolws8ZQD+y5m1CTUqWI2232bK93jLuR6XrDw69/g4s//BZ3b62YpvisvmxH/TnTkydbfveLb/K7X3qbrrEWrTVtQgfWiI1+nBK/9YU32Wynw7+rrSTJOieTs9PbJwBsLnec3zlFW8OT+095761HjLu57bAWTm+v2Fzu2FxtyTm31EokjChlusEfnCIyTVOHRk3YB/kze4s2hr69rznlDxALyK8rVWzaSjAiOURqLq2wk3UACUqRfUDn3HdM1La7mcdPt1K8VeG85sZMTVlg4SkJpsdZgzeakAq7/Yf7HT4Wc8+BbtK1wigx/fMcMVougf3CoWrFO0tuh6m1ipolIjzOsXUi88Fm2Q2OnAq5gDKGeWyWrCSpZCWnQ8y41hrjvcSP50wKgZikiJNYZy2TAKBW6eSrtocHclkdBi98kE7YH7p5mW/i1ZWS7r74vL/dDiqH8jQFNlc7tldbtlc7fvFnPsli8M/qy3HUUX8qSQFWm81DIK3aapTVoGg7p4717SWut+ScsF4zTzNXj6/bQVeYxyAIAmvoey/veLugppDa1KEQG7ux6xy11kPSnussvpfCzrTmje98s09rlDaEbkGxnqKNFHRJUAVS1MkSueBSFMV3LF+8xyc++7Fn/YiP+jOWsYau99zE5cVvC9iplWYZLi0ER/H1L34T3fiH1mm0UdKMbMEF2rQAhJyxTpIxtZHva2MtoJinKFPnnKE1EcRSaVmfLQ87b8PCHexVpf1dtBUrcY6yp7e92jGcLOl6AYujoLaUiJwSFLGgldLslzcM1SoooDjHZi+V8JPSAlpuJm1S7CXG7cTl/Ud84x//Fru332PoDOuVb7zIo476wahW+IOvv8cv/ZM/4NHjDTWLWyqlzDRH/uC19/mdL73JroUO3Uh2PlULGDGc3l4zT4E0R6wzaKu4vtwe7pTCUJTmZM6Z7eWOnDMxROYxYFshFsZAmqVheLPfnZNY/BcnS4zREuQXI1ePr1FaY51DG9M+HwGXS1kmjU/TuMa1gc2Nkx1ApfUhsVnOuu98Pl977X3eff9CrqnfZrcE2s8QkGpUfvY9eLTl/oOr78NX6gcn+6z/AkeJ/uH/61f52V/8MfpVJ7HOVtGuhLjBy8uSMjnfgB7lG7oqiSSXpWzdbFfSjSxVYK0pJfEhNz+zNsIsyTFL5zWJv9hoTQxSqN1cQKkySVNaos5rKRhvqc1yU0zBLvvWkXHQ9oDmObaRv23L5S0xqBWCOYvV0jrZcxh3I+98a2SxGSnjzId74H3Un2e9+Y13+cRnXhbbS7OKlFYc9ctOJuEpUdoRY510LHPKdItO9ty8bRfTJIxIraiZxrhyxDkSxgTtoHPe4Fc903Y+XEbjNMv+UJKdPGUVZI3XTrqfFVKpqOUapgk/70kxCWi5VFwRjIE2Cm0M5eQc7Rzr0+WzfcBH/Znr4596iZ/5xR+T3bhSD/Yl3YINSkuILK3Y2e9npnEWDpXTTM3SqFRqCJpKoeKtlV3PXIhzxnUK1+LUfWfZXI0455hLPGAK4pxwvZbCywgPcZ4jtYG+/eAIc8JoJanOSpIkh+XA8mRFaGdgLRXlDDHIBEDslpYYhQOpWxJenKUgnfaT7NgpSf9LMUlAURH7qI6RzXvvk6+u8BT63mIQYPmH3KF11IdQFXjv/Qvee/+CF++d8NKLZ7zz3gWPHm++57/jvPDftNGcnC05v32K6y3kQkXx7jfvM+7kPSgpsz4XzEaMco/sVx1pFgdImGbMasB6w7gZJSyyBd1tr/aklHHesjxdUKn0N3y5SRJy5/0sGQ5KwpRkH2+SRovRGG0kMVfLu55SBmRQEJVGqSpJl9/l5Usp88bbT7hzayWM43rz80smWBVIqaCsIufKt958/P34Ev1AdWwnPSeqtWKcdCaEjSNFVM0ZVauwfKxuiXhIRwJhvQ0rSfIaWzRsVW3ETbsAGkMpihwzKdO6JpmYJC5Wt1h0CT/R5CTdTkolpcQ4xgPo9caC4robYKpGGS1/f2sEBGu1FGypcOvOyeFjz3MUNlHr3CiAWpi2e6bLay5ee53d9Z43371oL+5RR3349Ie//ybTGCTefYwtTEj2TndXewk/0YocEs5KslgYAylkdhuJNo9TYNpPjJuJ7cWeMAdcZ0jNcg0wnC6IMYuVs1T2m4kQZC9VKUW3HChFuHfOO2rhAJ+1Laratt1X1feU01vkYUFSlmmOB6aktRbWp2Qt/87p7dNn+4CP+jPXO2/c57d/9cty0Wp7mKVFi99MqG6si6VUNhdb9psR68VOfJO6ehPE4DsJaLjZWYshYQwsVwPWO+ErBpkmuzZRdi08y3rboL7SUMwpo5GkY+MtSmm8F4RBjOlg7d9vR4xWrFaDJOApBVoKxJLKIfVPITtyvb+xY8ruqUwG04GDCsLTSzExbbZcfOtN9u8/QOd0SNQTS3Lhb/2NH+XW2eJZfgmP+nOs+w+v+cKX3vpjCzkQp0iKCe8dt+6dEUJkfzWRYubi8ZWw5UqhW3hc70mp4HpPnG8SZSVEb9pP8t4WYUqGWVwktVbmUXZYtdHEORLHINkOjUs8rHqct5zcXjOsBpw1rE9X+E4CuJx3shbQRmqyf5eFrbroG3vVtD28m+bRd+q99y/5ytfebc4CDu+9as2fkiHFwn7KfP0bD74PX5UfrI7F3HOiWiu/98++ivO+8UEkHrzmcuDwKFWJcyDMiZyT7MRRiTHjh+4AACdXxt1EzbJzAxLRKnsD8o2stMIaTS2Z2y+dcXrnVHAFzXrpvBNfcpumaaMPNsk4RVLI+F72FWqB0rghKUkX8yYZ7OzuKb4d+FrJHk5JGUohp8i42XL94Anbb7yFn2fmmPnW/UvmeCzmjvrwSmmDtmID6Rc9vnfESQ6ofjVISJCz1NqsX0YRQ6CWlpqnkJ/Oqh7syzllhnUPtYgtewp0C0+MwvESaxjNyiYsu37h0c626Z2TXdkGYpXkMwMo/NDRrQf8ndsMr76Mu32Hohx0C8wL9ygnp6AlfdD3Rwv0R00xJH7zv/k98o2lvohF0dibn/sfpAzXUrh8fMX15VYKtcZAzQVSEnvmbrOXSZgzB7uV9Y7FsmsJkeLMmKaZzdWeWmDez3SDJ7dQkxQTYR9lStxZ5jEwTwIk1lYwN8OqlxWBCilEUIqzOyd0nWvnZxUWVQtAqdQDr8oYmRL2vW/WM4GkS8MykVIkhZknb7/P1Zvvkq63qHYzlMug/IdauXtryb/0lz71TL52Rx31J1YFow0pJbq+a7tzHcoorq+2GG8ZVgvmrdgzrTNYYwQd0viRN+Ejw3qg1EoumX7Zk2ZZ4zHWNM5bYVh3EgLmDdpAmAPXT67FAQKkOWKsPdgg/SA7gN3QHVACN4gF15o8FHEIaKP51mvvsL3ef89P9/7Da55e7om5UA4A8SKogpTZ7mb+83/0ZaY5fR8f+g9GR5vl86IKv/1PvsTnfuKTOCsXLCr4hiyopTKPEdt7iJmUK15DnKIslBoli+ZWH3zC1hqmFgdNA6dKCllFzwGjDZXK1eNrVusFZ3dOuXx8xbgbCXOLf1VGIJVKs1wv5eJYKjFEjBFkZU4z3SBR7LlN/XIp5FRYrAY5VGOE7EjjRI2R8SpTcoLtDpUKddnLS5wS7uhZOepDLKXAdU7sy2hqu2wO6wFrNTUXeZeNYWrg1JxyO/A01jtQlZoFMq6A/WbCD5Z5P7FYD4Q5UlJmN6e2fyD7S0qLFXOeYuteyp5TPUxVSuMCKUqbbDhvhU1kLUpLyISyFm6fyQG+lGX3G9vLfnPEEnwU9dqXX+f/9r/7f/Pv/C/+Dt3QoZQ+sNlugL7CX6uHHehFbwmzXMVKY7LJno2Fqg6BOsPgxUqcC93QM+8nSq743kvoidWk6FqSpgXkLOkXXdtzM20vr6O0dMphqUlzlAukMw1vs8H5WwyLnhQz8xSoNbT0S43KYqHUN0gdb7FV3Cqvv3afW3dOZLkh90xXG+o0Y2um9xbXwohqaft4jYelGibo5378Zd5/cM1v/d5bz+6LeNRRf4yMlUbG6a2ThvuozPPEfjdRU8thGANu8PhOLMdXT67ZXY/kmhlu1gCcIexnmY6HSL/oSDkxrHriHDFGUs210iinsVYzjTPDaqAUcXZtL3fkUkhTam4v236GCLbHWE2ppTWFrJyRzpJ8JsYoOJK2c/e9tB8Dv/rrr/HiC2fcOR+4fbbk7KSHXHnvyZYvff09rrfzH/sxPiw6TuaeI91/5xGvfflboKp052lR4Ui8qm0Jec6JRz/MkZwLKVVyks58mCIgxV+YZkk38halNfM+oIzBOYPvvKSWzZJaOc8R33l5wfk2CKUB31vCPNMNjvXZqkEcxWIiO3L2YLGJMeM6L75sLbbQYTXIx8yRPI2k7RY97/HziKuZRec4OV3Q9Z7rzczXv/XwWX0Jjjrqz0RKy07OTcBDqYV5FL6W1hDHeLBD1iqR6qXevHdQYqHvvUwBigQ6xCnie992ESTYpGThzn1gRxHkR9c7whypVabqN8FIzkuSpnUW11mGFiN9s2Quf3mFcQZtLbalFIrlzVALfOyHXjiGoHxE9Qdf/Cb/z//D/1f2WbQwmKT7LmiBm0mUwIoVU7vQ3YQl5JjIUYD2tGaj9aYlLDduVC3t7AlQZMoVYhQ7ZJZU5JwqFQlWUUDNlWG5OOyD5xaWknNp7xCEOaFqZdpPDKsB30n63c1ZppRqQGLd7KCqTekExRNj4q3X73Px6Irx4oq822NKxhk5d29S826CUkASMGP7ezin+St/6VN4/8dfMI866lkpJymOFquB9fmCnDNP7l/x9MFlm15X/OChFIbVAucsMbQ7ZcpsnmygItgPBGHjOmGlOm+Js7CLZZIvAwZtdUuwFSj4zYpBv+gOqBBjLbVAmCJKKRarAdNYc85LIVeKgMP7hWd1usQP7hCk98cp5cI77z3l937/XX77y2/za7/7Jr/6O2/wq7/z+kemkINjMffc6Zf+81/H955u6BhWHq2k85FjpBYErpgKJVVWp4NYIHWlloRzBtfZA7NHaUOl7cAZgxsEfVByYdzNf4Spc/HwkloLy/WC5clSUvJaeleYhL+z304oBcuTJevTFdZKR79KaKWkkznT9hckDtc6i+8cfe8xOWFCQIeAiYk8JxQSqyvxsYWUC9ffFqV71FEfRmmg6ywa2tRLuvo5ykVVO4PvHYtVR79wAjKuYLyRidkc2F3vmcdAmGUKLjaXShzlwDNOdoy00YTG8PGtozqPgg7pencIGgpzatHrWYDgusVCt0Ak1RozWotFUynVXAEcmDxaK07P15ycrZ7p8z3q+6fXvvw6/9f/7X/C5snFYV+uNjCxVnJBU0pRqnTMtVQ6GK3oBo/SwoLLMeEHAdb7Xs6Bm3NFGFnmkO5asxRHrnPyMRSHYIPQAMBhjvQLSXUt6aaIk+mYNYph0VFbpPk8RVZnqwMGRCnZldMHTIE5BJ1oLUmeqsHI3/zm+1w83pDjjbW0UqmNpdoMYUpcKUpVAYznTEqZl1865ed/5uPP4Kt21FH/7brhnt772G2mXeTqyZYYA0pX0JpxM7d96x7nLPvtXnZJ2/qNNh9Y7ef9REqJeT+3VPXCtAuMu/HQ4NdaEKXTOB/s033fEeYkH++GS+wtVOh6j3WWzeVechlaiJ6w4NoZ1hJzjTH8iyblXV2PvP/gigdP/vjdwg+jjsXcc6ZxN/Erf/+3JO6/88yzhCjY1gHRSoud0slhadtlTN38c6MBSZ8zLSTFdq4FGThKrcJSbIcgSNAKWnH1dEMulWHo0NoQRkkKSykTQmK72bPfybRvuAGDa4057OTIbk+IUaCPneP8zimf+clP0i17qtKgNOMYocHOQ9vv8Z2lVLjaHAu5oz4CKhVnDYtVj3Py3nZDRze0JEldmCdhMaY2VddWs7/ao43GLTrQEiwU54ixWrhXueAXTqxry66l1sqU5PpiRwyZsb1DfdujdV7e027w9L1vvEd1AKcao7HetMl/lZ8bWjc+l0Jb3dJt2y4EkL9th+qoj55e/8N3+f/8h/8lbJ5S5xlKpuYqwG6jeemH7nH73hlKqcYhFC6bcE1leyPFTEmVbnB0vT+gD+KUGPdyaXSdpBn7wWOsYR4Dm6u9BPZUDlOz1Nh1xlpQWtJeB4fvxUkSowSg7K53hBDYXm6w1nB6vm4TNSWbC7lirGVYDSwa11FwP5bzxtyKMfPG24/ZjoFcobb3RdYV5L/XUltjRJ5XVUoSPqfAatF914S9o4561rLOcn7njFJo6eiF3WaiVkUKkdX5AttZrJWQkRjEqqyMAq0OkG/nHYuVpJjnlnZeG44nxczm6Zb99V6ChJRidbJohZpmc7WTNPVc2w5bxlpNt3CAoA20FTyKUjCNAbQipdTs1nI2aaVw3j3jJ/r86FjMPWeqtfK1L3yD66utxIuHiB+ku1jbInqtFQrkojBODld301lsF78YE1km3ZS2oF4o0rlvlzwQe0qco3RMG+jbWMPydEG3kHAT3V5i6wzTbs+0m4hJAlC01lgrXVfhDmXCnLDeN1uNdFVQBlwDyw4e10kcu7NShMa2H/GVb9x/ps//qKP+tFJKsTpdSHps43UphUQ5G5rdS1ELxCC/Jze7lut8s1JbKbTaMniYE7nBVMMUqWTCPjTESGJ3LXtscQ4YI4Wb7azY1JQihSATjSo2M5TY3mTS0oo7bWgxuhLd3jqxqGa5boWdsTdNo6M+ytpe76gXF+RHD1DXF+h5hy0RZxSmpVrGECQQwX8QlhXGINPgzkjYlmoNhWaH9L2jHyQNedzM5BhJIbbQr4pSFes1/cJjjGGx7ukXTs7AXPHeobW4ORSa5WqgX3gW6wFQTM11EqaZk/OVBPaoZjFDdkKdMwIMboFjWil+8uc/d/jcY8y8++5TYgOn37hPlNSEjWWnJNWyVJmmh8I0JX7x53+Izh/jCI56/vTWt97j8YOnbC42hDk0bJUUSGd3TwmzvLuLdU9q0+Y4R6bNRJ5TA3abFsAnjb/TOyec3jlhuVqwWA8Ya1mdLrjz8m1Jnu0c8xQpRdACOci/G6P8Wc5Ztld7xl2gVAnt01qjKg15ZdFateGBarZMzdCmh0eJjk/iOdSD954QQqKU3BbKDcZZlE7EtiBejHRBUa5BWWODqVoJLimV/WZEa4EMW+/QuTKHgKIextXDyssunaqkIAVgP3jObp20A0rgj7VCuN4Lh8cYxjGwbC9u7y3KKFLMjPsZ56wwQ7ZiY1kse7rBE41Cx4jVYs+Jc6JUWC07jHP89hff4OLqeycTHXXUh0IKsaGMQRIqEVva6mTBPCWMk121cTthrWHcTWijqTGjtD7YuZyzmN4ztf/9ZlfJOsO4nYgqo7TYLR1gkoBdjbWEKaJjQivZh3PeSzKmtdQiF1TdoOZWC08yBtl7aAtCB9KqMYacQd8k4TZb5lEffWkFXhVqkX20OkfCGKmjZdyM5FioNZFjRjUouG2A4RQkQdV5ewAAA+SYD3tscw64TvY/99uJftkzbif216M0M5zYNK2TCfTh8lYrapCEWOcN+82eksvBVlxr4frpllrg5GzF5ZMr5lIOIS7WWYblQAyBxbJnVyvb6z8a7HN5vefqesRbjfaGnCtohdHgGoO1ZtBOY5TwsqYx4PvuB/1lOuqoP5Hee+shu+sdr3zqJR69+1SKOS8OjBgSrhP3xvJkyaP7F0zjJMFaRtMve2koIseDcWLnj2Mg7MOhcS+ODk2cotz75gS1tpTzQr/qJZ0dma5NuxltNYtVTwpJVg2S3G9lTbXt3gGus0x1xlrhFyv13bEEfx51PJGfU/3aP/odai5Ya/BdB83OIV11xTQG9puZHGT8pnWzVlrpjt5w32LMB29yyUWmZFVeRt9ioNEK1/kDF2S/GZn2E7funnHnxXO6vsMPnn494DrH7npPDInLJxt21xJBTZWuq7WGvu/wnWd5Int1q7M1d1++jfWO/pYwqry3rM9XnN09YTNn/tGv/gFf+uo7bbH8qKM+vFJIPLNxsqtjjGJYSPy50RpnpRjzvSOXgvUG12mU4cCSzDExN7iqcLcUuRS0lkJxdbYUixjyLg2LXnYMtIGqGJa9dDCNaZfoitEa30t388aOTZUJ4U1AhRARJGDJtr+/XH4NvnOHz69+D7bPUR8dqRsrbtuHU1RUKRgqRqtD0X9jx1VKJsItG4QSs4RqDR1Ka1K44daJFSslKdJcJ06NG3aqgIUT26uJaR/JpRBiJsVIyVkAxrmQ5shyLfs2fuhYrIa2b6cpBeYpMO5nASSfr+l6JwnMLdbcWPme7oZOHCbO/JFOf63w/v3LZoOussrQgopyFpeM0UoIIlQ6Z+mdpessn/+FI6bgqOdPn/uJT/Diq/cEL9XYjClmKJUwBqb9zK0XzgnTzNXjawk4iolh4duwQBJpwxjFgrmL5JgJszg/TMtMmPYTu+1OUjKRAKVxO2HbFD/FIlkKzUGyXMtUTxuFtcJZVlrTdR3WG3wn+IJ5DLIv1xpDtR7tzDc6FnPPqd55/T5KK07OVnSDxThLiplpJ50Soy2Lk55uKRHS1smlTSvawmph3sfDoVVzph86mQCAWLfGSMkSM73fTlQkQna32bPbTew2e05OFtx7+TZ971p3JaOtFWBjiIRpZnO5FRj4LDBjeelmYV8h8dO37p7JovqiR3fSgbHWEarm7/3Sl3nn/Ytn+ryPOurPToqu7zBawhWs+SCAoRu8RC7nyribD5fbaRsJU2a3mQ7Wk1Jysy4Ln+5mb/b6yYbN5Y5pChhnJWxItW5p429Zp1meL0lFwohyqqRcmfbh8M5DlQ5qiIDgFG4W3JVuOw2lUpJMXUqtVFoE/bGY++hLIRcrJXH8N4mpCsXpnXNQN5YnOXus1QcnSJgTJ7dOGq9ULP26fQ9J08JKYmWVad64m4WD2MDD/aITq1eIEo5SCsYYukVHt/AYIw3NnJujJKaDjdN5g6oS2hDGmWk30nWeoe9QSklzBGQftFklrdWsThb8yE//0SJss5u52o7k9v2ulDQ8Sq7kWJjnRM6Qo+yt9r3DWc3dW0eA+FHPl5y3/PQv/DBnt04Zx0DOhdM7p6zPV6QsNv/16ZqcKu++8Yj9di/vS+egItzUls58Y/fvVpKuPO9m+mXPtBsPXMqSK6Ylxg7LgZPztUDBO8+dF8+pLUjFeWkASVKtpaaC944whRYuJDZmpTjY+6Vhqrn3sTv0i+MkHI42y+dWl0+u+YMvvc5P/8KPcPFk02LMxQK5uR6pFLS2DYgqBdq8mygpobShlILvDTd3rjAn6iSpYNZLCl6/6kkhkObUFgLkgPOdI04Bquznrdc9t1845+piK5O4ihyeRiDjm6sd++3UghJkmVyCFQwxBPLpkn4xsDpbM08RvxrYPHzKH3zxbR4/3TJ/BICNRx317dJasVgP1JIbpNtgvSGGiOsc1oktK0yyh5BCIueC7zu5PFeZlO23o8SyrwdySpSc5VDVimkfoZOLtjGa6lu4ioL9dpLD0VmGVUeKBde6pr6Xy3DIhZQSCkXJmRwFpyCFGy0oSVI00yxhS8L/yq0YPOqjLK01tciEjhtYdpFUx5c/+wrjHHHegFKk/c2USqOtpTOGkjPWeEHpaEWYI1rBPMtejFLI912upJhRLYZcawF6l5hYrHtKknfDOMs0zmIProph1Tdsh0wHc5I05tJsxSll5nFqUGHF8mRBTAILTrFgrKEAKmWGZU9qiX3frpwLr33jAauffEUQBUoSl61WGMA0l0zJbR/PGhaD+SD18qijnhPdffEWP/OLP85+NxHmWSbUgG5ZC1opXv7EC1w/3RCCTN6ohfX5+uDGUEZhfUtINoo4Z2qpLE8XXD25Znu5QztD3zmykmAj33vm3SSNwFIpU2Q3CRJAJm0N+9H4lTkL+sA4w7Sf8YPgC6y3TLsJ3zmGZUcthY/90AssFj3T/qODGPjvquNk7jlVSpl5jsxTZHOxhVrxnb0J5WqQYYeikuYs3XMlVksBAgtotZQiXUWlJNsgF2oulJSIU8B30u3sVx7vXUsLkxSjWiClxG43sT5Zcn7njHsfu836ZMFiNTCsBlkA5wMGDy1QJceEojJuJy4fX+E7y8sfv0vJme7WOb/0T1/jnfeeMk3hWT7mo476M1eMid//3T9EUdvlVIq7GDKl0HbYmqXMKuIcmMYZ3zvZOzAyLYhzaovf0g1VWjAjCigFrJO0y27hGHczCmF01dqmDi3BclgNUjCmTK1iP5NdWLm8uk5wBBL0IFVcTkkSKxuPS2nVUv8Mi5PFB+/7UR9ZXVzueOfdp3KRq5WciwR/VEgh4xvQPkeZ1GorhV3NucHrLbZzdH3b6w6JFGWvLbX9zKqU8OG0olLZXo/SmFByaZQE2P4QYpJCouQbJIKA7nN7N07vnsqqgTFtkqcw2hCmyOZqR86F1clSps8ly/tlJGCoa1O772bzjykTUyGVwhyyMO6y/DqkzH4f2O9nxn1g25ooL790xr27Jz/wr9lRR303KQX/8n//L9D1njBO5JhYny1IIZGiNPhe/sQL5Bh5+viSMM4sVr3wGnsnqbPtZ771Fu9t23ebDmnHKRe6RcedF2/RLwf6hVj9c8hUZD2glEouGec9pQqyICVp5KQo/EmQxo+1lm7RNcapbmBxf1gVSFl4lsfGouhYzD2nuvvSLX76F3+U/W7G9x3L0yX9aoA2FXPOtn0Yh+vlRXPOkKJ0SmQPQNH1DtUOKLFpeVKSiZtSkGKLvMwVlOwSuE54dSnmA0Pk8sk1i4XnhZdu8+Krd7nz0i3Ob59w/sI553fPWN8+YX225vzeOevzNcv1wPJ0wZ2XbtH1HTklTs/XLBY9OWZu3z17ps/3qKO+X8op89qXX0dry01irDa0/SArjZIq6bBVVcbdxPpsidaa3OKiq2qsrvauximRQpL0y1xlAt45whyYx0BJYtHUB8ubhJTIFD2LFc5asb+k0tiTul3Ub2x0ql10BYEgdkvxb9rGvxuGnjf+8B2+8juvPeOnfNT3W5vNxIOH19J00A0sfwPMVjDtZ6YxcH2xIYbI/npPnEMLLpC9lpsQrRSFg6qtotSCNooY5WOFKZBCQaNRFFJMgi/YTjTAHd57arNUivU4UksmTHPbe3Nt/cDQDZ7l2YrFesnp7RNObq0x1jDuA0prlstBduMUzNNMLtJ0Mc0i+t30+ttPBJVgGoBca0Ku7KfEFBJTyIxzOoQXnax6TlZH+9dRz15KKf7mv/GX+Wt/5/NQK/vtKIFD12MzZCnOb69Zrhc8eXDJtJ8lcCgXhkVHDvlgu5/HyO5qxzQGUpQVGqWU7NaFJCnr7V0flvL9n4tMw6mVbtHhvTR3nLeHwiyGSKkZrRSlFIZFT2k/G/bbmSKMkENTVClxfvWLjr/77/2NZ/uAnxMdbZbPsb751bfQKD7+6Y8xLHr2u4lcChXF8nRgseqw1hKjcOjG/dQ4QDdFXYYWfmKdLKgbW1HKoQ3MeT7wPuYpta6IxrVOi0SaJ1kmHyNXTzcMy55htWBtzSGhKJUGFtaCIoghY90NENJjzBalNMPQ8+LHbvPeWw/5sZ/7LL/y939LbDJHHfVRk1J0g6OUjjAFSgHfi73ZumZ1LoUcpNCSnQB1sHnNcyDNUSZsxlBrhMYCMtZivUOpiut8szxrVEmUmHGDRyuNbnZIpaDmTM2lWdsK2sjeay4VY2T6J/F88mttnOy8NnaYQqKhrTfUKra4o/4cqNkrU2gpdu0fD8sebSO1CLstpkzXe2otYu81CtsuWymm1tmXj1ByZX224upiS5xjS6AEZSSZteqCN5qcDVR5V6a94AZqrWwuN/jOU3IhhoJ1zZGiJPBHAOCSPCnnUYBauX66IafE+nTJ6a0Vu83IuJfdbt97wXnY797fzkmmBqWqQ4iKUUJsRGuUkcl1jJkQhc9ajmFeRz0HWq4G/vV/VwqeGJPY7K0lp8zqbEGaE7fvnXF1seHi8RVxFtxILWLD906C8nKI5JQZ1gNQiXPCOo0fPJsnOzAa56SBaJxhv5Xp3rSfmOd42FctqdAvOnlfUqZkyXaQoC6FSu3cKjS+qVj9rTWHhMybKXrX2SOeoOn4FJ5T5VSYp0iaZ5anA9Y7wsUWpRX94On7jsV6wbSbD+BGYxRm8ALjHivTPqKNMKR8JygAYzXGKjYXO6BSG7BYwN2OMCdiKrJoqhTdQvYSUs6E68i4nxm2M36Q+FrnFcuFY9zODTCZ6Aff/kxDyeC8eJz7oaNfdNRS+elf/BFe+8q3eP+tR8/4SR911J+tjDV89sd+SHYMtG6WyGZRccLMQam2J9Tej1XPNM7tMhzF8qwUISSosktXknRKSxUbNlWmZ6nx7HIq9IteirSasd4I7BmIU5TpnHckolyaK9QQqQ1Voq3GtslDLWC8OXxOSgsaoVb4+pdff0ZP9qgfpNbrnhfvnUrDLpXDHlgple3Vjt1uYr+ZKFmsTvsY6Zc9p7dPMUZz+8UzUqxyCZvl+09rueztNtPBblwxB3tjCulgFPGdpaCwWuOsxawXhDnI5XAMuM5grQSrhClgrG7FWCaETE4JrU2zGxsqlThHxt1E1zm6zgl7sU2gh+WA4rtbtuaQ2O4mTtc9Bo3RUtRZY7BGY7RM0jGaeYqY/ni1Our50N/+d/4a2ho2l1sZBJwsAElWvvfKbaZRbP5vv/6AaT8d7oGLhQelKbXIvquRaXuYAvvrPf26J2eQNHXolj3dIAzInASHBRJ25Lykpddm3Z/nQGnukPXpkpzl3Cut8UNVB6dKVTBuZ3xn6fqWqKzF2fLowQX/6X/4D5/Zs32edPyJ85zKWM3HPvki9994gLGG66c7Ssp0vWAKnLeEWbqVJWemtkiK1nL56zrZtUkF08kCqdKKnMReuTpdkqIwg4w1qKJIsRwubTddULPoJYYa2ZPoes8cIraz7Ld7rLWoSdEvOrpeupE5yUEaZ8EjnN46QSkY1guG1YLV6YJxM/M3/u5f5v/+v//Pnu2DPuqoP2NZa/jsT3yywcGlUWK0pusdFXXgbe1CJIcsXvciu3AxJowxjNuxRT1b1I3FrRTmTaACRmsqVQ63Ukkpi2VFia2mFLFbW2cJbS9V3suCbXwg21IGrTWkKr9fe9MsmMK2LElYYVor0Iqnj6/4zV/+4jN6skf9ILVa9Ny5teLJw0vCHClKyxRKCcctxXIAAVtvGYae3DhuAGEfiFnCTnzvJMTEavbbIFxUJWE6KckeXC2VftU3PmNCG3CdR2mZzs37gO89YYqgK8ZZ/NLLxMCIDSvFRC2SuiosU5lUT1Og5sI8R7EhG8Ni1VNrZdrPlIpM9L4HP3EOie0YOTtdUICidAsJ0vJ+m7bXUysxF0qQ/dSjjnrWevtb7/Pemw9ZncgemyQlVxarBWFObC63vPfmQ/Zb2VdNMbFYDXjvmKfQQoYg54y3jt1+xnZW3tHmJimlUkpm2mXBXGmA2va3Ncv2rkmWgzR0tK64hs25ScYtpuA6S0VBKYz7iVIqulfM04y1GjtIuJKq8LUvfpPry+0zfsLPh47F3HMqbTQvfOwW3/zyG1w92UgCUQiNGWfQxhCmRJhjs69UYijN2iXpdsZIBzGlLLaoCt3SU5tdSywsRi6AWkDDORdKzvLvOgO1kDIYr7HONsCjYbedqKXgnMTWnt5eobVGGbFfGi27EapKdLU2mhQSq5Zs6Tr3PQ/Oo476MGueAn/vP/pl/s3/8d+k61tHsla5/Kkq0OUWEGibLTmEdEjk8r2EGKHF5my9lWTZkg+/VlqinMMo+0fdIIXiTQItSOQ7SpbbrZPJilg6C84IjLmU2iDOugFY64FTGedM11vpxoZEGWd+7b/6XXab/TN9vkf9YPT+g0t+8wvf4lMvn2HaZY4ivMLFauDqamxTr07cIqvhYN+qJZNLIU6RUNtOmpMQnhTkgiiBO3LZc1agxTdBX76z5BZHHvaBEJIwELVhebYihUicA7tpxHvD6nQhE+i2U+e9oD0Wt5doXVFGk7zYk7XWzNOM1uCcZdZzmxhY7B9j2XrydMvLL5xikNAv2oQdJcgFPvglOefjCsFRz4V+/Ze/wNe/8i3+3f/5v86nfuTjGKtZrQZKgevLLW/+4btcXWwkBdYZhtWA8w4aJiQHwdv4zhHbRDrF0oJMLPvNeLBCu9ao1EqjrCbnBFWB5mCBvmn4Y+We67wjzJFh7YlJmpnTGChweB+n/YzrJMyrVlnr+d1f/yr/7JeOjcUbHYu551SP3nvK7/zKV7j1wjnTNGOsYWH7BlSNpJyZx5kYonTpq8TG5iTMqO3VHm0USmvmKeC8IcwZE9vujJa9uGmccd6QQ6bUgrWKVJVYalJiPyX6hafESAqlBTgE0hzEsjUniTfXskfRD52k5BmF945aYHO15+RsBRriLN3VmwnBUUd9FPW7v/77vPXN9/j3/pd/h/O7Z6QoEzjrDMpraoMjz1M4BA9ZZylV7NXGaEoqKKsJowBZrZN02m5wxCBAcW0UuUCJlTDFNnWb6YfuwIJLMaEaFPkm+CQnmYjUIlMGq0BpSSO8US6FcYpsdhPTPvDk/gVf/GdffUZP9KhnoYvrwOa0hZpogclrDdMUqVkmuACu9yzXCzaXO8FfDJ5apKippUKbEs9jOOyS5ZiFW18qtnPEkIlzxlh9SF6lXfwUEjqiVOPYTTOhvSfKiF1SdjkjYSqU3LUd7owbBkqVdQPnpWmJUsxTkB3wxcA4TuJK+WNSWp8+3bEbZ251S7lQKk3VNDSCBDJIgFDlG2894Z13j+zUo54PPX10xX/4v/lP+Ymf/xz/9v/sb4GC++885unDS/abkXmcybmwuHdKConrKZKigMWNFWB3LoW0nSjIO1mBcTtRK4fmzDxx2PtOMdENruU6ZIZFh+8czltKkTCTFASNc5Ou7L1jP0naJjeOsJTxnRVOZUuW/b1/9jV+45e/8Mye5/Oo4236OVVFOFN3Xjjj7W/dp+sc1ln6xtwopRBClH25nLBtQhZDwveOvnOgNNN+ksMxZJwTjs88BeIcWsy5omTwvWNsAStxFt9yzmJXmcYZo6Uw3G0nvLcS5kA9pO2N+4mcxO7pvGWx7HGdJUyZlDIxRIbFkn65YHWyYN5HVucrfuIv/DBf+e2vP+vHfdRRf6aKIfH+O4/4v/wH/xk/8lOf4q//nV+klMw8yqSsVthd7QhTpGTBgMxjYNqPAlLtnHQznSFPUsyh5IDcXu+ptWKdZh6jNHFCoJQqndRSmCbZg7Detgu4YXWyIOdMbhbM2iDQNzf0rnekUplHSSncboQNVBHUiXFOpidH/bnRV77+HuTMj3zqLiFGUilUtASStAaDbqmrNw0G64zwq0yzFFdFN3QYq/C+JxfYb0b84KWZESrjbm7FW5ZQE29IsbSmpEEbRb/o0dowrCzDwjPvA3JSQpwTvpf02JwK4z7gvWaeDfvdJb5zpCBNzXkKEnbiBWuwWA/E1M7S9L2DfUqtvP3uBbdvrVD1AxxPrhWjgHZG3nBfb0DjRx31PGjaz/z2r3yZnAr/6v/gL/P44QW761HOmCKhVmObgueGujJGM0/jwcUVY/vnWjX2oyGMAbQi58w8RaY58fjhFbVkFIqUKy++fMZyNTAsJDvBeUfJmX7ZMe0Dlcpy3bO52hImcZspo4lz5CZyOabM1dXEw3cf8hv/9e9+V4zIn2cdi7nnVPdeus3n//rPMk8z10+3vPPG+6zWC3LqZcclZYzTkmSnFEUVqlKsThdoI6lecU74xvK5uYTJjl3BdQ7jJEJaVUnI7IeOUip+6IhzgJtIagUaJdMEK53ZUqokVa6d7OU518wn0rWZ50DOmW7o28EHEoIr/KAUC75zvPDKvWMxd9RHVo/uP+Xxg6dsr3f8tb/9eVang9guqUzjfOiIjjuxLRsnYPGcBVGQYiKGJLauLGydOsulWSmxr0zbiLUSIjHHxHYzEubE1cWG64s9q7OBk7M1Wml8p1mfDIIu0AbfOWqS/aerqy3jPrK5npiniNKaxXqAUlmc9scEyz+n+so3HpAr/Oin7xDHQNUQpoBWCucsi/VwQBcsTxay41klVOT01slhgpZzoSjp4muj2G32BwB9jlmS64xpduOK7xxjLVj7gT1SaaCIZblfdS3KvGKdnG/DQqG1pV84lJIL581EESRYTCuYdhO1CNA4l4L3nqun13zxN/7gj30W+zEwh0TvzMHOXCvkXIXuoxVFKd5+5ziVO+r51NXVyBd+81u88LFzhmVHSoUwZzbXI1fXEymXw/S6lIpWiu3lBusdi9MFMSROThdopahVkUuhlD3jfmZ7NWF6R0mSNNkNHRdvPubJ/StKBd8Zzm+tWJ8ugcrqZEk/OEqpxHkmJ2kCFWCcIuNuxFonzrJQMMbw7uv3j4Xcd9GxmHtOpbXGest2s+feS2doq7h4dMW0DwzLDq3z4Rs6p4z1FmelY7pcD0w76VqWXCltDyanQskJ2w5NBc1uKd37earQoLDCsLOkWCk5tcIxCwi57ym5Mo2RHsER3EwKlycDORWm7Yj1DtfJIRxDIuWM7xx930kambMHK9hRR31UVSu89tW3+OzP/wTja/f59Gfvcn25Zb+ZpLGRErXITptOmnmON4nwEoJiFY/ev2C3HYmxYLzl/PaaYZAJXy6w204SSJFlMhdmCYK4+/Jdxt3IO68/lsh4J3bOzgso3DiNH3pk51Z2IHKqrE4XLE8WWK/pe8s8V1KYn/WjPOoZ6evfeki/7Pjsx8+Zpiiw+d6jjcG3Qota2+4lpFwxDZFTSvve0QqVYb+dDgB6pQSdYa2kTQo+RwqlaQwtRES1c6KQY5UAkloIBYalxJQvVgNhmklAt5AmpCReWrFiKi3wcS87o/MUqFWanEopFsuOzbWh1D9+z+3qauThoy2vfuyM2hbklBbIudIaZQ0pJf7wmw+//1+Uo476F9Snf/LTfObHP0etlQfvX+E6xbuvP2h7ahHbObrei9MLRa0FPzjKxZ6y23C13zPcPmO3GdntEhV530stWG0w1iEvaKFfDPjOS9On71qgVmYMlXix5+L+U9a3Tik50y269ucKM7lf9dhhwa3VCq0lbfljn1rz3hsP0e4ICf9uOhZzz6lKkTjYnAtoxd2Xb3Nytubx/UtqLWglnVBhRkkcclJRpmoxEWMCZBdHa9MS7aSoUy2x6KYr6rxlnhOVSsmSTkaVKZpxihzlome9QSt9SAzbbwQovj5bgRLWT86SdCaFnJVI2d7S9Z5pN7M+X7G+tWS/m4ghCbPnqKM+wjq9fcov/Ct/iQfvPKGkwj/9pa9ycraU6cF+Yr8LjHtJnCy1sm1FnlKKcTtzdmfJbpsZr/bYzrKyhvvvXEkRpm6W1AUtUHPBOH3Y4bn98hn335SY6MXJUho6MaGcI4XE9nrHC59YUXJmfWt5iGfXRuM6Q987jFH0fWH3ND7jJ3nUs1KplS986S2mKfLKi6eUKoXXydmqWfwr2iq00lw8vkLpG4uUhG25zrcpgNi0cpKpM1VwAX4hk984z9Sc8YsO7wUsjAJKJYwB6z2ut6SQSGFm2mU5k4zBOMOw6Nhc7kg50C164pygwhxmzu+dorXm9PYJ427GesN+MxGmyHI9fE/G3D+vb3zrAa++ck4FUkvotE72ikA4jkcd9bzJOssnfvQzFMQS+fCdh9x58RZXj69ZnKw5u3smqFHEeWWtxlixCxdluLqeMM5wfhfG3Y6T01OePBoP1uJSwXjHsPSUlJn3gVoNi3XPowfXKCUBKWroGJY96uV7LNd9Y6HK7rd1BmMNKUNnFaWCtY5V31ESLNZLpvF4Dn03HYu551SS2CNvVs0KpSpd77l995RpCpibxLnGiZrHiO6tTMz2oXU3xdiYG77AOEPNhZDm1jGRFycXAaxSpJuqbiwxbanc945h2RPmDKqSY8I6y/JExvBhjizWEildS0Yb00IaEsOix3e+/V0/YATpFld9BKse9VHXj/+ln2Z9+4wHbz2EWrh8esnlxY7zuyvef+Mx5y+ekWNFOwEru65nWAjrZ1j1PHjzKevznrAf0Xjs+YI87gilYroeaHttRWG9gI+tgZIl5CRMkc2TK9a3VihtmabE+s4p/W1HTAlJlzfk9rNjWPZYb7HWUIpiGhObJ4/pFv7ZPsijnrm+9tr7vPH2E179iz/J+nQAxBYl/knhG149uUYbw8n5Ctd51rfW+M4dmIjGWrSVn/vGaEquhL2wS601sq+jFdvLEec/QGTMU2zfrw7feRarge1mTw4B6zT9YmhR6hmqIoyhWZpp39uzRLN3HpSci85bxrb/5/tO1nP+W57BNEW+9fojPv2pu21PrjU/nCWnwq//5utM83G39KjnS5IGuaakyH6zJ+x25HhOSpVhvcRaeRf90DGPM7UKcurqyY5P/MjLvPta4vbLd9htAtWdMM8Vo4WlmnNhfz1CzqRTy+mLt6EkUiosTwcun1zRdZrV3Ttk8SPje8EboDRd32GdESB4K+76weE7Qy2F7WbG+J7T8zWd7571o3wudSzmnlNdX2z5wq99lc/82MdB06LCA9qCc0amX9bSLz0lVYYh43qZhJUi+2jWSvJlmGUfQSmBmpIlCt17J5M5YygxM4eEUhWdjeDqqPSrAaqAjbtey8RvIXG1vnPkVgCGKcr+TUVS9pRiuV7QDR3WS3hLyZlpN8reT64YZxrr6qijPrpanZzw+N0LtlcT/cJSqsE5ixs6+tWA73qSSazPBpk+dE4WxJkhJM7vrlmdLrHWYJyjOMfpiwO2s0z7ICEqcSZnw8krt1Alcf10y7zfo4yWVL/ekfY7qhm4/dItnLdi2TQW7Q1KSThS13u63uM6CZkoORNDYn37Nr/xD3/1WT/Ko54Dfe6nPsX6dImqVdJUY5L0yRaeo41YJZfrBauzFUoptFbEORKNoZRMaeEplEq38Ez7mTAHahEuHVXjOiurALSCEUnArN6xWA1i34yZqBXTOBFCkkh0wA8y2VYaai54L5bP7dWOrnP0y15CUXJBKYGOhykcYtK/l4xW/Et/+XOsl10rEktDKCg5l3OWy+pRRz1nKrkQJkkiHxaOsxdewPeefrVgfX4ilmRVmedMP3Sc3llwfSFIgpoTJ3fPuX5yjXE9d1894fL+tYTomUrfksyNVixPekqtpKw4vbNAk3nxh15oiBzFw/c31Fyx3mKsw3cWlKTVpiR741rJfrgrmpwr1hrGbeTs47c5f+GMt/7wrWf9OJ87HYu551TTOPPeWw/53E9+kmFh2FztKFUSLp035KwPMF87SEJlihnnnYCHtTrsH+iWNJQFdEVJBajMpUpynhPgqev9IZm5lkpNldJAw6uzBTlVdG4BJloO483VljiFVrRZUsgMqwE/eEpVjLuZpdFgtRzgg4echTUSZVfoqKM+ytrvA8v1AqUr1hl83zFvNlw/3nJy64TV6YIYJTDC9ZquMxhvub4Y2V1vuffyLUmKnRWna8fFe5doA7dO77E+X/DonStq71Fat6XzBcN6we5a8CTTNnB6+4yutzx9cMXTB9csTxe4zuG9w9SC85aUKjpmUBK44qymWziG9YJ+0WPM8bg4Cm7fPUMrdUBdxNB2PIsURsOy5/T8hNV6idayF4e66eBnQCD3pRRxiswRpSU5r2qYpwi1gFL4TnZ3JM688RNzYb8bWZ2sOL1zwrgdD2FBSotbZNoFjBObsB/cASQeJuGramvw1dF5x16NKKVYrhb85X/15/hv/ovf+K6ft1KKv/L5z/KZT91js53YT5KAWRtXVauKdkYSYo866jlUyYppDCzXHbdfvkUKiZNbp+K6qJLM2i8ti6VjHjMxVpS1vP/eFWmcuH4ysbqtePx7G6xzvPrZVxsSJDeLpOy3hpDASnFmdWG3ifTrNcYWjPVo61pxZ6jQ+MkK6y2+c62JU0m54qyk5e53AYUiTMep93fT8XR+jqWNBJooJR7meQz43lFLIYVMzsLuSCFR2v6cpBBVCRxJUuCJ3SqjjCydd4MjxSwfI7bEy1JZrPr2z6NENGt5pXLKPHrnKa63GGsaV8eQ5oSqmtIiaWOb1s3jhDEa34t3Os6Rs1trggkNID5Qq8YcOXNHfcQ1LBdUZQhzYX0mO2unt9dsnirKoy3L0xV+6DAuS/psZ9FWM+4iy5MFpSh220QYZ8btyLgfqVnhess8ZZzvuPvKLcKU8b3FOk0MmZSgoFite64eXBNikon9osf2g+y0WoMfvESpV/Cdw1jNYumZx0iYI84bnNOsTjq0Pu63HgW+7zi7fSpg7CxNiBQS2juUgn7Rs761ohZJTxaPh4Qp1ArXTzfYxntznaVWgXc7J+BhBbheuHOSoKqwTrM6WZBSIYXAuJ0oBU7OV6xPV/SLjnE/Me0mcqooFFXJn5uTnHUo8J394O9USwsCkx0+5y0/+/kf4wu/9lWuLjbf8Xn/3M98ghfunfHk6U6anhViKpg5tT1yR6r5iO846rnV6nyg6yXgznWGJ/cveOXTL2G8JadMbIXSNGVhwRlLKYHTsyWj86RsmPaBxXrBS6/eYX2+lOwFIzzUUivaaXIB5zu2D+4znJywPFlwcrZkv9uB1vSLnlokRf3mPmqdwVkjP0sUsgMeM1pLkeicPjgAjvpOHW/Tz7FOb62wvWPcjAe7SQqyr9YtLGEfCLMkTSol6ZfKtGXSlKk1k5OkdVkriAHVmFEAxrUl9VrpFh1KKeHvWENyCW0NIQg4UmtDKaBy5erxFcuTAWMcWilWZwu2lzv8wuC8oes7gbOOs4Qz5MzprfUBEt4PTjqpIfG5n/wU//i//A3mMTy7B33UUd8n/ehf/CleeOUu836HMuC9HGSXjzbcefkWSjtqrejuxg5pmacooNUQhf+4HemWC5S1zFMihYyxhtV6Qd87cqos1wJjDSESpkytCj/0kiA7GM5evMV+s+PW6YLNNtL1DqX0IV49p4TqxSadYpFicU74Lkky4H4+xkEfBcDZ7RNe+vgL5JYKOe4mUpvOWSe72ovlwH47HrilpYjDY95Ph2mW6wRno5WSYgvhnRqjyaVgvZxj2ijCHNHGsD5bst9orC/EObK52HJ25xTvHc5a+r6TcK8kRabSink/M6wGUkrN0iWrAM5bci4sTwZSbPvh3vNTv/DD/Mo/+O3v+LytMYx7Qe4opcjflnx58+c9eTzyla+//wP7Whx11J9Uxmhqzmit0N5hvGN5tsYPHTkXXGdxje+YYkEZYcsNi45aoV9YfOfZb/ecni1Zni4l+M5Z4hTplx377UwFFsuObvDMvSfFzCsfu8XmYoNCcXq+AtUg4LlQC5Qiw/hxH6goVic9NWeUajiRxkGlVnmHj/oOHZ/KcyxtDFYb6R4qKPuJEiqogtNQVSXlTKkFRUvUqmCtls5kre1F+cAKomSwRkoyyasoSq1i81KlTdRcA7FGFEp+XStxminGUEvl6smWxXqB1rC93FFKbhNCcL6gtKSUGSP7OFdPd5ycDWhjMEmSkpTVfO4nPkHX+WMxd9RHUrVkvFc8eW/H3VduNwtK5vTuKfvNiDEGrSs5V0yvGqjVUKti0VnUdsLZBRVN5x3LZWHaT6xOl/hFh/WGeZ7pvCHlSi3qcKHuFo6SMqf3PIulhBApY1ksNCmDGyy1iANAJS1R8Vp+HqQxozTEWNC6cvXkuiXdHvXnXVpr/NBhrUVv9vTLgTiLm8M4g+ssOWasS+w2G6ZxJkxB9tcK9MueChhtmKcJtGJYdcQpSTKeU5AzzihKsuw2e5z37LcjYQ4ohH/oO3dwrHRD16yfVi6YVs63eT+hjKZQyalwud3ivWnAY9kr952j6x3TGLDW8MM//ZnvKOZevHfC5z5xh6vNXiZ6KJw1THMkhSo201xYrftn80U56qj/Fv3Ff/XzbC+uGE5OsM5RcsX5TpLKnSGHjPXN7mgV85SwzrFaedanC+6/fYFxBt97Ts4XWCN7bWGO1FzJSUvIXpH7pPcWd3ZKDEHSa1OlW/Tst9eYDlBW7P25oCqShtnOrpLzwX6JAmM143YCVXj1s6/w+7/1VXGPHXXQsZh7jlVSYXm6YJ4jYQr0Q89+syfFSJwlFMU60J2Vwq2hCMQ2Euk6z2I9sN9OQMVoxKZy2FUTFEHIEENCrxeIpUW6pc5bYsh0fSdpmEYiaFMQPk+YgiTnOQvKYp1pHVY5SG8mcSkl5v2eDZVh1ZNjQlslXdRUjh3/oz6yUhS2F1c4Lw2Zm3Q9bSTMZFg4SikQ27TcKJS2MMcWeZ65++oZ211m2s5Yr6Ek7rx4AsayvRqxTh92lio0wLKwtoqC9ZkXPqQ3zGOk5Iwylht0SU4SOjGOEgChlCKljPNyMLvOsr3eHN/TowB4960H/No/+h1efPUu53dPpaizmnkOpG9LUL0JNQlzoJSCsxZllazDIY4Nay1ohfceVWXfzRhpLMzjTFXge9nnkd2chFKaeZzwQ4f3llIleCWnTJjFyp+TuFLG7Yh1lhDlwllqYZ6g5Mo8R9ani8P5Jq4Xw90Xzvn0j36cb37tg5CFmgsXD69IRcDnsRQpIEuVROcpkGKHsccr1VHPp8bdyNndOxhn5ayocibkJO+cAkrKGCMp56BIMbfzoND3ju1uZrXuMcYwT/GDEDurEI+HWI+tE1QBzRmWUsF6y9XjLevTgXmSST26UmKmW3bEUKixyBrRHA+onX6wqEVr+OvK6uwErTWZYzH37Tr+5HmOpbWMxodljzLyjV1KkSldroQQmtfYoQxM25F+3UvYSS44JZ0N07obYvSPYikptI6HJFFar9htdljncL7D965BH+XPTCGw2yRqkSVzGqvEOI02mn7wdIMXfEGtrchzTO2QHXczMSR211s2Vzuct7z06r0WJX28JB710dP6bMUPfeZVYhG2lTGaHCWKWWuN72TvTWvh6ahaCVHS8VIqDXTs206RJhq50J7ePRP72JRwTkbtpUCKma53xFjIOTfGpCJMLWDCdminUSaTYmDcVrq+k6RArSBLQMSNDS7nQrdwxIYoUceduaOAX/n7v8nv/frXePXTr/LDP/UpPvGZl+gG35inCmMN++tROvJZgq66ZSdNiyJ8upwLVEWtGV30YVeuX3TMc0Ar1SyaEaU1YZplH0dLuMj102tJu1Razpic2V3vmHaToAVqbRBzJfvcMXL95Aq/WJKwdL3l3gunbE6WDKue/XbEaMWw6Hjp4y/ws5//Md74w3fkwgn81b/wKZ5e7Hh6sePrbz9hCokf++yLvHLvBG0l9MQazRyPF8yjnk/VInxg57QkvSpNP1imPR9YpJ0WN1euWG/wncV7wzQGlNWkOXA5TpzcORObciqkJO+9qnI+aCR1thtss0lrail0vWOD4sn9a27dWXB5uUdbJw2bnNEKwhwlQEhBChmlKiFmBlWZpojRmt/71S8Sw5E198/rWMw9x/qVf/Bb/PDPfoblaoU2Cdd7iUMOkVAj3dAT50gpGY1BaWS/TunGm5sJbceu5NJ2baTI6gZDmAM5RrpedgdoH7vkgjGKfPi9jjo4+gZ2le5nwnVeYqB7j24eGtcZUpCCMbXis7b/U0rx9Mk1r/3+23S9Z7cPfO0LrzHup2f6nI866vsjhV+uMKXSDVYuokVS/UopGK2xvZEk2VJRVGLIoBTjPqIUKK0lPKL9b/vtzPndNSEI37FkScgERTdYwpzQRkOhTccNsRSUBmUUJ2cLxt1EmBOrE880RkqAYdlhvPC8qMguroawj+zCzOnZICf+UX/uVXLh6ukV1xfXbK+uSSlz594J++tdC7jSbS+7ygTaCNLGekm2rFUsjzS0QckyIY4h4baGcTcf9vFyKu0yWIU/pwRgrrQiP01oY9jvAw/eui/crBTJudkeG9jcWCm0jHMszs/JpbC93FGVYjsl3nzjHaatMO1e+vhdVmcr7r50G2PMoZj7pd/8JikV5jkytoCTR082vPLimewiVdDOQjleMo96PtX1A7dfPBfHRS3kVBt2yh7OilLFpYUGCsxzQtXCPAZSSlin2V/tcRZyysxzJIeM8aZN9PSBHay0RilpVqINOSVp1uwnLh5do33H+szz9ME1s9HiKLFaVnWcYdxO+M5AhYsHl/S9I87peA59Dx2LuedYTx5e8n/6X/8/+Lf//b/NSz/0AkrBcj0wj7otjBaG1SApRDFKtLiWjmYKkZQ1JsuLSrNNppQO8PCud0wNGK6MdEGhoKtiv9mi9YqUEiu/YnW6ZBoTtVaWtcqhfWOZUVWS9naRaSeWThmrGxndA84m5mnmW998RCyK6WrPP/2Hv81rX/rGs33IRx31fZLSmm65RFEoObG9nvC9k6SuNhVXWlAhtcounXVix1wsHVQlybDW0LUwB+MMVxd7licDzluqFdxInIVv5do+kEzs5MB1vWXazdCs2N3SM08zTx9eCfi5VHKMYsvMlVorJScUYr1UFM5vr/jsT3+OB+88ftaP9ajnRLVW3njtLa4uNvz1f+Ov8t7rD3j66KrxTzsWq6EFamlUrcSUmfcTzlu0Vlw9leLvpsi7weKMu4n9LpBb5HmOhZykmalQ3H7xDGMUD995gu87sXjuZ8IYcM7gO30zmGvfy4VYMkU5uuWCq6cblIJP/firvP36k8NU0XjHu2894o3X3uLBu4/+CG/u8cXuOz7/H//UPXIuWKtJpcpFOB5RO0c9n1qdn7I8XRFGQXH4XhPn0nappQiLQSZprtNMu0BNBTNIo8U6g7IW4z0xFGIC5z1UwX2kKAnrtYgzK8UobrJc8INlezWjlOLkfA1Kc/n4kgdvPub8hRP213ucX2NU5fpqg+sccY6M24xWmlt3l+giKwKf+pFP8o0vf+O4M/fP6VjMPed6+vCS//j/+J/zN/+tv8KLr9xjfbaSZKFld+hyTrsZ52nWSYUu5XDwaSPFXdc5xt0elKIfPLp35AzWy6lXtOzapSSMKYmQvgYUYQ7EcAKlcnJ73cCqUpxtr7bsrndcPrkmRQlMmfYjT95/IoXfnXtcXW75xGfucXL7FquTFbvLHV/6td8jzMfQk6M+utJac/vuissn14y7iPWGkjIFRS3NumjModForNhRqtJt9xW63hLnjPGyg+A7i7Was9sL9rtEinJYKlWZ9oFSWmLsUtAhIMMCY21DlRSUUbjOc36v59G7j+iXC1SNDMsObeFhK9hyynzyR1/h1r0z3v3Gu/zIX/gcv/pf/NqzeJRHPccyVrM8WxFyJibQ3lKVQXcOWgLk7nLLHMSS5bxmfb7g4YNrtNYMC4/zFuNk/7qgcX1H3M5Y1+McpFxYnS2xWmEsdMOA8R7jO07OBh5vroRzZeRMTKlSSqE0y7JtIOJ5H/G9J+xHwhhZnCzpBs9bf/gu3/jSa1w9uWzogj9eP/rJu1gtKbC1c7LDkwsXT78TaXDUUc+DvNfkGJlHYTCaorFWE1MmTEmm5BXynCjFtAk4dINjniLr84E4B0qSVRttoFQpBrVRpDnTDQ6FFHXzFFmdDsxTZB5buJGRZPWUC8NqQYqJ64sRrTT7qy1xnkghYYzsmxulWAyNaRkz4xh4+TOvHPZij/pAx2LuQ6Criw3/yf/57/Fv/k//Nq98siEKklghQZIpBSbuDpHLxkpnRWtFDJExJlxvoS2Z5yI2F+vsB/YuXXHOU1uASUyynxemxKP3HmOMYXu9Q1vF00dXPLn/lMtHF4z7wOZigzbye53VrE967LBgP2d2m5EH717Rn5zRLQfuv/PesZA76iMvpZD9HWTaNSwGYpDuYtWGMCdSCDinER+kIswJ62QKp63GVE1NmThXFKB1YXc90a8Gai1MOwlAuSnowhjolx1hjBKXbjR5DHQLj+8ccc7UWjBWYqhfePWe2GWmwOZyD2kGCikJd25zuaEbHBHL5nJ8ps/zqOdTpYoda78JvPDxF1BK4bzs5ew28nP+7OW7jNuJ7XaSiHGjMM6Sm3XY+caZ6mqbrlmmnYQAhTljjHDouoXj4VuPufdqx/kLpxhtxGpsDbqK48QYsTHPk0zNzs4GfGfQ3pPCTIqxnYuFWhW7q4nf/ce/+ycq4gAWvePOyUBMYiHLIZFjJk6BX/7Vr38/H/VRR/13kjZSuKUQWawGcqkoBbvrUd6FmBi8J47pwDUupYI4JNldjZzfXeJ6y3IlDZVxH6lV2Mar00E+zhzxnWsMxzZIWAguZLHqGXczIK6UxXoQ10oZ2F1PlByxvsf2hpNbS6w17DYTZ+cD8zij/YLLy8D6LHHc3v5OHYu5D5H+m//fr/G3/of/GiXOXD66kghZb/Cd4+RsQSkV3/kWYFCoCuIukFPGeksKucW+FrF4KVk01c3iEkIkhYz1poFPK6rCfis7cAXY72auLvc8fvs9wjjjnBacgdcCj0QusNdXE2vTEXaR09un2M5xcr5kezWSj0viR/050CuffpV5nFmsB4x35Ci7a/MoUy9rdfP/60O4yI0FreRKKZmhtxQD2yd7jFNsLmaMVpSUZY8hRKhipe5622xeqqWRybu8mwXuPI8B3zvQ0nQphWaX1rIvlzNGDyy1OURUDwuPcY6Tc8Pmu4CUjzpKgYTx9B2+c9iWMFlyZXnWEUNinjOr8yUFiFOkeotxjm65ZH22xDvLYtWx3U5oJVy5O68sWtox5FzQ1pAzDCcLfGdwzrK5GuWi6gy7XT6EOziryN6wXHrE9l9ZrSwnd88Yd5N8L2tNnAOL0wVK/cmvh3/9536IEDKusy2aPeCs5gtffMLF1bHhcdTzp8/+1Gf5xA9/gmkMVCr73YwxquE9Cv3gD+dBnCPaGhQyEZtnCScppTDvA7udwi0Ti5Vnv50wVkKJbtKOp1GaMForcizEaaRWiEqsmItlJ6xGAKUwzrE61eIaUZKs7J3FDx6jFSjF+taK/Xbm6vGG8PI5x62579SxmPsQ6cf+0o9TFLzz+iPef+shw3pJyQlrFYv1AmO1jK7nRN8bFsuuXRZlwdw4Q5wiXe+JKQuOoFVfyij2u4n9PrLbzkxjxDmL8xrrPIuTjkfvXwCGYeWl+JsTWssU0CgF1uAboNx7g9YKP3TEaWS57Di7tUQpzc//K3+Jy//4H7Df7J/xEz3qqO+ffvLzP0UICaM1ORbmcQZ9c+jJjs1y3SMhf3L5xSiUkklDLYXryz1a18bYAU0iJ7ns5jaFMAYkXVaOuJvD9SYl1jqN9YacDBVIU8JohTWyR2u8BKfkmIkhs1h1OCd//upkoJbCk0dXTNMxqOio7yYB+b748dto46HZHI2tpJDpl0PDEyATtqXi4vE1cU6sz09x3rI6X6K1ws0Z1xm0s2il8AvVGpDSABE8zix8xt5i28VxrLKXR/3gMulb0INwGyulaJTtKETCnFDaAOBcx8/+lZ/hd//JF/5E07mLzcTd22uur0dWSy9pmznzB996+P1+0Ecd9d9JuTmt8s0eaWmYD63QShPniPUW7wXMrZFBQcnw5P0rYspcPNpIEnqamadI14KOFith1ZUiO9vaCq6gpEwpHDjJCnF7WGdgBt8pQZZ0ljAmlmuLUpU4J4zTKAWucxLEjmax8Nz+sZfECnCs5r5Dx2LuQ6SrJ9e88PGXKShc71mcLilZeD25GoxxFDS1Vt558ykhZBYrj+89l4+vGK9GjNU4p+lXAllNKaONIaWC7x05V6a9AGCHlSaXytIl0AM5NHZcLnTe0t/9oHMaU2Gek4AivaQaGWc4u7vi4dsz85yYpsD19cyrn/kY/6P/1b/Hf/Qf/EdcH7v9R31EVbJgQ/a7iXkfMFam1zlnaaAodZik1VoxRjWrdJJDk0qYEuNmx+Z6ppaCtVCrIkWxWddisN5hqFCV/BmpSjhRrXSd2NMqUAexfipVMVq3sJSKqpWhs0SjqLmCkrRA68WWmWPk9gtrcl3xyqdf4Z1vvvOsH+1Rz5FqqUCm5IzvDVCFNxcLvvN473DWyg7boOj0zO4ic3L7hJNbKxSqcRLBemFcuSr73rVKEVdKEZtwiBgF4y6hnbw7zlm689sY79G60GUpLmuuVBJoS62abDouH15yct4RTnpizJRc2W4mfu6v/jwnt1f8V//xL/+xn+vZqme17NlsZ9YNEL5aD/yT336d6+2x2XHU8ymBeos7Y9oHmYypxnW0CqW12PJVaRw6eTeoLZBoP5FzkgYiCTtVtK0oLedPKVByolYwSh0wWsYI0w5F22k17Lcj2kpGA7US275eas2am8JQ3Cnyd1AajIGV1qQYOPosv1PHYu5DpLdee5Mf+4WfwHqD6zx+GKi5iEWrCgS1ErHGcfbCLS4fb1BK0S07/L4jTFk6HUqhnQMUy7VFG0MImWk7Ccwx17bQXdlvA+Nmj9Ke07snPH3/ijArSBldCwqZBFirUMBi4QRAaWRKWIowsPZbudD2S0Mt8PEffoWPf+7jfOU3fv8ZP9Wjjvr+SMuwmpoz1mnCFChV2JEhJIlxLpUUI9ZZYihYr6lVkYtckucpyD7SdhYrZAE/WPyip+aC9eoQ3y5gZQWqUou8m9NeGHM5F4mIVopaKkVV5jkRJunOWie8Sa01JVXmIn8/31lyqZyuB0rMvPLpl4/F3FF/RNoa4phYn51Q4owfFlhbKe2yZoxCGUWcC/1CMQw9Lw4DOTb0KbI/V3LBeQ4IglokvOSGF1dVIcZIiZnry0ec3lmTxwydYVg6Zn2GdZZu0KQ5UlJiceLZXGVc78kFdpcbdhvF/nrP7ukF67u3QVtSVqA7Vqcrtlfb7/p5nq17Pv8Tr7JYdISGJ5hT5sE7T7j/6PoH98CPOupfUNabZqW8wQkUaqnUUlHakrMEpCilSFFSyxWQS0FZx3K9BK2oY8A78INH24arSgljHaDxvaXkSozp0OSpbf9uv51wXmycEkpk8b2w6m7QWLrxlJVRpCnhvKQyG2fQLdshTJm/8Df+Ir/29/7ps3ykz52OxdyHSAqwurLoYT5Zslz3kn5XIcYsL2Qvu3E5VYbVQBxnXGfolz3TLrE8XeJ7z+q0Z78R61bNla63DItels2tJsYsVhaj2V1t0cZwdnuJdR6lFVdvj8zbSCmVOEpnpRscaJk21FzpjCy8LteeOAvb6vLRFeVFx3YT+NxP/fCxmDvqo6sbMOuik65lyUxjJAsNROKerez+aGtIcxS7i9N0xjKNMylErHeszpaoWhnHSRADzb7iO8M8RvzgSTGjNdSiDwiSlLLsIIVEDAmlaSBXCZmotVKSMCCVlu5oTAVqYek6apHLeI6FEOsfiWs/6igArTSr81MevvcUayBdX6PMACRs31MrpDlz/XTP+qyjW3nWtwYevXst3/chS0PDGmpWGGNaw1K+t7WWPburJxvC7ho/7UnvX1K2TzAhotafYxwj005WDjq/QlvD4nRFjJnd9WO6mOjXC1bnKxbLHus0dEtiqszThPWOcRf55I98nC//xle/6+f5uVdvMXSOEmUHNeXKr3/pLa638w/2gR911L+g4pyYplmwA1ahekMM4hwZ90EQOVWTQwIlY3JjLfOUGTdjY9NV9jGJVbpzKCWJzc75g12yVqCI/T+WQoxZ0tKzpDeHWayXIDZK6yzzOB7OGWsVmTYl1AqlIc4SjmS9xjrNblvxXfeMn+jzp2Mx92GSgpgyyq85uS1jbFnclh0btVBoawCNdQVbLHGa0GT21yOL1YD14lmupTIs+7aIqkhJ0od6KxM724Fpo++K8OTinBl3gcWqwy5W5CJ/dq80/bIT7/M+YJdLlOvpF55H719z+94Z4/WOy/cfsAsOu53JKfPw/uUzfZxHHfX91NXFBtst6HqJUTbWMizFhpZjpmsTummOqCmSUsZ5S86CHNBWcXrnFNspLh5uub7Yo5ViMTQoc1XEqFBWpmrG6ta5lPhpP0gcu1t4VPoAd4BVbYdBFhluwh+sNbKQ3uyWOcvPF60VIWRyTMdi7qjvlAJlNKe3TtC6sL3aMe22lGqJT56wvdqyODvFd5bzu2tQBYViv4/UmlBV9ne6wZCiNCatzhidGcfMsBwI00ylkpJCzZnSdcxDj1tCt1qhBrDdzDwGsu7w3pGyoirDvU+8dLBy+s6ircYPA/1K9sznaU9OlY9/7uP8/sXl9/40lebyes82Fr7wtXekSM1HrtxRz79yKZIkWQs5JrRWEsxl5C44z4nd9QhUjJN1AHqFUorV6UAtsN3sUXJq0PeGXBXzFNBtdy5FwYHkJIiCnNo6gZbJe5oiSokFWhnZ2Y5BwvWGhZddb60AuXOWmMlG0fWOWgopKa4uRmqtxHBMQ//ndSzmPkRSSnHrhTPe+cYDtLWsTnpqFaCjbjsuzltQcjiWUrEvnEEJaG05vXt6YM8pozA39iujcaaFJmSJo6VUUqyHXZ5xPzNuR7rBgwK3PMGt1pLQVyTZ6+zeCfOYuHhwxYsvLZmDQhtNv/Lc/fgLdOe3WfoOtMF6y6MHV8/0eR511PdLP/JzP8wPffZVri725FjljDKSMum8hIvMU6QWTUlSTOmqD/tB426mFFitvFx2x4C1FqM0MTsWVjePmiLHJP8ppr3TmWHpBEuQMjFmahHrmiSGgXH6UMhZK5Zq0yaExmhMJ5M7rTQ5J/abkc3lnv1xCnHUd0i4hWGStGTjB3rtSDEyXm/pFgOWgNaG7cWu7WlbVitPnCaurzPTOFOz4frplmHpoRtY3z0h5h2P37ukXxrcMNCv7hHDLXwBbRSLVUd1DkJmsbb4oRPcjrE4a0ilYLRGK9UunVkmAE5sYSlWHrz9hBgrL756F6X19/wsv/bGIwCud8d34KgPmdruWYpJgrS0Zlh0aKupFOHOWc1+OzNtZ8GG5EIMkd4rUtHkUHjhlTPCFKkV5jHIHVNBrYUUE9bJu5ZixjnT7pMycStWUdv/lsfC+qyXiV5vqUrWcVStGCt3WDsIU9V1jpwKvtNcPNpy/XSDUn8yjMifJx2LuQ+R5AXKLE9PUEq6HpePr1iuexYnK1ILPhB4Yyfde2+Zx4oqmeWqJ0yJbnCkNv42xsgLabXsNZQMqmI6Q0mVaTvjpi0GcJ0hPL6G7h7TbkcpBmvh7M4J85h5ev+K03unLG+dsN1VhlXPS598WUJRuoL1nlwqj999yLDw3P3YbY5UnqM+inLeU6tiWPaULAWa87KfWnKWPQCrmfZZ9gRKRTdkgFJgrWYaZ3ZXkfV5jx8cak74vgfkYjqPEW01wrGDmgtVa+H8IHtGdvCCC6kVA2iVcb2VxNuFl8mb0YQx0S87chQOnfcGSkU5hSqa7dXEdhta5/Sooz5QCoFxuyNGCc8xzuAHR5w9i8+tuHy8IYXI+mzBxYNHGC0NjRChKsP1owvWd89QpqVNaotyhmkq+GHgY59dEWZ5T+KcGJZOMDhagTKoCkYrUq743qKUwVot0OMqF03bddDi1a2VQKBSoBs8t1+8hTKaaT/R9+Z7fp7HIu6oD6OUVnz2pz8LVFznmfYzxkloSRyj7KghxZ61ipxAa1CNN9wPHdcPrjm7vcAvLPMYiHMgjIK9StaglMJYRZqTFIhZ7pSSPqtJqTCseuYx4gaD1hrrLd2ip+RCjgVJXVcSyBcTiiqYk5bKPM+Z68s945jp/HEi/s/rWMx9iKQUUBT9coExFa0r83Zkdynstm7ZYU1HyZlprCiqXCBR9OtlS6jTzHMUm1VVkmAZC9YprFOyg7MPjNuJebfj6jKwmra8+Mm7XL93QRkjqmbO75wzjpF5mqnKsbq1IIZEqY6TWwvCHClJfNRKK3zXU5WWyWHVbC73fPJzL/Orz/qhHnXU90E5F3KujPuZnBL9YkApmaLHVMgpkeZEbbaVUvK3BT8IJ1LVyn63x/dgvSLsEkrLTmyYJol97ztSi3Luek+MmX7hSCFhnSGnQlWVrnOEkLBeY7SiO+lBK0os6M60IApJyNRaJoc5QypRphpWyUX5GT/Xo55HaZaD5/LBfVKCqhTn987krJki6/MFtWT2m5HdNjMsFKbr2F5dszhZsL57LhPr3nJn6DBWdj1TkhCfmCqgKKmAEpyGUhL8U1Il5iSprAriVDAOpt0EFRargZoC+03i5HxJNZqUC2nOuDNLioX1+ZIw7kkhoM3xO/yoj5a01vzE53+Kpw+v2F3scZ0jTMIeDnMU7A2yg229YZ6lca+VOLceP7gWFMGg2V3vyCFgLZJ8aRU5xdYUdOSYKFm4qc63ZNuY284dMpGoiji3BMuYG7JENbt1gVbY0X5dqzjEUkhoDevTgWl3DBz653Us5j5EUlphe8ew9KQkXKjlnXP8euL68QbjNI/fv5TOfj+wXGqGoSNqw+ndUwqVaZRghHEz0vnC1dNKv+q4erLHO8UUCt7KVM11p8zzJWNd8u79CbNYY186QW6XjsVpx+JkKZydOaO0eKdjkOmf907+3sZgnUVrw7Ds+din7lHTTDmyQo76COr/z96fxFqSpXl+2O9MZnbvG9w9ppyz5q6q7uqubvbIaogAyW6xpY20kAQCWmhBQEsB4kbacS1AAHdaEOBCK0GQQIDaCmgRhKRmi9VDdVdWZVZWZmRmzD6+4Q5mZ/q0+I7Z84iMjHDPjMjI98L+KM9wf8O1W4Z77Jzv+/6DD56/9vf+QDeqUuk3A1VgiplpnJq+oDTNq6NkbXxUqZRayZM6TLY0Op5+cMnxEDk5H3jywSVn5wPG9DgvTIexGUfUVgxKY18aUttE45SpOWOdxhaIMTrpsDAdMzEJUgscmuNzo+HkVOhPBkBIMZNj1qniihXPwTrDOB3VKKcKh+s9241lt4+EridOhc2putb1p1t2l3ucD1w+3bE5P2NzErBO3SurCOdnJ+yujkxHLa7KqO6sGqWTiccj1sDZ/S2n5z2lwvFYiMfI7uIIIhgywRsux4mrq5H7r52x3QhVeqiVUjK7yx2b0w3OB05OTzRap+++6Nu5YsVnjtm0J/QO6xz9tuP62R5B4wqsd0jMTIdIrZU4Zbou0G86nj6MGAOP379i9/SaYTAcXz/XqJGiXgua8VioVRljzjhKqeSoLBSpFest1s/mJrS4Ast0SOTmv2CtUdmQs4iAfS5cvN8EHVAEy25aaZYfxVrM3SKUVLh4+Ixq3ML/l2wIw4Z7r3ty1kBw6y3TYQ8n98gpYchshp7NyUDcHbh6vMd5B0PH/vKCaX/N9v49Th5s2dbK5ZMd46TOmK9/+/VmhiAMG+UuG2fbYS/TD4E0Jbpex/bOW4ypxDHincEYR5WClEqOWbOyukDOgpTK2f0zri/WrLkVdwfOO771W9/i6tk1oQs479hfjxirayDFijGixVrVoO40RkREaZNNgyBSNcex6etqLeSUcVbww4AxQs2VMWYQQ4xRI0U6nd5h2/SiqCZCqhqv1Kwba0oVLFgr5AJxSrjQAl9Fs+Z0Umd4/Rv3uX6644MfrfSWFR+GAKV6us3A1cVTnr73WAsmgZNzpzTM69L0MkKp8OzxjrDZqGFCKlANh0Pk5Gzg6sk142GkP93yyqsnPH20Y5wSYion9wb2F0eohTTtefTegeP1hA2eaZyw1hM6z/bsjJIKT996TDqO7G3Em3NOHgRCFzB4SilcPb3m5GxLcD3jIdIPG4Ztz3hYKZUr7g76rVfjEata68Pu0NgjVbMhjaVWbR5qxltVeUApSz7d8UpdLRN+ySdVjbdOy+PihKkZddNB9yDnnUbtFCFOKiOoWRAx9KcB6wy1qFOy907ZWyglOo2Zbtsp5d9ACJ7tSceu/9na1i8r1mLuFsE6R78ZSKVy+egZYj01a5jqcHZCyRnfa+6H7wdsMBx2O467CpeJ+/czVQzbE89+n8nF8OCr9xFRrUFKYL3n5P45zhvlNbcsLCW6oEYJ1uqDIOuEAQNpSsQpk+PEq2+csbtK9J0hDD05CSkXBEgxcnJ+xvZ0Qxozv/673/qZVtArVtxKCC37zeGcYTrqJpejBrU6q8YjpQjGWfreQbW6fqaC8xrmWqtGBlhjOL3Xc/30SBgCtUiLD8nUCjkVut6TYyaDuk9OiSrC5mQD0KzUVSfrvSWOidD5VrRpKKvaTM+NIqHbBo0zQOhPNIuy69ctY8WHIaLGOtNU8MOA35zy5OGOswcnlHYgzAiCgVq0i28M914/I46Rk3tzGHilpESOhe3GsL+85u1nz7h8usM0q/Sat2A8/emGKnoQNcaxvz5SMphO3V2nUYOH+5MTcrVM2RE2J4DmOBrnGYaeaZyYxgi1Ml1d8Vf/nd/ij//pP1+LuRV3CoerCecs1unEbDpUrIVa9LkvWJBCCFpY5VTwXiUy1hn6UMmd0i5POtWpCtq4lEaldE5dz6u07FNr6LqAsVZpklnoh0ApFZECUjleT9RGkTZAjgVn1ZXdOjVT0WFCAeNIY+botehb8WGsO/MtgrWGYRu4fusDzu+fcNxd8+7bjzl/7R7dNNBvA13vKKnQdQHnC88+GHHOstl0PHt6rdodPzCcqIul7xtP+pjVirx17WsBqIyTdkecVwplrYXdReT0bODeK2c8e3TJ9dMrvvnr9+iHnkfv7vnRd9+mGzq8rwynQjf0uqCr0mUO1ztee/119lmwfs0LWXG3IO1/nbdMMSE1k8ZESZkwBHzTI5hcOO5UU5dSXnRrKcoS/B2CIx/2lP0BKxaqZ3vvjOmQdPcT8EFNjKy3+OAwzrA5awZIovSammHYdHSDJ46FcOpJsUDVnLl+CIjMzmMOvBaA1hqMQE2Vs/tbQh++yFu74lcS2sm33rHZ9mzPttjzDc6r/jqOU9PUGFxw9JvANGXyYWQaJ07vbUlTwlg16um2gZIixhu86RhOT4hjAq+mKaUktIdoODl3xAz9yQmbc208mubwKlU4vXdC6AOnpwOboaOJfVSK0Hm898TxSJwih92R4yEthgsrVtwJCFw+3eN71ZpaD6VkrPVQwTWTkTFGjrtELYVu60gxYa1l2ASmfKBzldpvqFjQrUMnfVV0gOAD3UnQoixlnPe6d1TVjzvvGE464jHRdb5FkJgWVF7AgrdqmuJ7T06aTScIzlmMQOh9M+pa6dAfxVrM3SYY8F2HGM/lo2fUKtx7/ZzDmLEcuHgSOX/lngYsBsfxUBhOTxj3Bw6HxP7iwLA9IQRLrRVER9vdpmN76tjvIpsTT0mV8ThipHGiTeXB6/c5HDNXT0ZKzsTjnsP+QIyV4WTg8cMj1uw4OT/h4umBaRrpz044e61DKnRDrxl4VTAUdtdH0jHRf4J72IoVtxIi7C52SpE0QoxpsWA2GPKk2TpVKtNhAgslajFnvSfniEiLK7AG022w1eNtpd8OhOAoRQ+c/dARo4rJa9IGDVWoUnHOqUahVGqu2kwpGghujNP8u+qQ1ol1Xn/3+tmx6Zg00NwZsD4z7gtS1oPuig/jlTdeUUpWVifLzTawvzyQpwnfB2yLy3DeI6Xig8M6Yd/WyHQYWyGn9uR1KhwvJ3a7A5uTntMHp4wH1ZX6ztFZQyk63Y5TVskA2nxwvrkzB4eIOrU62+G9pd8GxrGoW5+zxGNkezrg/JZhU3jw+j3+7F9+l6tnq7nCiruDP/onf0QIjvEwkrKai9QqbE8CE4nDbsJaCMHiO9tkACyFFBhct8ENmkV6em+L6x1uqjrZ8w6SrsNxHxf3ZLBqsDdlair0jekhIrrOc0Wak/PirC5CiZmCUIssmcjOWXwwnN/fAMpyWfFhrMTTWwSpwnSYyKVSxNJtB66vI/uLHVeXe7CW+ahVms6tlKoWtDFz9uo9YszNacg3UbmQjpF6uMKXa0yOTPsjeUqkUakmYeh45yeP2T27ZtgaTh+ccPLgnmbMIXTbju35CdX2xATnr5xx9uo9Xv/6q4Sgh0IAqjBse7phw/XVyOmr5wwn2y/mZq5Y8TlBRIjHI7UmSq3anSwalBynSMqF42EiT5mSi8YBQBOMZ83mkUo3eBCaGFzoN45hG3BBdQWhV51CHCOIIfRqMlGrNHtndcikqBNYqVUDxcdITpnxEHVqJ1ro5VQwVjPorG0C9lwoIpRq8MHju7X/t+LD+KP/0R8hrWs+HRIl1TaVSxhr1VyhaV6m46Th9WI4f+2cYdsxHUecAefbqJnKvTdOOX/lDB80jkCKGvbUCiXrJNA0urKI/jvHTJoS1EoVwVqLtYYweMZYlsOhGjcURJQC2g+eew+2vPa1BxhjdKK9YsUdQb8J2OZ+7r3RYsx5jsdIbmwrayEmwThdT2nK2mCpFe81g846h5FCjmmREYRenY+ttRirzJB+22mTxFqVFTiD7x0lqiHK8RCX3NPpGBG0SeOcafsRSIW+V/fkNOWbPa2oZ4MP6z70Uax35BZB2shaxICxpEanLFVItfLgbEPoPTVlatKDYgiOCY0h0Byrie1JTxojGOiHoNa05cj5a/d4+PZTatVuaeg6+pNKvwn0mwFEEJTKchwzw/lpCxXXg+VJ09L1/QlSKttNAGcYDxkEkqh26Oz8hNBtuffglJrXDsuKuwfje5wzpFwZzjZ02041bqPodEuglEJp+XI+OFIUci6YRiuRCikXSsrklMlHpa5szk4Ig0Mw1DwxbHtCpwGt3nus1YmE90q5rKnQW9XaVUwzgNA8H2MMFvCd/t07h3SWJqFYtH1d77m6OnD1dJ1arPgwxkNESmUcEykmjIOzB1t2T6+RKvjgSVO8aS7UyoM3ztRoazfSnw6kHHDOaPbiOFFzJucCRZ0wndO9BaOfS0TU/ZKKsxp0r97nWhQaa1t2lWY2Dg86rINpzISu5VwFRy4FwbDfRSqWv/sf/F3+9J/9W54+fPoF3tEVKz47VAHX9QQMPkPXd/R9YRxHfHBMR91nZsaINkeE3dMDp/e2+L7juD8gou7JORXuvXEfayzTmJeGIFLZng1QdT/LSWn6NRd8KxKtc3Sd18bgicPsIwaN0Zo125p77NR53Vn6rWal7q+OWGuYjpE1I+ensRZztwjGagZUrbVRSTzDNrA5DWpkYLRYi8eIdZY8RYy1bLYd0yFy+egK5w0iJ1hHy/AwuF4taHfXR/qTDbWC8Z0e/sRxPCTSlOm3HTWiU73WSdUFatXhsiqN0gUL3vD04Y6z+yfqlpc04DiOkdrszqcpk9JazK24YzCG7mTAWcN0tYNYiGNivx+hajBqlaqhql7zs3LKqDUzgB40ZwpZTkndwfYjznmGs1O1bfYayKpaNxhOOkRYCkHnDcZZiqvN+l0dcXWztIhVcxURIaesExQRnDUYVFe0Pek1liAX9ld7fvy9H3/BN3fFrxriGJvJgXbzfVC6lO3sslcBi6FPqYU4FmrOnJz1TLFSQsVikFJIxwh9oBsC3g9IVctyMKSk2XPG6ufWe9sCwKuuiRa/EXqdCBhj8EFDykMfOB6n1qAoEPW9Ouc5psgALSt1pRKvuEuw9MNAt+m5enLN/urA7vqgeaZVTex658GgDRRRzbbvHKVWSizUous3p8x0HEnHpIwr0dew6Nrx3uOCxzrLZm4who2u1yowRw9YzVf1ThuOBj2/+iYhMFad0RHBohnLAu1MatQBd8WHsBZztwkiqr3pLLV6xOj4etxFnIfhZGB/FVssgCWnRBjUYMQ52GwD0xi5frbDdx4RiGOi23ZMx8LV4x2pwL1XTtTpLgum2Ra5oMYq1rtl83bOIqjdeclFaS0tH+Tk/sC0141RqmCspesDORVSqoRNx/5w1LyRFSvuEAxQYuLqas9ht8c6DSdOY6Ri6HpPyUIaJ4zVjSxObYOzQopCybltcAbv/BJLsL/a88rXdE1q6LggReMG4pjxTsOXNyfajFGNqr6mVCH0ljoq5dI5R0VpoaWt9WragbtUul5DZUupmJLVdGXFio+gNK2aANY0DY0xDJteaZDt+V9zRkRpU7nXvWS6KJQckaFHRJuCGDVQ6PFUC8EHxsO8VgCULolAaTrQMHhEKqAGPtZaxNhmnS4cdhOhC41mqYdC54RpzMQpcXrWYWomdBZYi7kVdwc5ZfbXOy4eXZBzXmQvORVCr+6SOWkRl6aEtQbfqeGd1AIIYQjkGJsjeSLFgussecxgIAzqrH486LSvJE8YusU4TzLEWAlBnS5pLulYbTCGYCmNiq3XFbpOIw1mmUHoPPk4UmJeNXMfg7WYu0UQEVJMIGCkQlENgroHtcXZOeLkENHoAMaJ0Hfcf+OMaX9k3B0QUR2PadEDOSaowoM3zhnHjDWCcr9YXMgsRnU3STv/oNoadQ+DWetQUnPx20XSVHE2gbV4p++vlEpKhZNzx9npsFrMrrhzEFqYd9/x4HRQ6/YpMx4mjoeIs+As5FJxRh3+PFpslVw0XNUa0iHjgsEG1SXknAGjk28LtNw4dcHUbmoVfRak5MixYr0hj4lZBlRL625WICr9uW8OlTmWNkUpDNuuRRToBHG/i7hVp7DiY1BKpRbVekqLsUEqvjcYY5uznaPmrHtGqZQ2EfC9pWTNTKVzGljvLGmKbO6fkMekB9BmnqWfc9XWiY7kqFU0/xQNMJe2llKqN1lYRZpONOGCR0RNVABECvvrzOHZnrNXzjFm5XCtuDvIMTEdpvbZtqSYmQ4TXTeQc2LovTblc6HfdMQxMh0nZYVEbZBYo436kjJxTFijZ0jfuxZLpdEEpmlVaQYqacqaLQdgDced7lc5qoOlbUVd6DdI0cxJg2mROCDGYHqPkYqxjjFXxFiGk9XN8qNY5yK3DLYF+UotpKRxAliLAXbP9s0Gtuqh0Fm1fDaQjuoidP7aKbtnO1JWUWlFmI6R6TiSc2V/PeGcw3nVIdhGZylZXxe082qtaaYONKG5ZlPpRmuIx8x0TIyHqKLyKo0C5shZbdWfPrxahawr7hyMMdx77ZyuD4z7yLgfOR6OlJJB1Pyk1oILdtnwaqmUUjDWYZjXizY+0pSJx6jrp9OJRK2yGJTk2fTEOpz3hC5AEXxn2wHX4b1qZkuRxSSlpkJNFYxq4kLv2vuoYFS/VLMaS+RjxLt1YrHip2GojeYohN5jndIYH7xxjvNe9wejTpUiesiruSidyqgNeZwiOVeN6LA6GR53E/GYiIeI96YdEA3OeXwIGnRc6mKcUppTq07/kqYQOJ0EjodR10oqSC2IZHLU2ASpQgiOs/snjV68FnMr7g42pxu2Zxv6ITRn5UIpGaFQa0FQmr+IsjyMaedM7xALYtQIRYqucdXBTXSD1+xUazBWZTu16pDAWo9tFOeu9/SDJ3iL9bq2amvm1CK4zmvDJRakgnFtrVttdBoRzbE8JqAybNzK6PoYrCfpWwXVwKRUyUWwqBW5tYbry4l4HPFD3zoqpTl9NZtYa5AEzlq6Tu1gDYI1Bmvg+vrIvdfu8+C1U7rBc7iOi7NXi3tVZz2LTu7aa4NymzVA0mOsWs5OxwzOqhteKXinBgzGgiSlkPng+Ef/i3/Mv/hv/iXXF9df4H1dseKzRtH4DwP7yyOllKVws1anD1iYDtp0CUOvRidTwjhDGjXvp3lE4zqPHWF/uSPHrBO3XPHBt8BVdZuURl0RA5Iqxqmbn53pbk7XZ0UndJKr5gSJjimkVmwzUfHWUjunGqeUNdZgxYqPQCnCVpkXqTa2RyUecjPmmd0sDSVnnDU6ZTaGwy5yuNpjvaEbOmrS+AJjjTotp0w2Bts6/z7YZV8DncQZq2vEmOZuWSsIxDEDTQdkYTymxVlzzlS07eCYc+Hi8TWvffv1NWduxZ2CMZCmSbXPUU23Sq1IUlplSdqMSVH3ImPVpCuNmjOXUqZk3YO8d2RridnQ54qgWuzQOwxKt3ZeJ+zzGdN7txiiSAU3WHVV7zxD55rbukp1jDXUqSBimp5VdeFpyvhgKZMOF/7iX/3lF31bf+WwFnO3CMag9q1Vmn2ymo9YY3jw+ikP3xpxzkBw5GjA6AZXciFqlAfH64maNePn7JVzXYnoJogpYBzXzw56MBTNGJFSmh2sXQSzGi7sWfQFArUWalLdRBUIc5bP2UBpocjzNpliUe61pr+uWHF30KyVjdxEDKRJaV+zNboxBo+Bbd+6nmWJDzCorqAKqhuIekAG1RPklPHG0XVOmzW14trrbs82GKmkrBmRpqLrViqlik7iRHBdm8LlShc8QwtxDZ0aQpyeDRoICxz2mWmMCKtOYcVPo5ZC6AzDNrTPo06Q5wLPOkNp9CsRS21NDdd5sEZ11amye3qFfd3iWgaBc5b+bIOhNt1d08LVFjtgaJM2nSRb45bmZkmCD4Y4ZjVSMGrRXvLN4bA2MyKlaBq1QreNXLJixR2BiBCnyHSM9JuOYNXsRL0OlP0xr6GaK9bqNPtGv1YxDkzVeAIBrp9csj09xTlDjpU8Jo0nCQ7rtLGoE7WCsZXdZYSmYe2s0fS61pHR6bpVmqZrmvOiTuvK/FLTJKpRI7Ap8s3f/Abv/+S9L/Cu/uphHVbeMqRYmt2zpRRBqnbhc8p475iOEzRnIO1w6oZqWhHogqPbdtqJmSLTmJo9umX3dM/1k71O9dAOp3YuNQqh6zsE00KICzFmcq4cd5pX5YLR102q9bm63JMbZWvW8Hnv2pRAD6jjMeo0YcWKuwJDy4FzGISSdELgrKEfPMO2a1NsgxTNvMqlZWcZoxbNQ6Dr/TJFEFETCessx90eEKZjUve9lhFXS+VwrXRpnXw37d2UmY4Za3QDVedBvY5zVqMSjMYZ0JwxU1S6WilVD97ecnp/88Xe1xW/kpimTF1Cftt0uArXz0ZtArTmo2325NLyT6VqlIaacenPHK+OxCk1o4XM/tme4/VImtrUwDmq6GTOWtMOnC3KQyol655kmqOr7zwnpwO+6by1uSlgNKsqR90jxzFx8WxH14eVZrniTiFNmTRmEM0VnqNv+k1gOO3xztANnq53VJTSmJMa2rnOY5ylGzptQHbqVBnHkRRTk9ZopA7omgXViKsEyGjETqr4ztJvVP8dgqN5q+C9pesd/eDVlKvI8r5rEXLRQHGVDFhOz4eFrrniButk7hahNjE3TftivU7KajWM+0R/qhRL6yxgtMtvtHOaJl1k05hJ48Q0JqrsCX1Yii0MnJ12FNQ63TjtmjrfdDzLpgmg9BSdFurGKSrfowi43rGpPdZo3pV1blnwIvoz+4sDZ/cOvPKVV9ld7r6w+7pixWcLXRil6d2MNeQpqXDcmRthuVGzlNB5tXUODpDFHr1mnRx0g1t8H6z3XD+75vT+vUYrY5lGWKvFWMl5ob6EzpGS6vNym4IbhHE/4YMFq5OJkuZJhdVMyJSptbaoBFpB+YXe1BW/qqhVqVq5Il7aYUw4XB8YTgO2CzqNtvMHtrnWZe3ID5uBY8mkONFvOtIUWwyPYbMNPHu0Y3OuRgnO6me0VqV4zbTJWmcWijrm6ZShHQqlEnqVHuRcVFMKaspS1ck1Z6GIbYYPK1bcHVSpVCrjQU1Ntqc9u+tdMz3R9ZRSboMAMBa6QU1RatE9oBZllcyNjnF34Hi9o9tuVb9mmkulmBsac9WIGwG6zpLGjB/0HGiqHgLjpM2XbvAaMO4NoXPkIktsVSlKA8256JQdpYSu+DDWydwtgrWGHBO2GUA6a5pds2FzGnjla+f4zmPQKIEbzZwWdLVRuGZqSomJ4/6omR0Gai4kgcNh4rAbyUl/3lqrhaNIC2ytKpydO65eJwbWOe3cGIcPgX7bcfpg03JDLM57ddED5WQXYZoK/+R/+R99Ubd0xYrPAUIcE8dmABSj6oOqNGpLykAhx9Sy5hw+eMIQSFOhCkvOG+gkoRSNEtBcrbZZNtTnDrPO6wQeoxqhOKXFcTbnqjq59ju5CMddJOXCNOXF6Mh6pcLNB2Z1rxXN5lqx4iNw3tH1Dow0zaXqZaxxHPYTaUrLPoSYVowJtaoduu88oe+RKhx3B51Ux0w8TmweDJw9OMU7u+Ri0ZqUsw4PDLWwTK6NVRplyRmMYL3jsJs0FmdKHHZH4lEnC67Rl33wbE4GdhcHfvdv/v4Xej9XrPgsUVIhjpkpZkrTucVJs0/jFJsmTs+A/Ta06XrzPhiTUvsbA8Q2qjMYnLXKLMl6qJszT0uuzfmyahh50jzhnAt5ymBNayzeNH7SlPVPLGqighaFOdfFkEUJYm2957Xp8lGsxdwtwj/8H/97HA+J0miQNjj6TdeCFltxZ5qrUKrNglyWrB+laqlDEKJTtjRGdRKLiZIyKWb6oN1O26Zxtc52swbXNmrnDCGoi5HUgnVQUlV9T+9x1mAFNttOs+rQRdkNARcc4zHSb3pKqRx3xy/61q5Y8dlB1FAhRnV0jYeEdbZ99j3WOULvObk3YE0lHqNaPh+b4FQEQadyzpkbIxQM1jmcNYyHUR0vqyh9EtT5MtbmLmvoh6DWRc3R3XvXTCgK3aCv473B955aRKf9VhtAXeeVcoY+L2IsSk9bseIjCH1HqeCC53iYtOFQC8/efYRtjYeatbPvvF2agBoLp7lww8kG64ManowTOSVSjJhs6Daes/tbfOe1sWCkvYb+sdbiO4u1qO60FELvMca2bEShRCFOpTUoUTrmNOlEuhQ2Q+D+vQ3WWX7rb/z2F3xHV6z47FBrXeJujHPY4JRBlZt2uzkdB++0yR4zYsA4uxRPs4OysbadIT1PHz5uTX6waJPGd755OqizbfCuTetUs2qdbdmnOnHrtn7JkcPQpudK8Q+dxwdH6PxiuDeNiYsne43nWfEhrDTLWwSpFd93xKmCFdIx0Z30TKPSqvaXsQm71W5chaO1TeYERCd5oQuUPpNTwnlDSVEnZiJYhIIhDAFpWVbGqrueNEtLdbkUMDpWL7libdaOirFt8uA0SyhmxqMWicYa+qEDdPEOncc2asyKFXcFgjDtJ2oudNvQ6GEGnKGiY3Xn1ek1RcE4Qx5zo0dq80QdZGdNUMUamk5WN+Znj57wlW9/Qy9o1HAix0arFqEfPGCxRbPnut61cFjNp/SdOoXV3mtGUOfaZEM3eikV23QNwyZw/soJNecv7qau+JWFMxoXIBW6QTjupqahtsRDJAyduhyX2g5uShtW0x5tDJYyd/PRacGUdepmDekQ6fqAFKXvm1LJk+rAmSn/on+1TvNQwxDUOj3pNCCNiZIL979yH6SSRNSIJaprbMmmafcqdtXMrbhDKLk0sqMQOo2VGjZBp2PoOkzNfKtOdfFTcI1yL073KysaAeLc7Neg43Zl9980AnWCbqhZX7sb9Hq1VkCzjeOkHg9d79Utz3icM8RYsFbXnxqpKD3aWUvXe0pOhD6QprWY+yjWO3KLkFt2XE7a+dDNTJBa2JwrndG2UbS1ZqE+aj4P0Dr5GO2sGKPhjNQKtRDHSEqJeMw45xi2PdY30TrtNY3m8BhjMMzOYUUDyB2kKZKT5oGUIhhvCV1o2SRFKTRZi8jQO3ywjPvpi761K1Z8ZkhT4r/7f/4zNWGQ53Sm1oC1lKo0zJqFzVmPFKVSikiLMKjqSjtnZxnz3Jp2hK7j+ukFxohOOoqQxqLeJU0Dd9xF0phIKWPdDQ1TRJ8R0gLHa6NAW+8WU4laadRpg0iBpjlynfuib+2KX0Fo0y5yvLxmvN4zHUa63vJbf/vbnL92qlEc7XOpTpRz1pQeBm9syW07chp8H5QtEhP33zjFWodUpYzllAFp60FDwo1Vuue8N0kRalEpwPWTa6ZjYtj2OpluRi3z8Sc3S/T91YHjIa5ulivuFEouLW/Y4prpTylVGVNSlvxgFxxmMQrSs2HOuoas00WhUQFuKeBy1AgrzZGs+NYULEWQRqecHTFB95W5cTOczI7pmlWcWiHX9SonsNY2HZ7uiwbBWv2T18biT2Et5m4R/u0/+9f0ncNaR7/pwShlpes86XBEStFJXL05AEor4KpUSltQBoNzapQiVRfKuD8SJ3WlfP1rD+i8JR6mVrCBsbphlnrjXNQapxgrTIdIFUM3KOc6TYnTewPBW1ywhKGj67v2e02snjIxJnYrzXLFHUOpFetozRTti6Yxtw1J6Sigzn6hd7igXUjvNWpknlbX5nYJhpwqw8nAu2++zcWjp5TYDrWLAYpOOLQoU21dzrltupk4Fl3DbVJfS6UguE6LTmMgxkI/OI1GKBWDRqAYge/88z/74m7oil9ZhM5yuD5wcn/g3oMt56+eItVw3GWqGHVglkqVqoHdVae+moloFrfWWYsTp0yKSW3QRc1JvDc4o1rt0PnFlAvRA6Jq8QzWO/ptxzB4pGTi/oAxwoPXN5zc36oOtKqupxtCa34UfBfotz0gTNN6UFxxd1Cr7iEaORCpIkxjat/TRl83aIaccDMM0D2qGZCIYEMbDtCGA+a5pkzLTp37IDN7S2MRkp49ra5tg8E6yFHziKsoO8t3npqFknQPUzfL2oxYClJVb16a0+aKD2O9I7cIV08uOV7t2Jx2bLYdwyZw3E8q9Ea4enzBtB+JY275PvZGOIoe8JQDreYGtfGgS5GbA2Cp5DQRY1zG8NZpdoi6VdpmZuJ0I2zUSmO1AwSAaa8lhWcfXKroVUQnFVhSrEx77egI8OC1B3zlm1/5gu7qihWfE5ZpuG6mpk2xQaM9rJFFhF5SJjYb91JU76rOl1poqamJdlancdIJhtSW+6P20KbRI9WgqFKzTiAMs8nKTdGXk2CM6hes0UKy33i63rfIAn1ODCcDoQt0Q+C4O3yx93PFryQ2JxuCg8PlCF1H6DvCpm/NRP2ZaUwYtLGYkxqf1FJnVxT9LLfPpu5Pum7ipA6wKUY195kSRiCnRI6JcTcuWmypGoIstXD1+JI0JbZnPcO9E1zXL87OmiU3vzcz/x/DNmCxLcN1xYq7gfd/8h7jOGnTpApS2vqrSrtk/nqzJDdGnSPnjDediNcbp1ejhkHbk41O6pyu2zk6gCr4zjG73dlmTFSTygDC4BbXdd2ChG7jW/SBpRs8oVNd+Xx+7XrVcDtjca5pZ1d8COsduUXwXcCFDlsL027k4tElh6sD4/7A+WunnNzbYL0ukDk/ytq5m6LZbnMXtJayHBLjmNrYHe2exEKairoOZbVXL0kPiLrp2oWKYqxarc8WttYanIVn7z/lyTvPGE57hm3XXIrazzRXPmct3loevPGA81fvfZG3dsWKzxRSKzQqSwgWH1zLfYQcE9MUGY+RFBPxGLX4qqrjCZ1j2IYWPt5oKM7ie8/mdNMc/ITpcKDkRPsnhhYV4nUjBFkiCaRpXb23LWdILaj7weGtYXsS6HvVLUyHpHQ126yoTXMeXLHiYxCnzP2vPGA43RCPiThmDrsIqDsejfqYUyGXurgpz/EE1JYj1RqFyh2BeBzJKdP1AeuVhoU1lJLYXx4YDxPjYcRZcEY47A7snlyzf7onbALdpsP4js3JptE2MzVX4pSb9KCqljQVho1vjUr9+ooVdwUP33m4NNpD73HB0rdcuZwL0zESp8y4i6QpNQ8Es1ArjW19yfYa81DAd573f/IOu4vrRqOUxS1zOqqDbcmV/jS05r6yP9SpNmENbDaB7SbgrMYXbLfK3pobPjy3nxnnOH/1lGGjBnsrPoy1mLtluPfaGWevnXL+ysDp2cD2xHP/1TOevX8BVrsW2mnR4qukvCwK3VRvMqOeL8pqFabjRJ4i1loefO0eJ+c9w+CJY8Q1V6GZnhnHRDwWut5r56TzevjMSpFxVt37prE2N8w2ki/aZZmmREoJQyWl2MSxK1bcDUit5JxxzwUl20bpqqUQgm6Iz2sG5rSB6ZiYjlENUVB9Qegcw6bj6sklcZyQKrz75juaAye6kc6hzLlN6rvBIwVC5zTHx+pjQKnZFmv0e9bfmKyETifuGmvS/OQF0lTWnLkVH4vjYSKlivWe0HValHnDeMzkpFQp1bKpDtMFLexKi8sRaMY/OmnWhoSnVGHcHzjsRjAO4xzd0HG8ntic9aRpwjp49sEznj2+IATbPusB6wO+Hwibrn1uhdTCkEEoKTPuY4tNEKw3bM8G/bkpfqH3c8WKzxrz89yHsBRjUkVZU+37cxbknF2q7pU053PTijGDD8rUSDHz1l/+RPeN4HXdOS0CQ2+xHkJwlKmqm3JwWMDZNv02leAN1tycD2e3dJHZzdmp/CB4jNXYnZQy/ab74m7mryjWYu6WwfhAKYZoAudvnBP6ATGe4WSDsV6zpJ4zUpjpj/PBsZab3A5QiqTU2gKLIU6TClrFUI4Txhb6wUJWo5RxP1JLZhoTYfCaJTJqXl2KE9M4Ebzl9NV7nL/6gGHTU3JZnPTmh8MNlxpoWXYrVtwVzDbNGLNQIaU1UkKvG6qKvG822tm5srZN01gNPq6lUpIWU4fdYemyWqfC84VCaWgTb7PYvzsLoTOUmCgx6wSeCqj+KLWYE9O42CIacVKbPiFOkVIqaayrMcSKj8Ww7XBdR7/d0G06ShGcMRijnXrv9fAmUhfKv4jolLg5JKsEx+CaHXqtWgAerw/tM2sIfaCK4cFX77M9P+Xeq/e499o9rLP0zpGmwvkbDxjunWPcbJGuAcMpKqXTebPYohtTFvfn6ZCpVa/h7Gr0s+LuQKrwvX/1522fUS10Lup9YFvOaZV6U7TVZpCVCzkWSq6654g0x1gNAE8xgcDu6oowBHynujtrQHIldA7fGfqNw3nwAYatxxjh7H6v0VipMsvvpFZyap4MRjXiiFKn8xSRoo7oXX/jxLniBusJ+hZhd7nj3/y//4Td5YG0P7K/POKHDpwjTmqSoJQrIU4aEJmzOhdJnfUJGq6qOjcVjKO/gg+eeJiIx1EXbQj0mx6pFddZDrsj0/FIPEa63iEpcf1spw6WtVJiIWw6TOjwfaeufbEgqEOePhwytWS8NwybjmHTaWDs2vVfcUfgO8/f+Hf/sH3Wa6MoCsbcuIJZr7mNtc5h3LI0XnzTE+hgrC7ByuNx4uHb7y/XuXhywe7iihTzkrlVSmmFpFLGrGuBq21S7r0WmdZpvpdtB2d9n6pfsi0LzKCUmOuLPbvLsX1/xYoPw9hGIxY99NVayCXjvNVnfqpLc6LJcrDGtDwpWRoZ1qmrqnM3xdQ0JqbjpK/VYjlKUWmA6wK+Czz46mv0987Z3j/DWLcEF5ei5kC0gnEOvVcdjgDatKhZA5Sd0+nh6tq64q7hyXuPefbo2UJxllK0eeItvjU4XFA5DrCsoThlXLDNada0iZnFdr61YODdN99h2o3kqSy6V6ka8RFjVt3rwgBTEnUaMzmW5TVqrksjZ6b7q5GfPhdynkPOE7uL42LgsuIGazF3i1By4bDbNdoWWBe0q5JLW3j6AS9Fu6GlyOIcpPSu2g6INyNs591ioa6/K+yudloMVrh+NuG7bnEWOz0fsN4wXe8oJWJNpTvp6HvPyYMzQtcDjU7WAsvnh4ZBnZXicVLLWlEb95o02HXFirsAYwzDyUapyX2g5pt1gDR79TgXcHYJRHXe4TvfnMAqOSalqeXMeJzoho443lDAcsy8/5P3QZTSaRzLWpa2OSJQitJg5ve22XaLK9l8wC1FnyGuhZZjWnRIF5iOkc3Z0LR6K1Z8GAaoOZOmif3ljnjQhl88JqVy5aLP+1ybq6U2LmZ78xu2iBolOO8WK3OkcrjcUdU+D6lQxbRmCC1eo1BqQUpZcuOet1NHKlJaU7NdTw0Ybg6uvnMcdkfSOK1NixV3Dikm3n3zHXVJNnr2m02IqojqqL1p7JHaDO+Ebggq3QFgbtYog+Txuw8REcbDyDs/eotaWiOnxYzUWuiCY2jh35tth2vZcbU2vwVncA6qqOkKIs+5abrlXOp8ezZYq1putzZcPoq1mLtlCF1H1/eEoacberz3yGzH1VwqvbdNp8aNyLxlSBn9acAsG+e8sNsZDo2WqoTOE3pPKTAdM91mwPcbhrMtw/1zzl494/SVU6iG4eRU6Z+xtMBXdQ5LsaiDn2iIbOhUdCsixGPk8ul1c1BaD4or7gZe/cqrWGeoWeMJnDdQm0Ol1+6n6lWd0iSb+UMcc9tsWSZsoQ/Y4HEhLAXY83j07iN2F9eUVEjHrBS2UttzwFCy0plTzCpkNzDFtiGLZs05b5bA5VL1Z3yYD7qGeExKqVmx4mPw9IOnTMfEcT9y3I/NzKeSi3be+8G1SbHmvxmjBgezBmfWi87NP9qf2f5cpBKPcdGGGmPptxuG7Qasv3FooP1qGwNIi+mRKhSpmi3XMlZnGplan+vv1Oas+d/+1//NF3MjV6z4HPH+j99jd3GlzRNnlnViNENHG/ZFmrZV16tGd2TSGMkpf0gS8LxR0Nt/+RbH3aFpvTUGKzX3ylyF8ZgYD6lNzFseXXutWpVyreYpsrDJ4pgosaiGtpn4pVzYngfW5v9PYy3mbiGMsBywUs6kcVrsZufOiXY97OJix2J8Ii208Sb822qLdN5DWSYGoLbNLeyYNjavWTubx30hJTDOEceJklOja2m3RwW1+hrONY2QUeqMGKPje+cZTrdrMbfizuCv/4O/jm0b4lycSaNA6xS9NA2c5mzV5gQ2G444x+Jk6Z2umc22px9+WvSdU+bRe4+0QdI0SqVCLsJ4zNSq1xGEnCpSZmt4AxjVB4k+B+KkG2ffO2pS+lktOlFRd7J1A13x0/jOP/+31JJ1IlaqNiJUBKPhwy042Lk2ha7aOJxzqGprJNKiC2a61VLXAS48HxlQqTlTcsZ5B1hcCLg+qHFDy6ZCWGiW87TbWtNyG0GKLKHEM61sc7b90PR7xYq7AussZw9OqaWottrrGS/F1gSsNz+XUzsjztEh7QwXes+w7ai1cP3senntnDLv/PAtnYyLkEZln4zHrI7Mut3oOquzAZ+ePTUwvDZqpdAPHmug7y0uNO1tkaX4PBwix/30y715twBrMXfL4CwYB7VkDld7xt2RUjLTGEmxNH2MHgpLqdTZEKXpE+bu5xwWCXMn09wYHBgYGxVSiuCCx3mPdWGhXNaisvVaC+Nx4rifSKkwjallXJUbCkyjXFpnOR4mLRyFRVN0+XRHTmtQ64q7gek4UaTqRK7VP40lhgCuc63zGMlR3WZLKprX4zRsVZssupGp256j34SPvd7xeq/NlFIXJ7CahWkq5CzkqEWclDnmwCi9UsD3gdJClO2ywRtcpxOPmQ4dmlvtihUfhTRGSE6lGVxBaoY9xtwYm0itNw3EVmTNTpdzbE7JZXZIec5puZKmrEYrzlJSbhbqmVpVg51TgWqwxiLStHmYG8qkqHtejrkZOlQqNOql5kCq2UJemxYr7iS897z+9dexzjI1Fkht2uhapeUVQ2m5qM7f6E2XgqzFS820/efx+P3HHK4PunadNnFKhXHOPW7abedUo6dUTMBYzbIrKDMEvVTOQopZ96FcyLHiglPKJWvz/6P4hYo5Y8z/1hjzHWPMnxpj/i/GmMEY8xvGmH9ujPlLY8z/1RjTtZ/t27//sn3/1z+T/w++ZIhTUktnafzklChJtQlS1cEyz5luTlubcwRBbd2Q2XEcWLqks5PesqGKtM6qQMsE6gZPP3R0Q0e/HQj9gHWB0HcMJz2h88vrz+Hkxhp8sIugNU5xmRCmmDBGVBe06hRW3BEYq7QvY+yiW6tFQ1g1jLvFhrSDpFLHWjyAa0YPLedHEBWmC5qz9TG4eHxBilEDyrlZ3LU1cEwzm8DCeExMo5pSpFKZmlGSsbqh5lhb46dNE63Fep2ir1jxcZDmOFdS1mKpFDCyOKqWWCitkeCD1SxUc7MHGSNLcDfo53amXkoVrp9dMe5H4pjbdE4bj2nKy/S45KTOq7UsweNqyKKTtyosReQc4THHedQiyz7og19dW1fcSQiipiNVNdVA04/aZr7VIm6s7lO1VVZS5/BwtStxzrM5O+PBG6986PXH/ZHDbq97Djp5B20iSoXDLpJTIaVCjNr4j+1PbcYqBkOOlSrz+1HzPGMspQ0jMIaYVzfLj+Ln3qGNMd8A/jfA3xGRPwAc8B8D/wfgPxeR3waeAf9J+5X/BHjWvv6ft59b8ZL4sz/+DvvLnVK1njc1aRuntIlbLUozcU7tz12wSzCx8zeaAtrhb44uKLlyuNq3LqpuqNpJEUpK5JhJY1LWJdAPHZuTLcN2i+86wtDhg9dg8OBb11ULxDilJRhZbdXVJamWshgxrFhx2+GcrjURWUKSMTqtKLk0aonBO7tMKnITptemKcKIWjwXIU2Fk7PN4vz1cfjJ996ktAiDObPRB4fvHS5YSq2Mx6QOf7Zp5MzsGNYaMKj8KE552fRBc4H2V+M6sVjx8RCdnmFU75JiWbRqN7o3/fTOe5VUjdGYs6PmwmoutOy8LzQHZKS2iUEzS/GO0He44AldUNpwszcvzalvoW6KaFyHzOYOLcC45SmGoSOnig+WrrMr5X/FnYUPVteHVWqktW3NNDkAsMR51FKbG616ILigrrW+82AN9mMiPC4ePlmkOjOl2nqLgD4bUl3o1UKbCJZKzu35gJ5jc37OyTJmagE1QFItuln3op/CL9pu9cDGGOOBLfAe8B8A//f2/f8z8D9tf/+ftH/Tvv8fmvWp+fIQpUBOY7yhtaAW5C6opbkPbul++k4LstzypKQVeppBZ9oGK80mXacF03FcNjtdbK1zagB0BJ9TbjQXnQZiDD5ozoixtrmFuZalZTAWpjFirRo+DNuh2UxXxDRC9YoVdwDOO4zoFCDN2W7mRiNQizZcSiukTJvCueAW0blUbbKklEkpI80h9mehlkKepkZ50/UtVagZrDeEzuGaDrbEvGzg8yZrTLOerkrDrO1QfNxPYC33XztbD7krPhYiSldMsWjkRbCULB9yontevzZPxWpt07tm0DVP0eafK6XOmw5P33tECGqa4IK7cburQkoF0zIXaXucujWbZQI4M0XAtMgOu7wPZw2n5xsQePbkkpRWs58VdxO1NtdIA77TM5p1pu1NzQhIB2KAuiPPdEiDru3DbtQz5Me8/tOHT7l6egmo9nseMMzTvVmnV5dzqNIp52fELEeY68SSyzJ0mCeF4yFhrKPr1+Dw5/FzF3Mi8g7wfwR+ghZxl8C/AC5EZBZAvQ18o/39G8Bb7Xdz+/lXf97rf5lRUiGOUV2GJtUP5Nb1nzdQsxie0OgjDuvng+JN5twNzXI+yKkgtSQN+hbmBVmXg2Jpmh7nnFpFV2l20HV5KMwdVHUXQ0fpRTULofdsTzd6cG0H3hUr7gqsNXS9b5qEupiPAJR20Ox6r9z/VsjNXcxZPzdPy0upDNuhTR1+9jXHw8jTDx5jjYrWtYjMS3BzKdoB9eH5vCADaKZcato95zWSQEQoKbef03Xsw2oHveLj0GiKTa+pzTyrjnnt+W8abSunQkl1yUCdf2/Wsc0F3PPmXNY5ZaK0PWU2LBHR/ca34k6t1rXRmGNZsheNNYvBg7VzVE7T1bUictbzfOe/+1N2F7sv+oauWPG5YKFZNt8CY3WBWmuWwG+eMyFyzuB9oy2XgkWb9jp8++lyrpbCe2++rZmnuZKS7kO+V7q+Tt+VLaYyHui3YTE3MsZgaGs0qGY7z2yTKuyuDpycDfzG7/06r3zllZ+6/pcZvwjN8gE6bfsN4OvACfBPftE3ZIz5Xxtj/tgY88e/6GvdVRz3e3zwSqtqB7WZbpmiGhvUWfdStTPampDPaeNasdeoVEtggTGad9V7zfTQL7Zf1JG8fW5jxkAITjdU1wStzelh/ro0bZBt3My5Y3r1eEeJc5DrF3EnV6z47CFVO4210b00fqB9ryjNJGd1jqxVqZElzh3IOTxZzU+6PtAPHbVUuv7jDVBmpDxhrNysa5GFFp2nFuLcpvndJmhAc9uorb051MpsEW8N3quWDkNzDlyx4sO4fnbF2z94q1Vt5qawaxSqeeJWnmsKmnaYq1Jv6MPP7UfKMPHMGaW1Vg5Xu6XBwUwLNuZmr1tEeEDLsJuvVaVigJK1GVnrzRhARMg5N8v0nzZ2WLHiLkBEmMZIKeocWUpt0y5lYYzHtMQD1OZ6XFtwt7MG6x1xitjZ4fxn4IO33uf64orQa/ROTpV4iEqPRBaHdd85Qq/rrTT9dq1Kp3ZWZQChc4tDrfGqQY/HSBjCyhT5CH6RI/Q/At4UkUcikoD/CviHwP1GuwT4JvBO+/s7wLcA2vfvAU8++qIi8l+IyN8Rkb/zC7y3O40f/fmbyxRNR9Jm4R/XfONYOX+v5KalmRcvLN0Zad0R57SIU8qX4/rxRTNBuOmWzCNvY3SDVIG5dkLjGJnGCamldfHV9KTmqnEGlUXLUIuan5SiNrUllU+cOqxYcZtgg7pV6rS8aeda5o7qijQwvKRmi45OLmbal7HaDIlRw1cFPQi/9Zc/+cTrvvfm+4z7o677FgBbSos9QFTLhD4n4pjV4G+JLjFYr11anWaAoWpuGILrPrmQXPHlhrXqeDpH2kjbPOYCq+S6NCvmeJyZNkXbh+YYHJ3m3ehJ5yZCHDXDTrWdpk2+taFobfsMt+zUWZcDtIOpujqrMYtKDKzX/DvnLTmWRjlbu4or7iYM2pxTGnRdtN2zAZBzN2fGWm8ae7XMLC/NLLV+Ngn6eN2aiGjG6eIMK82roV27nUVL0dTjkkqjRXNDvRbNQa3tus5bfItKEIH3fvweT97/qfLhS41f5Mn1E+AfGGO2Tfv2HwJ/Bvy/gP9Z+5n/FfBft7//P9q/ad//p7Iq6n8upGkixQljXTucNZpks1+eNzhEnSK1wyJt4d646M3uldAoKN5ijcF6y3F2JmoumSmmxe0IEUyzc5Za1ZWyfZJKqS0E0pFiJsW0HGpNC51zwTZjFrs8QFbJ3Iq7gnmyZYwGhovIstb0PKk6oVrVeRa0qMI0QXjSsGWpNyYktQrH3eGTL9yo1dZBSRpLMruU5angvC4z5y2h88sBWif3spgR1ax/SpH2PoT91bXaTq9Y8TG4fnpxQ+sHpUq6m8Ju1mPPxZhrdEfQpuQcIDwbp+hLNH2c0ybjdDgSj+lGZ4cs2ry5MFTqsOJ5yqa1qv1p4zyMtJ8TQ9d5rPcLVXPFirsIkRs6vdSbc9rCJGlGRKYJTW/Woa5paw3dZlA/BOs+cak8fPuhRg7QlpQ1lCIcdqk1KNVQZW7KgLqt19Lo/+qn1ORCtcVbVcYp43tPmtS9dsUNfhHN3D9HjUz+JfBv22v9F8D/DvhPjTF/iWri/sv2K/8l8Gr7+n8K/O9/gff9pcbh+sBxv8cYi/cemnHBXBXNfOUcNbjRGF2k84IAmuWsLCHfsojDtaATEQ5Xu+c2YdF8O9EsnjjlRp1smSNt1RqjLmXq3pc1zPgj799Yy343KrWyFNIU+Rv/8G+tXdEVdwMizein6Q1Ec+Lmw2tt8R/SLCTnKABrZv2cfi3F+qHX/LRzplThx3/xY9KUCEGp1Rp30KIF2uZqrOb2VBFqo2G7oHoJO2/c3i2dWQT2l9cc98fP7ZatuN14/N7jZmnuMKbpbbxtsTjP6d+sTs9mJ0nTGhjSxN21zjpv0Q8jWuw5a5n2E0JrQMpz8R3NTEHJKJppB/OUzrZIAofvNB+15kpthgqCGmH64Ck5cfnk8gu9jytWfJ4opZLGvBRTVvmMlNzoyO1MODfYZ+OgWippiuwudogI25P+E2n3T95/1MK/lSo5a2Kt0wa+tZZ0TMRjwnlL13u6zi/Pg1Jnk72yMMhSypg25TPrUfGn8AsRxEXkPwP+s498+YfA3/uYnx2B//kvcr0VCmNM09IExmNtrniaxcFzEzdpMQTzoVGDW8GYiojRXKp5WjDzj9tBshsC037P6YN7i7VQarqbmRLmncd3Hudc0yToRuqcaXa2zR5apFmi08RxGkuA0Y6tsZaffPfNpRO0YsVthg+OcUzEWIhTRqmTsmRc5dTiQ6Q2GjO6FgpgwVl0Ktbs1tVMyL7YBlbV1r3MNBm08+mCaYJzQ44Z6wzD6aAdUGMosZCrEPqAiD4rxmNiOkaMs3QrzXLFJ8AAJ2cbDrsJqXM0B21fUpOS2uiNIoWS5gNZo07N7cQ5z0pozq8aeN9tAn3n29Rbr6mTBUuqRbV6xmCcwwDe3kzHpelvVBvaNHKpLCZezjmsVROhP//jP/uC7uCKFb8EGFpeG7h2PqulKhtjpji2/84TOtMaMGlKOO+JY6Tru0/MY5yOEw/f+YCzBw/IqS4NHETjS6CR/+fzp1V2iFJAi76v1tzxweuzw1RC7xeDpRUfxnpHbiFe/8Yb/MZf/U1eeeM+oQttOgez3ma2On/e/hlmjV2jVcpNoLD+G8BqQdgKOtc6m7UKpm2E8xQwdIHQB3zwi+OlMljs4uJX58y750xWnLOEvlcKTnDkCsO259E7D1lZtyvuAtSNUoXlOSbilKm5Mh3TYnyi7pTynAmKmkHUrDo5amnZkLKI0M0LcJFNa9oA7fWyrudU1PU2JpCKVJh2iTJlpl0EY/B9aPbR6mZ53I+Mx4j3Dr+an6z4JBjohg4f1NRgc9Lf0CcXx2S7/GkEriVnSrjZW6CZADnbTBIcJVXGY+TN73yfOX+KFonQti412GLOl5v3QddyT61SKYPHd0Hd+oyQUyTFxLMn14TOr3T/FXcc2lRfiquW74hV7ZpmuqlMJ+dMipkcc6Pne/ptR04aR/VJzfecMsfrPaFzOgkvdZmI68xBCL3XKJNWmC0+DrR4Ldco1oAUIU+Z0DlAVjOuj8FazN1CiAjb0xPuv3rO9nQg9J5+07UuixZPthV1NdfWYdHMjjnLQxeidu4x6jG02MY6S+iUevXuD9/SwoybzVIpMs1tDLtM9rx3C41Tpwo6TvfB4ZxBSibHRE2Z0HeoodksYF8/iivuBmb3rdgiN1xz1lPTID3U+sUswrQcR6VH61qEFDPO6KY45+z8LMH585g7nDnX9nq0rqtSqdWC2gC6seI0iqAflOYyW+DOGqZh0yFU/vxffvdzvGMrbjt+92/9Huev3KMfgmaNGoNBqb0ueGrVPMOcKrWZd+UibQLNDb0SbtYCqtMpqeCC5+mjC9XJLEaUN5OFNGbc7Krc9HE3hZzTQs5ZbCskfXCYZoYy7kdqLkhZm4kr7i5mbalzVl0lq069Z3aGa2e/maI805RnLXY/+Oeo0fUTJ3OgecgaUN4mfRbSMTVXPWE6xPY8EG10xoqx8xp2sEh2NO5qfzUqPdR7bZiu+BDWE/QthO8Cm9MNKRW2JwPeu9Y16bWTYYzqbZq9eKmqC2g2CwhK46pNm/B8tocWZZZu0zGNUd3xmA1SdHN0vtErvUNm0wU7b86VUmkbacD1ATM/GJzT7JCS2V2qocL1xZ7Lx1eUvGbNrbgbGMeItJ3OWEMVnYyVXBaNqm3NEGTOgFQKJqIb2P03HhBaFEGtswD806993B/YXe6wKAWttgNzLY1ihk4zjECNOjGUdiiec7vUpGV2oRX6vmN3cf053rEVtx3nr96nH3p8F9RYh6bFbN115z2hCzfGKFZdJ33wYOxicuLaNM/algnX9qw8TTx9+AQWapYeJufpgIYO66HReqt74uxO2fY3qYbN2YZhO9Bve0LX4bzj9MEJ2Oa2vI7mVtxRGIO6jMhsmIfq5BCwYJw28F3LbTRoBJWzFu+NsjrQiJp4HD9VQ/2T7/+YdJwac6swjWqUZ41AbZFaLc94drHV+AIotSCzzq6dXa2DmlRDNx1X85OPYi3mbhsM/P1//A/aP4QwdPRDIARH3we6vsOFQDd0GOswrcNhMJTnaI/Sijfr1RFTgxyboFwMV092PH34tOWKCP3Q4b3HB0+36Rb759QoZPPQQDfiZsQiBu8DIfSEYaA/2eBDYNj2lFrZnJ/Rnw6c3j9l5besuCuopWCNFmnO64HSBY8xdjms5qxaOte5ZXJuaA5fTdeWUtaJtehr1vLp3cjD9YHj8YAxtDiERrcWIce6/N04zflZFq4ItunznLfsrkaMCNvz4VM7sCtW6EdYGLZDayBUpWi1YHDNlROcbZMzLIbG3PD6NTt/0ISF7YExhM7z8L3Hy7U0/kYPo9YZQmcpSelgapyn+5hhnggom6TrwzKxC13XdD8WKcLZvRPOX7nH3/r3/tYXcv9WrPi8IQK5NKqjs/qnZQnXfONsDixreDYjUm8Eu6zXiycX7C53n3JB8L1HBLptaOZeGn/jOkeYG5joOp4LPZFKk9M2Xa1GWdnO44Oj78KST7fiBusduYUwxlByAqtd+zB0pKTaGOstHbNg1EAQUrTNHcxqZ8aAtaVpBgp2FoqLYI3B957LJ5dcPb0C4PLxBW98a7M4lM30Sesstmin1VotFuevi7FYDL7Txey8dnf0UFoZhi0pC11vkVxWJ8sVdwZzvIBOCpT66B3kSbugRdRIKMXSHPx0ylDbpKHExGF3wPUD1qia6OLxBfurT9k8G/Ko0QLSikQxmiNZayWOQui8btSlErw+K5zv1KbaQBwT4xQpAn1w/OS7P75xwV2x4iM4OT/h1a++hgH6TYf1jjJlrLeIlA8ZlsxseuchT3MHkOaEbJgz42q9cbvMz2XUHa72XD6+4MFXX9VcqtkZFkGkUEtCilnoYs67lqdosE5uYnyqYTjZAHDcTwwnA13whKH7pd67FSt+aWgxNGYySxRNaXIbaBE2qajWet6TWjOkVo01ODvZ8uT9x/zouz96oUu+8/23+Mbv/BpSKmCQIlRXl/dDc31G1OjE2nnafiMrKFk4HiasQBcctvN0YS1dPor1BH3L8MpXXiWXwrg/Eg+TZn/0HaHzpFiIY1IXoNIWa9EJgQ8O725G6M6rANa3sbrzFusdWNUrnL+yWa75k++9SUlqISstJ6gUDT6utTSbW0fXq/2zC24Z5buWI6JdWM/J+Skn52dYZ3nlK2ecnG3Z7eJiQ71ixW3HbM1eipqazHCdrr1+01Grbp5zeDdtUu6alsj1aiZhrB5wp3F64euLMeQKtUBKqgUqSd+HcSBUXb8iuGDV9daqYUqORfMhp0y/CZArj9559CJyvRVfUnSbnrMHZ0zHkRyzFlBFMDJr04BaW/Mwk2c2R5vmGZlHcc11slmii8BwMrC73HFoGYtxiuyvd8RjJE1JpwpNI56mxHE3crg+Mo2R6TAxHUfG3YHpOHK8PlJLBpkdoB39dsvm7Izrp3t1bF0/5yvuKFRK05rwz9OSnzsPAswWywvd0hrSlMlZMMZyuD5ofMEL4NnTC4zT82KOSdd/bjnICFJqiy0oGjtQNGDOOvVikCwab1VhioWLx3tSTDx9+PTzuk23Fmsxd8vw1W99lTRl9lcHxt0BqQXvdVQutRLaApxpk3PGXK21UU/apNvMGVLNpbLRI41RW+m3f/Decs1pHHn3zbeIxxGogLruxTFSUiYexxYgrpM+qp4kpWpouPNQq04L0lSYD641FbanGxXWfiF3c8WKzwdC0/zY5khZRAs3+NBGKKgxUcm6fkrWXEepQgge73Wy/ejtD1742t47gkdplG3iXmslpUKKhTQWSizUXNs03SzFpWHW91WmMXPYTTeb/IoVHwOpwu7ywDRGas4EZ5uFeAsCF1n67Brc3XLlUDdX65/7fJnmQjk3MY5xcWed8cFP3qfWSs5ZNahFmvYbrBVySuQpklOilEStmmWaUyaNkZITUipSlSXigwNjSC2uYMWKu4oQzHNGeIILjjzvP6KT89Icl3PUr5WsGXPdEJrj64uvESmCtMxjOzc4SyVPs/GdaXtUm8VVLe6kObO74EhTopbKZtthnCFOkR/86Q8+v5t0S7HOKm8Z/vyP/4z7X3mdk7OtFlC00G6jBZxUUTWCM2BER9hGu/TShK5UdbMDFb1Kac5ExmCNocjsnqeQKjx+7xEP3niVjWzo+g6MW8wUSizElj8HhjxNGlFQWrZWC46NTh0v51e21vLtv/Itzu5tVuvzFXcGOWes0YOrpapJUBVC79R5LxbdKI0KvWfht3ZHdSoR9xlOdZM7XB9fKoNxezZQiqDNV4M817wpAmFjF7fKkjLVufaM0C7oYT+pVsk7Li6vub54MXrnii8vfOf1QFgK3RDopsRxP+GcJZUKlYVuP+8ttVRl/s+f7Ua3nDVzoQ+UKfH+T9790LXG49holTT67w19suaK9yorMAh5avQugRwrOUXM0eC9Z9rfxHAMg2U6jrz35ju/zNu2YsUvFdLCSp23zQhL116KpX0f5sBw4MaYpKpRlpJIXryYGw9HxsORbruhZM2Jy1EziG2xhKBaPGkXNE7ZX9NRNeU5CXHKGGuZpkyZEt12pUJ/HNZi7pZBxagBjMM4mGJqRZTgnKPUjDTnn5wrGA1hBJprJeSmP5hDHEELK6ma2fPej99ffmfG/nJHyZnD9UgcM/22xzi/ZGallKijhj6qoUNhmpqBw3OFoW0Fnw+OnCrPHp6zv7ri3R++/bnfuxUrfikQKLPWoMUNGNus2WNu3U4t5uZ1N9MpjTUEDzKEZlBkuXj87KU0az/80x/w67/325zcc4TO3VDR/E0GpbreauaX888Vd7mQUsF5R6mVbjCM+8Pnd69W3HoYYxg2Pcdj1GgMr8YJc1C4tQbTOyQKNenkeTY+qEXIjQIss9OeVa1njplH7/10/qhUYdyNbE63lKJVYkkF54Naret2qJmOy2FUG4s167rMSRsutEDzq2c7pFR+/Bc/+SXfvRUrfjkQYDwkcip0Q4cLlsNOaY+5ZQNr/nBzlbTqKrs4LjcH2Y9Oyj8J42Hk4tEz3vjWhiJgmlme7/SFKxWyxvM4Z+kG1yJa6zLRz1UI3mFFsHje/M4PyGl1P/8oVprlLYRzlhDUqdJax3E3aaRAK6QEdSLSPJHWaLGGmoUcy0KznOks8wGvVhWhH64/3obcGhWV11qpRTnO8/i8ztM9EXJMTFNEGtVSpFBKpuRMmhIlqVOf84ZalaaZmu3tihW3HcZAPCZKKrquimbp5JaNM1Na9ADachqtacZGGiDeDZ1apS/usC+OaYwaddBes86aPGtANFtSxCw0OINSM6fjxDQqHW17Gtie9vMAf8WKnwml+AshBIzzVKHF5NiFNjnTiJVqaZbCTZ2UzULllUb7t9bggydNP21BXkvh4dvvYZxdXPdqqZSUF1pYjlnNhRoNc+knGllMFmZ6ZsmFOEWEupoqr7izSFPkh9+5oSdKWxdSK1RZ1s4cTyNV9y3jjEaJ9F0L6345YamIWc6n82+WIs3p2T23V0FJtTnZagPmuI9tOKDMs835wHh4cf34lwlrMXcLkVMh9IHQhRY9YBgPEYOh3wS81257zULO6lak3OS2MGnC82b9bBrFyhjDePjZ2SFv/vkPcN4qx7oIqXGqkVkDUckpU4rq44yBXApxzJQmeC8t96qkqmGypfLw7fd/OTduxYpfAnJSdz3nLbSCKuc2fUAo7VArIkgRrGvGJ2k2JQm6fpue7efR8dQi5PJhvZKIRpE45+lbvMhs8V6r/nycEtZbShZqLkzj2gFd8ckotTUPveqtpzHhnGXYdtRcG41Sp8w51zYRNjc6N/f839WIQYDD9Z7Dbv8zr2tp1MqiVEvDjR7VmJsJn+r2dF3mVImTrs9ctHGCsVjrdI9aDVBW3FGUXHj26CmCkJKeyWLMGmHTHCWtvaFVajNm/m3DMHQLBfqlrlsqLtgWv9P8HFpEgdorqEZOzZL0grOOzzpw7Yw6bDSCy/u1bPk4rHflFmKWFoAeBGnTtTgl7aQIS9fRB12QSmV57lAosvxrWbzWsmtxBB+HmXrpnAYeS6mo77MgtZKmTJwS0/jc6L7MOVY3++ScuzU/LP7iX3/vs71BK1Z8gTAG0pRJMS/r0LY1ptlv8w9CTrkZB+UWlNx0QM0gQjPhXu4x7YO/kT20RWedboJd75V6aW2jgMpCm0kxcxyTumq2DTPlT8+2W/Hlxk2vQfcB2jTYOksYPLQ8uNoaF7Mu1Aa7FFzL53QOrcfw6N0PfmZx9ezhEw77I0KLOzBmycWa35QApdZFk1dz03A7Q8nS2Cn6Oa+1sqZvrLjr0IxH02j3tjVS6lK8mcayso21gWhTb57IzWZaL3VNZxG5OX8aa3He4YNne7pZ1iDos2Ru+McpE495yamMYyJOmZeQj3+psBZztwxf+fbXOH/1HGimBqVSc5tLY9TGlVZwda7FCTQ9QnO2Q/QAJ82atrYdsFbh+AmTuZwSx90B531boDd0sThFUsrEWUj73OZ8A4NxDhc8PgRK1Vy81UBsxV1CzhljNJdn1u9IFWrVQNScVZtgraHK8+6WunGmcSJFzX+kZT++DH7j93+rFWs3rmA+2LZp6nNCKTQVKdrwySkTp4x3jtCyIacxrXk+Kz4VSg+WxWKcqkVbSRXvdQ9CnnNRfq7BN1OQZ5oxShYhDIH8CdT7OEZSTJSYyangGlWzVo0DEaTpgDTrsYpOym0zBHNOJ98+aGD5TEVeseIu4/LxBaAO6FKFmpUpUoueBVPMS1C4rh0t3sar42Jk97LrpOTCdJh0gGCVNea8w3deWStJzU5qlTYlb+faIqrf7hy1Qkr6Nb/uSR+LtZi7Zfj6b36DfhjwQReE65RWGYLXfLemmQvdzb9nC2iq4Ky6S4rchDPWFug4HY+fSLOcjhP7q2t8F5iOUwshjhz340KldN7inFuiEUR0KuC8AzE453AhgBhMhe/8s3/DuB9/SXdvxYrPHzmpkZAzVnWr1mjDpFTiMVNSYR6+zc0ODUjWjW08TjdThp+jC1lyYXMWmhbBLvQV7bza1vks1FJw7XuXz/ZIKeQpQW2bqDV0/bpxrvh0dEPAB0+/6W/0b3PjALNEdVgLvnfLQXE2KLHO4LzKAXIqHK4OnzgBePVrr7M9O6FKo3Ch0RpSZi2crinv1al1jh/QA2k7THqvRkBWjRdemj+2YsUtw8N3HnLcT9SiE7Na1ThPkCUaZ1533t+YFOEM4zEShk4ZVS+B2WxrbtSIqB4WUTZZzhqZI83dNsfMNOXFnMhoIKU2JZ2jH8Jnfl/uAtZi7pahZjVVMM7iO9+mZJ5SWyHVCrVSK85peLdudo2OAs2x6Dm6F1oApph+yjnseWxOt7z2ja9QcianzHScVGfAja7HNerkfHA07TDrvG6avvPP5ZlUpv3xE6+5YsVtwzB0eO+azbJOvEutiLO4xe2VG7cwdGInoI5fTQf7fGjry0AEQjcQet14jQCYpblTsupYZ61RrcJ4GEk5L9Q37w1FhLe+v7r7rfhkGLQYSzFTZY6jYdG2eO/U/KQ956UUrBGMmcs8WiFlFkrk5eOnP+WoPMM6yxvf+tpi4AOQUtECzqnJQq1Ns9qaGMtJci7wWlxBaaHFeap0Q1iHcyvuPGquei7DLNnD1qi+ux+CrtUijUGi58XpGJnGCBVC93LFVNd7sNrI98E1Wr9tRVymzCZ6tTYqZVKH2pzU3RLACGf3Bv25vPKhPw5rMXfL4LzVoquNxlNzn5vNTELvcV5t/401bE4GFZc3UXnJmWlUe1q1otXXtcZ+4lRuc7rld//2H2CsJU+JNEXNKWkULtuciWa6ik7/ZBnL16p8bectwzbge8/+6poff+/NX8JdW7Hil4eriytyEdT7XPn+ZtG42udoj22a0NZJbdO4zelGu6ZF2F3tee/Ndz/lih+GMS18tbC4CTpnZxeUZj1d9MBrLfEYQYQ4JkLnFkMKa+Dxe48/+xu04k5BUJ2LNUqbjMeknX+vzbvQ++c0bbrhmKVL3zKsGg25Npe7T6JSGWvZnp9SsprzPG9cMsdsKHWyGa84uwQhq6usxiMYZ5aIBGMsw3b4PG/TihW/EvCdb01/0bXZNKw0x+XZPGixyquC94ZaCteXO37/7/41hpPNC1+v1Kq5yPPZ0FlC55p7ZoYWrZWLaspLrcQptdgRFqf1427SCX5Yy5aPw3pXbh0M1vslnwo0N84YpTcaa+m3PcPpBhd8W5SCs2pNm1KmDQag9UWlgvOO66eXP/Oqr3z1dQ1uPEzknJm1N9ZaXOdVe+BubNZnSos2XDXE3PlGt6w01z77nF5oxYq7ge/9q++BtMk4SmfOpS7OftbpwTZPZfn815aX1Q+BfhOouehm1qJAXhTnr9zj7MEZLvh2UlaKmQtKbauipielFHUwM4YpJmIqurFXNWWprRm0YsWnosVnlHYYDH2ne5QPbE42bM+3WCNYRLVts8lBa2Qsld1isCCfGFXzxje/tmjtoIWOt4/qTK+E5s1lZzqz0otp4eKuyQC63hF6Tzd4alnNflbcfXz/T75HShkxpumyW1wIOi2HFlfQeP7GqlFQzYXj/sh4GF/a56CWJv3xDowlpkIpmmk6Dyb0uTD/u2Kb423NGWf1vRpnKasDysdiFUTcMsQp44whZ6W01FqZeygYi3WWYegQgThNXD66XHJ0SnkunNWANME5xuCaMPzj0A09X/v1bxKnlvmBThU0J04F5j44cqwLvVI1cmCcdj39EKAYrPXUWnGeVci64s4idB6p8wkzEbwjloo1c8aO1azGZk1bSsVZLepK0WlFOiRS/OmcrU++blCqDK276mzLo7SIgzRp9uOioXWW425ESsVZLT4NBus122e1al/xaZiNTUrSpqLvDN3QIUA6RvbXB4TaOuy1URvLEr0B6l5ndDiA5MSzh09+5vW2p9t2ALVLLt1Mm9S9zLWMR53S+U7dXV1RA4YcK84HusFTSlEKmLfUnNbP+4o7j2ePL8gp0XVBBwDGIKVg7E1jxNBclY3BO0tJKq3hOHHYHV5KGmNEp/RpKnS9x3eOKWbimBbZkG15qyIaeYU1+G6WGVimQyIep7aW1ybjx2E9Td8y+E5F21YqZcpY7/CdELqgvGejjpbT4cDVk0um41G7MG3jrFUQowc27a6o8Pz6cv8zJwBzVpUPXZMemGZla54LX9VFN1O6apGmjzNNwGqYjtpt7TrVEu2vPj6cfMWK246ahVwytU0CjDWEwTONemAsLVNujvUw84FWKvFQKWIWt9nQB9L0sycVz8N5nczHpAdX750ebrNO5cTUpnMVQvA6rWsTOxHTYhIK+8dHnBNkPd2u+DTMZlpWcwyH7UDoA9N+5OLxBYfra2hFnGYw3kzQNFrAYGfb8lKw9lMIQ9YQhrA0Ep+fIM9rzTrXDoTqzKqZiqofF8mqabWGvgtMhwRGjVFWrLjrKLmo/q092mcpTEkaHo65ySdFhNKy4IyzVIEcSzPPezH4zul0vPOEPmguZS6qIxfBYFRbXkRz70p5LiNSNX79JlCD4cl7T3jrL1Yd98dhLeZuGWrS0bQWYY4iwun5CaHriFPieL3n4skleRyZxlGjCqqGvNUKc0i4tZobYp3FIOwvLn7mAjVmdrXTcFUsS8BxKQXmAPL2X8Eozasqv1kPlQZ31iknWgoShR995we/tPu2YsUvC/M0YLyOizmEyQYqOGvAq5X73DyZTStF1MhBrTA91hnuv3afNH7thY1ISi6kVBExOKuGSLPmQA/dFbECRRtDOVVSKlgL/cbf2MN7y1vff/OFi8gVX178zh/+LhjHcLJZdNrPHl7y8O0P2F9eQdUiLqXSNHGt9S+CFG60pc31fM49/Nkw1GoJvW+uzLWZoeiU0BpDoaqZj9GAYmPVfMg6gytVpxClkqO693lvCbMBytq/WHGHsb/c8f6b7/CVX/smw0Zdz222mMws7laI7hnTlCmxEjaeru9eOrTbeb/oV33vyIdESQXJ6mBZDBhboWlarQHrmzO7aLPz6vKIcSBGmMbpM70fdwVrMXfLIKjVaymVYTuwQQuzGBNXT685Xl+RxpE4TqSYFhvmnFQjY5sFujE3TmA63v7k0bUO8aoGNhbdHJfOfintfVW1Om/ulXNnxYhoN6eJWaUJYf/xf/w/5L/6P/3fPs/btWLFF4JhCMTcc7w+YpqT7BykXKosk/Gaq2p1REO7jTF0276tTc2D21/vX/i6D167r/RIlLqC0ViElHKzfVc6p+09PliOh9S0rpacatPNVbbbTn9/xYpPwdd/8+v43mtYvTNcPb7i6ukFh6s9oBO52W3SWrNotEVkKcCKCBalTr71lz/+xOtZC9boQdM5By0Kp7SpngsWV+rCGgGlbOVUkaIuzrlojqPvLFVjrj59IrhixR1BrULXB3Iu+OA4Xs9Nf2WD5Fway8phTMV4daHcnPQvdZ1XvvIKX//1rxGjqG67CNNR6ZWzRlbzH9XdctbClpzpt31zfPZszy04SONKsfxZWIu5W4aud1gHuYJxDiuwvzpw+fSK62dX1DIRR9XHWdcyOlpnsusdOcti6ayudzpNy80Z7GPRDoPaPbW44JFiMH2zeRbVwZlei00zC+Jz0dCD2fQhK53MOjVBsSutZcUdhbGWoe+ZjpkcJwyzXk07jTklNRopN46yNVds0CZICAFjhON+z9P3f7Z+6HlYa/n6b3wDGzy5KH3NtHXnrCHFhEWQLAznHTEV3UBzoWZBfKWkjBFIJb8UlWbFlxfGWtV4xkzNhcfvPWF/eU3JERF1qMMYZjakTtNoTT9LbaHf1hqs8KlGJEYEqKRpwnfdovvxPuCD1cDwqHpQi8oKciyLy6uxIKWCNUzHgvP2Jpd1xYo7jrP7Z/z67/2aGp44R+gCVUQ1caIxOnZ2P24ROZuTvlHxpe1NL1ZUafyH4IJvvgqyRIiYlnXX+Jzk6cbUSJrOrmZlf1mjOm4p6570s7Cepm8ZjFFaJKL2rcfDkd1+z+7qihRHpmNs5gcO57wGt1btYNZCO0wKtCwRYwyH3Z6rJxefeN1aivKcSyHHSM2RHCPSMj+ssxo6mSo1FaSW1nnVP6VUQm91EpEKGCGllcK14m6ilEwpma7v2J6fEYaeftvhgiPHTJ2LLVg6oDZo7AegGVxGp/AvChHh0XuPESNLGLg1hnjMpFTIuVKmAs5gRLDN1W92m7XOYYMji+rpPp3utmJF21NyJo6Rx+895XC1Yzwc234hrYlnlhBv6yy+cy24W/OtNIpDJ8PuUz7zpWlMkUKOIylOWIuaeKFNDd+ckqvUFt3Tpg5G2jS8Lq9TcuXeK6dr82LFlwKh73j9669hrOH0fEvXd5SUsXNd1SDGNNqjOpP3Q4fzjq5/8WLOeUfoO22sGEsulRIzOSVS0nU4T+Zqrc8xxyCNqU0ODSIV7wwnZ9vP56bcAay79S3Dn/33f8ru2Q6kYqlcX+x48s5jpsOxHRLBGLccEo012OCWjJ1FIO410BsDff/JIZApJh6+9UETiRsVihsoOSGSNNOquRI5rxO5krVoLLkuQtqatbDLqagxw+pmueIOQkQYj1FF40aNSE7OTgiboQV3q0qutiBl07qfzjm6TaDfdBp4DAzbF6e1iAjv//g9SkxLVlDOFetNcxwUxKGT9ixMU1JziHmCj0HEMpxs2JxuVtewFS8EdUatGAq7yyvG/UGnzhUQQ+i8duDbfuS8xgIYAymqi96c9/b0vcccd4dPvJ40/bf+VyhJteK1ZKUnixZs1hlMCy83KN14NkExdjbvqtRaMEAc1+biiruPWisxZkKvrsfH3ZFaSjMUujHBm11qfdD4Duusmtu9RFROmiI5JkppjZRaCIPDGUOVSui1qVNSaWdV20LCTXtGGHKsTKmQSsWv0/OfibWYu2U47A6UmkkxMR4npVbmpBMwYzBeNXLGumYTrR32MPgWxtqyr5oNrDRh6idBRMgxYa1TAxQxYNSxMk6JWjIYmiZBaS0pZkpWXYNBENFNc960QwiLg9mKFXcNXQj44DAOsJByQkrheH1QjanUFuw9N1jUgEEd9Vp4q7XaRHkJ1Crsr3akSd3KjFFKWYoqOq8tu842irXSsS3d0CNtemIQqtTVB2LFCyFPmYpwuDoyHUawFtcFXOdaEafTX3WQVCdjjGrnpDFEaNtQTulTD4vOK+PEOouzDt95MMJ0OBLHiTgmjG37UcsTr23SLXOwKnpwrXLj8rxixZcBv/kHv4WIIfQdu8sDzx5dqDGW3DQYMepCbq1mogralHROp2gvissnlzx85yHOWwyWkgolZkTqsu5qUYdlyXp+tOg1BD1nOh8Y+p4udKsh1ydgHY3cNgiUXOm3luNewxv90JFjwXUgRbUCmsNDm4wJ3jlcaIswyxJWLCK89f1PFpwDi+5uzv2Q5jq2mH+JHgCtbYvdyGKBboJuuvPDwjjt1n4anWbFituIV7/6KsYZaiw4dFO6vjxy9fRSKcqlBYILC93MAji7dCM1YNXy+L0X08vNsNYQjxHvD1hjG80tU3IGa+hadk+aCkuwlzPUUpSaliOHQ+L8/pZu030u92fF3UJ32uO8o+YM1mpcjlWrczGyOEdKEXWZbDpOaIZYws1+9ALXcy1uQ0T3EkELwq73qkfNGouj11W9do6V4DSUGGOeM+LSyXRqGvMVK+46vv4b36Tf9pTrkeunV0zHsRmfqCusMa1R0kzs0jgRj4bT+x2lFE7Ph5bt+GJ4/8fv8ca3vtaah5mUkq73RuFMMVNFqf5gwQoG8F2Aqmfa0DscwLpGfybWydwthVQhTbntfkrTMmgMQCnS7M618Cq5FVbStHKNbqKaHPNCY/NnDx9Tc8Y4pUdap9O/0HdavFU9nAqCDxoqbhyEjdMJRHMzm500fecWjcOKFXcJv/OHf4XQ9Vhr6TYdtRSm44jUonSWUqHOWY+mxQEYjKg2QTdTR9eFl9o0gaZ/sy3jTk2LfO/VtKiiVOnWjCm5INI0sCWpjmFKGGAaM37dOFe8AMbrI2lKjFPUZp5V44LZCGs2KDHWUFsR5xvVss4a7oYXCSNWgxSlBRur6waa9tPO0TvabHRBtdw1t0if0iZyWXOzNHhcDVL+5P/zJ2sswYo7DRc8zur57enDpxx3+6W5KMJCtzfGtIGAWc5truUKl1w/jcz1ITx+9yH7iytyzKQxUVNt9Gp97ZK1sBPUmM96NTsxUsk5YkjsL3fkGLn36jnf/p1vf2735zZjnczdNhgNBs4xt8OY4J0ljln5xrVq7IA35En1CL53S9bcnOUjLcx4zp37NIz7Y8vCMtQs+F43Qg0Ml8VYxTl9D7WqXkhKVa2cASNN4OrMEky5YsVdQ0qFUoXN2RZjHBePr4jTRM7q9tfYI8sUO/Sa75aywcGiKfrx937M4fqT9UM/hTZBT1Mi9wnnAyW1CZxUatHuqFRpugSldRqjDRipcDxGAJ4+fPqZ35sVdw85JjUVac985y2SNHDets+yM5ZYczNC0KJLSsU5SylKdwSDfwEd9Zt/+n3u//uvq9Oe9xBQk68WDD7GCZlmHV7jjkgB0YkcRieD6rJsGTYdUPjgJ+9/vjdqxYovGL/37/w+b3zzdZ4+vuDZw2dMR81sW/JFm8O4dbYVcJau9zhnyDGDsSqteQlaci2Vh28/5Gu/2YO1dBsHzagvl8bm8oaSEojGhgBIVm+G3Fhmh0Pknhj6zcvFI3xZsBZztxA1V0ywGGfwvSfHpB1IY24Oac5hnTQmlbp6GWeoRTOu5li5l2n8h+AJnV/eA6YdDKVSYsV3Dqm6eH1QUSsVfO/VMCUVqhRstaSxYM0nW1CvWHHb8PrXX+fXfv/Xscbhg+fisUaG5ClSc0Fn5lrE6R+nE7wh0IvaMOcp4fqwrLWXQT90y7SvlIILASk3zrIYixEw3unBVppuCS3kfOfVfTZnHr798DO/PyvuHqyzxJSb6YjVJqNp9VMVCiy0/JlOWbJqZkTAFLUqt9bgw6eThURoNEqdvJWo9LBSagtzBIyQc206PS30QJQO2hgkUoqaoli02bGO5VbcYRhrODk7JU6RJ+89JqcI6Odf5o+/1cioeUKnB0XheJiwPtD1jr/80x8wHsYXvq6I8OzRE772G1/HiOG4m3BOY0KkCqmq261thnxUgTYxR7Q5mkvm9HxAPiW25MuMlWZ5C2FbTpwUWbQIUgUjmudhLdRU6DZeNThtj8qxLLlyrtlBv0gndMbDt98Da5ouoSrtsk35rLdQb7R4CPSbnv5kUNt15xdKS2OGUvK6MFfcLXSbnlfeeMCw0eDvi0fPGA/HRg27EZHPrrAGi0grvnLFe98o1OmF8+VmGGv55m9/Gx+85gZNqUWHGFwf6E82uBAwzqmG1jk1pgga+Gy9IxfB9Z4P3lqnFCteDFI1+N5Y06zFmxOeNzflkdH/MYvpgX5H86PaMUSgvsiWYMA4o4VZUk2cDw5BKFKXLEfvLDmqS58PnmE7YKzFmNZICU73sSrsrsY1mmDFncZmu+EP/wd/kycPn3H17Jo0Ne2aMTfZb4v5yMzi0jWRY178Eo6748+1VuJxwlARqkYUNKMVwWC9asfrnJ/snT47nEoVTk62bE4GDvsjj997/FneljuDtZi7bRCIMTKNkXiMSOtczBbn1tEcw4Q8aedxzu/QSYCO4mYb2tLc7V4ED9/6gDRG7bIGg+0sJWlXVDdSIWXV6aRYWljsTVe26wNgmcbMFFOjeK1YcXcgIoz7iPWWpx884/piR2mTsZtTbpuce4fvAs7PNGjRvB9nGY8TF08uX/bqxDGSYibFQkoZoQKa7WidY9gOOB/ohg7nA6HvdPN06lTbDz1D7/ngJV00V3x5kWtt1HqdhpUsLfLCNgODunT95xgC4wylqGvrvDTUYfLTD4lSK8erPcaooUlKmXGclFDZ5AS1NURyysoWabqfUipiDKELIOrYd7ge2ZwOjeq5YsXdhCAcrvc8ef8p0/7YmjCCweKDOs8ae2N0N0d3pJjJMWmUFcLbP3j7pa9traW2To0PHaHvCaHDhUDXdzppd8pmmZub3nssViNOWo7x8XrPs0fPPtsbc0ewnqZvJQz9EBo1Sp27XOduXMMQsCAtKFW7H22iB4tegGaA8qJiVtM6orWUm9w4dT3RKZvo4s+p6MQO3eAR0zZtdSUyIuQxIy/Uhl2x4vbAh4CxhmePLrl4fKkxBI3GaK1rpie2ZfY4nNfRdpoix92RcXfUxszLGp+gz4KHb79PnKIeZqNGmCCCQbBG3f1OzgYt4KwFDM46zQLzjs1Jx7s/fPvltXorvryQeU+panxlWUywMK24SvVmm2nUXmsaowM15sqpvJAZV8mFd37wY9KkGadaNAo5l5Ybp+snRm0s1pIppVBzJSfV9NWqAeXGwnSY1AZ9xYo7DOc8zx5dcrjaLbR7aTlyznmc1YJuiSZwLZMuFVJWyc6P//zNxjJ5OVw+uWB3tWeaopqcWEvoOqz3+BBwodGm24TOO4cYC1hCHzi/v8F7z4+++6PP/L7cFazF3C3E3GEMvW9hrbRCSsipUpNqF4wqWjU01TT7Z2HpwFhjb1wuXwDWGoJ3GNf4zFSs1c21ViFl7fQgbWPNFSl1oVPq9wzd0GGAd99853O6QytWfAEw8Pf/0b8LpfLs0TP2VztKbkHGWJzX3Efrleo101sAjruR3cWO42FERPjxd9/8ud7C9bNraikLVea4n9S5slbGw0TJmTSVJjq3OOe142oszlhKjHz/T77/md2SFXcfOtVtjcR6E1kzO0bmWKjmhlqpv3NDwTQzp8tATi96UDSEzlFyIU6JkgslZWWaGG0Whs4rXay5aJZSVZPXpnezRtQ6g/cvpx9fseK24a//0R/y+J3HxHFqWlFHNwSlHlulQLumXaVq9ltOKqVxzvL43Ue888OXn8oBxDGqcUoqWKO5w9ZZQtO++uBwwek+FBxi1WDP9Z7tSY9tDLMn76+mXD8LazF322BUzF1mMwVDs2duxVanh7PZbUi7ozONhTYp087LbAstn94MXVByQVLTJRSldIJgmXV4SqExqJahzN1a0yxoUYqLIHz/T/7is747K1Z8oTBGePLoGc8eXlBS0gOsaTQW65RaYv0SS4ARUsrkUvU0aQwpvpxb2PM47g/EKeJDUOexGKmptmBmYTomSi0YNAfMeoPxTcdH4c//+++sWtYVL4WUMtOUtUkBKOXDLE0/3zt8MyCRyofiCJSi3yI6rOWNb7zB5mTzAldVFogLTiMIvCP0gRA0BgGnFg4+uPaORF0za13YK4Jooo4Y4phetKe5YsWtRIyRcRwxzimlsQtYe8PQmIMJQOnLtvkqWOvoNj3Dpn+pwPCPwgClZAxCHLUBY6zGVBmx6jZrHaDNzn7watYllVohjfmFBw9fRqxulrcNAnnKiNXiqWI0O0fUCUwqSqsyZpnElVRv9DpStfvhzNJJfdHlMdM69e+tIGwL0rQHgWsbuLGmHQoN3eAJfVAutBXO72+VlrOuyxV3DIf9yNNHWsgZY5T+bDRry3r9uw9usQ9LY0GkHS5FmyPXF9fsLq5/ruvXlhdURRZ3MDvHxVkWC3mhLlMJKRWL8PTx059LD7Hiy400JkLfQbkJAbbe4r1trscajWEwiBUkaRPSOY25ASjNzMu0jvynwbRmpDHgvRqZiMhiruL8bLHept+WZRrge0fJOp3znWMfU6N5raO5FXcX036i326g0fxnnwU9AIrmzTV3VzUqqq14E6oIX/uNbzEeEz/4N9/7ua4vAof9RL8ZcM4yHiZ6EVznNdM0a+M/hABGsE5TR0Dp0W//4C2O++Nndj/uGtbJ3C1ESpk0ZfJi7yzNTrxtZm1DK0n1a1rECd5bvHeqZzC0rsuL5cwtMICVm4nc0qmRppOTxa2PFgIp7X1hdKFa67h+uv/M78uKFV8oBHbXe6bj1GzTA6EP+E4dJEPnWw2na0ELLzUQss5jvccFx3u/IP1Yqr6u1EroAnHKOG81KoRKiZn91UhOqeXeCT4Yvvsv//wzuQ0rvlyoVU0SquihD3uj0XbeNm2OFlauxQncFE5KyQfVz71IIff8dXNUOpi0Sd2sNzUWncRlUdqYNFpnhZI00w4gdIF7r55jqEvm1ooVdxG5FJ2YW6PnvrYGSs7knNqeoedGbfRXjQKQgqWyu9zxm3/11/lb/97f/rkmdPvrfaNx6pHUeUMuBWv0vZmWaWeMurEHZ/HOYo1nGid+/L0ffcZ35G5hLeZuIYzT7A/faC0iLZzV6UKdHST94Fruj46pdRYwF37KrXwZiqWhBR2bJlR1FpprmVTdyK2x1FZcuq7xr0EPjSilJqXCcLpp2T8rVtwdlJgJfaDbDLgQVNzdYgDmbK2cstKVq+oAqELwSit59uhpc5j9OdE2xn7bNcdK10KSC87rJp5yBlNV01qFPnhyzHzl21//7G7Eii8R2ue4/Xc+6NVmuOOcW+iXIkqxrLkVYSILdWrOmXuRCdk84XbeIoZG0zS4oE3GHAumaVKltqmcNThnsLZRNL3HOceDr97ng7fe52J1yVtxh7HkPMpNs2/cH8kpUmKmpEyKkZwSaYrkmCklY61FinD5+JKnD59x/tp9/trf+4OXvv6Tdx9irGF/fWSmSUsqxFgoUYs6ndQLXeeQUskpkXPVrLmYPvN7cpew0ixvI4zGCuSKdi62PdA6/UUNUZx3GAQxN+5dOWYt9IpqwL032EYDexHoJlwopWCa/kCqEDqHbY6ZtQjOGKpAiQXnLLlUfAumtFY1fa99/ZUXci5bseI2Ica80Lw038pRkhoE1ZQp7dArSZoxQ6GihVYthde/9hrf+u1v86//23/B9BLBrDMefO0rvPLV1wjB40xFqurySlH6jE4xDNYVXHAUqZRaOOwnTh/c5+T8hP3VOjVf8RIwWqTRNNvWaEEVp6wRBcGSc9H9qO1Rc8E1NyA1wF7UdfIFmhnzNJB2ADQW8pixQQvCOBZsY4nMTJUsmWosrmtNRFEdX23TiBUr7jKmMeJCaKZ0mZIKUjUaRBpbZA4Cbj7lOgjIFecdOSX2VzvGw4jUSr/pX2qabYxBaAYnCK5z1CzUnJd81Vwyg/e4YJmOkW4IpEnjR1Z8MtZi7hYixQIFTGeRRm80VR0uEcFZLaBMcw1KMaHsFRWgF1EKSgie7Wm/6As+Db/3d/5q08dZSqkze5MpFoxo9hxiKKngO9c0DarhcT6QYiRNE/deOWUY/CpmXXHn0PWB/e6ImYPAUyGnrJTG1miZD6wuWJ1YizAd29ecI/vI7/zN3+PN7/wlx+vDCzU9jLW88rU3+Nbv/5pqlZqulrZGMc0O3qpGorYpYdc5jIHQe85fOeeVr72+FnMrXgrTMdL1HbVWnHUaChyVvmuDFnrWyOI8qRRMFsMfa9T8IPQeI7KwOT4JxmhouAuW8XoiZ2Wn5KSHT+/VzbWUivdO3WOrgK0Y49Ry/TiqScoDNQpaseIuY3e5I3SBmholutSb/YG52X4j2zEYneA1d3JjDFLVcdY5y7AdXriYe/DV13ntm1/h5LwjT1X3vaJynWlM0CtzrO8d3lnSlIljbo7rgvUd3/orv85bf/Gjz+3+3HasxdwthOSKHwJ5yksoqrWGNOXW9QBErZhzzCCVWhtP2hgwFuOg2/QcrvYvPL4+vX9GyoL3BpMqaaay1IKxljyVm1Dy+ee03dPcwoTjeOR6uObZ+09WMeuKO4dpnKiNQinNTrlkpVXqIVUWR9kSK1WqTixaxEiaIiVnQt/zt//9v8u/+Kf/vxcqrr79e7/B2WuvkGJCioMyF2xKv5aqTR5cKzIzSBGGTdDmkFgwld/6g9/hne//ZJ2ar3hheOfZ3jslThEjEKepRXKAc444Jmot6l7n1KnOOYu3Gp+jyQSqv7569OSFPu/WqINzmgquc8ikdBORihioRen+vnPauOxcM3yAHAvOGaYxIlW4vtjh7Er5X3G38eit9zk927bzoaFUzXuTVs2ZxqhSjzxlc6nvgkoHnNc1tRimvGAv/vVvfZVv/u5v0A+efBypLVOyG5yuUW/0v80UabbUHDadRpuMcXHUXPGzsRZztxD9EOhOtuS+aOe/JGrM1GqW4PBxP9GfaLC47zukQjd4cqz4xkeWWnn4zgfE6cWKOamC847pMC7uY9gbO1tjm/GJU3MVrMV3dnEac87iQ8f+euTJ2+9x3K3F3Iq7hfE4IkU0XqCUpUgTgVxkobTcbKAgpZKS5mJRVN9TcuHhW5Gv/No3+OG//eQIDx867r32KjFHOh8IXUc8ZqBqnpAo7U3QRtBsRZ2zFnHGwvn9juvLI85bzl855+Lxxed7o1bcGfSDg6oaNCmCHwZMtVQl3sMkGGfp+0ApehgMvW96bdVvGwx9p7mpLwLT9G9pyuQpN31ci/6uYLzBVAC1VzfOYtpBtNaMoJO9ipBS0kPkihV3FMPJwG/+4e8y7iKuObz65jxurToaa6KIoTQdq+q8C4hpxkRqaqf5cEGbg5+CB2+8ym/99d/jcDgyHqa25m+yskyjdFYRnDMM214nclabL7WgWcnB8Nt/46/w/pvvsL/afY536vZiNUC5bWiUKCkqWDUieOfpTwa6oVchOLowrDGEPuiCaV0WEBW7lko8jHzwk/df+NLaqdGOa22GJ6Y5WtYqLTx8DibXzTVOeTFOMa3AC13A9+Fzu0UrVnxReP/Nd8jTSJ6ihhjnSqlCrqpbK2XOtkIPlkX1PFIaBboIOasL7TROL9RoefUbr7E5HTgdtmzPtzjnCINqlwymUWeau18VclKXsoXyWYWw6ek2geFkw2/+we987vdpxR2CRYXYNGOF5qCsGu32QW/UXtXFVdKUqDm3/UIlAaHzvPnnb77QJUup2iTMegg1bo7e0cl3zqoFyrFoLEHTxeVc8MHjg2t6Po+1jr/412vm6Yq7i9e+/hW+/ptfZzjpmhGQTsitczdnMwy1ttziZh4U+o7+pNe9xOrPuxCI48Rh9+kT9De+/TWefXBFHJWKbYxrsVlK15xlQq4Fk+dUGKeEDR7rdJ32m04nhXIztVvx01gnc7cNopM3a8E7ELHUUhh3kZwzkvMiRp+OqeXx1Nbt14OjdSpCPTk7ealLl1IpxWCtxTnRkEdn1SnPWeZ0cNMy7PQhoTxrbcRo3lCl8uyDx5/L7Vmx4ovE7uKaB2+8imm0LWmTBteojjMzpeTa9KdCKqphEAxUdaJ1zab95FOaHtY57r16n+AMfttjg22aBpBO6ZulafWMuenAGmPBCMF7JBXSlNmcDhyvR6SuoeErXhyzI3KdEkXUTVWqUojjQXNIpVRKKZSibpZSdDImxWgh1/c8efx0mVh/Eqy13Hv9ga4Zawjek0tzis16OKxFNHKj86qVM+A6deUTjP5XVD9qjPDk/Sef811aseKLwcnZlq++8RX2F0eMBd87pn1sETkW13kMDrxq5OrMukJ1c97bZuqlhZ/3luvHO+L46TrTfrvl5HzbHNhBqmGqE0ZoDBE0NqRWjLOac2cMOVa2ZxtKyhz3e/pNj7fuxUORv4RYJ3O3EKYVTHGKlJpJJZNiYjpEUszEMWGsjs9Vs1O0+9+maMaqteSP/vyHL3XdOGXVxM1W0I4l/Ns6q+HD1jTtghadJRact216B9Y7vHe8+8N3P6e7s2LFF4PQdfzm3/gdfNdpOHFw+OC1++8cNI2QMWYJMxaapXrrkLrg8J3HdwHvPe//+JPXiQ+OV994BeM8IoY0aQyJTuiNHlhpAcvWYIMFCzllpYG21zAGLJoB5EL4uXKEVnw5YZ3Xrr23LQhc9xyRSkxZM+hq1c+9vXHOMy3jtCTBG8PTtx++UDHnvOPrv/YNjfowavyljZLWqPQaWWDtzVRa3ZwLGEsIFmsMXR9wXeDN7/xwybpbseKuwHnH9nTDK2+8yntvv03a7ej6HrX+D2Agxci4P1KLGnFZo8yp0Gmsju883aZnc6JB39ZZhqHj8buPPvX6X/u1r/PK6+c463HWL6Zgzjmsd43ZBSDkotl2Xa9xOoI0ja1nezao9tsb/uo/+Ouf8127vVh37FsIQUfOoe/ph44c82K2gFHqlHW25c+ZtrFZjGiQsYjBef/S1udSi3ZnvMN5h3d+CYIFliBkFyy+cxoCaxwGvV636THW8xK5sCtW3AoM24G//x/9ESEMWN/CWFPRBocxywbpOr/QWEAn1sO2b4Hi6u7lgxZyaRzZXX6KPqCJ0aWoQskFu0z1rHV0Q9BDrbPEWMipkicNj621ctgdtdFjhFIrpw9OeeWrr3Pv1fuf5+1acYdQqxCGgc3JKcY4nNdO++yeatCcw3ku7b02OYyds+IMKUauL15MCyMiTOOE946w6QibgeFkaJRJu8SCzOHIvvfY4HT6IGCsoz8d8H0HGC4fX67OyivuFKy1fP3XvsZrX32N6Thp6PZ3f4SLCYfHGj23ee8BQ5VCmibyNLG/vKZMCecsznu89+rEbFR3+hf/+rvE6dOncsNJr5psY5asY2VuzVmSKsXxczZkVvdK5xw+2IVRdnK+JU0jVYRhs/mc79ztxUqzvG0w4Lxnc7bFBcv102tqcxxyXukszvnmWgklNTqX04LKBUdOGe/dC3VBPwQR8hTBOqx12tG3TimVzmKdbt5K5XSEft5cdePOqRCC50//v/9idctbcWcQOo8PnuurC1y3JViLPx+IRw1fTTEiUrDOE7yjOp2ElVIbBUzoN4GahVJ1ym0Qfvy9H6kA/RNQSuHy8SX333hVBevNYMI5S/XSpuVOY0paftfc+Km5crweCV0gjRXBYg0Mpx15zfVZ8YJw3rM52XL99FpNDtDP7BzFIbXx7KXFY4TmaGnVgKSKkKb4csYGBkpSh+T/f3t3HmtZth/0/ftba+3hnHOnmrp6np7bzxPv2ebZmMkgiGwzCPMHIo6IcIAIRYoUEiWKcPjDSiKkRIlCgpIgITCYBEGQGWwQQywDxgHZ2M9PfjZ+U7/u193V1TXcuuMZ9t5ryh9r31vVk19VdXVVnarf56m76p57+91T59xda//W+g2mchhXUWGpXE2MgclGRUrxtFaubmpMIzjnTnf5TRkzd1ujEJRaB82kZjqbsrm9QQjxtI9ByplJY3jti1+lbmZsPXmGlCAMgUTpwyBj47oMdKsVi/mc2fYm9da0rBE+MttqON4/uq3n4jtfujU7i03CzSKD0s0yhkjMGedOujuXU/yqtoAw9IEUYbCGzZ0ZRweL08Yp6v00mFtDdVVSFRcHC5bHS4YQsFbKjJ2YSl1Cn8fGI4JxtqR55VIrIMDu5d0771gngm0cvis3qWJMqY2IGZvs2LWv1On4boCUCD5RtRVNWxO9J/pIN1/e+xdFqQfAOssTT18gI1z+8iV2nj7H2SfO0S8HJEnZzZRSV5RyWTj9EDDZILUtReFGgASmzGus64q9K9eI4esHVDFErl26gjFC9Jlmc0pVNyXN01liHksjxvqhNHZfyeMJfgie1XyJrRyuqkjZEfqEq3RpULcnBM/yaMHiaEGKodRu50QiI3ncr7AltV9MCeKMNaXjZMg4Z6huc9YpUNYhV9abEAJh5UmpPHbSAOWkxCCljHiPEcEPiapxWGux0ZTmPyEQo6ZYqvUkUtL0z5zbZrIxGTf0SnfkGCJxPCGfTmtSjIQAPsxJVwJ1MyVZU7oop4SpLL4vG/12a0bwnhg8MZSU/Lq5szXh0muXePITz9FMp0CptYuSykY/pYtyHCJ+HEzerwaW8yVN20JpvzBexxEnFmcFH/UQ4MPoir2GvI+EwwXL1Yq+H8gxksaOXjmA2LIDetLoICcwVUm1PJm5M9248+Pqk6GRZqxJ8H0ZDBuCB0lj/VzppBlDpu+k3FCmgRgGrCkL6M0dGqXW14WnzzPbnNEtutNB4MdvX0O6gdmZM0QSxmSCz5janrZPr9oaP3iWewc459jY2aSZNuXGMyeIgd13vn5NwonVYsVqviIDB/sH1HXDZHPGZGOGrSrEmTKg1Qo5e4y15JzGuUKJbtXjQhwDTwEjfOp3fjs/+3f/+cf34qlHwpknzmCMYX/3ED8MZaZpLOuAZMZhwyCUTq6uKrvuYoQUMr6L3OmINwGcMfSMp3whYkwZMQCZ6GP5funmTKxhtSozqwaDrdzYzKFsttxxhopSD1hVOzZ3NmknDTvnt+mWPcEHvA+lZjWdXIelp8Gw6um6MM50FFbzjuW8Y3Nrk0Qmu5p+NWDteFydUqmrG8t1og9UdUV3h7OBr71xma0z21RtS7sxQ2xJ/xcHqUskM3a7FfA+lHUsZqrWQS7ZJU4sXQpUbc3BjcOP4+V8JGgwt4acE1bdQFgNpBixlSkXsI/jYNQynDjGPNbMlMLV4MsAYyNlVt2dylBanYtBTMa4TIyCq/LY5rYEi8aVHGgxjOmXBleV+jlEMDqgVa0pYwx1W3P2iR2ccyWQG9O5NjdalsuOq+/s0ewtOPf8BbIYhr6nakoNaR7nyE2mLe2kYTlf4X2gyRVWhKo1dMvVHc1gXBwtiCGUxiVGWC4WeD8w9B3T2Qbt5hTjLDlAVVVAJgxDOUmPkYSQbKRf9TSTmqp2GN0AVbfh/NMXadqaoRtKndw4QzHn0mRHTobWkzFZcGMzIOMMvg+Ig6pyxPD1a3BuJVbG0+My9mNslAxA8AkjBlcxzkIt87NORoHYOpe04yzUTakfUuph51ypJ5ttzZhtzWgmDUM3cHywKKfQYxOR00Du5IIQuPDUGd56c3ccU5VPh38fHRxSVY5u7pjMNsgu0XeJ2c4U3w1lY0SgbiuG5ZK+6+/oOR9c3+PcxbMsDg852j9g+/w5qqZBxFLVVTkASAnnLH1XTgFXixXeO+q6QqLFmtJBPfSRi89c4NlveI5Lr771MbzC602DuTXkUySEQMrlgg3jBUcu3bxO5u04My54ucywQkrxuTEQh9sbFP4uYxfNPBbDGhPH3VaHiGHoPM7Z0lBh7HDpfaLKmX4BVVP+ItGdULVu6qZm68wG1jnaSYv3nqH3pZNezrja4lpHfxCBzDAMXP3qZba3t5lsz8oJRc432y+PAeBsczoOEk/4oWc6mzDc4YIJsHtll5e+5RN0q56NnU1m2xuklBmGnvnbx1R1zXRzejpXqyzSQjaCq0udqxBZHq+o24oseo2qr+/621fZ2Nm5dQ7waZdJGZuRGGtKLUxblawOUzYWM5BjotlseetLl277e5aTgkiZvzEevqUxQItQVxUpZ1JO2MqViTmUtuvBe8gnteS5fKyxnHrIiQi/+btewNQNb19akEl0y44Uc0kTPpn9e+uGynhxCLA4Xpb7rrHTuOQyIqRqDJVL1CYQlwcEcdhJQ+jLyBoB+s6TYsKvOq69c2cjpY72j3nnjcucf+oCx4cL9q7usnP+LFXTgK1g3OwhpdIIRTJZyqFDn8thRPAWYzORhKtbvNe06A+iwdy6ydAtevxYLBrjycIpxFtuwERKi+Yy46ekXhoRQudxjeOLv/KVO//WaZw9IqVI1lpbhoWPC6MYwVaWFCORXAaau3EunhOMzfgQT2dvKfXQEphtTHnpledZ9T1+HMsRfMQP/jSt8mQBFYTF4fJ0oyLnMix8d/cGzXzObHMDmhpxUup3Qun6GocByAx+3Pzo766GJ4bIMARm21O2dnaQyuC7AJSZj33XEYPHVa40hYiRpm2wzdhV0FW4uowt8L1ncXxnnW7V4+ng+j7LxZJ20pDlZjORLIaqsmXO4ZDGRli2/OxRatoQIRkYup6rb1297e8ZfOCrv/YqL3/qk+PaUhoGZZ8Z+lBqceI4jNgZok9k0mkNTgyh1Bb1ZTM0aR2Oeog5Z/hN3/YUTz4xJWVo3RTvA9jMKkSODgfmB35Mpc8QIQHihH5V1q3FvKOyQpZM3RicKdlcmLLxn+LJvaPHzz1+ucSII8VMH3r2l56+G0jpzjf5rl/e5dlPPE81mZCB4Ae65ZKUYDKbUm1MwBj6rh9rX/M4qzgyrEqmWW0bKmfoFiuqStuhfxAN5tZQv+rLD3sqN5R5vBDNmG4pRpBcAitrLZFUunaN8+H2r+/RLe8s9xkoHYdCGrtVCr73ZDLeR9y4cKdYdk6dMySfMFWZT1eNs37EyNjwQamHj7WGp567iA+BrZ0t5vPluIBl/FA6cJVgaNwBHRe3lCLZmdO6uJPHc850q45+tWIyaZltbpDGWY/O1qSYGIah1NJVFcEMYxB2Z1aLFavjY1745heIS+iGcrpnpMbVGTCIKYt3jJHl8YLF/jHNRsvG1kZJfSGP87cs7k4aUqjHWlU3VE1NCGVzUYyAZMaztzJ/zlmE0kUSgRASMp4aXHr1zdtqdX4r33vSuKHiKke37Ckn4p66rSBnqroEjwLElKkbSwiMp9ClcrucHGrav3p4bW62PPHkJovjrtRVAzZn0pBpgLOtY/tCGccRUx4zozJdH1lJRiy0TUUYAt6XdOQUE5nytZIzwQjLo5sbeBkw2RB8YPiIDYJSTNy4tscLr7zIquvLzDl65kdzuuWSWb9RDhp8oG5qYjYIibqtywBxH0mpPDcBvV4/hAZza6gEURCGWFImM2V3ZQzkrDEYV3KSMxnJMrZBT6SUOLx+cDOf+g6EkGiFcspmyqJsKAt39BFMOb0TU75f3bjy/U3AWBhWHrLoTqh6KL308jle/MRFXv/aMcZZVmOxdxx/Xk8aO5zOdLy1PijncnOZbtYqOCc4O7b7bywpRfzqkJQEN5mwiuWEIKfMMMRy0h2Gu67hsc6QQ0YaS9NO6RYeK4kYLSKJdtrih77M5HKWftnRrzp8P+Cqcl4pwAAAMDFJREFUio2tGVtssX9lzutfeP3evKjqkbeaHzPZfpK6sgzLgZTL9RJDLE23QqZqLVVlx5OzMtB7WJXskuVddDcelzygbKSIKbVyduyaWdWWDKdrTVUbXGXxw5g5EiKM869+2x/4HfzDv/L38XcYUCp1P/R94PBghZHSUCilPJarjN3DKddCOgnkGDu6ArUzhBhLQJRhGDw+AEkYbGB1ePNn/uOqfinpnZGYEyF4Yihzkq1UmMYQhvIchmWHX3UYV9FOW1LK1E09lgGUurqSV60lAB9Eg7k1FMe5bmLLEbmtTmpgSn2CHTuGGVdqFYwRchJCyqzmKw73Du7q+2YodXOUuogcEyGOp362tGAXI6Xg3cdxUReq2hF8wlVl1tWnfvun+bmf+llNt1QPjYsXN/lN33qxzLh5fsrQR0wFfYzMV4GjfU9YJGxd0iSdsziXWR0Egk+nC8xsYjEwdn0tXfdiKifaN2tzEsPi+HReYylsEI6uHXM0n9Ov7m7G29U3r3LxqYtcv3KVYTyZEyc0uWF6YaecWGCwVUmNPntxh3414L2nbSusrXjji69z5c0rH/XlVI+RK197mwvPPUPOgliLSWVdylbGwcNCzkLKYCo7RmJgrBCD5+guOtTZcb0JIREjp3NVbWXHkvFSP2eMEMYuy0JEjFC5ihTi6ZqVcykfUOphtFwOfP7zl3nlk+dK7WcsgdvJRmJKJYDLKY8jcEowlzIESupxP3j6owEfAg9iEofvAtYZphsTlgcD1WZLCIlmYkh9IKYE05Zu0eGXHfODI+pJw+bOJvWkZbLREofI0cHhHaVkP040mFtDzlp8KE1IMOUeUVxVOkU6IedSVFqO5MvuYwzlhlMkE+6m+QngrCH4SI4lgSadNFyhpF4iGQlAbU7/UrGVoWocrsr4PlK3jq2drZMZsko9cNYKzz63w2o50C3LX4quKYukM0LTWrYvGOKZ0lihjOAomyp2ksgTwSKk8Wf+pC4oJxhCwEkmOUufe/xxWYDJEC00wREZCMBycXfX5YnVsmP/6nX2ru+P8WFJ+TzOwO51NrY32Ty/zaTd4Dh0+C7TzhrOTDbZ2Jxy9c1dDeTUXcuAbSr8yuNqy0kvrjB2WQYZT+Q8YiGmRDudMN3aYHknA8PhXTeySC6jNgxYY8abW8AkxBoky82GLGN9kHGWFNI4YifoYqQeasul58uXrrM9a8kZZlWFlZJOnDlpaFLmlXbbHctrmbQwpKnlaHfxoJ8+BwcH/Lt/8/nxtN6yfXaHdmvG5uYGC7PChVLTWjfbpBxZHM6JMbA4npfRVykwPzjm9S+8RvB3t9n5qNNgbg2JgcmsJfiSKpJCwjV2HJZ6MkKA8UQ6j4Xp0A2RZjbhwnNPcbS7T7+6syYHOeexRi+VQE7k9Oa1dMo0paYujcPKbRmREHw5gbOVIWdwTcWnf+d38Ll/+dl7/dIodcdSyuzvLZk0ZeDqSWrWSdDFuNOfEuMQ1vLfhJhwIviQ8amcUPdDJPlEqiKL43JqZ8hkI8ScyO9Zh5bceefKD5NT5q2vvUMzaU9PKqT8C4D50TGLozmudjT1jO3nzmKomE5ntBsNX/uSplaqu5Az0XtcXZVTaVfq4XIu2SEulZNqaw2JkgIZQhwbLyRe+rZXePvVN5gfHN/2RmPOpYlPSYEeL9QMIQaq1hK6knoZ8/hcyGNqZS6jCoyMReTlhPC7v+97+Nf/6Oc+zldJqY9kdT2yur4AC84Iwkld87gTkcda1auJ2AMReEjGsuWxjg8jxBTZ293F7u+zf/U62+fO0sxaQgKTyun5uYtngDI6ZLno2btywNuvvUEIGsh9GA3m1pAZc/1dVYGBQJktYpIwboCOnbsMWUqDlJTH1tEp8+wrLxBffJpXf+WLLI9uf9em5GqXHZSqsqWhSeA0rWW5GHDO4ERw7TiiIGVyLB2JxJZUGzK0k2m5ydQdUfWA5QxXrhxxYbslUII2OKlJGE/b8rtTWE6aoMSYGXxg1QwMVxKrzr/vRzrCWNj68YsxjR0FZby/vTlr6CQwDT7g+wPmv37AxYsXmLrEL/7zL3C4d3xfnqN6tDz90jPk5PFdArFYZ8FkjLFk8jgPrtzQiYU0piWX5SjTtDXf9F3fyvHeEV/4t7962zXVMSSMAVNmDzB0ZcxAaSCUESndnEXGJlxjiqcZgznT2JJZomuQWicRQsyMK8v7fbQEj3tOjDCdTU6bt5QdHyGESJgvWR4vqJuW6faM7Z1tclUyzCrjiD7QHS249NobRA3kfkMazK0hMYKkkxo5R64oQVUlWCtl9pszZTckZYbVgDiBWAalCkLdtnzzd/8mvvBvf+22U1xijDRWECkFqX4o3cROAkdj5XQcgqst3WIghbJ4YgArmHHwybd/76d59Ve+yPXL1z/W10qp27Fcen7p85cR4MlzU6gclYN6Um4AUwYnsSyi4jg86hEy84VnufTkkuHywG1ub5R705NNnfFY7uS03piy4ZOl/N1wY2+Ps09UGsipuzbb2aSq3Gl9tlCanuASJlNSHa0gGbwPhBDIOZFjPA30uuOedjLl5W/7RnbfucbBtb3f8HumWAJCa0vHPRkHk+cwpp2VUBFr7VhLZxCkdLtsHDFmjAC2NA7qF9r8RKl7zVrL5pnN0w1RbmnuJVJO7MUI3g8c7vbM9w/ZmG2xeXaG1IFZW/HW7q4GcrdBg7k189wrL1A1NUM/kIInpYS1jmzGo2wp6SwxJawVQh/Jkok+kGMahzB68lDSUp5+6Tn80PHGbXSvyymXBdvK2HGvNHVI4zDKm3lplM6a44KaEQy27KDmSPCexcESndaqHiZxPD27dK2cVlsLdWtOG2g5U07lxDoW8/csLg/B7r4Zi5ROrqrMuHaOLeCzPUm9HL/WlpO8L/7qGw/sOav15iqHqyvEWMQkUgzkGDF1Nd6sZXJI5FA62g2DJ3pPTJEUAiYYopFx5E0i+sTO+bNfN5jLuaxFOZWTaGtMGUNggAzOlcyRMESsM1TOEkNCTBkLcto0LJQTdluV8Qkx6EBipe6Vqqmom7p0f761T4KUE3MZx2XlMXUsp8TBwR5Hx/s8+/QmV9+cc/XK/oP8I6wNDebWzObOJu1sAkbolz0peHKIZAxiSw51CoEQIz4m/DCQcyKF0rikWwZSSAQfSDHifbiDuW+J4CMuC+IsrnakCCnGkkZpbKmlozRAQWScfZcx1pZB5pVBgNWwupkCptRDKEZYLT7ouO3h3CXc3N4otXK5lE+U+9qS4lxGlFACO2uQVIYqY8FJA9xZAwqlAF78ppc588R5fD9grLBaDmXnIwRKA9exbjrEciKXIilEYgikFFkuPUgm9IGYSkMt6yztbEq3+I1HFuQcSTFibAVwOpTcVqbMzxpvGPMtrdyNtRjrsM6VTdAYCb3nyeef5snnn+Tt196+D6+aUo++2daU2eZsbH40dpwdGxeJcPOUbuyCnvNYDmQM1ggxenZ3j3USwW3SYG7NxAzGVVjrcU1FCrbM4LCGfjWUDpKmjCSIJdJiWA1kEqHzdP1QLqpxdEHV1NjbHBCcQqBqJ2WXJY6NIGIZCgtl0bTO4od42jbaVhbjLLZy4+KaESuYlPnM7/kM//T//icf46ul1ONBRMZW6yVwO51Vd3pYXm6qZTy5y+Niap3wxIUNLl+68aCeulpjYgx1W5fUXaCZlFoYUmJYduM8RkgxMPQecmRxY0k0pdFBjnkcZ2OpGoexBucqbPUb35pYZ3BVRUDGU7pYOjYj2LFVuzEl7Z8s+CGNYwsEVztiKLV21gnL4xXb55x2yVPqHhER2klzM1NkDOQolyjva5gg5Xos161ha7vi8GDJavWQFQA+xDSYWzuZZlKRUgNiCCaQUipplDkRh0AC/OBZHK9IKdAvBkwl77rhs5XDiKGZtrSt4+kXnsBJ5NKbe+X+7wOGir/15Tf4xs98upwASknpzDmVgeGMQ4spO6MlxctgnSupOM6MWZiJ5fGS2Afqur6fL5xSj6y6rZhttgxdPF0r83j6dtrdlvGEDsEaoZk5JrWlrm5vM0ep97LWIKm0RvcrT7fq6JcDcfD4MJSfw5joFj0h+jGLw+DqcXOvKWnM1tkxu6PUXi8Pj09P1tIHrEXHe4eEISDWYS1ARkwZnVPmyqUy33SIlI1Le5piaayDnDCSqWoh9J6h68fOmEqpj8pVlrptSWPn85NYTk6bc439G8YPoXzeiFA5YbOtuPSaZovcCQ3m1owBYh/xK8/x3jExloLyFCNxCIQYiCGeplGmlKgmFVXtyi6JKXd3tnLj4FWh73omGzMkJ773915AMPx/P/tl/PDunUo/BPwwULctIuB9KRo/SZfMlFlyzpXdVTGlhqcMRhZEAr7riUPCbAnX3r52n189pR5N585OOLNTc/16f5rGku1JkcJJ8XnZ/RSBc+canrjQYsUwX+qJhLo7tjIMXc/l16/QrRaIGTcRjCkZG3kcD2DBiSNXlqqpS03beCpXxgOcdJo0VM7w4jc+T86Jc9s1r716mf391bu+7/J4SfCe6WQKORJTIvhhnLtVhoR7X9L/m2kFuXThcnVVNjMspOA5vLEij+thThrMKXUvbJ3ZOq2TO+15InDSbn1cjsbPj1uNAnUtPPlEy9V3jul7XZfuhAZza6RuG85ePEu3XLB7eZd+1SEWSBmxFlNZnBl3OMaFyThHVddUjSujBKTsUp4MFXaVxS86QIgJrl3r+PSnLnL27IyrV949pKRfrlgcHNI+PSPlRNXW+G4oA8PHbkXGlKvXVg7EIZSumjEGhEi/KnO16ibwpc998T6/gko9mp58cpPZpkMks3fDk8aByjlnrDPImGLZtoYnzrVsbFSQEilF3nh990E/fbWm9q8dcPbiAltBkxtsZQg+lZlyOTOseuJQTotN5ainDa6yY3p+GjcXZeyCKdjaEparMoDcOY4Wgd/8XS/wuV++xI3dd+/UX3n9Et/wHTvkbHFjvdxqsSpjRCLUE4OxFlc1OFeVgFEyvh8gDSyPx0ZHznL97Wsc7R/d/xdQqUfMufMzNmY1Q+C0g2XOJ7XbwMl94viYtcLWGcdGW9HUFmvkA0/j1W9Mg7k1Yp0lISwOl1TOYjYmp52AxJRahZNZcDll2o1JmUWXwDgZh6aWRVOMYK2BWArSU4rklNnamXC0v+A3f88n+Mf/4Jff9f2rpsY4h60MeMrcOJfwfSClTF1ZJpsTYgChom4bYoyEvieFnhQjIUSqpmLoh7HGQSn1UaWYccATZxqeONuy7BPLeSinDrVhZ8dRcdLtsnTlDCHhfeLocPV1//+V+iApQz/4Mqqm8yyPekKITDengJByQqxhtrGBq2tCucPDOUMMnKZYCaUEgJSIvtTJxFBO3Kwz/Lbf9Un+4d/97On3bTem1JOGFAO2rskJmtkEjCHHCBgmWy3OOayrMGLGjcdADB1xCMQEduzwujha4nutz1HqozBGeOaZLc4/scHld1YcHo3X1NgR+uZpHOycramM4cL5BmtMSdUOZdzP5cvawfJOaTC3RsQIk9mEo/6ImALdsi/plCEy3Zoh1hB9oG4rNs9ukYE4BLJJp/UzpRodkEwk4xdLfNefpmHZHOkHwXf9+77/xs4WGztbLI/mzLa3yFloZ46qrcsOi7PUbQUITVOxmneEvkMoDVFsZXEJ6rrCOTcON1ZKfVSZzDBEXCyn760RpjuupLAZgTTWt6ZcamxjJqZM30ei7oKquzTbmrKxs1Fmi04MwVc0TUvdTJgfrWgllU6VfmBYduSxA0Ikn7YjF4SUI66xDMsVftWd1naePVuTQmm2dUpg54nznH/uSeZHcza3N3FNi7GGarsm50w11uSlnKkbSz8f6FdLhEjysTTlimVNNGJwlX1QL6FSj4zJpOLsmSmVCM8/NcFfaAmpjP2RnEhJEAdWhKYpnc1TLN3WvS/r05tv7D0Uo37WjQZza8QYw9aFHWxV0Xcdvh/wfSD6UpOQyVRVjR88q+MVKZbmKCeDguWW5GUz1irM949ORxO0E0POkW6V+Owvvfmu77119gzPvPIyKZU0ldB1NBszxFpsclSNRXIZ4GqdMN9f4oeOfrViOmuoKksWgWwQY0uxuWgwp9S9kGJp+RzGlOeU8+m8uVvbQJ+0eU4p40PiK1+5Tqcdw9RdErHsnDvLZDojpUS37MdfO4auK7NQY5kt6rub6xGU9az8Rqgbh+8jh7sHGCMYY2gay6QSus5z6Z2rp9/z4vPP8NSLz+GDJ/gwbi6CtC2SHcYYxAjRJ3zv6eeRbr5g6Drqpho3N/Lp14lB1yKl7oEXXjpPSpmu82PqNFRGaKwgUjZMThrsRR/JORNjyRKJMXE89xxqpshd0WBujeQMYUi00xYxlradMAye4COr4xWLo2PCMBBDIIZA8LHkJRshp5s3dSKCayrCaoUffBkfYISNWUMcIpeuHL+rJeyFZy/y0qe+mRgSOUd8D8v5sow2aKcYEUKfEEIZjxAjOQV81zH0no3NCSFEjDH4UC7g6eaUb//e7+TnfvJfPrDXU6lHxVe/ussrL59nOiszt066h2XyeKpxy1ZnznSrwGtv7nN89P4TeKVu19D17F29UVL7c6ZfBWL0xBBpJjVDV4ZyG+ewdanlTrFkisQYy+PWEH3GdwuCD7jKltO1qmwuHh9HXn+1BHPPfeNLPPXKi0QfMaF0rPQ+0HdlDELVlNICvxgY+oGcAmEYWM2XpJBwzuH7UGYtitBMG5q2oWoWD/iVVGr9BR9ONxNLV9l8uvSIyGmX5cy4Po29FjJwfDzw6qu7BC2/uSsazK2R4AOHNw6QnAlDoKodQ+9ZLTrCEGkmLbZyBO8RyaWWLQR870mUmriqrjHOMvQDi72jkr8chdnU0VTgY9kpudVTLz+PdY5MJPSxDF4VKbV23oNziMDQDeVjEsOqx/twOuIqJ1h1PX6IVNtTmmlL9JraotS9sFx6vvyV6zStgxY2KsPO5rQ0RkqRlMHiWXSB3b1A30WW2i1MfURD1zE/OCrZ+31g6HsObhyRc2bz3A7WVUxmFcaWDcXoe1bLFf2yL3WeQmlOIpm9K8djDXhiOnNMW0PfR65fm5fTY4EnX3yOqq7IMVM1FnA4ZxEghkA9SWQ8IXhSGOgWHSGWmXZiDSkl/DCwWnRlHILZZvPMJpPp9EG/lEqtvcuXD5GZY1MM1grmlqYn+ZYNxZxLvW0YUyy75ZKvvbXQQO4j0GBujfTLFb/ys7/EC9/4Es3UsZpH+mXHtTf2sJUw2drAVjXOlQWumjakFAltz/K4FHjnXAYKz2YV8+sJ5ywbE0vjhOODjlUfuPz2zeLT6eaMuqnHluaJqqkQA0bAWME4SCmS4kAY/Lg72487L6VbUbfsWBytWB6vSt1c7QhDZmNLF1Cl7pVVH1j1AQ7hAHib5WmDiZN1VEsR1L105Y3LVE3F89/wLKsYyEPmaPcA1xhsbYkpQRLatiGOzUo2tjepasv8YFGyOGKimtjSGKUyTBpLWwndMtD1nsuXDwDYPneGqrFYaxExVG1TOjOTsUbKEPGQxvE3nmHweB8QI/gh4JzFVZa+g6EfaF2LdTKORNCNRaU+qvlxz5c+dxkR2DozYWtSl0XHgKstrYPFMpISDFgWPrK8fnia/q/ungZza+boxj6XX7N843d8AzlGjvdX9P2cioopM/puiRdDO61YHnQY69g+u4mrLEd7x6QQaGdTcvLUtWVjw2El0608/RC4fn3+rgtrY2eT6eaUlKCd1BhjShF7TpCg7z3JB4L35JToO0866a45RIw1DF0Z3BpjoN1saGdVeXyptTpKfVzy6b+U+vhc/uoltjZa2umEoxv7iEsEnxjmA1sXdxArtE3FEDx9X2pp2klL8JHl4QJrAzEk6tqwteEgQ9cHvI9cu3p8uh5tnd0m9J4YMq5yVE1FzhXGGIwRYkj0yzkpBcKYzo+RMpKHMic1pSXdqmeyNePic0+wubNJToZh6B7oa6jUI2PsXHl4Y8UhN+vfjBWsgA+6KH0cNJhbQzeu7nJ09SzTzQ1SWGErU1JHjo/Y2rlAteWorKXdmpwGVGTHdGvK8f6c4/1jJo1h1lqijywXnoN5h/fxfd8r+EC3WIEYjKvKKZ21iLGklMhpOB1OfhLE5ZjIIWMs5BTpVx1YOPvUWXYunmE22yAMkdVS6xSUUmqdxRj59c99me/87d/CZFqzmleEEFguDvGXAhdePI/3ASOGrY0ZMUTEwmTWEPrSbXl1cMRsYhmGQGUzV68sGIZ3r0d7V3aZbW9SNTXWVUw3p7i6dFIOPpV1hoQfQumUmcdhxEbAQNNWNLMJT7xwEetKrVzbVhzuHbN3/fgBvHJKPT5SLL1s1cdDg7k19ZVff51v/vZP0LQV3dKChdVyRbd6mzP5DBuzDQKRyrkyHsBC7QwWmG00DIcHrLqexXxgtfIfuoFf0iY7YizF44YZrq7Hrpa+pFimk+4qGWMNEhMiIOKw1rJ5bot2NiEnS92UFNCrb+zih+V9fMWUUkp9HHLOvPXaFTa2N4hjMxQxgvcLrr7esbW5QzVpCDWEFE9njKYYqKwpKcKLjvmiR4T31W0DLI7m2KrcsqzGkTquqWknDSmX0zqfEmJMKQGwQiNC1TYIlqqtcc4SfEQy+FWHs5npRoUYPS1QSq0vDebW1NB7Lr1+henGlJRvdg1KKXDj7eusZnOcmbFxrqLzkZwTTW1wJrLY22fv+iF9Hz5w0bzV4vCYGCMilm7ZEQaPqyrqSTO2zBMyQlWXOgZTGciCdY6qchjjEFta6+WYCL0nh8DO+S32r5ci9BjefyKolFJqffihjAooub1y+msYIjd2d6nrGtdYYgTJQlWDk8Te9TmLhSfGr78OHFy7wVMvP1c6OC9XsFjRt6VOvN2cYEyFmxisK7V11o2jChC8LzV0MUT8MBC8Z+hmVI3lhU8+y3x3l/1rB2UtVUqpNaLB3BpLKRFChAwi5ma3oJxZLFaQlxwelXawbeuQicV3nmu7x3h/ewfeQzewf3WPJ19+Du8DXT9A12PmS+q2pm5aXF2XxdMYEEM1cePuaCaGSFiWVtVxCKSYaGYt1cTw/CefYVj0/Pov/urH+CoppZT6uBlJWFuG1htzMtMwgynBWwge7wesLd2TnTUc7C84Orr9erW9K7s88dxTTLc2STkzLDuGbmCQgeV8iXNl7QGwztFOGlxTI9YiJIIP5BwhJVKM9F0HrqVtKn7rD/xWfuGf/QLdqqed1IQQOdg9/HheLKWUuoc0mFtjzmTaGvwghJBvaVwnSInwcEZoGsN06kgxg7O3HciduP7WOzzzieeYbExZzZf0yx5xmdViVerpEMBgjcE1lmba0k5bMhB9JAYP5HFAZKI1Lf3Ss8gdF19+iv3ru1TO0Xc9e9f2S41Dzh/Q4ehkx5dSjxfT++oqlFJKPQApcW6rYtIkrl8ZEFPWopOskZxgOrM4IxgjHB51dxTIAazmSw6v3+Cpl1/AVWdYLZd0x0tSSsQQ6fqu9DwHQBj6mmbS0E7b8bCwjPVJOWOdBck0TY3YRO8jrnZsNTVVU0HONG1T/p9ExplYY0JKymXcQobpzLA6WnKwv6TrdNyHUur+02BurWWefKJlOjX0y4BIJuay0ESfyQJta0rjyZxZrgauX7vzQu+hH/jCL3yeT37Xp9g6u83CzQneQ8jEkEgpApkQhd6Xr4/BUzU1OWZyjsSYCCHhake/GtjYmRJjoj+OSBKGweMqx8Vnn8A6e/MG4NYFdIzuxAh1DdNKOD5a8LXX9+7x66qUUupOpJQxZCbW8dRFWxph2UyKZTxBTGWGaQZCSMyPu7tqSf7ml77Gky88RV23VM021lbE4AmDJ+WMMaX5SQwJ68wYfJU1yvtI8BHrXJmdmoXV8QrfR7bONeVzZhx0bqSMQTBSSsKRceBx2S5FSpA39Ilv+uanMZL4N//6NQ3olFL3nQZz6yzD0EU2GosD4pAIKYPNZFeCoBgyKcNi0XP9+uKu6wGOD45YHR1y9umnsM4ShrJ4xhQIPo6zesbdy1QWuhg80WdSjuSYsXVVBr6mxPxgSYqR42sLhsGXXdxUUnROauiMMYgIKefTYK4Mlc10Kzh/Zsrzz28ymdR84QtXtA27Uko9IN4n+iFAyuSUCSFD+bA0yho35mJMXLs2v+ugJ/jAG198nW/49DcR87hOOAsplzEEZfnB2ZPZcZl+ORDSuE658ngWcNawOFqQYuLonf2x7jwjkkkYICPjesbJ+pNBTCbHsubVtQESm5s1F57Y5K039z/4iSul1MfEPOgnoO7e0VHHcuVZrjx+KKdfkstCmlLZXcwklsue3d27D+ROvPWVNzDmZLGTsmOZBQGsNRhjcNZQVULyiaHzDP1AinmsYxCqyjHZmBC7wOGNjiF0nAwmyans4uZxUY4xjQPJE8FHUiwngdEnjIVmIlgjfOIT52lq3ZdQSqkHZT7vOTzq8SERYjnBSjmRx//FmDg67rl67ZjVRzy9uvHOLvODI7p5h+96gg9lRE4MRO/xQyAMgaErs+28L/XaiJSRBBsTJBsEWC16XO1YrG4QQySP3ThP/0mJFMvonTQ+dpI1Yg289NKE2cyRU+KFF87cmxdTKaXugN4Br7G+D+zvr5g2ZjzxAsgkSmpj10TyELi+uyLdgwEfKWZW8xXeR2L05FAWvRQTOZZvcLIAWmsJIZbUG+eo2wbXVgDEfiCRme207L6xSwgBay3GjLUJlPoKTEnLERlr6CjplnVreOJiy0brxjTOzLd+6ll++Re/9tH/kEoppe7KG1/bY9JUzGY1OUlp+S+QcuDwsCfcowzEbtkRQypNVypTxiCkTAqpnNSZsvFnnCAIphJc7RBjadqGFCIpRWxVs3m2xZIYVhHrDNFHxAjWAQjGmXFuXSlhgDI6YWen4uLZCa115JjoQ2LoNcVSKXX/aTC35q5cOcJZKcXcp/1BxmYjVSYu7l3uoRjBVeOcHrG4SUUYBlLKYwCWEGuQcV5r1VYYa3FVhWAgJgY/sDzuqKoJYjPLwznOmbEttS2pmilhrSkngOVPQwYMmWnrePHFGU1lyTEzxEiImb7z9+zPqZRS6s7lDMvOs/yY/z6u6oqUwYpgXY11QvCZ2VZNCAGRkkFCBltZyIIRQGBYDZATMQRyY6nbCVdfu8zQ9yXj5Jb6bMYTxRPOCm1tefrplklbURkpmSPjaWRMmuuvlLr/NJh7BISYCfEDdgSHe/2dyqlZTELdNgggVkjZY205VTtZQF1lsJUlDBEhY0ypgxv6wJkLWwQfePvVy6WeLgjGmHEBzeQEkYQAifKvM9sVZ840nD1Tl8+P6ZY+RHzIfOHfXb7Xf1illFIPoadefIrtc1ssjkvWSejldKB43dalbg9IIZ1OvIshjhuShhAg+sDQDywPFuy9c52yCQol6iu1fSfBnYytojc3HC8+t1EWppwZhpNArqRh3rixvP8vhlLqsafBnLptm2c2SxfLFBAsiXIEZ42lqg0pZqwtXcTEwLDyZYQApZGJ7yJCol8NzG8sWBwdgUDOiZTLLKIkZQFNsXQMM1bY3nI8+8yUyhpCH4kplwU0JDLw+ut7JYhUSin1yBv6wLDyRB8xDsBQ1SXDI0XGTpOlBCDFUmOdc8IaS9MYjg7nWAN77xywf3UfKCUDJx0qS6pJpmqEtjZYZ7h4oUaSoV8FyBDH2vSUywbktWvHvPGGNj9RSt1/Gsyp2/byt32i1ARIJgwe4wxGBOtKh0mh1CnkcYezLKICKYKxzPeXNFuGG5f3me/tl5EGuTRHyal0CBMRSJl2YnjifAPJsLVVQcqsev+uBRTg+GjF/v7qAb8ySiml7ifrLM2sIfqAcSV4Kx2cI1VlyRmyZFxtICWsMSzmK+Iw4OcDhwfHdN0SpHS5tE4gClVrOHeuwnhD3RraujTvSn0m5kAIQk6crnMhlO6cR4eruxq1oJRSH5UGc+q25ZSwtcONAZURMFbG2jYhS2mPWjUGYzLJC34VqBthdRSI2XPlKwcM/apUweWMrSyzmaW2BkTY3qmoxIAYSBnjYLXwGFOKzhFKMxffcWOoePv1ffygRedKKfW4WM17lscr2o0JtnUkAnVVYWwkDoZu4aknhjSUerdlt2A2rQndwHDYMyzntG1ke8vibIWMg8xzFEiQQ8bYTPKReV9KBMbRclhX1qqhC+wfdly9evyuujqllLrfNJhTt20YOoSWpqlIY+BmXWm+MnQDINSNYXU0gIllFl0POQW6gx7SnO3NiGzWQJnXU+b0CM6WxTR2iRXppGyh1D3ETN2Mu6Mxsr97xI39jkWn26BKKfW4MVYYvCfuR+qp5Wh/wda5CYIQfWZ+vGJGQ9/3zCYO0yf6YQ/jPUYi2xuGlIQcIYSSgpkBMw5rypmxVq4sRCez64YIaRg46Jcsr3i6k0BPKaUeIA3m1G278rVrbF84R7iREGcRKUFWZSsObixIOTLZqkEyVTb4ZY+EnrDsqCWRTamfg1JjnoEcE74HPwZvJ4XmxpSq81LGIOwedQxLz3zRszjWzpVKKfW4GhYrqpTx0dMtI5ONFmMsxgr9cYeNkbg6pkmR1e4SQxjH5gAZ/OkonZN/ysbgyQgfQ6YyFb7K7F5fIDFijLB7mMmhf0B/aqWU+mAazKnbduPyNbZ2tqkaxzAEJps1vgtQQTuzpRDde4ajFTEvMakM/QZImdPFNGcI4+/LjmcmmoZhtYQMkjPWJGLdcsySxVsDKSdS1JM4pZR63C0WC6698VW2d6aklAjWEKyBnEkhY8b67XgSpJ0EbBkwCRMix11Zg4YoNCIczJcMw8lJW8ZgSJIJXlMolVIPNw3m1G3rVz1Hl99mOp1gjKFflhRLnyNhCOTsEW4ODy+/ln9SKsFYBmrxHHnP8hAgIznjrac7nr/nOy7u5x9PKaXUmrh67ZjFsufcuRniM8maktEBpbY63jIjrrHcuLIYOy0nbAgcfN0pAppCqZRaD5If4vZLIvLwPrnHlDHChfMzrBNm0+p0Ds/JDLqcM6RMNML8sC9NTKTixsGCEEp65On8OH13lVJKfQQnqfnTiWNjViM5k6zl6Kij78LpF2UdX6OUWm+fzTl/5oM+ocGcumtt40BgOqvoVoE8DmoFyCL0K61tU0oppZRS6iP60GBO0yzVXev6suvZdToaQCmllFJKqfvNPOgnoJRSSimllFLqzmkwp5RSSimllFJrSIM5pZRSSimllFpDGswppZRSSiml1BrSYE4ppZRSSiml1pAGc0oppZRSSim1hjSYU0oppZRSSqk1pMGcUkoppZRSSq0hDeaUUkoppZRSag1pMKeUUkoppZRSa0iDOaWUUkoppZRaQxrMKaWUUkoppdQa0mBOKaWUUkoppdaQBnNKKaWUUkoptYY0mFNKKaWUUkqpNaTBnFJKKaWUUkqtIQ3mlFJKKaWUUmoNaTCnlFJKKaWUUmtIgzmllFJKKaWUWkMazCmllFJKKaXUGtJgTimllFJKKaXWkAZzSimllFJKKbWGNJhTSimllFJKqTWkwZxSSimllFJKraGvG8yJyI+JyDUR+bVbHjsrIj8tIl8Zfz0zPi4i8hdF5FUR+byIfOct/80Pj1//FRH54Y/nj6OUUkoppZRSj4fbOZn768APvOexPwv8TM75FeBnxo8Bfh/wyvjPnwb+EpTgD/hR4LcA3w386EkAqJRSSimllFLqzn3dYC7n/K+Avfc8/IPAj4+//3HgD9/y+N/Ixc8DOyLyFPD9wE/nnPdyzvvAT/P+AFEppZRSSiml1G2625q5iznnd8bfXwEujr9/Bnjrlq+7ND72YY8rpZRSSimllLoL7qP+H+Scs4jke/FkAETkT1NSNJVSSimllFJKfYi7PZm7OqZPMv56bXz8beC5W77u2fGxD3v8fXLOfznn/Jmc82fu8rkppZRSSiml1CPvboO5nwJOOlL+MPCTtzz+x8eult8DHI7pmP8M+D4ROTM2Pvm+8TGllFJKKaWUUnfh66ZZisjfAn43cF5ELlG6Uv4PwN8RkT8FvAH80fHL/zHw+4FXgSXwJwByznsi8t8Dvzh+3X+Xc35vUxWllFJKKaWUUrdJcr5n5W733L2sxVNKKaWUUkqpNfTZDytBu9s0S6WUUkoppZRSD5AGc0oppZRSSim1hjSYU0oppZRSSqk1pMGcUkoppZRSSq0hDeaUUkoppZRSag1pMKeUUkoppZRSa0iDOaWUUkoppZRaQxrMKaWUUkoppdQa0mBOKaWUUkoppdaQBnNKKaWUUkoptYY0mFNKKaWUUkqpNeQe9BP4OnaBxfirenycR9/zx42+548ffc8fP/qeP570fX/86Ht+773wYZ+QnPP9fCJ3TER+Kef8mQf9PNT9o+/540ff88ePvuePH33PH0/6vj9+9D2/vzTNUimllFJKKaXWkAZzSimllFJKKbWG1iGY+8sP+gmo+07f88ePvuePH33PHz/6nj+e9H1//Oh7fh899DVzSimllFJKKaXebx1O5pRSSimllFJKvcdDG8yJyA+IyJdE5FUR+bMP+vmoe0NEnhORfyEivy4i/05E/sz4+FkR+WkR+cr465nxcRGRvzj+HHxeRL7zwf4J1N0SESsinxORfzR+/JKI/ML43v4/IlKPjzfjx6+On3/xgT5xdddEZEdEfkJEvigiXxCR36rX+qNNRP6L8e/2XxORvyUirV7rjxYR+TERuSYiv3bLY3d8XYvID49f/xUR+eEH8WdRt+dD3vP/afy7/fMi8vdFZOeWz/3I+J5/SUS+/5bH9d7+Y/BQBnMiYoH/A/h9wLcA/4GIfMuDfVbqHgnAf5lz/hbge4D/dHxv/yzwMznnV4CfGT+G8jPwyvjPnwb+0v1/yuoe+TPAF275+H8E/kLO+RuAfeBPjY//KWB/fPwvjF+n1tP/BvzTnPM3AZ+mvP96rT+iROQZ4D8DPpNz/jbAAj+EXuuPmr8O/MB7Hruj61pEzgI/CvwW4LuBHz0JANVD6a/z/vf8p4Fvyzl/Cvgy8CMA4z3dDwHfOv43/+e4mav39h+ThzKYo1zYr+acX8s5D8DfBn7wAT8ndQ/knN/JOf/y+Ptjys3dM5T398fHL/tx4A+Pv/9B4G/k4ueBHRF56v4+a/VRicizwB8A/sr4sQC/B/iJ8Uve+56f/Cz8BPB7x69Xa0REtoHvBf4qQM55yDkfoNf6o84BExFxwBR4B73WHyk5538F7L3n4Tu9rr8f+Omc817OeZ8SGLw3WFAPiQ96z3PO/2/OOYwf/jzw7Pj7HwT+ds65zzm/DrxKua/Xe/uPycMazD0DvHXLx5fGx9QjZEyp+Q7gF4CLOed3xk9dAS6Ov9efhUfD/wr810AaPz4HHNyyENz6vp6+5+PnD8evV+vlJeA68NfG9Nq/IiIz9Fp/ZOWc3wb+Z+BNShB3CHwWvdYfB3d6Xev1/mj5k8A/GX+v7/l99rAGc+oRJyIbwN8F/vOc89Gtn8ulxaq2WX1EiMgfBK7lnD/7oJ+Luq8c8J3AX8o5fwew4GbqFaDX+qNmTJP7QUog/zQwQ09bHjt6XT9eROTPUUpo/uaDfi6Pq4c1mHsbeO6Wj58dH1OPABGpKIHc38w5/73x4asnKVXjr9fGx/VnYf39duAPicjXKGkVv4dSS7UzpmLBu9/X0/d8/Pw2cON+PmF1T1wCLuWcf2H8+CcowZ1e64+ufw94Ped8Pefsgb9Huf71Wn/03el1rdf7I0BE/iPgDwJ/LN+cdabv+X32sAZzvwi8MnbAqimFlD/1gJ+TugfGeoi/Cnwh5/y/3PKpnwJOuln9MPCTtzz+x8eOWN8DHN6SyqHWQM75R3LOz+acX6Rcy/885/zHgH8B/JHxy977np/8LPyR8et1l3fN5JyvAG+JyCfHh34v8Ovotf4oexP4HhGZjn/Xn7zneq0/+u70uv5nwPeJyJnxRPf7xsfUmhCRH6CUT/yhnPPylk/9FPBDY7falyjNb/4tem//sXloh4aLyO+n1NlY4Mdyzn/+wT4jdS+IyO8Afg74VW7WT/03lLq5vwM8D7wB/NGc8954Q/C/U1J1lsCfyDn/0n1/4uqeEJHfDfxXOec/KCIvU07qzgKfA/7DnHMvIi3wf1HqKfeAH8o5v/aAnrL6CETk2ylNb2rgNeBPUDYR9Vp/RInIfwv8+5S0q88B/zGlLkav9UeEiPwt4HcD54GrlK6U/4A7vK5F5E9S1n+AP59z/mv38Y+h7sCHvOc/AjTcPE3/+ZzzfzJ+/Z+j1NEFSjnNPxkf13v7j8FDG8wppZRSSimllPpwD2uapVJKKaWUUkqp34AGc0oppZRSSim1hjSYU0oppZRSSqk1pMGcUkoppZRSSq0hDeaUUkoppZRSag1pMKeUUkoppZRSa0iDOaWUUkoppZRaQxrMKaWUUkoppdQa+v8BKISYdaUuiA4AAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "# visualizing\n", + "import matplotlib.pyplot as plt\n", + "rgb_preds_grid = make_grid(preds['rgb'], nrow=4).permute(1, 2, 0).cpu().numpy() / 255.\n", + "plt.figure(figsize=(15, 15))\n", + "plt.imshow(rgb_preds_grid[::4,::4])" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.12" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/visualize/ca_body/requirements.txt b/visualize/ca_body/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..292f6db5ae09201412070f663f4560ba2ee53359 --- /dev/null +++ b/visualize/ca_body/requirements.txt @@ -0,0 +1,4 @@ +torch>=2.0.0 +pytorch3d +numpy +torchvision diff --git a/visualize/ca_body/utils/geom.py b/visualize/ca_body/utils/geom.py new file mode 100644 index 0000000000000000000000000000000000000000..7e50c917ea4bb1aa3043888b421b3efe6612e700 --- /dev/null +++ b/visualize/ca_body/utils/geom.py @@ -0,0 +1,659 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +from typing import Optional +import numpy as np +import torch as th +import torch.nn.functional as F +import torch.nn as nn + +from sklearn.neighbors import KDTree + +import logging + +logger = logging.getLogger(__name__) + +# NOTE: we need pytorch3d primarily for UV rasterization things +from pytorch3d.renderer.mesh.rasterize_meshes import rasterize_meshes +from pytorch3d.structures import Meshes +from typing import Union, Optional, Tuple + + +def make_uv_face_index( + vt: th.Tensor, + vti: th.Tensor, + uv_shape: Union[Tuple[int, int], int], + flip_uv: bool = True, + device: Optional[Union[str, th.device]] = None, +): + """Compute a UV-space face index map identifying which mesh face contains each + texel. For texels with no assigned triangle, the index will be -1.""" + + if isinstance(uv_shape, int): + uv_shape = (uv_shape, uv_shape) + + if device is not None: + if isinstance(device, str): + dev = th.device(device) + else: + dev = device + assert dev.type == "cuda" + else: + dev = th.device("cuda") + + vt = 1.0 - vt.clone() + + if flip_uv: + vt = vt.clone() + vt[:, 1] = 1 - vt[:, 1] + vt_pix = 2.0 * vt.to(dev) - 1.0 + vt_pix = th.cat([vt_pix, th.ones_like(vt_pix[:, 0:1])], dim=1) + meshes = Meshes(vt_pix[np.newaxis], vti[np.newaxis].to(dev)) + with th.no_grad(): + face_index, _, _, _ = rasterize_meshes( + meshes, uv_shape, faces_per_pixel=1, z_clip_value=0.0, bin_size=0 + ) + face_index = face_index[0, ..., 0] + return face_index + + +def make_uv_vert_index( + vt: th.Tensor, + vi: th.Tensor, + vti: th.Tensor, + uv_shape: Union[Tuple[int, int], int], + flip_uv: bool = True, +): + """Compute a UV-space vertex index map identifying which mesh vertices + comprise the triangle containing each texel. For texels with no assigned + triangle, all indices will be -1. + """ + face_index_map = make_uv_face_index(vt, vti, uv_shape, flip_uv).to(vi.device) + vert_index_map = vi[face_index_map.clamp(min=0)] + vert_index_map[face_index_map < 0] = -1 + return vert_index_map.long() + + +def bary_coords(points: th.Tensor, triangles: th.Tensor, eps: float = 1.0e-6): + """Computes barycentric coordinates for a set of 2D query points given + coordintes for the 3 vertices of the enclosing triangle for each point.""" + x = points[:, 0] - triangles[2, :, 0] + x1 = triangles[0, :, 0] - triangles[2, :, 0] + x2 = triangles[1, :, 0] - triangles[2, :, 0] + y = points[:, 1] - triangles[2, :, 1] + y1 = triangles[0, :, 1] - triangles[2, :, 1] + y2 = triangles[1, :, 1] - triangles[2, :, 1] + denom = y2 * x1 - y1 * x2 + n0 = y2 * x - x2 * y + n1 = x1 * y - y1 * x + + # Small epsilon to prevent divide-by-zero error. + denom = th.where(denom >= 0, denom.clamp(min=eps), denom.clamp(max=-eps)) + + bary_0 = n0 / denom + bary_1 = n1 / denom + bary_2 = 1.0 - bary_0 - bary_1 + + return th.stack((bary_0, bary_1, bary_2)) + + +def make_uv_barys( + vt: th.Tensor, + vti: th.Tensor, + uv_shape: Union[Tuple[int, int], int], + flip_uv: bool = True, +): + """Compute a UV-space barycentric map where each texel contains barycentric + coordinates for that texel within its enclosing UV triangle. For texels + with no assigned triangle, all 3 barycentric coordinates will be 0. + """ + if isinstance(uv_shape, int): + uv_shape = (uv_shape, uv_shape) + + if flip_uv: + # Flip here because texture coordinates in some of our topo files are + # stored in OpenGL convention with Y=0 on the bottom of the texture + # unlike numpy/torch arrays/tensors. + vt = vt.clone() + vt[:, 1] = 1 - vt[:, 1] + + face_index_map = make_uv_face_index(vt, vti, uv_shape, flip_uv=False).to(vt.device) + vti_map = vti.long()[face_index_map.clamp(min=0)] + uv_tri_uvs = vt[vti_map].permute(2, 0, 1, 3) + + uv_grid = th.meshgrid( + th.linspace(0.5, uv_shape[0] - 0.5, uv_shape[0]) / uv_shape[0], + th.linspace(0.5, uv_shape[1] - 0.5, uv_shape[1]) / uv_shape[1], + ) + uv_grid = th.stack(uv_grid[::-1], dim=2).to(uv_tri_uvs) + + bary_map = bary_coords(uv_grid.view(-1, 2), uv_tri_uvs.view(3, -1, 2)) + bary_map = bary_map.permute(1, 0).view(uv_shape[0], uv_shape[1], 3) + bary_map[face_index_map < 0] = 0 + return face_index_map, bary_map + + +def index_image_impaint( + index_image: th.Tensor, + bary_image: Optional[th.Tensor] = None, + distance_threshold=100.0, +): + # getting the mask around the indexes? + if len(index_image.shape) == 3: + valid_index = (index_image != -1).any(dim=-1) + elif len(index_image.shape) == 2: + valid_index = index_image != -1 + else: + raise ValueError("`index_image` should be a [H,W] or [H,W,C] image") + + invalid_index = ~valid_index + + device = index_image.device + + valid_ij = th.stack(th.where(valid_index), dim=-1) + invalid_ij = th.stack(th.where(invalid_index), dim=-1) + lookup_valid = KDTree(valid_ij.cpu().numpy()) + + dists, idxs = lookup_valid.query(invalid_ij.cpu()) + + # TODO: try average? + idxs = th.as_tensor(idxs, device=device)[..., 0] + dists = th.as_tensor(dists, device=device)[..., 0] + + dist_mask = dists < distance_threshold + + invalid_border = th.zeros_like(invalid_index) + invalid_border[invalid_index] = dist_mask + + invalid_src_ij = valid_ij[idxs][dist_mask] + invalid_dst_ij = invalid_ij[dist_mask] + + index_image_imp = index_image.clone() + + index_image_imp[invalid_dst_ij[:, 0], invalid_dst_ij[:, 1]] = index_image[ + invalid_src_ij[:, 0], invalid_src_ij[:, 1] + ] + + if bary_image is not None: + bary_image_imp = bary_image.clone() + + bary_image_imp[invalid_dst_ij[:, 0], invalid_dst_ij[:, 1]] = bary_image[ + invalid_src_ij[:, 0], invalid_src_ij[:, 1] + ] + + return index_image_imp, bary_image_imp + return index_image_imp + + +class GeometryModule(nn.Module): + def __init__( + self, + vi, + vt, + vti, + v2uv, + uv_size, + flip_uv=False, + impaint=False, + impaint_threshold=100.0, + ): + super().__init__() + + self.register_buffer("vi", th.as_tensor(vi)) + self.register_buffer("vt", th.as_tensor(vt)) + self.register_buffer("vti", th.as_tensor(vti)) + self.register_buffer("v2uv", th.as_tensor(v2uv, dtype=th.int64)) + + # TODO: should we just pass topology here? + self.n_verts = v2uv.shape[0] + + self.uv_size = uv_size + + # TODO: can't we just index face_index? + index_image = make_uv_vert_index( + self.vt, self.vi, self.vti, uv_shape=uv_size, flip_uv=flip_uv + ).cpu() + face_index, bary_image = make_uv_barys( + self.vt, self.vti, uv_shape=uv_size, flip_uv=flip_uv + ) + if impaint: + if uv_size >= 1024: + logger.info( + "impainting index image might take a while for sizes >= 1024" + ) + + index_image, bary_image = index_image_impaint( + index_image, bary_image, impaint_threshold + ) + # TODO: we can avoid doing this 2x + face_index = index_image_impaint( + face_index, distance_threshold=impaint_threshold + ) + + self.register_buffer("index_image", index_image.cpu()) + self.register_buffer("bary_image", bary_image.cpu()) + self.register_buffer("face_index_image", face_index.cpu()) + + def render_index_images(self, uv_size, flip_uv=False, impaint=False): + index_image = make_uv_vert_index( + self.vt, self.vi, self.vti, uv_shape=uv_size, flip_uv=flip_uv + ) + face_image, bary_image = make_uv_barys( + self.vt, self.vti, uv_shape=uv_size, flip_uv=flip_uv + ) + + if impaint: + index_image, bary_image = index_image_impaint( + index_image, + bary_image, + ) + + return index_image, face_image, bary_image + + def vn(self, verts): + return vert_normals(verts, self.vi[np.newaxis].to(th.long)) + + def to_uv(self, values): + return values_to_uv(values, self.index_image, self.bary_image) + + def from_uv(self, values_uv): + # TODO: we need to sample this + return sample_uv(values_uv, self.vt, self.v2uv.to(th.long)) + + +def sample_uv( + values_uv, + uv_coords, + v2uv: Optional[th.Tensor] = None, + mode: str = "bilinear", + align_corners: bool = True, + flip_uvs: bool = False, +): + batch_size = values_uv.shape[0] + + if flip_uvs: + uv_coords = uv_coords.clone() + uv_coords[:, 1] = 1.0 - uv_coords[:, 1] + + uv_coords_norm = (uv_coords * 2.0 - 1.0)[np.newaxis, :, np.newaxis].expand( + batch_size, -1, -1, -1 + ) + values = ( + F.grid_sample(values_uv, uv_coords_norm, align_corners=align_corners, mode=mode) + .squeeze(-1) + .permute((0, 2, 1)) + ) + + if v2uv is not None: + values_duplicate = values[:, v2uv] + values = values_duplicate.mean(2) + + return values + + +def values_to_uv(values, index_img, bary_img): + uv_size = index_img.shape[0] + index_mask = th.all(index_img != -1, dim=-1) + idxs_flat = index_img[index_mask].to(th.int64) + bary_flat = bary_img[index_mask].to(th.float32) + # NOTE: here we assume + values_flat = th.sum(values[:, idxs_flat].permute(0, 3, 1, 2) * bary_flat, dim=-1) + values_uv = th.zeros( + values.shape[0], + values.shape[-1], + uv_size, + uv_size, + dtype=values.dtype, + device=values.device, + ) + values_uv[:, :, index_mask] = values_flat + return values_uv + + +def face_normals(v, vi, eps: float = 1e-5): + pts = v[:, vi] + v0 = pts[:, :, 1] - pts[:, :, 0] + v1 = pts[:, :, 2] - pts[:, :, 0] + n = th.cross(v0, v1, dim=-1) + norm = th.norm(n, dim=-1, keepdim=True) + norm[norm < eps] = 1 + n /= norm + return n + + +def vert_normals(v, vi, eps: float = 1.0e-5): + fnorms = face_normals(v, vi) + fnorms = fnorms[:, :, None].expand(-1, -1, 3, -1).reshape(fnorms.shape[0], -1, 3) + vi_flat = vi.view(1, -1).expand(v.shape[0], -1) + vnorms = th.zeros_like(v) + for j in range(3): + vnorms[..., j].scatter_add_(1, vi_flat, fnorms[..., j]) + norm = th.norm(vnorms, dim=-1, keepdim=True) + norm[norm < eps] = 1 + vnorms /= norm + return vnorms + + +def compute_view_cos(verts, faces, camera_pos): + vn = F.normalize(vert_normals(verts, faces), dim=-1) + v2c = F.normalize(verts - camera_pos[:, np.newaxis], dim=-1) + return th.einsum("bnd,bnd->bn", vn, v2c) + + +def compute_tbn(geom, vt, vi, vti): + """Computes tangent, bitangent, and normal vectors given a mesh. + Args: + geom: [N, n_verts, 3] th.Tensor + Vertex positions. + vt: [n_uv_coords, 2] th.Tensor + UV coordinates. + vi: [..., 3] th.Tensor + Face vertex indices. + vti: [..., 3] th.Tensor + Face UV indices. + Returns: + [..., 3] th.Tensors for T, B, N. + """ + + v0 = geom[:, vi[..., 0]] + v1 = geom[:, vi[..., 1]] + v2 = geom[:, vi[..., 2]] + vt0 = vt[vti[..., 0]] + vt1 = vt[vti[..., 1]] + vt2 = vt[vti[..., 2]] + + v01 = v1 - v0 + v02 = v2 - v0 + vt01 = vt1 - vt0 + vt02 = vt2 - vt0 + f = 1.0 / ( + vt01[None, ..., 0] * vt02[None, ..., 1] + - vt01[None, ..., 1] * vt02[None, ..., 0] + ) + tangent = f[..., None] * th.stack( + [ + v01[..., 0] * vt02[None, ..., 1] - v02[..., 0] * vt01[None, ..., 1], + v01[..., 1] * vt02[None, ..., 1] - v02[..., 1] * vt01[None, ..., 1], + v01[..., 2] * vt02[None, ..., 1] - v02[..., 2] * vt01[None, ..., 1], + ], + dim=-1, + ) + tangent = F.normalize(tangent, dim=-1) + normal = F.normalize(th.cross(v01, v02, dim=3), dim=-1) + bitangent = F.normalize(th.cross(tangent, normal, dim=3), dim=-1) + + return tangent, bitangent, normal + + +def compute_v2uv(n_verts, vi, vti, n_max=4): + """Computes mapping from vertex indices to texture indices. + + Args: + vi: [F, 3], triangles + vti: [F, 3], texture triangles + n_max: int, max number of texture locations + + Returns: + [n_verts, n_max], texture indices + """ + v2uv_dict = {} + for i_v, i_uv in zip(vi.reshape(-1), vti.reshape(-1)): + v2uv_dict.setdefault(i_v, set()).add(i_uv) + assert len(v2uv_dict) == n_verts + v2uv = np.zeros((n_verts, n_max), dtype=np.int32) + for i in range(n_verts): + vals = sorted(list(v2uv_dict[i])) + v2uv[i, :] = vals[0] + v2uv[i, : len(vals)] = np.array(vals) + return v2uv + + +def compute_neighbours(n_verts, vi, n_max_values=10): + """Computes first-ring neighbours given vertices and faces.""" + n_vi = vi.shape[0] + + adj = {i: set() for i in range(n_verts)} + for i in range(n_vi): + for idx in vi[i]: + adj[idx] |= set(vi[i]) - set([idx]) + + nbs_idxs = np.tile(np.arange(n_verts)[:, np.newaxis], (1, n_max_values)) + nbs_weights = np.zeros((n_verts, n_max_values), dtype=np.float32) + + for idx in range(n_verts): + n_values = min(len(adj[idx]), n_max_values) + nbs_idxs[idx, :n_values] = np.array(list(adj[idx]))[:n_values] + nbs_weights[idx, :n_values] = -1.0 / n_values + + return nbs_idxs, nbs_weights + + +def make_postex(v, idxim, barim): + return ( + barim[None, :, :, 0, None] * v[:, idxim[:, :, 0]] + + barim[None, :, :, 1, None] * v[:, idxim[:, :, 1]] + + barim[None, :, :, 2, None] * v[:, idxim[:, :, 2]] + ).permute(0, 3, 1, 2) + + +def matrix_to_axisangle(r): + th = th.arccos(0.5 * (r[..., 0, 0] + r[..., 1, 1] + r[..., 2, 2] - 1.0))[..., None] + vec = ( + 0.5 + * th.stack( + [ + r[..., 2, 1] - r[..., 1, 2], + r[..., 0, 2] - r[..., 2, 0], + r[..., 1, 0] - r[..., 0, 1], + ], + dim=-1, + ) + / th.sin(th) + ) + return th, vec + + +def axisangle_to_matrix(rvec): + theta = th.sqrt(1e-5 + th.sum(rvec**2, dim=-1)) + rvec = rvec / theta[..., None] + costh = th.cos(theta) + sinth = th.sin(theta) + return th.stack( + ( + th.stack( + ( + rvec[..., 0] ** 2 + (1.0 - rvec[..., 0] ** 2) * costh, + rvec[..., 0] * rvec[..., 1] * (1.0 - costh) - rvec[..., 2] * sinth, + rvec[..., 0] * rvec[..., 2] * (1.0 - costh) + rvec[..., 1] * sinth, + ), + dim=-1, + ), + th.stack( + ( + rvec[..., 0] * rvec[..., 1] * (1.0 - costh) + rvec[..., 2] * sinth, + rvec[..., 1] ** 2 + (1.0 - rvec[..., 1] ** 2) * costh, + rvec[..., 1] * rvec[..., 2] * (1.0 - costh) - rvec[..., 0] * sinth, + ), + dim=-1, + ), + th.stack( + ( + rvec[..., 0] * rvec[..., 2] * (1.0 - costh) - rvec[..., 1] * sinth, + rvec[..., 1] * rvec[..., 2] * (1.0 - costh) + rvec[..., 0] * sinth, + rvec[..., 2] ** 2 + (1.0 - rvec[..., 2] ** 2) * costh, + ), + dim=-1, + ), + ), + dim=-2, + ) + + +def rotation_interp(r0, r1, alpha): + r0a = r0.view(-1, 3, 3) + r1a = r1.view(-1, 3, 3) + r = th.bmm(r0a.permute(0, 2, 1), r1a).view_as(r0) + + th, rvec = matrix_to_axisangle(r) + rvec = rvec * (alpha * th) + + r = axisangle_to_matrix(rvec) + return th.bmm(r0a, r.view(-1, 3, 3)).view_as(r0) + + +def convert_camera_parameters(Rt, K): + R = Rt[:, :3, :3] + t = -R.permute(0, 2, 1).bmm(Rt[:, :3, 3].unsqueeze(2)).squeeze(2) + return dict( + campos=t, + camrot=R, + focal=K[:, :2, :2], + princpt=K[:, :2, 2], + ) + + +def project_points_multi(p, Rt, K, normalize=False, size=None): + """Project a set of 3D points into multiple cameras with a pinhole model. + Args: + p: [B, N, 3], input 3D points in world coordinates + Rt: [B, NC, 3, 4], extrinsics (where NC is the number of cameras to project to) + K: [B, NC, 3, 3], intrinsics + normalize: bool, whether to normalize coordinates to [-1.0, 1.0] + Returns: + tuple: + - [B, NC, N, 2] - projected points + - [B, NC, N] - their + """ + B, N = p.shape[:2] + NC = Rt.shape[1] + + Rt = Rt.reshape(B * NC, 3, 4) + K = K.reshape(B * NC, 3, 3) + + # [B, N, 3] -> [B * NC, N, 3] + p = p[:, np.newaxis].expand(-1, NC, -1, -1).reshape(B * NC, -1, 3) + p_cam = p @ Rt[:, :3, :3].mT + Rt[:, :3, 3][:, np.newaxis] + p_pix = p_cam @ K.mT + p_depth = p_pix[:, :, 2:] + p_pix = (p_pix[..., :2] / p_depth).reshape(B, NC, N, 2) + p_depth = p_depth.reshape(B, NC, N) + + if normalize: + assert size is not None + h, w = size + p_pix = ( + 2.0 * p_pix / th.as_tensor([w, h], dtype=th.float32, device=p.device) - 1.0 + ) + return p_pix, p_depth + +def xyz2normals(xyz: th.Tensor, eps: float = 1e-8) -> th.Tensor: + """Convert XYZ image to normal image + + Args: + xyz: th.Tensor + [B, 3, H, W] XYZ image + + Returns: + th.Tensor: [B, 3, H, W] image of normals + """ + + nrml = th.zeros_like(xyz) + xyz = th.cat((xyz[:, :, :1, :] * 0, xyz[:, :, :, :], xyz[:, :, :1, :] * 0), dim=2) + xyz = th.cat((xyz[:, :, :, :1] * 0, xyz[:, :, :, :], xyz[:, :, :, :1] * 0), dim=3) + U = (xyz[:, :, 2:, 1:-1] - xyz[:, :, :-2, 1:-1]) / -2 + V = (xyz[:, :, 1:-1, 2:] - xyz[:, :, 1:-1, :-2]) / -2 + + nrml[:, 0, ...] = U[:, 1, ...] * V[:, 2, ...] - U[:, 2, ...] * V[:, 1, ...] + nrml[:, 1, ...] = U[:, 2, ...] * V[:, 0, ...] - U[:, 0, ...] * V[:, 2, ...] + nrml[:, 2, ...] = U[:, 0, ...] * V[:, 1, ...] - U[:, 1, ...] * V[:, 0, ...] + veclen = th.norm(nrml, dim=1, keepdim=True).clamp(min=eps) + return nrml / veclen + + +# pyre-fixme[2]: Parameter must be annotated. +def depth2xyz(depth, focal, princpt) -> th.Tensor: + """Convert depth image to XYZ image using camera intrinsics + + Args: + depth: th.Tensor + [B, 1, H, W] depth image + + focal: th.Tensor + [B, 2, 2] camera focal lengths + + princpt: th.Tensor + [B, 2] camera principal points + + Returns: + th.Tensor: [B, 3, H, W] XYZ image + """ + + b, h, w = depth.shape[0], depth.shape[2], depth.shape[3] + ix = ( + th.arange(w, device=depth.device).float()[None, None, :] - princpt[:, None, None, 0] + ) / focal[:, None, None, 0, 0] + iy = ( + th.arange(h, device=depth.device).float()[None, :, None] - princpt[:, None, None, 1] + ) / focal[:, None, None, 1, 1] + xyz = th.zeros((b, 3, h, w), device=depth.device) + xyz[:, 0, ...] = depth[:, 0, :, :] * ix + xyz[:, 1, ...] = depth[:, 0, :, :] * iy + xyz[:, 2, ...] = depth[:, 0, :, :] + return xyz + + +# pyre-fixme[2]: Parameter must be annotated. +def depth2normals(depth, focal, princpt) -> th.Tensor: + """Convert depth image to normal image using camera intrinsics + + Args: + depth: th.Tensor + [B, 1, H, W] depth image + + focal: th.Tensor + [B, 2, 2] camera focal lengths + + princpt: th.Tensor + [B, 2] camera principal points + + Returns: + th.Tensor: [B, 3, H, W] normal image + """ + + return xyz2normals(depth2xyz(depth, focal, princpt)) + + +def depth_discontuity_mask( + depth: th.Tensor, threshold: float = 40.0, kscale: float = 4.0, pool_ksize: int = 3 +) -> th.Tensor: + device = depth.device + + with th.no_grad(): + # TODO: pass the kernel? + kernel = th.as_tensor( + [ + [[[-1, 0, 1], [-2, 0, 2], [-1, 0, 1]]], + [[[-1, -2, -1], [0, 0, 0], [1, 2, 1]]], + ], + dtype=th.float32, + device=device, + ) + + disc_mask = (th.norm(F.conv2d(depth, kernel, bias=None, padding=1), dim=1) > threshold)[ + :, np.newaxis + ] + disc_mask = ( + F.avg_pool2d(disc_mask.float(), pool_ksize, stride=1, padding=pool_ksize // 2) > 0.0 + ) + + return disc_mask diff --git a/visualize/ca_body/utils/geom_body.py b/visualize/ca_body/utils/geom_body.py new file mode 100644 index 0000000000000000000000000000000000000000..f6c36109e7f12f4900d9effd86d38e52f9c14d90 --- /dev/null +++ b/visualize/ca_body/utils/geom_body.py @@ -0,0 +1,702 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import logging +from logging import Logger + +from typing import Any, Dict, Optional, Tuple, Union + +import igl + +import numpy as np +import torch as th + +import torch.nn as nn + +import torch.nn.functional as F + +from visualize.ca_body.utils.geom import ( + index_image_impaint, + make_uv_barys, + make_uv_vert_index, +) + +from trimesh import Trimesh +from trimesh.triangles import points_to_barycentric + +logger: Logger = logging.getLogger(__name__) + + +def face_normals_v2(v: th.Tensor, vi: th.Tensor, eps: float = 1e-5) -> th.Tensor: + pts = v[:, vi] + v0 = pts[:, :, 1] - pts[:, :, 0] + v1 = pts[:, :, 2] - pts[:, :, 0] + n = th.cross(v0, v1, dim=-1) + norm = th.norm(n, dim=-1, keepdim=True) + norm[norm < eps] = 1 + n /= norm + return n + + +def vert_normals_v2(v: th.Tensor, vi: th.Tensor, eps: float = 1.0e-5) -> th.Tensor: + fnorms = face_normals_v2(v, vi) + fnorms = fnorms[:, :, None].expand(-1, -1, 3, -1).reshape(fnorms.shape[0], -1, 3) + vi_flat = vi.view(1, -1).expand(v.shape[0], -1) + vnorms = th.zeros_like(v) + for j in range(3): + vnorms[..., j].scatter_add_(1, vi_flat, fnorms[..., j]) + norm = th.norm(vnorms, dim=-1, keepdim=True) + norm[norm < eps] = 1 + vnorms /= norm + return vnorms + + +def compute_neighbours( + n_verts: int, vi: th.Tensor, n_max_values: int = 10 +) -> Tuple[th.Tensor, th.Tensor]: + """Computes first-ring neighbours given vertices and faces.""" + n_vi = vi.shape[0] + + adj = {i: set() for i in range(n_verts)} + for i in range(n_vi): + for idx in vi[i]: + adj[idx] |= set(vi[i]) - {idx} + + nbs_idxs = np.tile(np.arange(n_verts)[:, np.newaxis], (1, n_max_values)) + nbs_weights = np.zeros((n_verts, n_max_values), dtype=np.float32) + + for idx in range(n_verts): + n_values = min(len(adj[idx]), n_max_values) + nbs_idxs[idx, :n_values] = np.array(list(adj[idx]))[:n_values] + nbs_weights[idx, :n_values] = -1.0 / n_values + + return nbs_idxs, nbs_weights + + +def compute_v2uv(n_verts: int, vi: th.Tensor, vti: th.Tensor, n_max: int = 4) -> th.Tensor: + """Computes mapping from vertex indices to texture indices. + + Args: + vi: [F, 3], triangles + vti: [F, 3], texture triangles + n_max: int, max number of texture locations + + Returns: + [n_verts, n_max], texture indices + """ + v2uv_dict = {} + for i_v, i_uv in zip(vi.reshape(-1), vti.reshape(-1)): + v2uv_dict.setdefault(i_v, set()).add(i_uv) + assert len(v2uv_dict) == n_verts + v2uv = np.zeros((n_verts, n_max), dtype=np.int32) + for i in range(n_verts): + vals = sorted(v2uv_dict[i]) + v2uv[i, :] = vals[0] + v2uv[i, : len(vals)] = np.array(vals) + return v2uv + + +def values_to_uv(values: th.Tensor, index_img: th.Tensor, bary_img: th.Tensor) -> th.Tensor: + uv_size = index_img.shape[0] + index_mask = th.all(index_img != -1, dim=-1) + idxs_flat = index_img[index_mask].to(th.int64) + bary_flat = bary_img[index_mask].to(th.float32) + # NOTE: here we assume + values_flat = th.sum(values[:, idxs_flat].permute(0, 3, 1, 2) * bary_flat, dim=-1) + values_uv = th.zeros( + values.shape[0], + values.shape[-1], + uv_size, + uv_size, + dtype=values.dtype, + device=values.device, + ) + values_uv[:, :, index_mask] = values_flat + return values_uv + + +def sample_uv( + values_uv: th.Tensor, + uv_coords: th.Tensor, + v2uv: Optional[th.Tensor] = None, + mode: str = "bilinear", + align_corners: bool = False, + flip_uvs: bool = False, +) -> th.Tensor: + batch_size = values_uv.shape[0] + + if flip_uvs: + uv_coords = uv_coords.clone() + uv_coords[:, 1] = 1.0 - uv_coords[:, 1] + + uv_coords_norm = (uv_coords * 2.0 - 1.0)[np.newaxis, :, np.newaxis].expand( + batch_size, -1, -1, -1 + ) + values = ( + F.grid_sample(values_uv, uv_coords_norm, align_corners=align_corners, mode=mode) + .squeeze(-1) + .permute((0, 2, 1)) + ) + + if v2uv is not None: + values_duplicate = values[:, v2uv] + values = values_duplicate.mean(2) + + # if return_var: + # values_var = values_duplicate.var(2) + # return values, values_var + + return values + + +def compute_tbn_uv( + tri_xyz: th.Tensor, tri_uv: th.Tensor, eps: float = 1e-5 +) -> Tuple[th.Tensor, th.Tensor, th.Tensor]: + """Compute tangents, bitangents, normals. + + Args: + tri_xyz: [B,N,3,3] vertex coordinates + tri_uv: [N,2] texture coordinates + + Returns: + tangents, bitangents, normals + """ + + tri_uv = tri_uv[np.newaxis] + + v01 = tri_xyz[:, :, 1] - tri_xyz[:, :, 0] + v02 = tri_xyz[:, :, 2] - tri_xyz[:, :, 0] + + normals = th.cross(v01, v02, dim=-1) + normals = normals / th.norm(normals, dim=-1, keepdim=True).clamp(min=eps) + + vt01 = tri_uv[:, :, 1] - tri_uv[:, :, 0] + vt02 = tri_uv[:, :, 2] - tri_uv[:, :, 0] + + f = th.tensor([1.0], device=tri_xyz.device) / ( + vt01[..., 0] * vt02[..., 1] - vt01[..., 1] * vt02[..., 0] + ) + + tangents = f[..., np.newaxis] * ( + v01 * vt02[..., 1][..., np.newaxis] - v02 * vt01[..., 1][..., np.newaxis] + ) + tangents = tangents / th.norm(tangents, dim=-1, keepdim=True).clamp(min=eps) + + bitangents = th.cross(normals, tangents, dim=-1) + bitangents = bitangents / th.norm(bitangents, dim=-1, keepdim=True).clamp(min=eps).clamp( + min=eps + ) + return tangents, bitangents, normals + + +class GeometryModule(nn.Module): + """This module encapsulates uv correspondences and vertex images.""" + + def __init__( + self, + vi: th.Tensor, + vt: th.Tensor, + vti: th.Tensor, + v2uv: th.Tensor, + uv_size: int, + flip_uv: bool = False, + impaint: bool = False, + impaint_threshold: float = 100.0, + device=None, + ) -> None: + super().__init__() + + self.register_buffer("vi", th.as_tensor(vi)) + self.register_buffer("vt", th.as_tensor(vt)) + self.register_buffer("vti", th.as_tensor(vti)) + self.register_buffer("v2uv", th.as_tensor(v2uv)) + + self.uv_size: int = uv_size + + index_image = make_uv_vert_index( + self.vt, + self.vi, + self.vti, + uv_shape=uv_size, + flip_uv=flip_uv, + ).cpu() + face_index, bary_image = make_uv_barys(self.vt, self.vti, uv_shape=uv_size, flip_uv=flip_uv) + if impaint: + # TODO: have an option to pre-compute this? + assert isinstance(uv_size, int) + if uv_size >= 1024: + logger.info("impainting index image might take a while for sizes >= 1024") + + index_image, bary_image = index_image_impaint( + index_image, bary_image, impaint_threshold + ) + + self.register_buffer("index_image", index_image.cpu()) + self.register_buffer("bary_image", bary_image.cpu()) + self.register_buffer("face_index_image", face_index.cpu()) + + def render_index_images( + self, uv_size: Union[Tuple[int, int], int], flip_uv: bool = False, impaint: bool = False + ) -> Tuple[th.Tensor, th.Tensor]: + index_image = make_uv_vert_index( + self.vt, self.vi, self.vti, uv_shape=uv_size, flip_uv=flip_uv + ) + _, bary_image = make_uv_barys(self.vt, self.vti, uv_shape=uv_size, flip_uv=flip_uv) + + if impaint: + index_image, bary_image = index_image_impaint( + index_image, + bary_image, + ) + + return index_image, bary_image + + def vn(self, verts: th.Tensor) -> th.Tensor: + return vert_normals_v2(verts, self.vi[np.newaxis].to(th.long)) + + def to_uv(self, values: th.Tensor) -> th.Tensor: + return values_to_uv(values, self.index_image, self.bary_image) + + def from_uv(self, values_uv: th.Tensor) -> th.Tensor: + # TODO: we need to sample this + return sample_uv(values_uv, self.vt, self.v2uv.to(th.long)) + + +def compute_view_cos(verts: th.Tensor, faces: th.Tensor, camera_pos: th.Tensor) -> th.Tensor: + vn = F.normalize(vert_normals_v2(verts, faces), dim=-1) + v2c = F.normalize(verts - camera_pos[:, np.newaxis], dim=-1) + return th.einsum("bnd,bnd->bn", vn, v2c) + + +def interpolate_values_mesh( + src_values: th.Tensor, src_faces: th.Tensor, idxs: th.Tensor, weights: th.Tensor +) -> th.Tensor: + """Interpolate values on the mesh.""" + assert src_faces.dtype == th.long, "index should be torch.long" + assert len(src_values.shape) in [2, 3], "supporting [N, F] and [B, N, F] only" + + if src_values.shape == 2: + return (src_values[src_faces[idxs]] * weights[..., np.newaxis]).sum(dim=1) + else: # src.verts.shape == 3: + return (src_values[:, src_faces[idxs]] * weights[np.newaxis, ..., np.newaxis]).sum(dim=2) + + +def depth_discontuity_mask( + depth: th.Tensor, threshold: float = 40.0, kscale: float = 4.0, pool_ksize: int = 3 +) -> th.Tensor: + device = depth.device + + with th.no_grad(): + # TODO: pass the kernel? + kernel = th.as_tensor( + [ + [[[-1, 0, 1], [-2, 0, 2], [-1, 0, 1]]], + [[[-1, -2, -1], [0, 0, 0], [1, 2, 1]]], + ], + dtype=th.float32, + device=device, + ) + + disc_mask = (th.norm(F.conv2d(depth, kernel, bias=None, padding=1), dim=1) > threshold)[ + :, np.newaxis + ] + disc_mask = ( + F.avg_pool2d(disc_mask.float(), pool_ksize, stride=1, padding=pool_ksize // 2) > 0.0 + ) + + return disc_mask + + +def convert_camera_parameters(Rt: th.Tensor, K: th.Tensor) -> Dict[str, th.Tensor]: + R = Rt[:, :3, :3] + t = -R.permute(0, 2, 1).bmm(Rt[:, :3, 3].unsqueeze(2)).squeeze(2) + return { + "campos": t, + "camrot": R, + "focal": K[:, :2, :2], + "princpt": K[:, :2, 2], + } + + +def closest_point(mesh: Trimesh, points: np.ndarray) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: + v = mesh.vertices + vi = mesh.faces + # pyre-ignore + dist, face_idxs, p = igl.point_mesh_squared_distance(points, v, vi) + return p, dist, face_idxs + + +def closest_point_barycentrics( + v: np.ndarray, vi: np.ndarray, points: np.ndarray +) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]: + """Given a 3D mesh and a set of query points, return closest point barycentrics + Args: + v: np.array (float) + [N, 3] mesh vertices + vi: np.array (int) + [N, 3] mesh triangle indices + points: np.array (float) + [M, 3] query points + Returns: + Tuple[approx, barys, interp_idxs, face_idxs] + approx: [M, 3] approximated (closest) points on the mesh + barys: [M, 3] barycentric weights that produce "approx" + interp_idxs: [M, 3] vertex indices for barycentric interpolation + face_idxs: [M] face indices for barycentric interpolation. interp_idxs = vi[face_idxs] + """ + mesh = Trimesh(vertices=v, faces=vi) + p, _, face_idxs = closest_point(mesh, points) + barys = points_to_barycentric(mesh.triangles[face_idxs], p) + b0, b1, b2 = np.split(barys, 3, axis=1) + + interp_idxs = vi[face_idxs] + v0 = v[interp_idxs[:, 0]] + v1 = v[interp_idxs[:, 1]] + v2 = v[interp_idxs[:, 2]] + approx = b0 * v0 + b1 * v1 + b2 * v2 + return approx, barys, interp_idxs, face_idxs + + +def make_closest_uv_barys( + vt: np.ndarray, + vti: np.ndarray, + uv_shape: Union[Tuple[int, int], int], + flip_uv: bool = True, + return_approx_dist: bool = False, +) -> Union[Tuple[th.Tensor, th.Tensor], Tuple[th.Tensor, th.Tensor, th.Tensor]]: + """Compute a UV-space barycentric map where each texel contains barycentric + coordinates for the closest point on a UV triangle. + Args: + vt: th.Tensor + Texture coordinates. Shape = [n_texcoords, 2] + vti: th.Tensor + Face texture coordinate indices. Shape = [n_faces, 3] + uv_shape: Tuple[int, int] or int + Shape of the texture map. (HxW) + flip_uv: bool + Whether or not to flip UV coordinates along the V axis (OpenGL -> numpy/pytorch convention). + return_approx_dist: bool + Whether or not to include the distance to the nearest point. + Returns: + th.Tensor: index_img: Face index image, shape [uv_shape[0], uv_shape[1]] + th.Tensor: Barycentric coordinate map, shape [uv_shape[0], uv_shape[1], 3] + """ + + if isinstance(uv_shape, int): + uv_shape = (uv_shape, uv_shape) + + if flip_uv: + # Flip here because texture coordinates in some of our topo files are + # stored in OpenGL convention with Y=0 on the bottom of the texture + # unlike numpy/torch arrays/tensors. + vt = vt.clone() + vt[:, 1] = 1 - vt[:, 1] + + # Texel to UV mapping (as per OpenGL linear filtering) + # https://www.khronos.org/registry/OpenGL/specs/gl/glspec46.core.pdf + # Sect. 8.14, page 261 + # uv=(0.5,0.5)/w is at the center of texel [0,0] + # uv=(w-0.5, w-0.5)/w is the center of texel [w-1,w-1] + # texel = floor(u*w - 0.5) + # u = (texel+0.5)/w + uv_grid = th.meshgrid( + th.linspace(0.5, uv_shape[0] - 1 + 0.5, uv_shape[0]) / uv_shape[0], + th.linspace(0.5, uv_shape[1] - 1 + 0.5, uv_shape[1]) / uv_shape[1], + ) # HxW, v,u + uv_grid = th.stack(uv_grid[::-1], dim=2) # HxW, u, v + + uv = uv_grid.reshape(-1, 2).data.to("cpu").numpy() + vth = np.hstack((vt, vt[:, 0:1] * 0 + 1)) + uvh = np.hstack((uv, uv[:, 0:1] * 0 + 1)) + approx, barys, interp_idxs, face_idxs = closest_point_barycentrics(vth, vti, uvh) + index_img = th.from_numpy(face_idxs.reshape(uv_shape[0], uv_shape[1])).long() + bary_img = th.from_numpy(barys.reshape(uv_shape[0], uv_shape[1], 3)).float() + + if return_approx_dist: + dist = np.linalg.norm(approx - uvh, axis=1) + dist = th.from_numpy(dist.reshape(uv_shape[0], uv_shape[1])).float() + return index_img, bary_img, dist + else: + return index_img, bary_img + + +def compute_tbn( + geom: th.Tensor, vt: th.Tensor, vi: th.Tensor, vti: th.Tensor +) -> Tuple[th.Tensor, th.Tensor, th.Tensor]: + """Computes tangent, bitangent, and normal vectors given a mesh. + Args: + geom: [N, n_verts, 3] th.Tensor + Vertex positions. + vt: [n_uv_coords, 2] th.Tensor + UV coordinates. + vi: [..., 3] th.Tensor + Face vertex indices. + vti: [..., 3] th.Tensor + Face UV indices. + Returns: + [..., 3] th.Tensors for T, B, N. + """ + + v0 = geom[:, vi[..., 0]] + v1 = geom[:, vi[..., 1]] + v2 = geom[:, vi[..., 2]] + vt0 = vt[vti[..., 0]] + vt1 = vt[vti[..., 1]] + vt2 = vt[vti[..., 2]] + + v01 = v1 - v0 + v02 = v2 - v0 + vt01 = vt1 - vt0 + vt02 = vt2 - vt0 + f = th.tensor([1.0], device=geom.device) / ( + vt01[None, ..., 0] * vt02[None, ..., 1] - vt01[None, ..., 1] * vt02[None, ..., 0] + ) + tangent = f[..., None] * th.stack( + [ + v01[..., 0] * vt02[None, ..., 1] - v02[..., 0] * vt01[None, ..., 1], + v01[..., 1] * vt02[None, ..., 1] - v02[..., 1] * vt01[None, ..., 1], + v01[..., 2] * vt02[None, ..., 1] - v02[..., 2] * vt01[None, ..., 1], + ], + dim=-1, + ) + tangent = F.normalize(tangent, dim=-1) + normal = F.normalize(th.cross(v01, v02, dim=3), dim=-1) + bitangent = F.normalize(th.cross(tangent, normal, dim=3), dim=-1) + + return tangent, bitangent, normal + + +def make_postex(v: th.Tensor, idxim: th.Tensor, barim: th.Tensor) -> th.Tensor: + return ( + barim[None, :, :, 0, None] * v[:, idxim[:, :, 0]] + + barim[None, :, :, 1, None] * v[:, idxim[:, :, 1]] + + barim[None, :, :, 2, None] * v[:, idxim[:, :, 2]] + ).permute( + 0, 3, 1, 2 + ) # B x 3 x H x W + + +def acos_safe_th(x: th.Tensor, eps: float = 1e-4) -> th.Tensor: + slope = th.arccos(th.as_tensor(1 - eps)) / th.as_tensor(eps) + # TODO: stop doing this allocation once sparse gradients with NaNs (like in + # th.where) are handled differently. + buf = th.empty_like(x) + good = abs(x) <= 1 - eps + bad = ~good + sign = th.sign(x.data[bad]) + buf[good] = th.acos(x[good]) + buf[bad] = th.acos(sign * (1 - eps)) - slope * sign * (abs(x[bad]) - 1 + eps) + return buf + + +def invRodrigues(R: th.Tensor, eps: float = 1e-8) -> th.Tensor: + """Computes the Rodrigues vectors r from the rotation matrices `R`""" + + # t = trace(R) + # theta = rotational angle + # [omega]_x = (R-R^T)/2 + # r = theta/sin(theta)*omega + assert R.shape[-2:] == (3, 3) + + t = R[..., 0, 0] + R[..., 1, 1] + R[..., 2, 2] + theta = acos_safe_th((t - 1) / 2) + omega = ( + th.stack( + ( + R[..., 2, 1] - R[..., 1, 2], + R[..., 0, 2] - R[..., 2, 0], + R[..., 1, 0] - R[..., 0, 1], + ), + -1, + ) + / 2 + ) + + # Edge Case 1: t >= 3 - eps + inv_sinc = theta / th.sin(theta) + inv_sinc_taylor_expansion = ( + 1 + + (1.0 / 6.0) * th.pow(theta, 2) + + (7.0 / 360.0) * th.pow(theta, 4) + + (31.0 / 15120.0) * th.pow(theta, 6) + ) + + # Edge Case 2: t <= -1 + eps + # From: https://math.stackexchange.com/questions/83874/efficient-and-accurate-numerical + # -implementation-of-the-inverse-rodrigues-rotatio + a = th.diagonal(R, 0, -2, -1).argmax(dim=-1) + b = (a + 1) % 3 + c = (a + 2) % 3 + + s = th.sqrt(R[..., a, a] - R[..., b, b] - R[..., c, c] + 1 + 1e-4) + v = th.zeros_like(omega) + v[..., a] = s / 2 + v[..., b] = (R[..., b, a] + R[..., a, b]) / (2 * s) + v[..., c] = (R[..., c, a] + R[..., a, c]) / (2 * s) + norm = th.norm(v, dim=-1, keepdim=True).to(v.dtype).clamp(min=eps) + pi_vnorm = np.pi * (v / norm) + + # use taylor expansion when R is close to a identity matrix (trace(R) ~= 3) + r = th.where( + t[:, None] > (3 - 1e-3), + inv_sinc_taylor_expansion[..., None] * omega, + th.where(t[:, None] < -1 + 1e-3, pi_vnorm, inv_sinc[..., None] * omega), + ) + + return r + + +def EulerXYZ_to_matrix(xyz: th.Tensor) -> th.Tensor: + # R = Rz(φ)Ry(θ)Rx(ψ) = [ + # cos θ cos φ sin ψ sin θ cos φ − cos ψ sin φ cos ψ sin θ cos φ + sin ψ sin φ + # cos θ sin φ sin ψ sin θ sin φ + cos ψ cos φ cos ψ sin θ sin φ − sin ψ cos φ + # − sin θ sin ψ cos θ cos ψ cos θ + # ] + ( + x, + y, + z, + ) = ( + xyz[..., 0:1], + xyz[..., 1:2], + xyz[..., 2:3], + ) + sinx, cosx = th.sin(x), th.cos(x) + siny, cosy = th.sin(y), th.cos(y) + sinz, cosz = th.sin(z), th.cos(z) + + r1 = th.cat( + ( + cosy * cosz, + sinx * siny * cosz + - cosx * sinz, # th.sin(x) * th.sin(y) * th.cos(z) - th.cos(x) * th.sin(z), + cosx * siny * cosz + + sinx * sinz, # th.cos(x) * th.sin(y) * th.cos(z) + th.sin(x) * th.sin(z) + ), + -1, + ) # [..., 3] + r3 = th.cat( + ( + -siny, # -th.sin(y), + sinx * cosy, # th.sin(x) * th.cos(y), + cosx * cosy, # th.cos(x) * th.cos(y) + ), + -1, + ) # [..., 3] + r2 = th.cross(r3, r1, dim=-1) + + R = th.cat((r1.unsqueeze(-2), r2.unsqueeze(-2), r3.unsqueeze(-2)), -2) + return R + + +def axisangle_to_matrix(rvec: th.Tensor) -> th.Tensor: + theta = th.sqrt(1e-5 + th.sum(th.pow(rvec, 2), dim=-1)) + rvec = rvec / theta[..., None] + costh = th.cos(theta) + sinth = th.sin(theta) + return th.stack( + ( + th.stack( + ( + th.pow(rvec[..., 0], 2) + (1.0 - th.pow(rvec[..., 0], 2)) * costh, + rvec[..., 0] * rvec[..., 1] * (1.0 - costh) - rvec[..., 2] * sinth, + rvec[..., 0] * rvec[..., 2] * (1.0 - costh) + rvec[..., 1] * sinth, + ), + dim=-1, + ), + th.stack( + ( + rvec[..., 0] * rvec[..., 1] * (1.0 - costh) + rvec[..., 2] * sinth, + th.pow(rvec[..., 1], 2) + (1.0 - th.pow(rvec[..., 1], 2)) * costh, + rvec[..., 1] * rvec[..., 2] * (1.0 - costh) - rvec[..., 0] * sinth, + ), + dim=-1, + ), + th.stack( + ( + rvec[..., 0] * rvec[..., 2] * (1.0 - costh) - rvec[..., 1] * sinth, + rvec[..., 1] * rvec[..., 2] * (1.0 - costh) + rvec[..., 0] * sinth, + th.pow(rvec[..., 2], 2) + (1.0 - th.pow(rvec[..., 2], 2)) * costh, + ), + dim=-1, + ), + ), + dim=-2, + ) + + +def compute_view_cond_tbnrefl( + geom: th.Tensor, campos: th.Tensor, geo_fn: GeometryModule +) -> th.Tensor: + B = int(geom.shape[0]) + S = geo_fn.uv_size + device = geom.device + + # TODO: this can be pre-computed, or we can assume no invalid pixels? + mask = (geo_fn.index_image != -1).any(dim=-1) + idxs = geo_fn.index_image[mask] + tri_uv = geo_fn.vt[geo_fn.v2uv[idxs, 0].to(th.long)] + + tri_xyz = geom[:, idxs] + + t, b, n = compute_tbn_uv(tri_xyz, tri_uv) + + tbn_rot = th.stack((t, -b, n), dim=-2) + + tbn_rot_uv = th.zeros( + (B, S, S, 3, 3), + dtype=th.float32, + device=device, + ) + tbn_rot_uv[:, mask] = tbn_rot + view = F.normalize(campos[:, np.newaxis] - geom, dim=-1) + v_uv = geo_fn.to_uv(values=view) + tbn_uv = th.einsum("bhwij,bjhw->bihw", tbn_rot_uv, v_uv) + + # reflectance vector + n_uv = th.zeros((B, 3, S, S), dtype=th.float32, device=device) + n_uv[..., mask] = n.permute(0, 2, 1) + n_dot_v = (v_uv * n_uv).sum(dim=1, keepdim=True) + + r_uv = 2.0 * n_uv * n_dot_v - v_uv + + return th.cat([tbn_uv, r_uv], dim=1) + + +def get_barys_for_uvs( + topology: Dict[str, Any], uv_correspondences: np.ndarray +) -> Tuple[np.ndarray, np.ndarray]: + """ + Given a topology along with uv correspondences for the topology (eg. keypoints correspondences in uv space), + this function will produce a tuple with the bary coordinates for each uv correspondece along with the vertex index. + + Parameters: + ---------- + topology: Input mesh that contains vertices, faces and texture coordinates info. + uv_correspondences: N X 2 uv locations that describe the uv correspondence to the topology + + Returns: + ------- + bary: (N X 3 float) + For each uv correspondence returns the bary corrdinates for the uv pixel + triangles: (N X 3 int) + For each uv correspondence returns the face (i.e vertices of the faces) for that pixel. + """ + vi: np.ndarray = topology["vi"] + vt: np.ndarray = topology["vt"] + vti: np.ndarray = topology["vti"] + + # # No up-down flip here + # Here we pad the texture cordinates and correspondences with a 0 + vth = np.hstack((vt[:, :2], vt[:, :1] * 0)) + kp_uv_h = np.hstack((uv_correspondences, uv_correspondences[:, :1] * 0)) + + _, kp_barys, _, face_indices = closest_point_barycentrics(vth, vti, kp_uv_h) + + kp_verts = vi[face_indices] + + return kp_barys, kp_verts diff --git a/visualize/ca_body/utils/image.py b/visualize/ca_body/utils/image.py new file mode 100644 index 0000000000000000000000000000000000000000..2951d57a574696e852da8233ad1331f01b3a7b74 --- /dev/null +++ b/visualize/ca_body/utils/image.py @@ -0,0 +1,977 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import warnings +from typing import Dict, Final, List, Optional, overload, Sequence, Tuple, Union + +import cv2 +import numpy as np +import torch as th +import torch.nn.functional as thf + + +Color = Tuple[np.uint8, np.uint8, np.uint8] + +__DEFAULT_WB_SCALE: np.ndarray = np.array([1.05, 0.95, 1.45], dtype=np.float32) + + +@overload +def linear2srgb(img: th.Tensor, gamma: float = 2.4) -> th.Tensor: + ... + + +@overload +def linear2srgb(img: np.ndarray, gamma: float = 2.4) -> np.ndarray: + ... + + +def linear2srgb( + img: Union[th.Tensor, np.ndarray], gamma: float = 2.4 +) -> Union[th.Tensor, np.ndarray]: + if isinstance(img, th.Tensor): + # Note: The following combines the linear and exponential parts of the sRGB curve without + # causing NaN values or gradients for negative inputs (where the curve would be linear). + linear_part = img * 12.92 # linear part of sRGB curve + exp_part = 1.055 * th.pow(th.clamp(img, min=0.0031308), 1 / gamma) - 0.055 + return th.where(img <= 0.0031308, linear_part, exp_part) + else: + linear_part = img * 12.92 + exp_part = 1.055 * (np.maximum(img, 0.0031308) ** (1 / gamma)) - 0.055 + return np.where(img <= 0.0031308, linear_part, exp_part) + + +@overload +def linear2color_corr(img: th.Tensor, dim: int = -1) -> th.Tensor: + ... + + +@overload +def linear2color_corr(img: np.ndarray, dim: int = -1) -> np.ndarray: + ... + + +def linear2color_corr( + img: Union[th.Tensor, np.ndarray], dim: int = -1 +) -> Union[th.Tensor, np.ndarray]: + """Applies ad-hoc 'color correction' to a linear RGB Mugsy image along + color channel `dim` and returns the gamma-corrected result.""" + + if dim == -1: + dim = len(img.shape) - 1 + + gamma = 2.0 + black = 3.0 / 255.0 + color_scale = [1.4, 1.1, 1.6] + + assert img.shape[dim] == 3 + if dim == -1: + dim = len(img.shape) - 1 + if isinstance(img, th.Tensor): + scale = th.FloatTensor(color_scale).view([3 if i == dim else 1 for i in range(img.dim())]) + img = img * scale.to(img) / 1.1 + return th.clamp( + (((1.0 / (1 - black)) * 0.95 * th.clamp(img - black, 0, 2)).pow(1.0 / gamma)) + - 15.0 / 255.0, + 0, + 2, + ) + else: + scale = np.array(color_scale).reshape([3 if i == dim else 1 for i in range(img.ndim)]) + img = img * scale / 1.1 + return np.clip( + (((1.0 / (1 - black)) * 0.95 * np.clip(img - black, 0, 2)) ** (1.0 / gamma)) + - 15.0 / 255.0, + 0, + 2, + ) + + +def linear2displayBatch( + val: th.Tensor, + gamma: float = 1.5, + wbscale: np.ndarray = __DEFAULT_WB_SCALE, + black: float = 5.0 / 255.0, + mode: str = "srgb", +) -> th.Tensor: + scaling: th.Tensor = th.from_numpy(wbscale).to(val.device) + val = val.float() / 255.0 * scaling[None, :, None, None] - black + if mode == "srgb": + val = linear2srgb(val, gamma=gamma) + else: + val = val ** th.tensor(1.0 / gamma) + return th.clamp(val, 0, 1) * 255.0 + + +def linear2color_corr_inv(img: th.Tensor, dim: int) -> th.Tensor: + """Inverse of linear2color_corr. + Removes ad-hoc 'color correction' from a gamma-corrected RGB Mugsy image + along color channel `dim` and returns the linear RGB result.""" + + gamma = 2.0 + black = 3.0 / 255.0 + color_scale = [1.4, 1.1, 1.6] + + assert img.shape[dim] == 3 + if dim == -1: + dim = len(img.shape) - 1 + scale = th.FloatTensor(color_scale).view([3 if i == dim else 1 for i in range(img.dim())]) + + img = (img + 15.0 / 255.0).pow(gamma) / (0.95 / (1 - black)) + black + + return th.clamp(img / (scale.to(img) / 1.1), 0, 1) + + +DEFAULT_CCM: List[List[float]] = [[1, 0, 0], [0, 1, 0], [0, 0, 1]] +DEFAULT_DC_OFFSET: List[float] = [0, 0, 0] +DEFAULT_GAMMA: float = 1.0 + + +@overload +def mapped2linear( + img: th.Tensor, + dim: int = -1, + ccm: Union[List[List[float]], th.Tensor, np.ndarray] = DEFAULT_CCM, + dc_offset: Union[List[float], th.Tensor, np.ndarray] = DEFAULT_DC_OFFSET, + gamma: float = DEFAULT_GAMMA, +) -> th.Tensor: + ... + + +@overload +def mapped2linear( + img: np.ndarray, + dim: int = -1, + ccm: Union[List[List[float]], th.Tensor, np.ndarray] = DEFAULT_CCM, + dc_offset: Union[List[float], th.Tensor, np.ndarray] = DEFAULT_DC_OFFSET, + gamma: float = DEFAULT_GAMMA, +) -> np.ndarray: + ... + + +def mapped2linear( + img: Union[th.Tensor, np.ndarray], + dim: int = -1, + ccm: Union[List[List[float]], th.Tensor, np.ndarray] = DEFAULT_CCM, + dc_offset: Union[List[float], th.Tensor, np.ndarray] = DEFAULT_DC_OFFSET, + gamma: float = DEFAULT_GAMMA, +) -> Union[th.Tensor, np.ndarray]: + """Maps a previously-characterized camera color space into a linear + color space. IMPORTANT: This function assumes RGB channel order, + not BGR. + + The characterization is specified by `ccm`, `dc_offset`, and `gamma`. + The dimension index of the color channel is specified with `dim` (de- + fault is -1 i.e. last dimension.) + + The function accepts both [0, 255] integer and [0, 1] float formats. + However, the return value is always floating point in [0, 1]-range. + + FIXME(swirajaya) - + This is a reimplementation of `RGBMapping::map_to_lin_rgb` in + `//arvr/projects/codec_avatar/calibration/colorcal:colorspace`. To + figure out a C++ / Py binding solution that works for both DGX and + PROD, as well as `np.ndarray` and `th.Tensor`. + + Args: + @param img the image in RGB, as th.Tensor or np.ndarray + @param dim dimension of color channel + @param ccm 3x3 color correction matrix + @param dc_offset camera black level/dc offset + @param gamma encoding gamma + + Returns: + @return the corrected image as float th.Tensor or np.ndarray + """ + + assert img.shape[dim] == 3 + if dim == -1: + dim = len(img.shape) - 1 + + ndim: int = img.dim() if th.is_tensor(img) else img.ndim + pixel_shape: List[int] = [3 if i == dim else 1 for i in range(ndim)] + + # Summation indices for CCM matrix multiplication + # e.g. [sum_j] CCM_ij * Img_kljnpq -> ImgCorr_klinpq if say, dim == 2 + ein_ccm: List[int] = [0, 1] + ein_inp: List[int] = [1 if i == dim else i + 2 for i in range(ndim)] + ein_out: List[int] = [0 if i == dim else i + 2 for i in range(ndim)] + + EPS: float = 1e-7 + if isinstance(img, th.Tensor): + if th.is_floating_point(img): + input_saturated = img > (1.0 - EPS) + imgf = img.double() + else: + input_saturated = img == 255 + imgf = img.double() / 255.0 + dc_offset = th.DoubleTensor(dc_offset).view(pixel_shape).to(img.device) + img_linear = th.clamp( + imgf - dc_offset, + min=EPS, + ).pow(1.0 / gamma) + img_corr = th.clamp( # CCM * img_linear + th.einsum(th.DoubleTensor(ccm).to(img.device), ein_ccm, img_linear, ein_inp, ein_out), + min=0.0, + max=1.0, + ) + img_corr = th.where(input_saturated, 1.0, img_corr) + else: + if np.issubdtype(img.dtype, np.floating): + input_saturated = img > (1.0 - EPS) + imgf = img.astype(float) + else: + input_saturated = img == 255 + imgf = img.astype(float) / 255.0 + dc_offset = np.array(dc_offset).reshape(pixel_shape) + img_linear = np.clip(imgf - dc_offset, a_min=EPS, a_max=None) ** (1.0 / gamma) + img_corr: np.ndarray = np.clip( # CCM * img_linear + np.einsum(np.array(ccm), ein_ccm, img_linear, ein_inp, ein_out), + a_min=0.0, + a_max=1.0, + ) + img_corr: np.ndarray = np.where(input_saturated, 1.0, img_corr) + + return img_corr + + +@overload +def mapped2srgb( + img: th.Tensor, + dim: int = -1, + ccm: Union[List[List[float]], th.Tensor, np.ndarray] = DEFAULT_CCM, + dc_offset: Union[List[float], th.Tensor, np.ndarray] = DEFAULT_DC_OFFSET, + gamma: float = DEFAULT_GAMMA, +) -> th.Tensor: + ... + + +@overload +def mapped2srgb( + img: np.ndarray, + dim: int = -1, + ccm: Union[List[List[float]], th.Tensor, np.ndarray] = DEFAULT_CCM, + dc_offset: Union[List[float], th.Tensor, np.ndarray] = DEFAULT_DC_OFFSET, + gamma: float = DEFAULT_GAMMA, +) -> np.ndarray: + ... + + +def mapped2srgb( + img: Union[th.Tensor, np.ndarray], + dim: int = -1, + ccm: Union[List[List[float]], th.Tensor, np.ndarray] = DEFAULT_CCM, + dc_offset: Union[List[float], th.Tensor, np.ndarray] = DEFAULT_DC_OFFSET, + gamma: float = DEFAULT_GAMMA, +) -> Union[th.Tensor, np.ndarray]: + """Maps a previously-characterized camera color space into sRGB co- + lor space (assuming mapped to Rec709). IMPORTANT: This function + assumes RGB channel order, not BGR. + + The characterization is specified by `ccm`, `dc_offset`, and `gamma`. + The dimension index of the color channel is specified with `dim` + (default is -1 i.e. last dimension.) + """ + # Note: The redundant if-statement below is due to a Pyre bug. + # Currently Pyre fails to handle arguments into overloaded functions that are typed + # as a union of the overloaded method parameter types. + if isinstance(img, th.Tensor): + return linear2srgb(mapped2linear(img, dim, ccm, dc_offset, gamma), gamma=2.4) + else: + return linear2srgb(mapped2linear(img, dim, ccm, dc_offset, gamma), gamma=2.4) + + +@overload +def srgb2linear(img: th.Tensor, gamma: float = 2.4) -> th.Tensor: + ... + + +@overload +def srgb2linear(img: np.ndarray, gamma: float = 2.4) -> np.ndarray: + ... + + +def srgb2linear( + img: Union[th.Tensor, np.ndarray], gamma: float = 2.4 +) -> Union[th.Tensor, np.ndarray]: + linear_part = img / 12.92 # linear part of sRGB curve + if isinstance(img, th.Tensor): + # Note: The following combines the linear and exponential parts of the sRGB curve without + # causing NaN values or gradients for negative inputs (where the curve would be linear). + exp_part = th.pow((th.clamp(img, min=0.04045) + 0.055) / 1.055, gamma) + return th.where(img <= 0.04045, linear_part, exp_part) + else: + exp_part = ((np.maximum(img, 0.04045) + 0.055) / 1.055) ** gamma + return np.where(img <= 0.04045, linear_part, exp_part) + + +def scale_diff_image(diff_img: th.Tensor) -> th.Tensor: + """Takes a difference image returns a new version scaled s.t. its values + are remapped from [-IMG_MAX, IMG_MAX] -> [0, IMG_MAX] where IMG_MAX is + either 1 or 255 dpeending on the range of the input.""" + + mval = abs(diff_img).max().item() + pix_range = (0, 128 if mval > 1 else 0.5, 255 if mval > 1 else 1) + return (pix_range[1] * (diff_img / mval) + pix_range[1]).clamp(pix_range[0], pix_range[2]) + + +class LaplacianTexture(th.nn.Module): + def __init__( + self, n_levels: int, n_channels: int = 3, init_scalar: Optional[float] = None + ) -> None: + super().__init__() + self.n_levels = n_levels + self.n_channels = n_channels + if init_scalar is not None: + init_scalar = init_scalar / n_levels + + pyr_texs = [] + for level in range(n_levels): + if init_scalar is not None: + pyr_texs.append( + th.nn.Parameter(init_scalar * th.ones(1, n_channels, 2**level, 2**level)) + ) + else: + pyr_texs.append(th.nn.Parameter(th.zeros(1, n_channels, 2**level, 2**level))) + + self.pyr_texs = th.nn.ParameterList(pyr_texs) + + def forward(self) -> th.Tensor: + tex = self.pyr_texs[0] + for level in range(1, self.n_levels): + tex = ( + thf.interpolate(tex, scale_factor=2, mode="bilinear", align_corners=False) + + self.pyr_texs[level] + ) + return tex + + def init_from_tex(self, tex: th.Tensor) -> None: + ds = [tex] + for level in range(1, self.n_levels): + ds.append(thf.avg_pool2d(tex, 2**level)) + ds = ds[::-1] + + self.pyr_texs[0].data[:] = ds[0].data + for level in range(1, self.n_levels): + self.pyr_texs[level].data[:] = ds[level].data - thf.interpolate( + ds[level - 1].data, + scale_factor=2, + mode="bilinear", + align_corners=False, + ) + + def render_grad(self) -> th.Tensor: + gtex = self.pyr_texs[0].grad + for level in range(1, self.n_levels): + gtex = ( + thf.interpolate(gtex, scale_factor=2, mode="bilinear", align_corners=False) + + self.pyr_texs[level].grad + ) + return gtex + + +morph_cache: Dict[Tuple[int, th.device], th.Tensor] = {} + + +def dilate(x: th.Tensor, ks: int) -> th.Tensor: + assert (ks % 2) == 1 + orig_dtype = x.dtype + + if x.dtype in [th.bool, th.int64, th.int32]: + x = x.float() + if x.dim() == 3: + x = x[:, None] + + if (ks, x.device) in morph_cache: + w = morph_cache[(ks, x.device)] + else: + w = th.ones(1, 1, ks, ks, device=x.device) + morph_cache[(ks, x.device)] = w + + return (thf.conv2d(x, w, padding=ks // 2) > 0).to(dtype=orig_dtype) + + +def erode(x: th.Tensor, ks: int) -> th.Tensor: + if x.dtype is th.bool: + flip_x = ~x + else: + flip_x = 1 - x + + flip_out = dilate(flip_x, ks) + + if flip_out.dtype is th.bool: + return ~flip_out + else: + return 1 - flip_out + + +def smoothstep(e0: np.ndarray, e1: np.ndarray, x: np.ndarray) -> np.ndarray: + t = np.clip(((x - e0) / (e1 - e0)), 0, 1) + return t * t * (3.0 - 2.0 * t) + + +def smootherstep(e0: np.ndarray, e1: np.ndarray, x: np.ndarray) -> np.ndarray: + t = np.clip(((x - e0) / (e1 - e0)), 0, 1) + return (t**3) * (t * (t * 6 - 15) + 10) + + +def tensor2rgbjet( + tensor: th.Tensor, x_max: Optional[float] = None, x_min: Optional[float] = None +) -> np.ndarray: + """Converts a tensor to an uint8 image Numpy array with `cv2.COLORMAP_JET` applied. + + Args: + tensor: Input tensor to be converted. + + x_max: The output color will be normalized as (x-x_min)/(x_max-x_min)*255. + x_max = tensor.max() if None is given. + + x_min: The output color will be normalized as (x-x_min)/(x_max-x_min)*255. + x_min = tensor.min() if None is given. + """ + return cv2.applyColorMap(tensor2rgb(tensor, x_max=x_max, x_min=x_min), cv2.COLORMAP_JET) + + +def tensor2rgb( + tensor: th.Tensor, x_max: Optional[float] = None, x_min: Optional[float] = None +) -> np.ndarray: + """Converts a tensor to an uint8 image Numpy array. + + Args: + tensor: Input tensor to be converted. + + x_max: The output color will be normalized as (x-x_min)/(x_max-x_min)*255. + x_max = tensor.max() if None is given. + + x_min: The output color will be normalized as (x-x_min)/(x_max-x_min)*255. + x_min = tensor.min() if None is given. + """ + x = tensor.data.cpu().numpy() + if x_min is None: + x_min = x.min() + if x_max is None: + x_max = x.max() + + gain = 255 / np.clip(x_max - x_min, 1e-3, None) + x = (x - x_min) * gain + x = x.clip(0.0, 255.0) + x = x.astype(np.uint8) + return x + + +def tensor2image( + tensor: th.Tensor, + x_max: Optional[float] = 1.0, + x_min: Optional[float] = 0.0, + mode: str = "rgb", + mask: Optional[th.Tensor] = None, + label: Optional[str] = None, +) -> np.ndarray: + """Converts a tensor to an image. + + Args: + tensor: Input tensor to be converted. + The shape of the tensor should be CxHxW or HxW. The channels are assumed to be in RGB format. + + x_max: The output color will be normalized as (x-x_min)/(x_max-x_min)*255. + x_max = tensor.max() if None is explicitly given. + + x_min: The output color will be normalized as (x-x_min)/(x_max-x_min)*255. + x_min = tensor.min() if None is explicitly given. + + mode: Can be `rgb` or `jet`. If `jet` is given, cv2.COLORMAP_JET would be applied. + + mask: Optional mask to be applied to the input tensor. + + label: Optional text to be added to the output image. + """ + tensor = tensor.detach() + + # Apply mask + if mask is not None: + tensor = tensor * mask + + if len(tensor.size()) == 2: + tensor = tensor[None] + + # Make three channel image + assert len(tensor.size()) == 3, tensor.size() + n_channels = tensor.shape[0] + if n_channels == 1: + tensor = tensor.repeat(3, 1, 1) + elif n_channels != 3: + raise ValueError(f"Unsupported number of channels {n_channels}.") + + # Convert to display format + img = tensor.permute(1, 2, 0) + + if mode == "rgb": + img = tensor2rgb(img, x_max=x_max, x_min=x_min) + elif mode == "jet": + # `cv2.applyColorMap` assumes input format in BGR + img[:, :, :3] = img[:, :, [2, 1, 0]] + img = tensor2rgbjet(img, x_max=x_max, x_min=x_min) + # convert back to rgb + img[:, :, :3] = img[:, :, [2, 1, 0]] + else: + raise ValueError(f"Unsupported mode {mode}.") + + if label is not None: + img = add_label_centered(img, label) + + return img + + +def add_label_centered( + img: np.ndarray, + text: str, + font_scale: float = 1.0, + thickness: int = 2, + alignment: str = "top", + color: Tuple[int, int, int] = (0, 255, 0), +) -> np.ndarray: + """Adds label to an image + + Args: + img: Input image. + + text: Text to be added on the image. + + font_scale: The scale of the font. + + thickness: Thinkness of the lines. + + alignment: Can be `top` or `buttom`. The alignment of the text. + + color: The color of the text. Assumes the same color space as `img`. + """ + font = cv2.FONT_HERSHEY_SIMPLEX + textsize = cv2.getTextSize(text, font, font_scale, thickness=thickness)[0] + img = img.astype(np.uint8).copy() + + if alignment == "top": + cv2.putText( + img, + text, + ((img.shape[1] - textsize[0]) // 2, 50), + font, + font_scale, + color, + thickness=thickness, + lineType=cv2.LINE_AA, + ) + elif alignment == "bottom": + cv2.putText( + img, + text, + ((img.shape[1] - textsize[0]) // 2, img.shape[0] - textsize[1]), + font, + font_scale, + color, + thickness=thickness, + lineType=cv2.LINE_AA, + ) + else: + raise ValueError("Unknown text alignment") + + return img + + +def get_color_map(name: str = "COLORMAP_JET") -> np.ndarray: + """Return a 256 x 3 array representing a color map from OpenCV.""" + color_map = np.arange(256, dtype=np.uint8).reshape(1, 256) + color_map = cv2.applyColorMap(color_map, getattr(cv2, name)) + return color_map[0, :, ::-1].copy() + + +def feature2rgb(x: Union[th.Tensor, np.ndarray], scale: int = -1) -> np.ndarray: + # expect 3 dim tensor + b = (x[::3].sum(0)).data.cpu().numpy()[:, :, None] + g = (x[1::3].sum(0)).data.cpu().numpy()[:, :, None] + r = (x[2::3].sum(0)).data.cpu().numpy()[:, :, None] + rgb = np.concatenate((b, g, r), axis=2) + rgb_norm = (rgb - rgb.min()) / (rgb.max() - rgb.min()) + rgb_norm = (rgb_norm * 255).astype(np.uint8) + if scale != -1: + rgb_norm = cv2.resize(rgb_norm, None, fx=scale, fy=scale, interpolation=cv2.INTER_CUBIC) + return rgb_norm + + +def kpts2delta(kpts: th.Tensor, size: Sequence[int]) -> th.Tensor: + # kpts: B x N x 2 + # Return: B x N x H x W x 2, 2D vectors from each grid location to kpts. + h, w = size + grid = th.meshgrid( + th.arange(h, dtype=kpts.dtype, device=kpts.device), + th.arange(w, dtype=kpts.dtype, device=kpts.device), + indexing="xy", + ) + delta = kpts.unflatten(-1, (1, 1, 2)) - th.stack(grid, dim=-1).unflatten(0, (1, 1, h)) + return delta + + +def kpts2heatmap(kpts: th.Tensor, size: Sequence[int], sigma: int = 7) -> th.Tensor: + # kpts: B x N x 2 + dist = kpts2delta(kpts, size).square().sum(-1) + heatmap = th.exp(-dist / (2 * sigma**2)) + return heatmap + + +def make_image_grid( + data: Union[th.Tensor, Dict[str, th.Tensor]], + keys_to_draw: Optional[List[str]] = None, + scale_factor: Optional[float] = None, + draw_labels: bool = True, + grid_size: Optional[Tuple[int, int]] = None, +) -> np.ndarray: + """Arranges a tensor of images (or a dict with labeled image tensors) into + a grid. + + Params: + data: Either a single image tensor [N, {1, 3}, H, W] containing images to + arrange in a grid layout, or a dict with tensors of the same shape. + If a dict is given, assume each entry in the dict is a batch of + images, and form a grid where each cell contains one sample from + each entry in the dict. Images should be in the range [0, 255]. + + keys_to_draw: Select which keys in the dict should be included in each + grid cell. If none are given, draw all keys. + + scale_factor: Optional scale factor applied to each image. + + draw_labels: Whether or not to draw the keys on each image. + + grid_size: Optionally specify the size of the resulting grid. + """ + + if isinstance(data, th.Tensor): + data = {"": data} + keys_to_draw = [""] + + if keys_to_draw is None: + keys_to_draw = list(data.keys()) + + n_cells = data[keys_to_draw[0]].shape[0] + img_h = data[keys_to_draw[0]].shape[2] + img_w = data[keys_to_draw[0]].shape[3] + + # Resize all images to match the shape of the first image, and convert + # Greyscale -> RGB. + for key in keys_to_draw: + if data[key].shape[1] == 1: + data[key] = data[key].expand(-1, 3, -1, -1) + elif data[key].shape[1] != 3: + raise ValueError( + f"Image data must all be of shape [N, {1,3}, H, W]. Got shape {data[key].shape}." + ) + + data[key] = data[key].clamp(min=0, max=255) + if data[key].shape[2] != img_h or data[key].shape[3] != img_w: + data[key] = thf.interpolate(data[key], size=(img_h, img_w), mode="area") + + if scale_factor is not None: + data[key] = thf.interpolate(data[key], scale_factor=scale_factor, mode="area") + + # Make an image for each grid cell by labeling and concatenating a sample + # from each key in the data. + cell_imgs = [] + for i in range(n_cells): + imgs = [data[key][i].byte().cpu().numpy().transpose(1, 2, 0) for key in keys_to_draw] + imgs = [np.ascontiguousarray(img) for img in imgs] + if draw_labels: + for img, label in zip(imgs, keys_to_draw): + cv2.putText( + img, label, (31, 31), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (0, 0, 0), 2, cv2.LINE_AA + ) + cv2.putText( + img, + label, + (30, 30), + cv2.FONT_HERSHEY_SIMPLEX, + 0.75, + (255, 255, 255), + 2, + cv2.LINE_AA, + ) + cell_imgs.append(np.concatenate(imgs, axis=1)) + + cell_h, cell_w = cell_imgs[0].shape[:2] + + # Find the most-square grid layout. + if grid_size is not None: + gh, gw = grid_size + if gh * gw < n_cells: + raise ValueError( + f"Requested grid size ({gh}, {gw}) (H, W) cannot hold {n_cells} images." + ) + else: + best_diff = np.inf + best_side = np.inf + best_leftover = np.inf + gw = 0 + for gh_ in range(1, n_cells + 1): + for gw_ in range(1, n_cells + 1): + if gh_ * gw_ < n_cells: + continue + + h = gh_ * cell_h + w = gw_ * cell_w + diff = abs(h - w) + max_side = max(gh_, gw_) + leftover = gh_ * gw_ - n_cells + + if diff <= best_diff and max_side <= best_side and leftover <= best_leftover: + gh = gh_ + gw = gw_ + best_diff = diff + best_side = max_side + best_leftover = leftover + + # Put the images into the grid. + img = np.zeros((gh * cell_h, gw * cell_w, 3), dtype=np.uint8) + for i in range(n_cells): + gr = i // gw + gc = i % gw + img[gr * cell_h : (gr + 1) * cell_h, gc * cell_w : (gc + 1) * cell_w] = cell_imgs[i] + + return img + + +def make_image_grid_batched( + data: Dict[str, th.Tensor], + max_row_hight: Optional[int] = None, + draw_labels: bool = True, + input_is_in_0_1: bool = False, +) -> np.ndarray: + """A simpler version of `make_image_grid` that works for the whole batch at once. + + Usecase: A dict containing diagnostic output. All tensors in the dict have a shape of [N, {1, 3}, H, W] + where N concides for all entries. The goal is to arranges images into a grid so that each column + corrensponds to a key, and each row corrensponds to an index in batch. + + Example: + Data: + dict = {"A": A, "B": B, "C": C} + + Grid: + | A[0] | B[0] | C[0] | + | A[1] | B[1] | C[1] | + | A[2] | B[2] | C[2] | + + The the grid will be aranged such way, that: + - Each row corrensponds to an index in the batch. + - Each column corrensponds to a key in the dict + - For each row, images are resize such that the vertical edge matches the largest image + + Args: + data (Dict[str, th.Tensor]): Diagnostic data. + max_row_hight (int): The maximum allowed hight of a row. + draw_labels (bool): Whether the keys should be drawn as labels + input_is_in_0_1 (bool): If true, input data is assumed to be in range 0..1 otherwise in range 0..255 + """ + data_list = list(data.values()) + keys_to_draw = data.keys() + + if not all(x.ndim == 4 and (x.shape[1] == 1 or x.shape[1] == 3) for x in data_list): + raise ValueError( + f"Image data must all be of shape [N, {1, 3}, H, W]. Got shapes {[x.shape for x in data_list]}." + ) + + if not all(x.shape[0] == data_list[0].shape[0] for x in data_list): + raise ValueError("Batch sizes must be the same.") + + data_list = resize_to_match(data_list, edge="vertical", max_size=max_row_hight) + + if not all(x.shape[2] == data_list[0].shape[2] for x in data_list): + raise ValueError("Heights must be the same.") + + with th.no_grad(): + # Make all images contain 3 channels + data_list = [x.expand(-1, 3, -1, -1) if x.shape[1] == 1 else x for x in data_list] + + # Convert to byte + scale = 255.0 if input_is_in_0_1 else 1.0 + data_list = [x.mul(scale).round().clamp(min=0, max=255).byte() for x in data_list] + + # Convert to numpy and make it BHWC + data_list = [x.cpu().numpy().transpose(0, 2, 3, 1) for x in data_list] + + rows = [] + # Iterate by key + for j, label in zip(range(len(data_list)), keys_to_draw): + col = [] + # Iterate by batch index + for i in range(data_list[0].shape[0]): + img = np.ascontiguousarray(data_list[j][i]) + if draw_labels: + cv2.putText( + img, label, (31, 31), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (0, 0, 0), 2, cv2.LINE_AA + ) + cv2.putText( + img, + label, + (30, 30), + cv2.FONT_HERSHEY_SIMPLEX, + 0.75, + (255, 255, 255), + 2, + cv2.LINE_AA, + ) + col.append(img) + rows.append(np.concatenate(col, axis=0)) + return np.concatenate(rows, axis=1) + + +def resize_to_match( + tensors: List[th.Tensor], + edge: str = "long", + mode: str = "nearest", + max_size: Optional[int] = None, +) -> List[th.Tensor]: + """Resizes a list of image tensors s.t. a chosen edge ("long", "short", "vertical", or "horizontal") + matches that edge on the largest image in the list.""" + + assert edge in {"short", "long", "vertical", "horizontal"} + max_shape = [max(x) for x in zip(*[t.shape for t in tensors])] + + resized_tensors = [] + for tensor in tensors: + if edge == "long": + edge_idx = np.argmax(tensor.shape[-2:]) + elif edge == "short": + edge_idx = np.argmin(tensor.shape[-2:]) + elif edge == "vertical": + edge_idx = 0 + else: # edge == "horizontal": + edge_idx = 1 + + target_size = max_shape[-2:][edge_idx] + if max_size is not None: + target_size = min(max_size, max_shape[-2:][edge_idx]) + + if tensor.shape[-2:][edge_idx] != target_size: + ratio = target_size / tensor.shape[-2:][edge_idx] + tensor = thf.interpolate( + tensor, + scale_factor=ratio, + align_corners=False if mode in ["bilinear", "bicubic"] else None, + recompute_scale_factor=True, + mode=mode, + ) + resized_tensors.append(tensor) + return resized_tensors + + +def draw_text( + canvas: th.Tensor, + text: str, + loc: Tuple[float, float], + font: int = cv2.FONT_HERSHEY_SIMPLEX, + scale: float = 2, + color: Tuple[float, float, float] = (0, 0, 0), + thickness: float = 3, +) -> th.Tensor: + """Helper used by Rosetta to draw text on tensors using OpenCV.""" + device = canvas.device + canvas_new = canvas.cpu().numpy().transpose(0, 2, 3, 1) + for i in range(canvas_new.shape[0]): + image = canvas_new[i].copy() + if isinstance(text, list): + cv2.putText(image, text[i], loc, font, scale, color, thickness) + else: + cv2.putText(image, text, loc, font, scale, color, thickness) + canvas_new[i] = image + canvas_tensor = th.ByteTensor(canvas_new.transpose(0, 3, 1, 2)).to(device) + return canvas_tensor + + +# TODO(T153410551): Deprecate this function +def visualize_scalar_image( + img: np.ndarray, + min_val: float, + val_range: float, + color_map: int = cv2.COLORMAP_JET, + convert_to_rgb: bool = True, +) -> np.ndarray: + """ + Visualizes a scalar image using specified color map. + """ + scaled_img = (img.astype(np.float32) - min_val) / val_range + vis = cv2.applyColorMap((scaled_img * 255).clip(0, 255).astype(np.uint8), color_map) + if convert_to_rgb: + vis = cv2.cvtColor(vis, cv2.COLOR_BGR2RGB) + return vis + + +def process_depth_image( + depth_img: np.ndarray, depth_min: float, depth_max: float, depth_err_range: float +) -> Tuple[np.ndarray, np.ndarray]: + """ + Process the depth image within the range for visualization. + """ + valid_pixels = np.logical_and(depth_img > 0, depth_img <= depth_max) + new_depth_img = np.zeros_like(depth_img) + new_depth_img[valid_pixels] = depth_img[valid_pixels] + err_image = np.abs(new_depth_img - depth_img).astype(np.float32) / depth_err_range + return new_depth_img, err_image + + +def draw_keypoints(img: np.ndarray, kpt: np.ndarray, kpt_w: float) -> np.ndarray: + """ + Draw Keypoints on given image. + """ + x, y = kpt[:, 0], kpt[:, 1] + w = kpt[:, 2] * kpt_w + col = np.array([-255.0, 255.0, -255.0]) * w[:, np.newaxis] + pts = np.column_stack((x.astype(np.int32), y.astype(np.int32))) + for pt, c in zip(pts, col): + cv2.circle(img, tuple(pt), 2, tuple(c), -1) + + return img + + +def tensor_to_rgb_array(tensor: th.Tensor) -> np.ndarray: + """Moves channels dimension to the end of tensor. + Makes it more suitable for visualizations. + """ + return tensor.permute(0, 2, 3, 1).detach().cpu().numpy() + + +def draw_keypoints_with_color( + image: np.ndarray, keypoints_uvw: np.ndarray, color: Color +) -> np.ndarray: + """Renders keypoints onto a given image with particular color. + Supports overlaps. + """ + assert len(image.shape) == 3 + assert image.shape[-1] == 3 + coords = keypoints_uvw[:, :2].astype(np.int32) + tmp_img = np.zeros(image.shape, dtype=np.float32) + for uv in coords: + cv2.circle(tmp_img, tuple(uv), 2, color, -1) + return (image + tmp_img).clip(0.0, 255.0).astype(np.uint8) + + +def draw_contour(img: np.ndarray, contour_corrs: np.ndarray) -> np.ndarray: + """ + Draw Contour on given image. + """ + for corr in contour_corrs: + mesh_uv = corr[1:3] + seg_uv = corr[3:] + + x, y = int(mesh_uv[0] + 0.5), int(mesh_uv[1] + 0.5) + cv2.circle(img, (x, y), 1, (255, 0, 0), -1) + + cv2.line( + img, + (int(mesh_uv[0]), int(mesh_uv[1])), + (int(seg_uv[0]), int(seg_uv[1])), + (-255, -255, 255), + 1, + ) + + return img diff --git a/visualize/ca_body/utils/lbs.py b/visualize/ca_body/utils/lbs.py new file mode 100644 index 0000000000000000000000000000000000000000..4764f54afe7d8e231d565a188944857e19f2c943 --- /dev/null +++ b/visualize/ca_body/utils/lbs.py @@ -0,0 +1,828 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import json +import numpy as np +import re + +import torch +import torch as th +import torch.nn as nn +import torch.nn.functional as F + +from typing import Dict, Any + +from visualize.ca_body.utils.quaternion import Quaternion + +from pytorch3d.transforms import matrix_to_euler_angles + + +from typing import Optional, Tuple + +import logging + +logger = logging.getLogger(__name__) + + +class ParameterTransform(nn.Module): + def __init__(self, lbs_cfg_dict: Dict[str, Any]): + super().__init__() + + # self.pose_names = list(lbs_cfg_dict["joint_names"]) + self.channel_names = list(lbs_cfg_dict["channel_names"]) + transform_offsets = torch.FloatTensor(lbs_cfg_dict["transform_offsets"]) + transform = torch.FloatTensor(lbs_cfg_dict["transform"]) + self.limits = lbs_cfg_dict["limits"] + + self.nr_scaling_params = lbs_cfg_dict["nr_scaling_params"] + self.nr_position_params = lbs_cfg_dict["nr_position_params"] + self.nr_total_params = self.nr_scaling_params + self.nr_position_params + + self.register_buffer("transform_offsets", transform_offsets) + self.register_buffer("transform", transform) + + def forward(self, pose: th.Tensor) -> th.Tensor: + """ + :param pose: raw pose inputs, shape (batch_size, len(pose_names)) + :return: skeleton parameters, shape (batch_size, len(channel_names)*nr_skeleton_joints) + """ + return self.transform.mm(pose.t()).t() + self.transform_offsets + + +class LinearBlendSkinning(nn.Module): + def __init__( + self, + model_json: Dict[str, Any], + lbs_config_dict: Dict[str, Any], + num_max_skin_joints: int =8, + scale_path: str =None, + ): + super().__init__() + + model = model_json + self.param_transform = ParameterTransform(lbs_config_dict) + + self.joint_names = [] + + nr_joints = len(model["Skeleton"]["Bones"]) + joint_parents = torch.zeros((nr_joints, 1), dtype=torch.int64) + joint_rotation = torch.zeros((nr_joints, 4), dtype=torch.float32) + joint_offset = torch.zeros((nr_joints, 3), dtype=torch.float32) + for idx, bone in enumerate(model["Skeleton"]["Bones"]): + self.joint_names.append(bone["Name"]) + if bone["Parent"] > nr_joints: + joint_parents[idx] = -1 + else: + joint_parents[idx] = bone["Parent"] + joint_rotation[idx, :] = torch.FloatTensor(bone["PreRotation"]) + joint_offset[idx, :] = torch.FloatTensor(bone["TranslationOffset"]) + + skin_model = model["SkinnedModel"] + mesh_vertices = torch.FloatTensor(skin_model["RestPositions"]) + mesh_normals = torch.FloatTensor(skin_model["RestVertexNormals"]) + + weights = torch.FloatTensor([e[1] for e in skin_model["SkinningWeights"]]) + indices = torch.LongTensor([e[0] for e in skin_model["SkinningWeights"]]) + offsets = torch.LongTensor(skin_model["SkinningOffsets"]) + + nr_vertices = len(offsets) - 1 + skin_weights = torch.zeros((nr_vertices, num_max_skin_joints), dtype=torch.float32) + skin_indices = torch.zeros((nr_vertices, num_max_skin_joints), dtype=torch.int64) + + offset_right = offsets[1:] + for offset in range(num_max_skin_joints): + offset_left = offsets[:-1] + offset + skin_weights[offset_left < offset_right, offset] = weights[ + offset_left[offset_left < offset_right] + ] + skin_indices[offset_left < offset_right, offset] = indices[ + offset_left[offset_left < offset_right] + ] + + mesh_faces = torch.IntTensor(skin_model["Faces"]["Indices"]).view(-1, 3) + mesh_texture_faces = torch.IntTensor(skin_model["Faces"]["TextureIndices"]).view(-1, 3) + mesh_texture_coords = torch.FloatTensor(skin_model["TextureCoordinates"]).view(-1, 2) + + # zero_pose = torch.zeros((1, len(self.param_transform.pose_names)), dtype=torch.float32) + zero_pose = torch.zeros((1, self.param_transform.nr_total_params), dtype=torch.float32) + bind_state = solve_skeleton_state( + self.param_transform(zero_pose), joint_offset, joint_rotation, joint_parents + ) + + # self.register_buffer('mesh_vertices', mesh_vertices) # we want to train on rest pose + # self.mesh_vertices = nn.Parameter(mesh_vertices, requires_grad=optimize_mesh) + self.register_buffer("mesh_vertices", mesh_vertices) + + self.register_buffer("joint_parents", joint_parents) + self.register_buffer("joint_rotation", joint_rotation) + self.register_buffer("joint_offset", joint_offset) + self.register_buffer("mesh_normals", mesh_normals) + self.register_buffer("mesh_faces", mesh_faces) + self.register_buffer("mesh_texture_faces", mesh_texture_faces) + self.register_buffer("mesh_texture_coords", mesh_texture_coords) + self.register_buffer("skin_weights", skin_weights) + self.register_buffer("skin_indices", skin_indices) + self.register_buffer("bind_state", bind_state) + self.register_buffer("rest_vertices", mesh_vertices) + + # pre-compute joint weights + self.register_buffer("joints_weights", self.compute_joints_weights()) + + if scale_path is not None: + scale = np.loadtxt(scale_path).astype(np.float32)[np.newaxis] + scale = scale[:, 0, :] if len(scale.shape) == 3 else scale + self.register_buffer("scale", torch.tensor(scale)) + + @property + def num_verts(self): + return self.mesh_vertices.size(0) + + @property + def num_joints(self): + return self.joint_offset.size(0) + + @property + def num_params(self): + return self.skin_weights.shape[-1] + + def compute_rigid_transforms(self, global_pose: th.Tensor, local_pose: th.Tensor, scale: th.Tensor): + """Returns rigid transforms.""" + params = torch.cat([global_pose, local_pose, scale], axis=-1) + params = self.param_transform(params) + return solve_skeleton_state( + params, self.joint_offset, self.joint_rotation, self.joint_parents + ) + + def compute_rigid_transforms_matrix(self, global_pose: th.Tensor, local_pose: th.Tensor, scale: th.Tensor): + params = torch.cat([global_pose, local_pose, scale], axis=-1) + params = self.param_transform(params) + states = solve_skeleton_state( + params, self.joint_offset, self.joint_rotation, self.joint_parents + ) + return states_to_matrix(self.bind_state, states) + + def compute_joints_weights(self, drop_empty=False): + """Compute weights per joint given flattened weights-indices.""" + idxs_verts = torch.arange(self.num_verts)[:, np.newaxis].expand(-1, self.num_params) + weights_joints = torch.zeros( + (self.num_joints, self.num_verts), + dtype=torch.float32, + device=self.skin_weights.device, + ) + weights_joints[self.skin_indices, idxs_verts] = self.skin_weights + + if drop_empty: + weights_joints = weights_joints[weights_joints.sum(axis=-1).abs() > 0] + + return weights_joints + + def compute_root_rigid_transform(self, poses: th.Tensor) -> Tuple[th.Tensor, th.Tensor]: + """Get a transform of the root joint.""" + scales = torch.zeros( + (poses.shape[0], self.nr_total_params - poses.shape[1]), + dtype=poses.dtype, + device=poses.device, + ) + params = torch.cat((poses, scales), 1) + states = solve_skeleton_state( + self.param_transform(params), + self.joint_offset, + self.joint_rotation, + self.joint_parents, + ) + mat = states_to_matrix(self.bind_state, states) + return mat[:, 1, :, 3], mat[:, 1, :, :3] + + def compute_relative_rigid_transforms(self, global_pose: th.Tensor, local_pose: th.Tensor, scale: th.Tensor): + params = torch.cat([global_pose, local_pose, scale], axis=-1) + params = self.param_transform(params) + + batch_size = params.shape[0] + + joint_offset = self.joint_offset + joint_rotation = self.joint_rotation + + # batch processing for parameters + jp = params.view((batch_size, -1, 7)) + lt = jp[:, :, 0:3] + joint_offset.unsqueeze(0) + lr = Quaternion.batchMul(joint_rotation.unsqueeze(0), Quaternion.batchFromXYZ(jp[:, :, 3:6])) + return torch.cat([lt, lr], axis=-1) + + def skinning(self, bind_state: th.Tensor, vertices: th.Tensor, target_states: th.Tensor): + """ + Apply skinning to a set of states + + Args: + b/bind_state: 1 x nr_joint x 8 bind state + v/vertices: 1 x nr_vertices x 3 vertices + t/target_states: batch_size x nr_joint x 8 current states + + Returns: + batch_size x nr_vertices x 3 skinned vertices + """ + assert target_states.size()[1:] == bind_state.size()[1:] + + mat = states_to_matrix(bind_state, target_states) + + # apply skinning to vertices + vs = torch.matmul( + mat[:, self.skin_indices], + torch.cat((vertices, torch.ones_like(vertices[:, :, 0]).unsqueeze(2)), dim=2) + .unsqueeze(2) + .unsqueeze(4), + ) + ws = self.skin_weights.unsqueeze(2).unsqueeze(3) + res = (vs * ws).sum(dim=2).squeeze(3) + + return res + + def unpose(self, poses: th.Tensor, scales: th.Tensor, verts: th.Tensor): + """ + :param poses: 100 (tx ty tz rx ry rz) params in blueman + :param scales: 29 (s) params in blueman + :return: + """ + # check shape of poses and scales + params = torch.cat((poses, scales), 1) + states = solve_skeleton_state( + self.param_transform(params), + self.joint_offset, + self.joint_rotation, + self.joint_parents, + ) + + return self.unskinning(self.bind_state, states, verts) + + def unskinning(self, bind_state: th.Tensor, target_states: th.Tensor, verts: th.Tensor): + """Apply skinning to a set of states + + Args: + bind_state: [B, NJ, 8] - bind state + target_states: [B, NJ, 8] - current states + vertices: [B, V, 3] - vertices + + Returns: + batch_size x nr_vertices x 3 skinned vertices + """ + assert target_states.size()[1:] == bind_state.size()[1:] + + mat = states_to_matrix(bind_state, target_states) + + ws = self.skin_weights[None, :, :, None, None] + sum_mat = (mat[:, self.skin_indices] * ws).sum(dim=2) + + sum_mat4x4 = torch.cat((sum_mat, torch.zeros_like(sum_mat[:, :, :1, :])), dim=2) + sum_mat4x4[:, :, 3, 3] = 1.0 + + verts_4d = torch.cat((verts, torch.ones_like(verts[:, :, :1])), dim=2).unsqueeze(3) + + resmesh = [] + for i in range(sum_mat.shape[0]): + newmat = sum_mat4x4[i, :, :, :].contiguous() + invnewmat = newmat.inverse() + tmpvets = invnewmat.matmul(verts_4d[i]) + resmesh.append(tmpvets.unsqueeze(0)) + resmesh = torch.cat(resmesh) + + return resmesh.squeeze(3)[..., :3].contiguous() + + def forward(self, poses: th.Tensor, scales: th.Tensor, verts_unposed: Optional[th.Tensor] = None) -> th.Tensor: + """ + Args: + poses: [B, NP] - pose parametersa + scales: [B, NS] - additional scaling params + verts_unposed: [B, N, 3] - unposed vertices + + Returns: + [B, N, 3] - posed vertices + """ + params = torch.cat((poses, scales), 1) + params_transformed = self.param_transform(params) + states = solve_skeleton_state( + params_transformed, + self.joint_offset, + self.joint_rotation, + self.joint_parents, + ) + if verts_unposed is None: + mesh = self.skinning(self.bind_state, self.mesh_vertices.unsqueeze(0), states) + else: + mesh = self.skinning(self.bind_state, verts_unposed, states) + return mesh + + +def solve_skeleton_state(param: th.Tensor, joint_offset: th.Tensor, joint_rotation: th.Tensor, joint_parents: th.Tensor): + """ + :param param: batch_size x (7*nr_skeleton_joints) ParamTransform Outputs. + :return: batch_size x nr_skeleton_joints x 8 Skeleton States + 8 stands form 3 translation + 4 rotation (quat) + 1 scale + """ + batch_size = param.shape[0] + # batch processing for parameters + jp = param.view((batch_size, -1, 7)) + lt = jp[:, :, 0:3] + joint_offset.unsqueeze(0) + lr = Quaternion.batchMul(joint_rotation.unsqueeze(0), Quaternion.batchFromXYZ(jp[:, :, 3:6])) + ls = torch.pow( + torch.tensor([2.0], dtype=torch.float32, device=param.device), + jp[:, :, 6].unsqueeze(2), + ) + + state = [] + for index, parent in enumerate(joint_parents): + if int(parent) != -1: + gr = Quaternion.batchMul(state[parent][:, :, 3:7], lr[:, index, :].unsqueeze(1)) + gt = ( + Quaternion.batchRot( + state[parent][:, :, 3:7], + lt[:, index, :].unsqueeze(1) * state[parent][:, :, 7].unsqueeze(2), + ) + + state[parent][:, :, 0:3] + ) + gs = state[parent][:, :, 7].unsqueeze(2) * ls[:, index, :].unsqueeze(1) + state.append(torch.cat((gt, gr, gs), dim=2)) + else: + state.append( + torch.cat((lt[:, index, :], lr[:, index, :], ls[:, index, :]), dim=1).view( + (batch_size, 1, 8) + ) + ) + + return torch.cat(state, dim=1) + + +def states_to_matrix(bind_state: th.Tensor, target_states: th.Tensor, return_transform: bool=False): + # multiply bind inverse with states + br = Quaternion.batchInvert(bind_state[:, :, 3:7]) + bs = bind_state[:, :, 7].unsqueeze(2).reciprocal() + bt = Quaternion.batchRot(br, -bind_state[:, :, 0:3]) * bs + + # applying rotation + tr = Quaternion.batchMul(target_states[:, :, 3:7], br) + # applying scaling + ts = target_states[:, :, 7].unsqueeze(2) * bs + # applying transformation + tt = ( + Quaternion.batchRot(target_states[:, :, 3:7], bt * target_states[:, :, 7].unsqueeze(2)) + + target_states[:, :, 0:3] + ) + + # convert to matrices + twx = 2.0 * tr[:, :, 0] * tr[:, :, 3] + twy = 2.0 * tr[:, :, 1] * tr[:, :, 3] + twz = 2.0 * tr[:, :, 2] * tr[:, :, 3] + txx = 2.0 * tr[:, :, 0] * tr[:, :, 0] + txy = 2.0 * tr[:, :, 1] * tr[:, :, 0] + txz = 2.0 * tr[:, :, 2] * tr[:, :, 0] + tyy = 2.0 * tr[:, :, 1] * tr[:, :, 1] + tyz = 2.0 * tr[:, :, 2] * tr[:, :, 1] + tzz = 2.0 * tr[:, :, 2] * tr[:, :, 2] + mat = torch.stack( + ( + torch.stack((1.0 - (tyy + tzz), txy + twz, txz - twy), dim=2) * ts, + torch.stack((txy - twz, 1.0 - (txx + tzz), tyz + twx), dim=2) * ts, + torch.stack((txz + twy, tyz - twx, 1.0 - (txx + tyy)), dim=2) * ts, + tt, + ), + dim=3, + ) + if return_transform: + return mat, (tr, tt, ts) + return mat + + +def get_influence_map( + transform_raw: th.Tensor, pose_length=None, num_params_per_joint=7, eps=1.0e-6 +): + num_joints = transform_raw.shape[0] // num_params_per_joint + num_params = transform_raw.shape[-1] + + if pose_length is None: + pose_length = num_params + assert pose_length <= num_params + + transform_raw = transform_raw.reshape((num_joints, num_params_per_joint, num_params)) + + return [ + torch.where(torch.abs(transform_raw[i, :, :pose_length]) > eps)[1].tolist() + for i in range(num_joints) + ] + + +def compute_weights_joints_slow(lbs_weights, lbs_indices, num_joints): + num_verts = lbs_weights.shape[0] + weights_joints = torch.zeros((num_joints, num_verts), dtype=torch.float32) + for i in range(num_verts): + idx = lbs_indices[i, :] + weights_joints[idx, i] = lbs_weights[i, :] + return weights_joints + + +def load_momentum_cfg(model, lbs_config_txt_fh, nr_scaling_params=None): + def find(l, x): + try: + return l.index(x) + except ValueError: + return None + + """Load a parameter configuration file""" + channelNames = ["tx", "ty", "tz", "rx", "ry", "rz", "sc"] + paramNames = [] + joint_names = [] + for idx, bone in enumerate(model["Skeleton"]["Bones"]): + joint_names.append(bone["Name"]) + + def findJointIndex(x): + return find(joint_names, x) + + def findParameterIndex(x): + return find(paramNames, x) + + limits = [] + + # create empty result + transform_triplets = [] + lines = lbs_config_txt_fh.readlines() + + # read until end + for line in lines: + # strip comments + line = line[: line.find("#")] + + if line.find("limit") != -1: + r = re.search("limit ([\\w.]+) (\\w+) (.*)", line) + if r is None: + continue + + if len(r.groups()) != 3: + logger.info("Failed to parse limit configuration line :\n " + line) + continue + + # find parameter and/or joint index + fullname = r.groups()[0] + type = r.groups()[1] + remaining = r.groups()[2] + + parameterIndex = findParameterIndex(fullname) + jointName = fullname.split(".") + jointIndex = findJointIndex(jointName[0]) + channelIndex = -1 + + if jointIndex is not None and len(jointName) == 2: + # find matching channel name + channelIndex = channelNames.index(jointName[1]) + if channelIndex is None: + logger.info( + "Unknown joint channel name " + + jointName[1] + + " in parameter configuration line :\n " + + line + ) + continue + + # only parse passive limits for now + if type == "minmax_passive" or type == "minmax": + # match [ , ] + rp = re.search( + "\\[\\s*([-+]?[0-9]*\\.?[0-9]+)\\s*,\\s*([-+]?[0-9]*\\.?[0-9]+)\\s*\\](\\s*[-+]?[0-9]*\\.?[0-9]+)?", + remaining, + ) + + if len(rp.groups()) != 3: + logger.info(f"Failed to parse passive limit configuration line :\n {line}") + continue + + minVal = float(rp.groups()[0]) + maxVal = float(rp.groups()[1]) + weightVal = 1.0 + if len(rp.groups()) == 3 and not rp.groups()[2] is None: + weightVal = float(rp.groups()[2]) + + # result.limits.append([jointIndex * 7 + channelIndex, minVal, maxVal]) + + if channelIndex >= 0: + valueIndex = jointIndex * 7 + channelIndex + limit = { + "type": "LimitMinMaxJointValue", + "str": fullname, + "valueIndex": valueIndex, + "limits": [minVal, maxVal], + "weight": weightVal, + } + limits.append(limit) + else: + if parameterIndex is None: + logger.info(f"Unknown parameterIndex : {fullname}\n {line} {paramNames} ") + continue + limit = { + "type": "LimitMinMaxParameter", + "str": fullname, + "parameterIndex": parameterIndex, + "limits": [minVal, maxVal], + "weight": weightVal, + } + limits.append(limit) + # continue the remaining file + continue + + # check for parameterset definitions and ignore + if line.find("parameterset") != -1: + continue + + # use regex to parse definition + r = re.search("(\w+).(\w+)\s*=\s*(.*)", line) + if r is None: + continue + + if len(r.groups()) != 3: + logger.info("Failed to parse parameter configuration line :\n " + line) + continue + + # find joint name and parameter + jointIndex = findJointIndex(r.groups()[0]) + if jointIndex is None: + logger.info( + "Unknown joint name " + + r.groups()[0] + + " in parameter configuration line :\n " + + line + ) + continue + + # find matching channel name + channelIndex = channelNames.index(r.groups()[1]) + if channelIndex is None: + logger.info( + "Unknown joint channel name " + + r.groups()[1] + + " in parameter configuration line :\n " + + line + ) + continue + + valueIndex = jointIndex * 7 + channelIndex + + # parse parameters + parameterList = r.groups()[2].split("+") + for parameterPair in parameterList: + parameterPair = parameterPair.strip() + + r = re.search("\s*([+-]?[0-9]*\.?[0-9]*)\s\*\s(\w+)\s*", parameterPair) + if r is None or len(r.groups()) != 2: + logger.info( + "Malformed parameter description " + + parameterPair + + " in parameter configuration line :\n " + + line + ) + continue + + val = float(r.groups()[0]) + parameter = r.groups()[1] + + # check if parameter exists + parameterIndex = findParameterIndex(parameter) + if parameterIndex is None: + # no, create new parameter entry + parameterIndex = len(paramNames) + paramNames.append(parameter) + transform_triplets.append((valueIndex, parameterIndex, val)) + + # set (dense) parameter_transformation matrix + transform = np.zeros((len(channelNames) * len(joint_names), len(paramNames)), dtype=np.float32) + for i, j, v in transform_triplets: + transform[i, j] = v + + outputs = { + "model_param_names": paramNames, + "joint_names": joint_names, + "channel_names": channelNames, + "limits": limits, + "transform": transform, + "transform_offsets": np.zeros((1, len(channelNames) * len(joint_names)), dtype=np.float32), + } + # set number of scales automatically + if nr_scaling_params is None: + outputs.update(nr_scaling_params=len([s for s in paramNames if s.startswith("scale")])) + outputs.update(nr_position_params=len(paramNames) - outputs["nr_scaling_params"]) + + return outputs + + +def compute_normalized_pose_quat(lbs, local_pose, scale): + """Computes a normalized representation of the pose in quaternion space. + This is a delta between the per-joint local transformation and the bind state. + + Returns: + [B, NJ, 4] - normalized rotations + """ + B = local_pose.shape[0] + global_pose_zero = th.zeros((B, 6), dtype=th.float32, device=local_pose.device) + params = lbs.param_transform(th.cat([global_pose_zero, local_pose, scale], axis=-1)) + params = params.reshape(B, -1, 7) + # applying rotation + # TODO: what is this? + rot_quat = Quaternion.batchMul(lbs.joint_rotation[np.newaxis], Quaternion.batchFromXYZ(params[:, :, 3:6])) + # removing the bind state + bind_rot_quat = Quaternion.batchInvert(lbs.bind_state[:, :, 3:7]) + return Quaternion.batchMul(rot_quat, bind_rot_quat) + + +def compute_root_transform_cuda(lbs_fn, poses, verts=None): + # NOTE: verts is not really necessary, + # NOTE: should be used in conjuncation with LBSCuda + B = poses.shape[0] + + # NOTE: scales are zero (!) + _, _, _, state_t, state_r, state_s = lbs_fn(poses, vertices=verts) + + bind_r = lbs_fn.joint_state_r_zero[np.newaxis, 1].expand(B, -1, -1) + bind_t = lbs_fn.joint_state_t_zero[np.newaxis, 1].expand(B, -1) + + R_root = th.matmul(state_r[:, 1], bind_r) + t_root = ( + th.matmul(state_r[:, 1], (bind_t * state_s[:, 1])[..., np.newaxis])[..., 0] + state_t[:, 1] + ) + + return R_root, t_root + + +# def compute_joints_weights(lbs_fn: LinearBlendSkinningCuda, drop_empty: bool = False) -> th.Tensor: +# device = lbs_fn.skin_indices.device +# idxs_verts = th.arange(lbs_fn.nr_vertices)[:, np.newaxis].to(device) +# weights_joints = th.zeros( +# (lbs_fn.nr_joints, lbs_fn.nr_vertices), +# dtype=th.float32, +# device=lbs_fn.skin_indices.device, +# ) +# weights_joints[lbs_fn.skin_indices, idxs_verts] = lbs_fn.skin_weights +# if drop_empty: +# weights_joints = weights_joints[weights_joints.sum(axis=-1).abs() > 0] +# return weights_joints + + +# def compute_pose_regions(lbs_fn: LinearBlendSkinningCuda) -> np.ndarray: +# """Computes pose regions given a linear blend skinning function. + +# Returns: +# np.ndarray of boolean masks of shape [nr_params, n_rvertices] +# """ + +# weights = compute_joints_weights(lbs_fn).cpu().numpy() + +# n_pos = lbs_fn.nr_position_params + +# param_masks = np.zeros((n_pos, weights.shape[-1])) + +# children = {j: [] for j in range(lbs_fn.nr_joints)} +# parents = {j: None for j in range(lbs_fn.nr_joints)} +# prec = {j: [] for j in range(lbs_fn.nr_joints)} +# for j in range(lbs_fn.nr_joints): +# parent_index = int(lbs_fn.joint_parents[j]) +# if parent_index == -1: +# continue +# children[parent_index].append(j) +# parents[j] = parent_index +# prec[j] = [parent_index, int(lbs_fn.joint_parents[parent_index])] + +# # get parameters for each joint +# # j_to_p = get_influence_map(lbs_fn.param_transform.transform, n_pos) +# j_to_p = get_influence_map(lbs_fn.param_transform, n_pos) + +# # get all the joints +# p_to_j = [[] for i in range(n_pos)] +# for j, pidx in enumerate(j_to_p): +# for p in pidx: +# if j not in p_to_j[p]: +# p_to_j[p].append(j) + +# for p, jidx in enumerate(p_to_j): +# param_masks[p] = weights[jidx].sum(axis=0) +# if not np.any(param_masks[p]): +# assert len(jidx) == 1 +# jidx_c = children[jidx[0]][:] +# for jc in jidx_c[:]: +# jidx_c += children[jc] +# param_masks[p] = weights[jidx_c].sum(axis=0) +# return param_masks > 0.0 + + +def compute_pose_regions_legacy(lbs_fn) -> np.ndarray: + """Computes pose regions given a linear blend skinning function.""" + weights = lbs_fn.joints_weights.cpu().numpy() + + n_pos = lbs_fn.param_transform.nr_position_params + + param_masks = np.zeros((n_pos, lbs_fn.joints_weights.shape[-1])) + + children = {j: [] for j in range(lbs_fn.num_joints)} + parents = {j: None for j in range(lbs_fn.num_joints)} + prec = {j: [] for j in range(lbs_fn.num_joints)} + for j in range(lbs_fn.num_joints): + parent_index = int(lbs_fn.joint_parents[j, 0]) + if parent_index == -1: + continue + children[parent_index].append(j) + parents[j] = parent_index + prec[j] = [parent_index, int(lbs_fn.joint_parents[parent_index, 0])] + + # get parameters for each joint + j_to_p = get_influence_map(lbs_fn.param_transform.transform, n_pos) + + # get all the joints + p_to_j = [[] for i in range(n_pos)] + for j, pidx in enumerate(j_to_p): + for p in pidx: + if j not in p_to_j[p]: + p_to_j[p].append(j) + + for p, jidx in enumerate(p_to_j): + param_masks[p] = weights[jidx].sum(axis=0) + if not np.any(param_masks[p]): + assert len(jidx) == 1 + jidx_c = children[jidx[0]][:] + for jc in jidx_c[:]: + jidx_c += children[jc] + param_masks[p] = weights[jidx_c].sum(axis=0) + return param_masks > 0.0 + + +def compute_pose_mask_uv(lbs_fn, geo_fn, uv_size, ksize=25): + device = geo_fn.index_image.device + pose_regions = compute_pose_regions(lbs_fn) + pose_regions = ( + th.as_tensor(pose_regions[6:], dtype=th.float32).permute(1, 0)[np.newaxis].to(device) + ) + pose_regions_uv = geo_fn.to_uv(pose_regions) + pose_regions_uv = F.max_pool2d(pose_regions_uv, ksize, 1, padding=ksize // 2) + pose_cond_mask = (F.interpolate(pose_regions_uv, size=(uv_size, uv_size)) > 0.1).to(th.int32) + return pose_cond_mask + + +def parent_chain(joint_parents, idx, depth): + if depth == 0 or idx == 0: + return [] + parent_idx = int(joint_parents[idx]) + return [parent_idx] + parent_chain(joint_parents, parent_idx, depth - 1) + + +def joint_connectivity(nr_joints, joint_parents, chain_depth=2, pad_ancestors=False): + children = {j: [] for j in range(nr_joints)} + parents = {j: None for j in range(nr_joints)} + ancestors = {j: [] for j in range(nr_joints)} + for j in range(nr_joints): + parent_index = int(joint_parents[j]) + ancestors[j] = parent_chain(joint_parents, j, depth=chain_depth) + if pad_ancestors: + # adding itself + ancestors[j] += [j] * (chain_depth - len(ancestors[j])) + + if parent_index == -1: + continue + children[parent_index].append(j) + parents[j] = parent_index + + return { + 'children': children, + 'parents': parents, + 'ancestors': ancestors, + } + + +# TODO: merge this with LinearBlendSkinning? +class LBSModule(nn.Module): + def __init__( + self, lbs_model_json, lbs_config_dict, lbs_template_verts, lbs_scale, global_scaling + ): + super().__init__() + self.lbs_fn = LinearBlendSkinning(lbs_model_json, lbs_config_dict) + + self.register_buffer("lbs_scale", th.as_tensor(lbs_scale, dtype=th.float32)) + self.register_buffer( + "lbs_template_verts", th.as_tensor(lbs_template_verts, dtype=th.float32) + ) + self.register_buffer("global_scaling", th.as_tensor(global_scaling)) + + def pose(self, verts_unposed, motion, template: Optional[th.Tensor] = None): + scale = self.lbs_scale.expand(motion.shape[0], -1) + if template is None: + template = self.lbs_template_verts + return self.lbs_fn(motion, scale, verts_unposed + template) * self.global_scaling + + def unpose(self, verts, motion): + B = motion.shape[0] + scale = self.lbs_scale.expand(B, -1) + return ( + self.lbs_fn.unpose(motion, scale, verts / self.global_scaling) - self.lbs_template_verts + ) + + def template_pose(self, motion): + B = motion.shape[0] + scale = self.lbs_scale.expand(B, -1) + verts = self.lbs_template_verts[np.newaxis].expand(B, -1, -1) + return self.lbs_fn(motion, scale, verts) * self.global_scaling[np.newaxis] + + diff --git a/visualize/ca_body/utils/module_loader.py b/visualize/ca_body/utils/module_loader.py new file mode 100644 index 0000000000000000000000000000000000000000..750fde207b03c7dccd6dd29c0421f0cbadd8349b --- /dev/null +++ b/visualize/ca_body/utils/module_loader.py @@ -0,0 +1,279 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import copy +import importlib +import inspect +import logging +from dataclasses import dataclass, field +from typing import Any, Dict, Optional + +from attrdict import AttrDict + +from torch import nn + + +logger: logging.Logger = logging.getLogger(__name__) + + +def load_module( + module_name: str, class_name: Optional[str] = None, silent: bool = False +): + """ + Load a module or class given the module/class name. + + Example: + .. code-block:: python + + eye_geo = load_class("path.to.module", "ClassName") + + Args: + module_name: str + The full path of the module relative to the root directory. Ex: ``utils.module_loader`` + + class_name: str + The name of the class within the module to load. + + silent: bool + If set to True, return None instead of raising an exception if module/class is missing + + Returns: + object: + The loaded module or class object. + """ + try: + module = importlib.import_module(f"visualize.{module_name}") + if class_name: + return getattr(module, class_name) + else: + return module + except ModuleNotFoundError as e: + if silent: + return None + logger.error(f"Module not found: {module_name}", exc_info=True) + raise + except AttributeError as e: + if silent: + return None + logger.error( + f"Can not locate class: {class_name} in {module_name}.", exc_info=True + ) + raise + + +# pyre-ignore[3] +def make_module(mod_config: AttrDict, *args: Any, **kwargs: Any) -> Any: + """ + A shortcut for making an object given the config and arguments + + Args: + mod_config: AttrDict + Config. Should contain keys: module_name, class_name, and optionally args + + *args + Positional arguments. + + **kwargs + Default keyword arguments. Overwritten by content from mod_config.args + + Returns: + object: + The loaded module or class object. + """ + mod_config_dict = dict(mod_config) + mod_args = mod_config_dict.pop("args", {}) + mod_args.update({k: v for k, v in kwargs.items() if k not in mod_args.keys()}) + mod_class = load_module(**mod_config_dict) + return mod_class(*args, **mod_args) + + +def get_full_name(mod: object) -> str: + """ + Returns a name of an object in a form .. + """ + mod_class = mod.__class__ + return f"{mod_class.__module__}.{mod_class.__qualname__}" + + +# pyre-fixme[3]: Return type must be annotated. +def load_class(class_name: str): + """ + Load a class given the full class name. + + Example: + .. code-block:: python + + class_instance = load_class("module.path.ClassName") + + Args: + class_name: txt + The full class name including the full path of the module relative to the root directory. + Returns: + A class + """ + # This is a false-positive, pyre doesn't understand rsplit(..., 1) can only have 1-2 elements + # pyre-fixme[6]: In call `load_module`, for 1st positional only parameter expected `bool` but got `str`. + return load_module(*class_name.rsplit(".", 1)) + + +@dataclass(frozen=True) +class ObjectSpec: + """ + Args: + class_name: str + The full class name including the full path of the module relative to + the root directory or just the name of the class within the module to + load when module name is also provided. + + module_name: str + The full path of the module relative to the root directory. Ex: ``utils.module_loader`` + + kwargs: dict + Keyword arguments for initializing the object. + """ + + class_name: str + module_name: Optional[str] = None + kwargs: Dict[str, Any] = field(default_factory=dict) + + +# pyre-fixme[3]: Return type must be annotated. +def load_object(spec: ObjectSpec, **kwargs: Any): + """ + Instantiate an object given the class name and initialization arguments. + + Example: + .. code-block:: python + + my_model = load_object(ObjectSpec(**my_model_config), in_channels=3) + + Args: + spec: ObjectSpec + An ObjectSpec object that specifies the class name and init arguments. + + kwargs: dict + Additional keyword arguments for initialization. + + Returns: + An object + """ + if spec.module_name is None: + object_class = load_class(spec.class_name) + else: + object_class = load_module(spec.module_name, spec.class_name) + + # Debug message for overriding the object spec + for key in kwargs: + if key in spec.kwargs: + logger.debug(f"Overriding {key} as {kwargs[key]} in {spec}.") + + return object_class(**{**spec.kwargs, **kwargs}) + + +# From DaaT merge. Fix here T145981161 +# pyre-fixme[2]: parameter must be annotated. +# pyre-fixme[3]: Return type must be annotated. +def load_from_config(config: AttrDict, **kwargs): + """Instantiate an object given a config and arguments.""" + assert "class_name" in config and "module_name" not in config + config = copy.deepcopy(config) + class_name = config.pop("class_name") + object_class = load_class(class_name) + return object_class(**config, **kwargs) + + +# From DaaT merge. Fix here T145981161 +# pyre-fixme[2]: parameter must be annotated. +# pyre-fixme[3]: Return type must be annotated. +def forward_parameter_names(module): + """Get the names arguments of the forward pass for the module. + + Args: + module: a class with `forward()` method + """ + names = [] + params = list(inspect.signature(module.forward).parameters.values())[1:] + for p in params: + if p.name in {"*args", "**kwargs"}: + raise ValueError("*args and **kwargs are not supported") + names.append(p.name) + return names + + +# From DaaT merge. Fix here T145981161 +def build_optimizer(config, model): + """Build an optimizer given optimizer config and a model. + + Args: + config: DictConfig + model: nn.Module|Dict[str,nn.Module] + + """ + config = copy.deepcopy(config) + + if isinstance(model, nn.Module): + if "per_module" in config: + params = [] + for name, value in config.per_module.items(): + if not hasattr(model, name): + logger.warning( + f"model {model.__class__} does not have a submodule {name}, skipping" + ) + continue + + params.append( + dict( + params=getattr(model, name).parameters(), + **value, + ) + ) + + defined_names = set(config.per_module.keys()) + for name, module in model.named_children(): + n_params = len(list(module.named_parameters())) + if name not in defined_names and n_params: + logger.warning( + f"not going to optimize module {name} which has {n_params} parameters" + ) + config.pop("per_module") + else: + params = model.parameters() + else: + # NOTE: can we do + assert "per_module" in config + assert isinstance(model, dict) + for name, value in config.per_module.items(): + params = [] + for name, value in config.per_module.items(): + if name not in model: + logger.warning(f"not aware of {name}, skipping") + continue + params.append( + dict( + params=model[name].parameters(), + **value, + ) + ) + + return load_from_config(config, params=params) + + +# From DaaT merge. Fix here T145981161 +class ForwardFilter: + """A module that filters out arguments for the `forward()`.""" + + # pyre-ignore + def __init__(self, module, optional: bool = False) -> None: + # pyre-ignore + self.module = module + # pyre-ignore + self.input_names = set(forward_parameter_names(module)) + + # pyre-ignore + def __call__(self, **kwargs): + filtered_kwargs = {k: v for k, v in kwargs.items() if k in self.input_names} + return self.module(**filtered_kwargs) diff --git a/visualize/ca_body/utils/quaternion.py b/visualize/ca_body/utils/quaternion.py new file mode 100644 index 0000000000000000000000000000000000000000..c8d2bf213835fe3de21efb50765f0a8ad77c6f94 --- /dev/null +++ b/visualize/ca_body/utils/quaternion.py @@ -0,0 +1,679 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import numpy as np +import torch as th + +import torch.nn as nn +import torch.nn.functional as F + + +class Quaternion: + """Torch Tensor based Quaternion class""" + + @staticmethod + def identity(dtype=th.double): + """ + Create identity quaternion + """ + return th.tensor([0.0, 0.0, 0.0, 1.0], dtype=dtype) + + @staticmethod + def mul(q, r): + """ + mul two quaternions, expects those to be double tesnors of length 4 + """ + return th.stack( + [ + (q * th.tensor([1.0, 1.0, -1.0, 1.0], dtype=q.dtype)).dot(r[[3, 2, 1, 0]]), + (q * th.tensor([-1.0, 1.0, 1.0, 1.0], dtype=q.dtype)).dot(r[[2, 3, 0, 1]]), + (q * th.tensor([1.0, -1.0, 1.0, 1.0], dtype=q.dtype)).dot(r[[1, 0, 3, 2]]), + (q * th.tensor([-1.0, -1.0, -1.0, 1.0], dtype=q.dtype)).dot(r[[0, 1, 2, 3]]), + ] + ) + + @staticmethod + def rot(q, v): + """ + Rotate 3d-vector v given with quaternion q + """ + axis = q[:3] + av = th.cross(axis, v) + aav = th.cross(axis, av) + return v + 2 * (av * q[3] + aav) + + @staticmethod + def invert(q): + """ + Get the inverse of quaternion q + """ + return q * th.tensor([-1.0, -1.0, -1.0, 1.0], dtype=q.dtype) * (1.0 / q.dot(q)) + + @staticmethod + def fromAxisAngle(axis, angle): + """ + Generate a quaternion representing a rotation around axis by angle + """ + s = th.sin(angle * 0.5) + c = th.cos(angle * 0.5).view([1]) + return th.cat((axis * s, c), 0) + + @staticmethod + def fromXYZ(angles): + """ + Generate a quaternion representing a rotation defined by a XYZ-Euler + rotation. + This is faster than creating three separate quaternions and muling + them. + """ + rc = th.cos( + angles * th.tensor([-0.5, 0.5, 0.5], dtype=angles.dtype, device=angles.device) + ) + rs = th.sin( + angles * th.tensor([-0.5, 0.5, 0.5], dtype=angles.dtype, device=angles.device) + ) + + return th.stack( + [ + -rs[0] * rc[1] * rc[2] - rc[0] * rs[1] * rs[2], + rc[0] * rs[1] * rc[2] - rs[0] * rc[1] * rs[2], + rc[0] * rc[1] * rs[2] + rs[0] * rs[1] * rc[2], + rc[0] * rc[1] * rc[2] - rs[0] * rs[1] * rs[2], + ] + ) + + @staticmethod + def toMatrix(q): + """ + Convert quaternion q to 3x3 rotation matrix + """ + result = th.empty([3, 3], dtype=q.dtype) + + tx = q[0] * 2.0 + ty = q[1] * 2.0 + tz = q[2] * 2.0 + twx = tx * q[3] + twy = ty * q[3] + twz = tz * q[3] + txx = tx * q[0] + txy = ty * q[0] + txz = tz * q[0] + tyy = ty * q[1] + tyz = tz * q[1] + tzz = tz * q[2] + + result[0, 0] = 1.0 - (tyy + tzz) + result[0, 1] = txy - twz + result[0, 2] = txz + twy + result[1, 0] = txy + twz + result[1, 1] = 1.0 - (txx + tzz) + result[1, 2] = tyz - twx + result[2, 0] = txz - twy + result[2, 1] = tyz + twx + result[2, 2] = 1.0 - (txx + tyy) + + return result + + @staticmethod + def toMatrixBatch(q): + tx = q[..., 0] * 2.0 + ty = q[..., 1] * 2.0 + tz = q[..., 2] * 2.0 + twx = tx * q[..., 3] + twy = ty * q[..., 3] + twz = tz * q[..., 3] + txx = tx * q[..., 0] + txy = ty * q[..., 0] + txz = tz * q[..., 0] + tyy = ty * q[..., 1] + tyz = tz * q[..., 1] + tzz = tz * q[..., 2] + + return th.stack( + ( + th.stack((1.0 - (tyy + tzz), txy + twz, txz - twy), dim=2), + th.stack((txy - twz, 1.0 - (txx + tzz), tyz + twx), dim=2), + th.stack((txz + twy, tyz - twx, 1.0 - (txx + tyy)), dim=2), + ), + dim=3, + ) + + @staticmethod + def toMatrixBatchDim1(q): + tx = q[..., 0] * 2.0 + ty = q[..., 1] * 2.0 + tz = q[..., 2] * 2.0 + twx = tx * q[..., 3] + twy = ty * q[..., 3] + twz = tz * q[..., 3] + txx = tx * q[..., 0] + txy = ty * q[..., 0] + txz = tz * q[..., 0] + tyy = ty * q[..., 1] + tyz = tz * q[..., 1] + tzz = tz * q[..., 2] + + return th.stack( + ( + th.stack((1.0 - (tyy + tzz), txy + twz, txz - twy), dim=1), + th.stack((txy - twz, 1.0 - (txx + tzz), tyz + twx), dim=1), + th.stack((txz + twy, tyz - twx, 1.0 - (txx + tyy)), dim=1), + ), + dim=2, + ) + + + @staticmethod + def batchMul(q, r): + """ + mul two quaternions, expects those to be double tesnors of length 4 + + Args: + q: N x K x 4 quaternions + r: N x K x 4 quaternions + + Returns: + N x K x 4 multiplied quaternions + """ + return th.stack( + [ + th.sum( + th.mul( + th.mul( + q, + th.tensor( + [[[1.0, 1.0, -1.0, 1.0]]], + dtype=q.dtype, + device=q.device, + ), + ), + r[:, :, (3, 2, 1, 0)], + ), + dim=-1, + ), + th.sum( + th.mul( + th.mul( + q, + th.tensor( + [[[-1.0, 1.0, 1.0, 1.0]]], + dtype=q.dtype, + device=q.device, + ), + ), + r[:, :, (2, 3, 0, 1)], + ), + dim=-1, + ), + th.sum( + th.mul( + th.mul( + q, + th.tensor( + [[[1.0, -1.0, 1.0, 1.0]]], + dtype=q.dtype, + device=q.device, + ), + ), + r[:, :, (1, 0, 3, 2)], + ), + dim=-1, + ), + th.sum( + th.mul( + th.mul( + q, + th.tensor( + [[[-1.0, -1.0, -1.0, 1.0]]], + dtype=q.dtype, + device=q.device, + ), + ), + r[:, :, (0, 1, 2, 3)], + ), + dim=-1, + ), + ], + dim=2, + ) + + @staticmethod + def batchRot(q, v): + """ + Rotate 3d-vector v given with quaternion q + + Args: + q: N x K x 4 quaternions + v: N x K x 3 vectors + + Returns: + N x K x 3 rotated vectors + """ + av = th.cross(q[:, :, :3], v, dim=2) + aav = th.cross(q[:, :, :3], av, dim=2) + return th.add(v, 2 * th.add(th.mul(av, q[:, :, 3].unsqueeze(2)), aav)) + + + @staticmethod + def batchInvert(q): + """ + Get the inverse of quaternion q + + Args: + q: N x K x 4 quaternions + + Returns: + N x K x 4 inverted quaternions + """ + return ( + q + * th.tensor([-1.0, -1.0, -1.0, 1.0], dtype=q.dtype, device=q.device) + * (th.reciprocal(th.sum(q * q, dim=2).unsqueeze(2))) + ) + + @staticmethod + def batchFromXYZ(r): + """ + Generate a quaternion representing a rotation defined by a XYZ-Euler + rotation. + + Args: + r: N x K x 3 rotation vectors + + Returns: + N x K x 4 quaternions + """ + rm = r * th.tensor([[[-0.5, 0.5, 0.5]]], dtype=r.dtype, device=r.device) + rc = th.cos(rm) + rs = th.sin(rm) + + return th.stack( + [ + th.sub( + th.mul(th.neg(rs[:, :, 0]), th.mul(rc[:, :, 1], rc[:, :, 2])), + th.mul(rc[:, :, 0], th.mul(rs[:, :, 1], rs[:, :, 2])), + ), + th.sub( + th.mul(rc[:, :, 0], th.mul(rs[:, :, 1], rc[:, :, 2])), + th.mul(rs[:, :, 0], th.mul(rc[:, :, 1], rs[:, :, 2])), + ), + th.add( + th.mul(rc[:, :, 0], th.mul(rc[:, :, 1], rs[:, :, 2])), + th.mul(rs[:, :, 0], th.mul(rs[:, :, 1], rc[:, :, 2])), + ), + th.sub( + th.mul(rc[:, :, 0], th.mul(rc[:, :, 1], rc[:, :, 2])), + th.mul(rs[:, :, 0], th.mul(rs[:, :, 1], rs[:, :, 2])), + ), + ], + dim=2, + ) + + @staticmethod + def batchMatrixFromXYZ(r): + """ + Generate a matrix representing a rotation defined by a XYZ-Euler + rotation. + + Args: + r: N x 3 rotation vectors + + Returns: + N x 3 x 3 rotation matrices + """ + rc = th.cos(r) + rs = th.sin(r) + cx = rc[:, 0] + cy = rc[:, 1] + cz = rc[:, 2] + sx = rs[:, 0] + sy = rs[:, 1] + sz = rs[:, 2] + + result = th.stack( + ( + cy * cz, + -cx * sz + sx * sy * cz, + sx * sz + cx * sy * cz, + cy * sz, + cx * cz + sx * sy * sz, + -sx * cz + cx * sy * sz, + -sy, + sx * cy, + cx * cy, + ), + dim=1, + ).view(-1, 3, 3) + return result + + @staticmethod + def batchQuatFromMatrix(m): + """ + :param m: B*3*3 + :return: B*4, order xyzw + """ + assert len(m.shape) == 3 + b, j, k = m.shape + assert j == 3 + assert k == 3 + result = th.zeros((b, 4), dtype=th.float32).to(m.device) + S = th.zeros((b,), dtype=th.float32).to(m.device) + + m00 = m[:, 0, 0] + m01 = m[:, 0, 1] + m02 = m[:, 0, 2] + m10 = m[:, 1, 0] + m11 = m[:, 1, 1] + m12 = m[:, 1, 2] + m20 = m[:, 2, 0] + m21 = m[:, 2, 1] + m22 = m[:, 2, 2] + + tr = m00 + m11 + m22 + flag = tr > 0 + S[flag] = 2 * th.sqrt(1 + tr[flag]) + result[flag, 0] = (m21 - m12)[flag] / S[flag] + result[flag, 1] = (m02 - m20)[flag] / S[flag] + result[flag, 2] = (m10 - m01)[flag] / S[flag] + result[flag, 3] = 0.25 * S[flag] + + flag = ~flag & (m00 > m11) & (m00 > m22) + S[flag] = 2 * th.sqrt(1.0 + m00[flag] - m11[flag] - m22[flag]) + result[flag, 0] = 0.25 * S[flag] + result[flag, 1] = (m01 + m10)[flag] / S[flag] + result[flag, 2] = (m02 + m20)[flag] / S[flag] + result[flag, 3] = (m21 - m12)[flag] / S[flag] + + flag = ~flag & (m11 > m22) + S[flag] = 2 * th.sqrt(1.0 + m11[flag] - m00[flag] - m22[flag]) + result[flag, 0] = (m01 + m10)[flag] / S[flag] + result[flag, 1] = 0.25 * S[flag] + result[flag, 2] = (m12 + m21)[flag] / S[flag] + result[flag, 3] = (m02 - m20)[flag] / S[flag] + + flag = ~flag + S[flag] = 2 * th.sqrt(1.0 + m22[flag] - m00[flag] - m11[flag]) + result[flag, 0] = (m02 + m20)[flag] / S[flag] + result[flag, 1] = (m12 + m21)[flag] / S[flag] + result[flag, 2] = 0.25 * S[flag] + result[flag, 3] = (m10 - m01)[flag] / S[flag] + + return result + + +class RodriguesVecBatch(nn.Module): + def __init__(self): + super(RodriguesVecBatch, self).__init__() + self.register_buffer("eye", (th.eye(3))) + self.register_buffer( + "zero", + ( + th.zeros( + 1, + ) + ), + ) + # mat = th.zeros((nbat,3,3),dtype=th.float32,device=r.device,requires_grad=True) + + def forward( + self, v0, v1 + ): # assuming v0 and v1 are already normalized, compute matrix aligning v0 to v1 + nbat = v0.size(0) + cosn = (v0 * v1).sum(dim=1, keepdim=True).unsqueeze(2) + # r = v0.cross(v1,dim=1) + r = v1.cross(v0, dim=1) + sinn = r.pow(2).sum(1, keepdim=True).sqrt().unsqueeze(2) + rn = r.unsqueeze(2) / (sinn + 1e-10) + R = cosn * self.eye.unsqueeze(0).expand(nbat, 3, 3) + R = R + (1.0 - cosn) * rn.bmm(rn.permute(0, 2, 1)) + R[:, 0, 1] = R[:, 0, 1] + rn[:, 2, 0] * sinn[:, 0, 0] + R[:, 1, 0] = R[:, 0, 1] - rn[:, 2, 0] * sinn[:, 0, 0] + R[:, 0, 2] = R[:, 0, 2] - rn[:, 1, 0] * sinn[:, 0, 0] + R[:, 2, 0] = R[:, 2, 0] + rn[:, 1, 0] * sinn[:, 0, 0] + R[:, 1, 2] = R[:, 1, 2] + rn[:, 0, 0] * sinn[:, 0, 0] + R[:, 2, 1] = R[:, 2, 1] - rn[:, 0, 0] * sinn[:, 0, 0] + return R + + +class RodriguesBatch(nn.Module): + def __init__(self): + super(RodriguesBatch, self).__init__() + self.register_buffer("eye", (th.eye(3))) + self.register_buffer( + "zero", + ( + th.zeros( + 1, + ) + ), + ) + + def forward(self, r): + # pdb.set_trace() + nbat = r.size(0) + n = ((r * r).sum(dim=1, keepdim=True) + 1e-10).sqrt() + rn = th.div(r, n).unsqueeze(2) + + cosn = th.cos(n).unsqueeze(2) + sinn = th.sin(n).unsqueeze(2) + R = cosn * self.eye.unsqueeze(0).expand(nbat, 3, 3) + R = R + (1.0 - cosn) * rn.bmm(rn.permute(0, 2, 1)) + + R[:, 0, 1] = R[:, 0, 1] + rn[:, 2, 0] * sinn[:, 0, 0] + R[:, 1, 0] = R[:, 0, 1] - rn[:, 2, 0] * sinn[:, 0, 0] + R[:, 0, 2] = R[:, 0, 2] - rn[:, 1, 0] * sinn[:, 0, 0] + R[:, 2, 0] = R[:, 2, 0] + rn[:, 1, 0] * sinn[:, 0, 0] + R[:, 1, 2] = R[:, 1, 2] + rn[:, 0, 0] * sinn[:, 0, 0] + R[:, 2, 1] = R[:, 2, 1] - rn[:, 0, 0] * sinn[:, 0, 0] + return R + + +class NormalComputer(nn.Module): + def __init__(self, height, width, maskin=None): + super(NormalComputer, self).__init__() + # self.register_buffer('eye', (th.eye(3))) + # self.register_buffer('zero', (th.zeros(1,))) + + patchttnum = 5 # neighbor + self + patchmatch_uvpos = np.zeros((height, width, patchttnum, 2), dtype=np.int32) + vec_standuv = ( + np.indices((height, width)) + .swapaxes(0, 2) + .swapaxes(0, 1) + .astype(np.int32) + .reshape(height, width, 1, 2) + ) + patchmatch_uvpos = patchmatch_uvpos + vec_standuv + localpatchcoord = np.zeros((patchttnum, 2), dtype=np.int32) + localpatchcoord = np.array([[-1, 0], [0, 1], [1, 0], [0, -1], [0, 0]]).astype(np.int32) + + patchmatch_uvpos = patchmatch_uvpos + localpatchcoord.reshape(1, 1, patchttnum, 2) + patchmatch_uvpos[..., 0] = np.clip(patchmatch_uvpos[..., 0], 0, height - 1) + patchmatch_uvpos[..., 1] = np.clip(patchmatch_uvpos[..., 1], 0, width - 1) + + # geoemtry mask , apply simiilar to texture mask + # mesh_mask_int = mesh_mask.reshape(height,width).astype(np.int32) + if maskin is None: + maskin = np.ones((height, width), dtype=np.int32) + mesh_mask_int = maskin.reshape(height, width).astype( + np.int32 + ) # using all pixel valid mask; can use a tailored mask + patchmatch_mask = mesh_mask_int[patchmatch_uvpos[..., 0], patchmatch_uvpos[..., 1]].reshape( + height, width, patchttnum, 1 + ) + patch_indicemap = patchmatch_uvpos * patchmatch_mask + (1 - patchmatch_mask) * vec_standuv + + tensor_patch_geoindicemap = th.from_numpy(patch_indicemap).long() + tensor_patch_geoindicemap1d = ( + tensor_patch_geoindicemap[..., 0] * width + tensor_patch_geoindicemap[..., 1] + ) + + self.register_buffer("tensor_patch_geoindicemap1d", tensor_patch_geoindicemap1d) + # tensor_patchmatch_uvpos = th.from_numpy(patchmatch_uvpos).long() + # tensor_vec_standuv = th.from_numpy(vec_standuv).long() + + def forward(self, t_georecon): # in: N 3 H W + # pdb.set_trace() + # Intergration switch it to index_select + # geometry_in = index_selection_nd( + # t_georecon.view(t_georecon.size(0), t_georecon.size(1), -1), + # self.tensor_patch_geoindicemap1d, + # 2, + # ).permute(0, 2, 3, 4, 1) + + geometry_in = th.index_select( + t_georecon.view(t_georecon.size(0), t_georecon.size(1), -1), + self.tensor_patch_geoindicemap1d, + 2, + ).permute(0, 2, 3, 4, 1) + + normal = (geometry_in[..., 0, :] - geometry_in[..., 4, :]).cross( + geometry_in[..., 1, :] - geometry_in[..., 4, :], dim=3 + ) + normal = normal + (geometry_in[..., 1, :] - geometry_in[..., 4, :]).cross( + geometry_in[..., 2, :] - geometry_in[..., 4, :], dim=3 + ) + normal = normal + (geometry_in[..., 2, :] - geometry_in[..., 4, :]).cross( + geometry_in[..., 3, :] - geometry_in[..., 4, :], dim=3 + ) + normal = normal + (geometry_in[..., 3, :] - geometry_in[..., 4, :]).cross( + geometry_in[..., 0, :] - geometry_in[..., 4, :], dim=3 + ) + normal = normal / th.clamp(normal.pow(2).sum(3, keepdim=True).sqrt(), min=1e-6) + return normal.permute(0, 3, 1, 2) + + +def pointcloud_rigid_registration(src_pointcloud, dst_pointcloud, reduce_loss: bool = True): + """ + Calculate RT and residual L2 loss for two pointclouds + :param src_pointcloud: x (b, v, 3) + :param dst_pointcloud: y (b, v, 3) + :return: loss, R, t s.t. ||Rx+t-y||_2^2 minimal. + """ + if len(src_pointcloud.shape) == 2: + src_pointcloud = src_pointcloud.unsqueeze(0) + if len(dst_pointcloud.shape) == 2: + dst_pointcloud = dst_pointcloud.unsqueeze(0) + bn = src_pointcloud.shape[0] + + assert src_pointcloud.shape == dst_pointcloud.shape + assert src_pointcloud.shape[2] == 3 + + X = src_pointcloud - src_pointcloud.mean(dim=1, keepdim=True) + Y = dst_pointcloud - dst_pointcloud.mean(dim=1, keepdim=True) + + XYT = th.einsum("nji,njk->nik", X, Y) + muX = src_pointcloud.mean(dim=1) + muY = dst_pointcloud.mean(dim=1) + + R = th.zeros((bn, 3, 3), dtype=src_pointcloud.dtype).to(src_pointcloud.device) + t = th.zeros((bn, 1, 3), dtype=src_pointcloud.dtype).to(src_pointcloud.device) + loss = th.zeros((bn,), dtype=src_pointcloud.dtype).to(src_pointcloud.device) + + for i in range(bn): + u_, s_, v_ = th.svd(XYT[i, :, :]) + detvut = th.det(v_.mm(u_.t())) + diag_m = th.ones_like(s_) + diag_m[-1] = detvut + + r_ = v_.mm(th.diag(diag_m)).mm(u_.t()) + t_ = muY[i, :] - r_.mm(muX[i, :, None])[:, 0] + + R[i, :, :] = r_ + t[i, 0, :] = t_ + loss[i] = (th.einsum("ij,nj->ni", r_, X[i]) - Y[i]).pow(2).sum(1).mean(0) + + loss = loss.mean(0) if reduce_loss else loss + return loss, R, t + + +def pointcloud_rigid_registration_balanced(src_pointcloud, dst_pointcloud, weight): + """ + Calculate RT and residual L2 loss for two pointclouds + :param src_pointcloud: x (b, v, 3) + :param dst_pointcloud: y (b, v, 3) + :param weight: (v, ), duplication of vertices + :return: loss, R, t s.t. ||w(Rx+t-y)||_2^2 minimal. + """ + if len(src_pointcloud.shape) == 2: + src_pointcloud = src_pointcloud.unsqueeze(0) + if len(dst_pointcloud.shape) == 2: + dst_pointcloud = dst_pointcloud.unsqueeze(0) + bn = src_pointcloud.shape[0] + + assert src_pointcloud.shape == dst_pointcloud.shape + assert src_pointcloud.shape[2] == 3 + assert src_pointcloud.shape[1] == weight.shape[0] + assert len(weight.shape) == 1 + w = weight[None, :, None] + + def s1(a): + return a.sum(dim=1, keepdim=True) + + w2 = w.pow(2) + sw2 = s1(w2) + X = src_pointcloud + Y = dst_pointcloud + + wXYT = th.einsum("nji,njk->nik", w2 * (sw2 - w2) * X, Y) + U, s, V = batch_svd(wXYT) + UT = U.permute(0, 2, 1).contiguous() + det = batch_det(V.bmm(UT)) + diag = th.ones_like(s).to(s.device) + diag[:, -1] = det + + R = V.bmm(batch_diag(diag)).bmm(UT) + RX = th.einsum("bij,bnj->bni", R, X) + t = th.sum(w * (Y - RX), dim=1, keepdim=True) / sw2 + loss = w * (RX + t - Y) + loss = F.mse_loss(loss, th.zeros_like(loss)) * 3 + + return loss, R, t + + +def batch_dot(x, y): + assert x.shape == y.shape + assert len(x.shape) == 2 + return th.einsum("ni,ni->n", x, y) + + +def batch_svd(x): + assert len(x.shape) == 3 + bn, m, n = x.shape + U = th.zeros((bn, m, m), dtype=th.float32).to(x.device) + s = th.zeros((bn, min(n, m)), dtype=th.float32).to(x.device) + V = th.zeros((bn, n, n), dtype=th.float32).to(x.device) + for i in range(bn): + u_, s_, v_ = th.svd(x[i, :, :]) + U[i] = u_ + s[i] = s_ + V[i] = v_ + return U, s, V + + +def batch_diag(x): + if len(x.shape) == 2: + bn, n = x.shape + res = th.zeros((bn, n, n), dtype=th.float32).to(x.device) + res[:, range(n), range(n)] = x + return res + elif len(x.shape) == 3: + assert x.shape[1] == x.shape[2] + n = x.shape[1] + return x[:, range(n), range(n)] + else: + raise ValueError("dim of batch_diag should be 2 or 3") + + +def batch_det(x): + assert len(x.shape) == 3 + assert x.shape[1] == x.shape[2] + bn, _, _ = x.shape + res = th.zeros((bn,), dtype=th.float32).to(x.device) + for i in range(bn): + res[i] = th.det(x[i]) + return res diff --git a/visualize/ca_body/utils/render.py b/visualize/ca_body/utils/render.py new file mode 100644 index 0000000000000000000000000000000000000000..0a89e1563e36b9d5adb5216b307eb80b26d84901 --- /dev/null +++ b/visualize/ca_body/utils/render.py @@ -0,0 +1,65 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +from typing import List, Dict +import torch as th +import torch.nn as nn + +from pytorch3d.renderer import ( + RasterizationSettings, + MeshRasterizer, +) + +from pytorch3d.structures import Meshes +from pytorch3d.renderer.mesh.textures import TexturesUV +from pytorch3d.utils import cameras_from_opencv_projection + +class RenderLayer(nn.Module): + + def __init__(self, h, w, vi, vt, vti, flip_uvs=False): + super().__init__() + self.register_buffer("vi", vi, persistent=False) + self.register_buffer("vt", vt, persistent=False) + self.register_buffer("vti", vti, persistent=False) + raster_settings = RasterizationSettings(image_size=(h, w)) + self.rasterizer = MeshRasterizer(raster_settings=raster_settings) + self.flip_uvs = flip_uvs + image_size = th.as_tensor([h, w], dtype=th.int32) + self.register_buffer("image_size", image_size) + + def forward(self, verts: th.Tensor, tex: th.Tensor, K: th.Tensor, Rt: th.Tensor, background: th.Tensor = None, output_filters: List[str] = None): + + assert output_filters is None + assert background is None + + device = verts.device # Get device info + B = verts.shape[0] + + image_size = th.repeat_interleave(self.image_size[None], B, dim=0).to(device) + + cameras = cameras_from_opencv_projection(Rt[:,:,:3], Rt[:,:3,3], K, image_size) + + faces = self.vi[None].repeat(B, 1, 1).to(device) + faces_uvs = self.vti[None].repeat(B, 1, 1).to(device) + verts_uvs = self.vt[None].repeat(B, 1, 1).to(device) + + # In-place operation for flipping and permuting tensor + if not self.flip_uvs: + tex = tex.permute(0, 2, 3, 1).flip((1,)).to(device) + + textures = TexturesUV( + maps=tex, + faces_uvs=faces_uvs, + verts_uvs=verts_uvs, + ) + meshes = Meshes(verts.to(device), faces, textures=textures) + + fragments = self.rasterizer(meshes, cameras=cameras) + rgb = meshes.sample_textures(fragments)[:,:,:,0] + rgb[fragments.pix_to_face[...,0] == -1] = 0.0 + + return {'render': rgb.permute(0, 3, 1, 2)} \ No newline at end of file diff --git a/visualize/ca_body/utils/seams.py b/visualize/ca_body/utils/seams.py new file mode 100644 index 0000000000000000000000000000000000000000..e5d22890c76f4f3b8a84d8cdb9d7f1c33c543692 --- /dev/null +++ b/visualize/ca_body/utils/seams.py @@ -0,0 +1,52 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +from typing import Any, Dict + +import numpy as np +import torch as th +import torch.nn as nn +import torch.nn.functional as F + + +def impaint_batch(value: th.Tensor, dst_ij: th.Tensor, src_ij: th.Tensor) -> th.Tensor: + assert len(value.shape) == 4, "expecting a 4D tensor" + preds = value[:] + preds[:, :, dst_ij[:, 0], dst_ij[:, 1]] = value[:, :, src_ij[:, 0], src_ij[:, 1]] + return preds + + +def resample_tex(tex: th.Tensor, uvs: th.Tensor, weights: th.Tensor) -> th.Tensor: + B = tex.shape[0] + grid = 2.0 * (uvs[np.newaxis].expand(B, -1, -1, -1) - 0.5) + tex_resampled = F.grid_sample(tex, grid, align_corners=False, padding_mode="border") + return (1.0 - weights) * tex + weights * tex_resampled + + +class SeamSampler(nn.Module): + def __init__(self, seamless_data: Dict[str, Any]) -> None: + super().__init__() + + self.register_buffer("dst_ij", seamless_data["dst_ij"]) + self.register_buffer("src_ij", seamless_data["src_ij"]) + self.register_buffer("uvs", seamless_data["uvs"]) + self.register_buffer("weights", seamless_data["weights"]) + + def impaint(self, value: th.Tensor) -> th.Tensor: + return impaint_batch(value, self.dst_ij, self.src_ij) + + def resample(self, tex: th.Tensor) -> th.Tensor: + return resample_tex(tex, self.uvs, self.weights) + + def resample_border_only(self, tex: th.Tensor) -> th.Tensor: + tex = resample_tex(tex, self.uvs, self.weights) + return tex + + def forward(self, tex: th.Tensor) -> th.Tensor: + x = self.impaint(tex) + x = self.resample(x) + return x diff --git a/visualize/ca_body/utils/torch.py b/visualize/ca_body/utils/torch.py new file mode 100644 index 0000000000000000000000000000000000000000..7b2e15aacfd190bc6a977b5180bcb217d628eba4 --- /dev/null +++ b/visualize/ca_body/utils/torch.py @@ -0,0 +1,229 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +from typing import Optional, Tuple, Sequence, TypeVar, Union, Mapping, Any, List, Dict + +import torch as th +import numpy as np + +TensorOrContainer = Union[ + th.Tensor, str, int, Sequence["TensorOrContainer"], Mapping[str, "TensorOrContainer"] +] +NdarrayOrContainer = Union[ + np.ndarray, + str, + int, + Sequence["NdarrayOrContainer"], + Mapping[str, "NdarrayOrContainer"], +] +TensorNdarrayOrContainer = Union[ + th.Tensor, + np.ndarray, + str, + int, + Sequence["TensorNdarrayOrContainer"], + Mapping[str, "TensorNdarrayOrContainer"], +] +TensorNdarrayModuleOrContainer = Union[ + th.Tensor, + np.ndarray, + th.nn.Module, + str, + int, + Sequence["TensorNdarrayModuleOrContainer"], + Mapping[str, "TensorNdarrayModuleOrContainer"], +] +TTensorOrContainer = TypeVar("TTensorOrContainer", bound=TensorOrContainer) +TNdarrayOrContainer = TypeVar("TNdarrayOrContainer", bound=NdarrayOrContainer) +TTensorNdarrayOrContainer = TypeVar("TTensorNdarrayOrContainer", bound=TensorNdarrayOrContainer) +TTensorNdarrayModuleOrContainer = TypeVar( + "TTensorNdarrayModuleOrContainer", bound=TensorNdarrayModuleOrContainer +) + + +import torch as th + +import logging + +logger = logging.getLogger(__name__) + + +class ParamHolder(th.nn.Module): + def __init__( + self, + param_shape: Tuple[int, ...], + key_list: Sequence[str], + init_value: Union[None, bool, float, int, th.Tensor] = None, + ) -> None: + super().__init__() + + if isinstance(param_shape, int): + param_shape = (param_shape,) + self.key_list: Sequence[str] = sorted(key_list) + shp = (len(self.key_list),) + param_shape + self.params = th.nn.Parameter(th.zeros(*shp)) + + if init_value is not None: + self.params.data[:] = init_value + + def state_dict(self, *args: Any, saving: bool = False, **kwargs: Any) -> Dict[str, Any]: + sd = super().state_dict(*args, **kwargs) + if saving: + assert "key_list" not in sd + sd["key_list"] = self.key_list + return sd + + # pyre-fixme[14]: `load_state_dict` overrides method defined in `Module` + # inconsistently. + def load_state_dict( + self, state_dict: Mapping[str, Any], strict: bool = True, **kwargs: Any + ) -> th.nn.modules.module._IncompatibleKeys: + # Note: Mapping is immutable while Dict is mutable. According to pyre ErrorCode[14], + # the type of state_dict must be Mapping or supertype of Mapping to keep consistent + # with the overrided function in its superclass. + sd = dict(state_dict) + if "key_list" not in sd: + logger.warning("Missing key list list in state dict, only checking params shape.") + assert sd["params"].shape == self.params.shape + sd["key_list"] = self.key_list + + matching_kl = sd["key_list"] == self.key_list + if strict: + logger.warning("Attempting to load from mismatched key lists.") + assert sd["params"].shape[1:] == self.params.shape[1:] + + if not matching_kl: + src_kl = sd["key_list"] + new_kl = sorted(set(self.key_list) | set(src_kl)) + new_shp = (len(new_kl),) + tuple(self.params.shape[1:]) + new_params = th.zeros(*new_shp, device=self.params.device) + for f in self.key_list: + new_params[new_kl.index(f)] = self.params[self.key_list.index(f)] + upd = 0 + new = 0 + for f in src_kl: + new_params[new_kl.index(f)] = sd["params"][src_kl.index(f)] + if f in self.key_list: + upd += 1 + else: + new += 1 + logger.info( + f"Updated {upd} keys ({100*upd/len(self.key_list):0.2f}%), added {new} new keys." + ) + + self.key_list = new_kl + sd["params"] = new_params + self.params = th.nn.Parameter(new_params) + del sd["key_list"] + return super().load_state_dict(sd, strict=strict, **kwargs) + + def to_idx(self, *args: Any) -> th.Tensor: + if len(args) == 1: + keys = args[0] + else: + keys = zip(*args) + + return th.tensor( + [self.key_list.index(k) for k in keys], + dtype=th.long, + device=self.params.device, + ) + + def from_idx(self, idxs: th.Tensor) -> List[str]: + return [self.key_list[idx] for idx in idxs] + + def forward(self, idxs: th.Tensor) -> th.Tensor: + return self.params[idxs] + + + +def to_device( + things: TTensorNdarrayModuleOrContainer, + device: th.device, + cache: Optional[Dict[str, th.Tensor]] = None, + key: Optional[str] = None, + verbose: bool = False, + max_bs: Optional[int] = None, + non_blocking: bool = False, +) -> TTensorNdarrayModuleOrContainer: + """Sends a potentially nested container of Tensors to the specified + device. Non-tensors are preserved as-is. + + Args: + things: Container with tensors or other containers of tensors to send + to a GPU. + + device: Device to send the tensors to. + + cache: Optional dictionary to use as a cache for CUDAfied tensors. If + passed, use this cache to allocate a tensor once and then resize / + refill it on future calls to to_device() instead of reallocating + it. + + key: If using the cache, store the tensor in this key, only for + internal use. + + verbose: Print some info when a cached tensor is resized. + + max_bs: Maximum batch size allowed for tensors in cache + + non_blocking: if True and this copy is between CPU and GPU, the copy + may occur asynchronously with respect to the host. For other cases, + this argument has no effect. + + Returns: + collection: The input collection with all tensors transferred to the given device. + """ + device = th.device(device) + + pr = print if verbose else lambda *args, **kwargs: None + + if isinstance(things, th.Tensor) and things.device != device: + if cache is not None: + assert key is not None + batch_size = things.shape[0] + if key in cache: + assert things.shape[1:] == cache[key].shape[1:] + if batch_size > cache[key].shape[0]: + pr("Resized:", key, "from", cache[key].shape[0], "to", batch_size) + cache[key].resize_as_(things) + else: + buf_shape = list(things.shape) + if max_bs is not None: + assert max_bs >= batch_size + buf_shape[0] = max_bs + cache[key] = th.zeros(*buf_shape, dtype=things.dtype, device=device) + pr("Allocated:", key, buf_shape) + cache[key][:batch_size].copy_(things, non_blocking=non_blocking) + + return cache[key][:batch_size] + else: + return things.to(device, non_blocking=non_blocking) + elif isinstance(things, th.nn.Module): + return things.to(device, non_blocking=non_blocking) + elif isinstance(things, dict): + key = key + "." if key is not None else "" + return { + k: to_device(v, device, cache, key + k, verbose, max_bs, non_blocking) + for k, v in things.items() + } + elif isinstance(things, Sequence) and not isinstance(things, str): + key = key if key is not None else "" + out = [ + to_device(v, device, cache, key + f"_{i}", verbose, max_bs, non_blocking) + for i, v in enumerate(things) + ] + if isinstance(things, tuple): + out = tuple(out) + return out + elif isinstance(things, np.ndarray): + return to_device(th.from_numpy(things), device, cache, key, verbose, max_bs, non_blocking) + else: + return things + + + diff --git a/visualize/ca_body/utils/train.py b/visualize/ca_body/utils/train.py new file mode 100644 index 0000000000000000000000000000000000000000..c9c7a5492ebee145037c9fb390182baf215defb4 --- /dev/null +++ b/visualize/ca_body/utils/train.py @@ -0,0 +1,223 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import torch as th +import os +import re +import glob +import copy +from typing import Dict, Any, Iterator, Mapping, Optional, Union, Tuple, List + + +from collections import OrderedDict +from torch.utils.tensorboard import SummaryWriter +from omegaconf import OmegaConf, DictConfig + +from torch.optim.lr_scheduler import LRScheduler + +from visualize.ca_body.utils.torch import to_device +from visualize.ca_body.utils.module_loader import load_class, build_optimizer + +import torch.nn as nn + +import logging + +logging.basicConfig( + format="[%(asctime)s][%(levelname)s][%(name)s]:%(message)s", + level=logging.INFO, + datefmt="%Y-%m-%d %H:%M:%S", +) + +logger = logging.getLogger(__name__) + + +def process_losses( + loss_dict: Dict[str, Any], reduce: bool = True, detach: bool = True +) -> Dict[str, th.Tensor]: + """Preprocess the dict of losses outputs.""" + result = {k.replace("loss_", ""): v for k, v in loss_dict.items() if k.startswith("loss_")} + if detach: + result = {k: v.detach() for k, v in result.items()} + if reduce: + result = {k: float(v.mean().item()) for k, v in result.items()} + return result + + + +def load_config(path: str) -> DictConfig: + # NOTE: THIS IS THE ONLY PLACE WHERE WE MODIFY CONFIG + config = OmegaConf.load(path) + + # TODO: we should need to get rid of this in favor of DB + assert 'CARE_ROOT' in os.environ + config.CARE_ROOT = os.environ['CARE_ROOT'] + logger.info(f'{config.CARE_ROOT=}') + + if not os.path.isabs(config.train.run_dir): + config.train.run_dir = os.path.join(os.environ['CARE_ROOT'], config.train.run_dir) + logger.info(f'{config.train.run_dir=}') + os.makedirs(config.train.run_dir, exist_ok=True) + return config + + +def load_from_config(config: Mapping[str, Any], **kwargs): + """Instantiate an object given a config and arguments.""" + assert 'class_name' in config and 'module_name' not in config + config = copy.deepcopy(config) + ckpt = None if 'ckpt' not in config else config.pop('ckpt') + class_name = config.pop('class_name') + object_class = load_class(class_name) + instance = object_class(**config, **kwargs) + if ckpt is not None: + load_checkpoint( + ckpt_path=ckpt.path, + modules={ckpt.get('module_name', 'model'): instance}, + ignore_names=ckpt.get('ignore_names', []), + strict=ckpt.get('strict', False), + ) + return instance + + +def save_checkpoint(ckpt_path, modules: Dict[str, Any], iteration=None, keep_last_k=None): + if keep_last_k is not None: + raise NotImplementedError() + ckpt_dict = {} + if os.path.isdir(ckpt_path): + assert iteration is not None + ckpt_path = os.path.join(ckpt_path, f"{iteration:06d}.pt") + ckpt_dict["iteration"] = iteration + for name, mod in modules.items(): + if hasattr(mod, "module"): + mod = mod.module + ckpt_dict[name] = mod.state_dict() + th.save(ckpt_dict, ckpt_path) + + +def filter_params(params, ignore_names): + return OrderedDict( + [ + (k, v) + for k, v in params.items() + if not any([re.match(n, k) is not None for n in ignore_names]) + ] + ) + + +def save_file_summaries(path: str, summaries: Dict[str, Tuple[str, Any]]): + """Saving regular summaries for monitoring purposes.""" + for name, (value, ext) in summaries.items(): + #save(f'{path}/{name}.{ext}', value) + raise NotImplementedError() + + +def load_checkpoint( + ckpt_path: str, + modules: Dict[str, Any], + iteration: int =None, + strict: bool =False, + map_location: Optional[str] =None, + ignore_names: Optional[Dict[str, List[str]]]=None, +): + """Load a checkpoint. + Args: + ckpt_path: directory or the full path to the checkpoint + """ + if map_location is None: + map_location = "cpu" + # adding + if os.path.isdir(ckpt_path): + if iteration is None: + # lookup latest iteration + iteration = max( + [ + int(os.path.splitext(os.path.basename(p))[0]) + for p in glob.glob(os.path.join(ckpt_path, "*.pt")) + ] + ) + ckpt_path = os.path.join(ckpt_path, f"{iteration:06d}.pt") + logger.info(f"loading checkpoint {ckpt_path}") + ckpt_dict = th.load(ckpt_path, map_location=map_location) + for name, mod in modules.items(): + params = ckpt_dict[name] + if ignore_names is not None and name in ignore_names: + logger.info(f"skipping: {ignore_names[name]}") + params = filter_params(params, ignore_names[name]) + mod.load_state_dict(params, strict=strict) + + +def train( + model: nn.Module, + loss_fn: nn.Module, + optimizer: th.optim.Optimizer, + train_data: Iterator, + config: Mapping[str, Any], + lr_scheduler: Optional[LRScheduler] = None, + train_writer: Optional[SummaryWriter] = None, + saving_enabled: bool = True, + logging_enabled: bool = True, + iteration: int = 0, + device: Optional[Union[th.device, str]] = "cuda:0", +) -> None: + + for batch in train_data: + if batch is None: + logger.info("skipping empty batch") + continue + batch = to_device(batch, device) + batch["iteration"] = iteration + + # leaving only inputs acutally used by the model + preds = model(**filter_inputs(batch, model, required_only=False)) + + # TODO: switch to the old-school loss computation + loss, loss_dict = loss_fn(preds, batch, iteration=iteration) + assert not th.isnan(loss), "loss is NaN" + + if th.isnan(loss): + _loss_dict = process_losses(loss_dict) + loss_str = " ".join([f"{k}={v:.4f}" for k, v in _loss_dict.items()]) + logger.info(f"iter={iteration}: {loss_str}") + raise ValueError("loss is NaN") + + optimizer.zero_grad() + loss.backward() + optimizer.step() + + if logging_enabled and iteration % config.train.log_every_n_steps == 0: + _loss_dict = process_losses(loss_dict) + loss_str = " ".join([f"{k}={v:.4f}" for k, v in _loss_dict.items()]) + logger.info(f"iter={iteration}: {loss_str}") + + if logging_enabled and train_writer and iteration % config.train.log_every_n_steps == 0: + for name, value in _loss_dict.items(): + train_writer.add_scalar(f"Losses/{name}", value, global_step=iteration) + train_writer.flush() + + if saving_enabled and iteration % config.train.ckpt_every_n_steps == 0: + logger.info(f"iter={iteration}: saving checkpoint to `{config.train.ckpt_dir}`") + save_checkpoint( + config.train.ckpt_dir, + {"model": model, "optimizer": optimizer}, + iteration=iteration, + ) + + if logging_enabled and iteration % config.train.summary_every_n_steps == 0: + summaries = model.compute_summaries(preds, batch) + save_file_summaries(config.train.run_dir, summaries, prefix="train") + + if lr_scheduler is not None and iteration and iteration % config.train.update_lr_every == 0: + lr_scheduler.step() + + iteration += 1 + if iteration >= config.train.n_max_iters: + logger.info(f"reached max number of iters ({config.train.n_max_iters})") + break + + if saving_enabled: + logger.info(f"saving the final checkpoint to `{config.train.run_dir}/model.pt`") + save_checkpoint(f"{config.train.run_dir}/model.pt", {"model": model}) + diff --git a/visualize/render_anno.py b/visualize/render_anno.py new file mode 100644 index 0000000000000000000000000000000000000000..5c02d19bc1f4fdf0a959d836194a2cf225ce9a95 --- /dev/null +++ b/visualize/render_anno.py @@ -0,0 +1,58 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import os + +import torch + +from data_loaders.get_data import load_local_data + +from tqdm import tqdm + +from utils.diff_parser_utils import train_args +from utils.misc import fixseed +from utils.model_util import get_person_num +from visualize.render_codes import BodyRenderer + + +def main(): + args = train_args() + fixseed(args.seed) + args.num_repetitions = 1 + config_base = f"./checkpoints/ca_body/data/{get_person_num(args.data_root)}" + body_renderer = BodyRenderer( + config_base=config_base, + render_rgb=True, + ).to(args.device) + data_root = args.data_root + data_dict = load_local_data(data_root, audio_per_frame=1600) + if not os.path.exists(args.save_dir): + os.makedirs(args.save_dir, exist_ok=True) + + for i in range(len(data_dict["data"])): + end_range = len(data_dict["data"][i]) - args.max_seq_length + for chunk_idx in tqdm(range(0, end_range, args.max_seq_length)): + chunk_end = chunk_idx + args.max_seq_length + curr_data_chunk = data_dict["data"][i][chunk_idx:chunk_end, :] + curr_face_chunk = data_dict["face"][i][chunk_idx:chunk_end, :] + curr_audio_chunk = data_dict["audio"][i][ + chunk_idx * 1600 : chunk_end * 1600, : + ].T + render_data_block = { + "audio": curr_audio_chunk, # 2 x T + "body_motion": curr_data_chunk, # T x 104 + "face_motion": curr_face_chunk, # T x 256 + } + body_renderer.render_full_video( + render_data_block, + f"{args.save_dir}/scene{i}_{chunk_idx:04d}.mp4", + audio_sr=48_000, + ) + + +if __name__ == "__main__": + main() diff --git a/visualize/render_codes.py b/visualize/render_codes.py new file mode 100644 index 0000000000000000000000000000000000000000..16f9cd1c6a4c31bfe4f3ed63379d5d450b3d02cd --- /dev/null +++ b/visualize/render_codes.py @@ -0,0 +1,163 @@ +""" +Copyright (c) Meta Platforms, Inc. and affiliates. +All rights reserved. +This source code is licensed under the license found in the +LICENSE file in the root directory of this source tree. +""" + +import copy +import glob +import os +import re +import subprocess +from collections import OrderedDict +from typing import Dict, List + +import mediapy + +import numpy as np + +import torch +import torch as th +import torchaudio +from attrdict import AttrDict + +from omegaconf import OmegaConf +from tqdm import tqdm +from utils.model_util import get_person_num +from visualize.ca_body.utils.image import linear2displayBatch +from visualize.ca_body.utils.train import load_checkpoint, load_from_config + +ffmpeg_header = "ffmpeg -y " # -hide_banner -loglevel error " + + +def filter_params(params, ignore_names): + return OrderedDict( + [ + (k, v) + for k, v in params.items() + if not any([re.match(n, k) is not None for n in ignore_names]) + ] + ) + + +def call_ffmpeg(command: str) -> None: + print(command, "-" * 100) + e = subprocess.call(command, shell=True) + if e != 0: + assert False, e + + +class BodyRenderer(th.nn.Module): + def __init__( + self, + config_base: str, + render_rgb: bool, + ): + super().__init__() + self.config_base = config_base + ckpt_path = f"{config_base}/body_dec.ckpt" + config_path = f"{config_base}/config.yml" + assets_path = f"{config_base}/static_assets.pt" + # config + config = OmegaConf.load(config_path) + gpu = config.get("gpu", 0) + self.device = th.device(f"cuda:{gpu}") + # assets + static_assets = AttrDict(torch.load(assets_path)) + # build model + self.model = load_from_config(config.model, assets=static_assets).to( + self.device + ) + self.model.cal_enabled = False + self.model.pixel_cal_enabled = False + self.model.learn_blur_enabled = False + self.render_rgb = render_rgb + if not self.render_rgb: + self.model.rendering_enabled = None + # load model checkpoints + print("loading...", ckpt_path) + load_checkpoint( + ckpt_path, + modules={"model": self.model}, + ignore_names={"model": ["lbs_fn.*"]}, + ) + self.model.eval() + self.model.to(self.device) + # load default parameters for renderer + person = get_person_num(config_path) + self.default_inputs = th.load(f"assets/render_defaults_{person}.pth") + + def _write_video_stream( + self, motion: np.ndarray, face: np.ndarray, save_name: str + ) -> None: + out = self._render_loop(motion, face) + mediapy.write_video(save_name, out, fps=30) + + def _render_loop(self, body_pose: np.ndarray, face: np.ndarray) -> List[np.ndarray]: + all_rgb = [] + default_inputs_copy = copy.deepcopy(self.default_inputs) + for b in tqdm(range(len(body_pose))): + B = default_inputs_copy["K"].shape[0] + default_inputs_copy["lbs_motion"] = ( + th.tensor(body_pose[b : b + 1, :], device=self.device, dtype=th.float) + .tile(B, 1) + .to(self.device) + ) + geom = ( + self.model.lbs_fn.lbs_fn( + default_inputs_copy["lbs_motion"], + self.model.lbs_fn.lbs_scale.unsqueeze(0).tile(B, 1), + self.model.lbs_fn.lbs_template_verts.unsqueeze(0).tile(B, 1, 1), + ) + * self.model.lbs_fn.global_scaling + ) + default_inputs_copy["geom"] = geom + face_codes = ( + th.from_numpy(face).float().cuda() if not th.is_tensor(face) else face + ) + curr_face = th.tile(face_codes[b : b + 1, ...], (2, 1)) + default_inputs_copy["face_embs"] = curr_face + preds = self.model(**default_inputs_copy) + rgb0 = linear2displayBatch(preds["rgb"])[0] + rgb1 = linear2displayBatch(preds["rgb"])[1] + rgb = th.cat((rgb0, rgb1), axis=-1).permute(1, 2, 0) + rgb = rgb.clip(0, 255).to(th.uint8) + all_rgb.append(rgb.contiguous().detach().byte().cpu().numpy()) + return all_rgb + + def render_full_video( + self, + data_block: Dict[str, np.ndarray], + animation_save_path: str, + audio_sr: int = None, + render_gt: bool = False, + ) -> None: + tag = os.path.basename(os.path.dirname(animation_save_path)) + save_name = os.path.splitext(os.path.basename(animation_save_path))[0] + save_name = f"{tag}_{save_name}" + torchaudio.save( + f"/tmp/audio_{save_name}.wav", + torch.tensor(data_block["audio"]), + audio_sr, + ) + if render_gt: + tag = "gt" + self._write_video_stream( + data_block["gt_body"], + data_block["gt_face"], + f"/tmp/{tag}_{save_name}.mp4", + ) + else: + tag = "pred" + self._write_video_stream( + data_block["body_motion"], + data_block["face_motion"], + f"/tmp/{tag}_{save_name}.mp4", + ) + command = f"{ffmpeg_header} -i /tmp/{tag}_{save_name}.mp4 -i /tmp/audio_{save_name}.wav -c:v copy -map 0:v:0 -map 1:a:0 -c:a aac -b:a 192k -pix_fmt yuva420p {animation_save_path}_{tag}.mp4" + call_ffmpeg(command) + subprocess.call( + f"rm /tmp/audio_{save_name}.wav && rm /tmp/{tag}_{save_name}.mp4", + shell=True, + )