commit 048860986ff0b6741ff51a257f7b7c176e0feee9 Author: charlie880624 Date: Wed Mar 11 16:13:59 2026 +0800 first commit diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..9476279 --- /dev/null +++ b/.gitignore @@ -0,0 +1,73 @@ +# Python +__pycache__/ +*.py[cod] +*.pyo +*.pyd +.Python +*.egg-info/ +dist/ +build/ +.eggs/ + +# 虛擬環境 +venv/ +env/ +.env/ + +# 模型權重與二進位檔 +*.pt +*.pth +*.onnx +*.bie +*.nef +*.engine +*.tflite +*.pb + +# 訓練輸出(很大,不放 git) +runs/ +work_dirs/ + +# 資料集圖片(不放 git,改用 DVC 或外部儲存) +data4/ +data50/ +test14data/ +teachabledata/ +numberocr/ + +# ONNX 輸出目錄 +onnx/ + +# NumPy 暫存 +npy/ + +# 暫放區 +暫放區/ + +# 權重目錄(除了下載腳本) +weights/*.pt +weights/*.pth + +# Claude Code 設定 +.claude/ + +# Jupyter Notebook 輸出 +.ipynb_checkpoints/ + +# 系統檔 +.DS_Store +Thumbs.db +desktop.ini + +# IDE +.vscode/ +.idea/ +*.swp +*.swo + +# 日誌 +*.log +wandb/ + +# 空的佔位檔 +python diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..ee840ec --- /dev/null +++ b/Dockerfile @@ -0,0 +1,52 @@ +# Start FROM Nvidia PyTorch image https://ngc.nvidia.com/catalog/containers/nvidia:pytorch +FROM nvcr.io/nvidia/pytorch:20.10-py3 + +# Install dependencies +RUN pip install --upgrade pip +# COPY requirements.txt . +# RUN pip install -r requirements.txt +RUN pip install gsutil + +# Create working directory +RUN mkdir -p /usr/src/app +WORKDIR /usr/src/app + +# Copy contents +COPY . /usr/src/app + +# Copy weights +#RUN python3 -c "from models import *; \ +#attempt_download('weights/yolov5s.pt'); \ +#attempt_download('weights/yolov5m.pt'); \ +#attempt_download('weights/yolov5l.pt')" + + +# --------------------------------------------------- Extras Below --------------------------------------------------- + +# Build and Push +# t=ultralytics/yolov5:latest && sudo docker build -t $t . && sudo docker push $t +# for v in {300..303}; do t=ultralytics/coco:v$v && sudo docker build -t $t . && sudo docker push $t; done + +# Pull and Run +# t=ultralytics/yolov5:latest && sudo docker pull $t && sudo docker run -it --ipc=host $t + +# Pull and Run with local directory access +# t=ultralytics/yolov5:latest && sudo docker pull $t && sudo docker run -it --ipc=host --gpus all -v "$(pwd)"/coco:/usr/src/coco $t + +# Kill all +# sudo docker kill $(sudo docker ps -q) + +# Kill all image-based +# sudo docker kill $(sudo docker ps -a -q --filter ancestor=ultralytics/yolov5:latest) + +# Bash into running container +# sudo docker container exec -it ba65811811ab bash + +# Bash into stopped container +# sudo docker commit 092b16b25c5b usr/resume && sudo docker run -it --gpus all --ipc=host -v "$(pwd)"/coco:/usr/src/coco --entrypoint=sh usr/resume + +# Send weights to GCP +# python -c "from utils.general import *; strip_optimizer('runs/train/exp0_*/weights/best.pt', 'tmp.pt')" && gsutil cp tmp.pt gs://*.pt + +# Clean up +# docker system prune -a --volumes diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..9e419e0 --- /dev/null +++ b/LICENSE @@ -0,0 +1,674 @@ +GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..f408376 --- /dev/null +++ b/README.md @@ -0,0 +1,270 @@ +

Object Detection

+Object Detection task with YOLOv5 model. + +This document contains the explanations of arguments of each script. + + +You can find the tutorial document for finetuning a pretrained model on COCO128 dataset under the `tutorial` folder, `tutorial/README.md`. + + +The ipython notebook tutorial is also prepared under the `tutorial` folder as `tutorial/tutorial.ipynb`. You may upload and run this ipython notebook on Google colab. + +# Prerequisites +- Python 3.8 or above + +# Installation +```bash +$ pip install -U pip +$ pip install -r requirements.txt +``` + +# Dataset & Preparation + +The image data, annotations and dataset.yaml are required. + +## MS COCO + +Our traning script accepts MS COCO dataset. You may download the dataset using the following link: + +- Download [2017 MS COCO Dataset](https://cocodataset.org/#download) + +## Custom Datasets + +You can also train the model on a custom dataset. + +### Annotations Format +After using a tool like [CVAT](https://github.com/openvinotoolkit/cvat), [makesense.ai](https://www.makesense.ai) or [Labelbox](https://labelbox.com) to label your images, export your labels to YOLO format, with one `*.txt` file per image (if no objects in image, no `*.txt` file is required). The `*.txt` file specifications are: + +- One row per object +- Each row is `class x_center y_center width height` format. +- Box coordinates must be in normalized xywh format (from 0 - 1). If your boxes are in pixels, divide `x_center` and `width` by image `width`, and `y_center` and `height` by image height. +- Class numbers are zero-indexed (start from 0). + +
+ +
+ +The label file corresponding to the above image contains 2 persons (class 0) and a tie (class 27): +
+ +
+ +### Directory Organization +Your own datasets are expected to have the following structure. We assume `/dataset` is next to the `/yolov5` directory. YOLOv5 locates labels automatically for each image by replacing the last instance of `/images/` in each image path with `/labels/`. + +```bash +- Dataset name + -- images + -- train + --- img001.jpg + --- ... + -- val + --- img002.jpg + --- ... + + -- labels + -- train + --- img001.txt + --- ... + -- val + --- img002.txt + --- ... + +- yolov5 + +- generate_npy + +- exporting + +``` + +### dataset.yaml + +The yaml file for COCO dataset has been prepared in `./data/coco.yaml`. For custom dataset, you need to prepare the yaml file and save it under `./data/`. The yaml file is expected to have the following format: +```bash +# train and val datasets (image directory or *.txt file with image paths) +train: ./datasets/images/train/ +val: ./datasets/images/val/ + +# number of classes +nc: number of classes + +# class names +names: list of class names + +``` + +# Train + +For training on MS COCO, execute commands in the folder `yolov5`: +```shell +CUDA_VISIBLE_DEVICES='0' python train.py --data coco.yaml --cfg yolov5s-noupsample.yaml --weights '' --batch-size 64 +``` + +`CUDA_VISIBLE_DEVICES='0'` indicates the gpu ids. + +`--data` the yaml file. (located under `./data/`) + +`--cfg` the model configuration. (located under `./model/`) (`yolov5s-noupsample.yaml` for 520, `yolov5s.yaml` for 720) + +`--hyp` the path to hyperparameters file. (located under `./data/`) + +`--weights` the path to pretained model weights. ('' if train from scratch) + +`--epochs` the number of epochs to train. (Default: 300) + +`--batch-size` batch size. (Default: 16) + +`--img-size` the input size of the model. (Default: (640, 640)) + +`--workers` the maximum number of dataloader workers. (Default: 8) + +By default, the trained models are saved under `./runs/train/`. + +## Generating .npy for different model input +We can generating `.npy` for different model input by using `yolov5_generate_npy.py`. Execute commands in the folder `generate_npy`: +```shell +python yolov5_generate_npy.py --input-h 640 --input-w 640 +``` + +`--input-h` the input height. (Default: 640) +`--input-w` the input width. (Default: 640) + +We could get `*.npy` + +# Configure the paths yaml file +You are expected to create a yaml file which stores all the paths related to the trained models. This yaml file will be used in the following sections. You can check and modify the `pretrained_paths_520.yaml` and `pretrained_paths_720.yaml` under `/yolov5/data/`. The yaml file is expected to contain the following information: + +```shell +grid_dir: path_to_npy_file_directory +grid20_path: path_to_grid20_npy_file +grid40_path: path_to_grid40_npy_file +grid80_path: path_to_grid80_npy_file + +yolov5_dir: path_to_yolov5_directory +path: path_to_pretrained_yolov5_model_weights_pt_file +yaml_path: path_to_the_model_configuration_yaml_file +pt_path: path_to_export_yolov5_model_weights_kneron_supported_file +onnx_export_file: path_to_export_yolov5_onnx_model_file + +input_w: model_input_weight +input_h: model_input_height + +nc: number_of_classes + +names: list_of_class_names +``` + +# Save and Convert to ONNX +This section will introduce how to save the trained model for pytorch1.4 supported format and convert to ONNX. + +## Exporting ONNX model in the PyTorch 1.7 environment +We can convert the model to onnx by using `yolov5_export.py`. Execute commands in the folder `yolov5`: +```shell +python ../exporting/yolov5_export.py --data path_to_pretrained_path_yaml_file +``` + +`--data` the path to pretrained model paths yaml file (Default: ../yolov5/data/pretrained_paths_520.yaml) + +We could get onnx model. + + +## Converting onnx by tool chain +Pull the latest [ONNX converter](https://github.com/kneron/ONNX_Convertor/tree/master/optimizer_scripts) from github. You may read the latest document from Github for converting ONNX model. Execute commands in the folder `ONNX_Convertor/optimizer_scripts`: +(reference: https://github.com/kneron/ONNX_Convertor/tree/master/optimizer_scripts) + +```shell +python -m onnxsim input_onnx_model output_onnx_model + +python pytorch2onnx.py input.pth output.onnx +``` + +We could get converted onnx model. + + +# Inference + +Before model inference, we assume that the model has been converted to onnx model as in the previous section (even if only inference pth model). Create a yaml file containing the path information. For model inference on a single image, execute commands in the folder `yolov5`: +```shell +python inference.py --data path_to_pretrained_path_yaml_file --img-path path_to_image --save-path path_to_saved_image +``` + +`--img-path` the path to the image. + +`--save-path` the path to draw and save the image with bbox. + +`--data` the path to pretrained model paths yaml file. (Default: data/pretrained_paths_520.yaml) + +`--conf_thres` the score threshold of bounding boxes. (Default: 0.3) + +`--iou_thres` the iou threshold for NMS. (Default: 0.3) + +`--onnx` whether is onnx model inference. + +You could find preprocessing and postprocessing processes under the folder `exporting/yolov5/`. + + +# Evaluation + +## Evaluation Metric +We will use mean Average Precision (mAP) for evaluation. You can find the script for computing mAP in `test.py`. + +`mAP`: mAP is the average of Average Precision (AP). AP summarizes a precision-recall curve as the weighted mean of precisions achieved at each threshold, with the increase in recall from the previous threshold used as the weight: + + + +where and are the precision and recall at the nth threshold. The mAP compares the ground-truth bounding box to the detected box and returns a score. The higher the score, the more accurate the model is in its detections. + +## Evaluation on a Dataset +For evaluating the trained model on dataset: + +```shell +python test.py --weights path_to_pth_model_weight --data path_to_data_yaml_file +``` + +`--weights` The path to pretrained model weight. (Defalut: best.pt) + +`--data` The path to data yaml file. (Default: data/coco128.yaml) + +`--img-size` Input shape of the model (Default: (640, 640)) + +`--conf-thres` Object confidence threshold. (Default: 0.001) + +`--device` Cuda device, i.e. 0 or 0,1,2,3 or cpu. (Default: cpu) + +`--verbose` Whether report mAP by class. + +## End-to-End Evaluation +If you would like to perform an end-to-end test with an image dataset, you can use `inference_e2e.py` under the directory `yolov5` to obtain the prediction results. +You have to prepare an initial parameter yaml file for the inference runner. You may check `utils/init_params.yaml` for the format. +```shell +python inference_e2e.py --img-path path_to_dataset_folder --params path_to_init_params_file --save-path path_to_save_json_file +``` +`--img-path` Path to the dataset directory + +`--params` Path to initial parameter yaml file for the inference runner + +`--save-path` Path to save the prediction to a json file + +`--gpu` GPU id (-1 if cpu) (Default: -1) + +The predictions will be saved into a json file that has the following structure: +```bash +[ + {'img_path':image_path_1 + 'bbox': [[l,t,w,h,score,class_id], [l,t,w,h,score,class_id]] + }, + {'img_path':image_path_2 + 'bbox': [[l,t,w,h,score,class_id], [l,t,w,h,score,class_id]] + }, + ... +] +``` +# Model + +Backbone | Input Size | FPS on 520 | FPS on 720 | Model Size | mAP +--- | --- |:---:|:---:|:---:|:---: +[YOLOv5s (no upsample)](https://github.com/kneron/Model_Zoo/tree/main/detection/yolov5/yolov5s-noupsample) | 640x640 | 4.91429 | - | 13.1M | 40.4% +[YOLOv5s (with upsample)](https://github.com/kneron/Model_Zoo/tree/main/detection/yolov5/yolov5s) | 640x640 | - | 24.4114 | 14.6M | 50.9% + +[YOLOv5s (no upsample)](https://github.com/kneron/Model_Zoo/tree/main/detection/yolov5/yolov5s-noupsample) is the yolov5s model backbone without upsampling operation, since 520 hardware does not support upsampling operation. diff --git a/data/annapretrained_paths_520.yaml b/data/annapretrained_paths_520.yaml new file mode 100644 index 0000000..3781916 --- /dev/null +++ b/data/annapretrained_paths_520.yaml @@ -0,0 +1,17 @@ +grid_dir: C:/Users/rd_de/kneronyolov5/ai_training/detection/yolov5/generate_npy/ +grid20_path: C:/Users/rd_de/kneronyolov5/ai_training/detection/yolov5/generate_npy/20_640x640.npy +grid40_path: C:/Users/rd_de/kneronyolov5/ai_training/detection/yolov5/generate_npy/40_640x640.npy +grid80_path: C:/Users/rd_de/kneronyolov5/ai_training/detection/yolov5/generate_npy/80_640x640.npy + + +yolov5_dir: C:/Users/rd_de/kneronyolov5/yolov5 +path: C:/Users/rd_de/kneronyolov5/yolov5/runs/train/exp59/weights/best.pt +yaml_path: C:/Users/rd_de/kneronyolov5/yolov5/models/yolov5s-noupsample.yaml +pt_path: C:/Users/rd_de/kneronyolov5/yolov5/weights/for520best.pt +onnx_export_file: C:/Users/rd_de/kneronyolov5/yolov5/runs/train/exp59/weights/best.onnx + +input_w: 640 +input_h: 640 + +nc: 1 +names: ['License Plate'] diff --git a/data/coco.yaml b/data/coco.yaml new file mode 100644 index 0000000..4897a70 --- /dev/null +++ b/data/coco.yaml @@ -0,0 +1,33 @@ +# train and val data as 1) directory: path/images/, 2) file: path/images.txt, or 3) list: [path1/images/, path2/images/] +path: ../datasets/coco # dataset root dir +train: /home/ziyan/Dataset/COCO/coco/images/train2017/ # 118287 images +val: /home/ziyan/Dataset/COCO/coco/images/val2017/ # 5000 images + +# number of classes +nc: 80 + +# class names +names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train', 'truck', 'boat', 'traffic light', + 'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep', 'cow', + 'elephant', 'bear', 'zebra', 'giraffe', 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', + 'skis', 'snowboard', 'sports ball', 'kite', 'baseball bat', 'baseball glove', 'skateboard', 'surfboard', + 'tennis racket', 'bottle', 'wine glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple', + 'sandwich', 'orange', 'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake', 'chair', 'couch', + 'potted plant', 'bed', 'dining table', 'toilet', 'tv', 'laptop', 'mouse', 'remote', 'keyboard', 'cell phone', + 'microwave', 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase', 'scissors', 'teddy bear', + 'hair drier', 'toothbrush'] + +# Download script/URL (optional) +download: | + from utils.general import download, Path + # Download labels + segments = False # segment or box labels + dir = Path(yaml['path']) # dataset root dir + url = 'https://github.com/ultralytics/yolov5/releases/download/v1.0/' + urls = [url + ('coco2017labels-segments.zip' if segments else 'coco2017labels.zip')] # labels + download(urls, dir=dir.parent) + # Download data + urls = ['http://images.cocodataset.org/zips/train2017.zip', # 19G, 118k images + 'http://images.cocodataset.org/zips/val2017.zip', # 1G, 5k images + 'http://images.cocodataset.org/zips/test2017.zip'] # 7G, 41k images (optional) + download(urls, dir=dir / 'images', threads=3) diff --git a/data/coco128.yaml b/data/coco128.yaml new file mode 100644 index 0000000..12e1d79 --- /dev/null +++ b/data/coco128.yaml @@ -0,0 +1,28 @@ +# COCO 2017 dataset http://cocodataset.org - first 128 training images +# Train command: python train.py --data coco128.yaml +# Default dataset location is next to /yolov5: +# /parent_folder +# /coco128 +# /yolov5 + + +# download command/URL (optional) +download: https://github.com/ultralytics/yolov5/releases/download/v1.0/coco128.zip + +# train and val data as 1) directory: path/images/, 2) file: path/images.txt, or 3) list: [path1/images/, path2/images/] +train: ../coco128/images/train2017/ # 128 images +val: ../coco128/images/train2017/ # 128 images + +# number of classes +nc: 80 + +# class names +names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train', 'truck', 'boat', 'traffic light', + 'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep', 'cow', + 'elephant', 'bear', 'zebra', 'giraffe', 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', + 'skis', 'snowboard', 'sports ball', 'kite', 'baseball bat', 'baseball glove', 'skateboard', 'surfboard', + 'tennis racket', 'bottle', 'wine glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple', + 'sandwich', 'orange', 'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake', 'chair', 'couch', + 'potted plant', 'bed', 'dining table', 'toilet', 'tv', 'laptop', 'mouse', 'remote', 'keyboard', 'cell phone', + 'microwave', 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase', 'scissors', 'teddy bear', + 'hair drier', 'toothbrush'] diff --git a/data/custom.yaml b/data/custom.yaml new file mode 100644 index 0000000..80aa8c7 --- /dev/null +++ b/data/custom.yaml @@ -0,0 +1,9 @@ +# train and val data as directory: path/labels/ +train: ../image_data/images/train/ +val: ../image_data/images/val/ + +# number of classes +nc: 3 + +# class names +names: ['air conditioner', 'dog', 'fence'] diff --git a/data/hyp.finetune.yaml b/data/hyp.finetune.yaml new file mode 100644 index 0000000..0c5c8d0 --- /dev/null +++ b/data/hyp.finetune.yaml @@ -0,0 +1,38 @@ +# Hyperparameters for VOC finetuning +# python train.py --batch 64 --weights yolov5m.pt --data voc.yaml --img 512 --epochs 50 +# See tutorials for hyperparameter evolution https://github.com/ultralytics/yolov5#tutorials + + +# Hyperparameter Evolution Results +# Generations: 306 +# P R mAP.5 mAP.5:.95 box obj cls +# Metrics: 0.6 0.936 0.896 0.684 0.0115 0.00805 0.00146 + +lr0: 0.0032 +lrf: 0.12 +momentum: 0.843 +weight_decay: 0.00036 +warmup_epochs: 2.0 +warmup_momentum: 0.5 +warmup_bias_lr: 0.05 +box: 0.0296 +cls: 0.243 +cls_pw: 0.631 +obj: 0.301 +obj_pw: 0.911 +iou_t: 0.2 +anchor_t: 2.91 +# anchors: 3.63 +fl_gamma: 0.0 +hsv_h: 0.0138 +hsv_s: 0.664 +hsv_v: 0.464 +degrees: 0.373 +translate: 0.245 +scale: 0.898 +shear: 0.602 +perspective: 0.0 +flipud: 0.00856 +fliplr: 0.5 +mosaic: 1.0 #0.0 +mixup: 0.243 diff --git a/data/hyp.scratch.yaml b/data/hyp.scratch.yaml new file mode 100644 index 0000000..44f26b6 --- /dev/null +++ b/data/hyp.scratch.yaml @@ -0,0 +1,33 @@ +# Hyperparameters for COCO training from scratch +# python train.py --batch 40 --cfg yolov5m.yaml --weights '' --data coco.yaml --img 640 --epochs 300 +# See tutorials for hyperparameter evolution https://github.com/ultralytics/yolov5#tutorials + + +lr0: 0.01 # initial learning rate (SGD=1E-2, Adam=1E-3) +lrf: 0.2 # final OneCycleLR learning rate (lr0 * lrf) +momentum: 0.937 # SGD momentum/Adam beta1 +weight_decay: 0.0005 # optimizer weight decay 5e-4 +warmup_epochs: 3.0 # warmup epochs (fractions ok) +warmup_momentum: 0.8 # warmup initial momentum +warmup_bias_lr: 0.1 # warmup initial bias lr +box: 0.05 # box loss gain +cls: 0.5 # cls loss gain +cls_pw: 1.0 # cls BCELoss positive_weight +obj: 1.0 # obj loss gain (scale with pixels) +obj_pw: 1.0 # obj BCELoss positive_weight +iou_t: 0.20 # IoU training threshold +anchor_t: 4.0 # anchor-multiple threshold +# anchors: 3 # anchors per output layer (0 to ignore) +fl_gamma: 0.0 # focal loss gamma (efficientDet default gamma=1.5) +hsv_h: 0.015 # image HSV-Hue augmentation (fraction) +hsv_s: 0.7 # image HSV-Saturation augmentation (fraction) +hsv_v: 0.4 # image HSV-Value augmentation (fraction) +degrees: 0.0 # image rotation (+/- deg) +translate: 0.1 # image translation (+/- fraction) +scale: 0.5 # image scale (+/- gain) +shear: 0.0 # image shear (+/- deg) +perspective: 0.0 # image perspective (+/- fraction), range 0-0.001 +flipud: 0.0 # image flip up-down (probability) +fliplr: 0.5 # image flip left-right (probability) +mosaic: 1.0 # image mosaic (probability) +mixup: 0.0 # image mixup (probability) diff --git a/data/mepretrained_paths_520.yaml b/data/mepretrained_paths_520.yaml new file mode 100644 index 0000000..7fea246 --- /dev/null +++ b/data/mepretrained_paths_520.yaml @@ -0,0 +1,21 @@ +grid_dir: C:/Users/rd_de/kneronyolov5/ai_training/detection/yolov5/generate_npy/ +grid20_path: C:/Users/rd_de/kneronyolov5/ai_training/detection/yolov5/generate_npy/20_640x640.npy +grid40_path: C:/Users/rd_de/kneronyolov5/ai_training/detection/yolov5/generate_npy/40_640x640.npy +grid80_path: C:/Users/rd_de/kneronyolov5/ai_training/detection/yolov5/generate_npy/80_640x640.npy + + +yolov5_dir: C:/Users/rd_de/kneronyolov5/yolov5 +path: C:/Users/rd_de/golfaceyolov5/yolov5/runs/train/exp24/weights/best.pt +yaml_path: C:/Users/rd_de/golfaceyolov5/yolov5/models/yolov5s-noupsample.yaml +pt_path: C:/Users/rd_de/golfaceyolov5/yolov5/weights/for520best.pt +onnx_export_file: C:/Users/rd_de/golfaceyolov5/yolov5/runs/train/exp24/weights/best.onnx + +input_w: 640 +input_h: 640 + +nc: 4 +names: ['100', '1000', '50', '500'] + + +#nc: 6 +#names: ['Break circuit', 'bulge', 'foreign object', 'scratch', 'short circuit', 'white spot'] diff --git a/data/mepretrained_paths_630.yaml b/data/mepretrained_paths_630.yaml new file mode 100644 index 0000000..3e441d4 --- /dev/null +++ b/data/mepretrained_paths_630.yaml @@ -0,0 +1,17 @@ +grid_dir: C:/Users/rd_de/kneronyolov5/ai_training/detection/yolov5/generate_npy/ +grid20_path: C:/Users/rd_de/kneronyolov5/ai_training/detection/yolov5/generate_npy/20_640x640.npy +grid40_path: C:/Users/rd_de/kneronyolov5/ai_training/detection/yolov5/generate_npy/40_640x640.npy +grid80_path: C:/Users/rd_de/kneronyolov5/ai_training/detection/yolov5/generate_npy/80_640x640.npy + + +yolov5_dir: C:/Users/rd_de/kneronyolov5/yolov5 +path: C:/Users/rd_de/golfaceyolov5/yolov5/runs/train/exp16/weights/best.pt +yaml_path: C:/Users/rd_de/golfaceyolov5/yolov5/models/yolov5s.yaml +pt_path: C:/Users/rd_de/golfaceyolov5/yolov5/weights/for720best.pt +onnx_export_file: C:/Users/rd_de/golfaceyolov5/yolov5/runs/train/exp16/weights/best.onnx + +input_w: 640 +input_h: 640 + +nc: 4 +names: ['car', 'greenery', 'person', 'tree'] diff --git a/data/mepretrained_paths_630class2.yaml b/data/mepretrained_paths_630class2.yaml new file mode 100644 index 0000000..8a90f13 --- /dev/null +++ b/data/mepretrained_paths_630class2.yaml @@ -0,0 +1,17 @@ +grid_dir: C:/Users/rd_de/kneronyolov5/ai_training/detection/yolov5/generate_npy/ +grid20_path: C:/Users/rd_de/kneronyolov5/ai_training/detection/yolov5/generate_npy/20_640x640.npy +grid40_path: C:/Users/rd_de/kneronyolov5/ai_training/detection/yolov5/generate_npy/40_640x640.npy +grid80_path: C:/Users/rd_de/kneronyolov5/ai_training/detection/yolov5/generate_npy/80_640x640.npy + + +yolov5_dir: C:/Users/rd_de/kneronyolov5/yolov5 +path: C:/Users/rd_de/golfaceyolov5/yolov5/runs/train/exp13/weights/best.pt +yaml_path: C:/Users/rd_de/golfaceyolov5/yolov5/models/yolov5s.yaml +pt_path: C:/Users/rd_de/golfaceyolov5/yolov5/weights/for720best.pt +onnx_export_file: C:/Users/rd_de/golfaceyolov5/yolov5/runs/train/exp13/weights/best.onnx + +input_w: 640 +input_h: 640 + +nc: 2 +names: ['bunker', 'pond'] diff --git a/data/mepretrained_paths_630class8.yaml b/data/mepretrained_paths_630class8.yaml new file mode 100644 index 0000000..ea3d738 --- /dev/null +++ b/data/mepretrained_paths_630class8.yaml @@ -0,0 +1,16 @@ +grid_dir: C:/Users/rd_de/kneronyolov5/ai_training/detection/yolov5/generate_npy/ +grid20_path: C:/Users/rd_de/kneronyolov5/ai_training/detection/yolov5/generate_npy/20_640x640.npy +grid40_path: C:/Users/rd_de/kneronyolov5/ai_training/detection/yolov5/generate_npy/40_640x640.npy +grid80_path: C:/Users/rd_de/kneronyolov5/ai_training/detection/yolov5/generate_npy/80_640x640.npy + + +yolov5_dir: C:/Users/rd_de/kneronyolov5/yolov5 +path: C:/Users/rd_de/golfaceyolov5/yolov5/runs/train/exp29/weights/best.pt +yaml_path: C:/Users/rd_de/golfaceyolov5/yolov5/models/yolov5s.yaml +pt_path: C:/Users/rd_de/golfaceyolov5/yolov5/weights/for720best.pt +onnx_export_file: C:/Users/rd_de/golfaceyolov5/yolov5/runs/train/exp29/weights/best.onnx + +nc: 8 +names: ['bunker', 'car', 'grass', 'greenery', 'person', 'pond', 'road', 'tree'] +input_w: 640 +input_h: 640 \ No newline at end of file diff --git a/data/mepretrained_paths_720.yaml b/data/mepretrained_paths_720.yaml new file mode 100644 index 0000000..62c04a5 --- /dev/null +++ b/data/mepretrained_paths_720.yaml @@ -0,0 +1,17 @@ +grid_dir: C:/Users/rd_de/kneronyolov5/ai_training/detection/yolov5/generate_npy/ +grid20_path: C:/Users/rd_de/kneronyolov5/ai_training/detection/yolov5/generate_npy/20_640x640.npy +grid40_path: C:/Users/rd_de/kneronyolov5/ai_training/detection/yolov5/generate_npy/40_640x640.npy +grid80_path: C:/Users/rd_de/kneronyolov5/ai_training/detection/yolov5/generate_npy/80_640x640.npy + + +yolov5_dir: C:/Users/rd_de/kneronyolov5/yolov5 +path: C:/Users/rd_de/golfaceyolov5/yolov5/runs/train/exp19/weights/best.pt +yaml_path: C:/Users/rd_de/golfaceyolov5/yolov5/models/yolov5s.yaml +pt_path: C:/Users/rd_de/golfaceyolov5/yolov5/weights/for720best.pt +onnx_export_file: C:/Users/rd_de/golfaceyolov5/yolov5/runs/train/exp19/weights/best.onnx + +input_w: 640 +input_h: 640 + +nc: 4 +names: ['car', 'greenery', 'person', 'tree'] diff --git a/data/model_paths_520_coco128.yaml b/data/model_paths_520_coco128.yaml new file mode 100644 index 0000000..c7daa77 --- /dev/null +++ b/data/model_paths_520_coco128.yaml @@ -0,0 +1,25 @@ +grid_dir: ../generate_npy/ +grid20_path: ../generate_npy/20_640x640.npy +grid40_path: ../generate_npy/40_640x640.npy +grid80_path: ../generate_npy/80_640x640.npy + +yolov5_dir: ./ +path: ./runs/train/exp/weights/best.pt +yaml_path: ./models/yolov5s-noupsample.yaml +pt_path: ./yolov5s-noupsample-coco128.pt # pytorch 1.4 +onnx_export_file: ./yolov5s-noupsample-coco128.onnx + +input_w: 640 +input_h: 640 +# number of classes +nc: 80 +# class names +names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train', 'truck', 'boat', 'traffic light', + 'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep', 'cow', + 'elephant', 'bear', 'zebra', 'giraffe', 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', + 'skis', 'snowboard', 'sports ball', 'kite', 'baseball bat', 'baseball glove', 'skateboard', 'surfboard', + 'tennis racket', 'bottle', 'wine glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple', + 'sandwich', 'orange', 'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake', 'chair', 'couch', + 'potted plant', 'bed', 'dining table', 'toilet', 'tv', 'laptop', 'mouse', 'remote', 'keyboard', 'cell phone', + 'microwave', 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase', 'scissors', 'teddy bear', + 'hair drier', 'toothbrush'] \ No newline at end of file diff --git a/data/pretrained_paths_520.yaml b/data/pretrained_paths_520.yaml new file mode 100644 index 0000000..48397ee --- /dev/null +++ b/data/pretrained_paths_520.yaml @@ -0,0 +1,25 @@ +grid_dir: ../generate_npy/ +grid20_path: ../generate_npy/20_640x640.npy +grid40_path: ../generate_npy/40_640x640.npy +grid80_path: ../generate_npy/80_640x640.npy + +yolov5_dir: ./ +path: ./best.pt +yaml_path: ./models/yolov5s-noupsample.yaml +pt_path: ./yolov5s-noupsample.pt # pytorch 1.4 +onnx_export_file: ./yolov5s-noupsample.onnx + +input_w: 640 +input_h: 640 +# number of classes +nc: 80 +# class names +names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train', 'truck', 'boat', 'traffic light', + 'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep', 'cow', + 'elephant', 'bear', 'zebra', 'giraffe', 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', + 'skis', 'snowboard', 'sports ball', 'kite', 'baseball bat', 'baseball glove', 'skateboard', 'surfboard', + 'tennis racket', 'bottle', 'wine glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple', + 'sandwich', 'orange', 'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake', 'chair', 'couch', + 'potted plant', 'bed', 'dining table', 'toilet', 'tv', 'laptop', 'mouse', 'remote', 'keyboard', 'cell phone', + 'microwave', 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase', 'scissors', 'teddy bear', + 'hair drier', 'toothbrush'] \ No newline at end of file diff --git a/data/pretrained_paths_720.yaml b/data/pretrained_paths_720.yaml new file mode 100644 index 0000000..d5b9abb --- /dev/null +++ b/data/pretrained_paths_720.yaml @@ -0,0 +1,25 @@ +grid_dir: ../generate_npy/ +grid20_path: ../generate_npy/20_640x640.npy +grid40_path: ../generate_npy/40_640x640.npy +grid80_path: ../generate_npy/80_640x640.npy + +yolov5_dir: ./ +path: ./best.pt +yaml_path: ./models/yolov5s.yaml +pt_path: ./yolov5s.pt # pytorch 1.4 +onnx_export_file: ./yolov5s.onnx + +input_w: 640 +input_h: 640 +# number of classes +nc: 80 +# class names +names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train', 'truck', 'boat', 'traffic light', + 'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep', 'cow', + 'elephant', 'bear', 'zebra', 'giraffe', 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', + 'skis', 'snowboard', 'sports ball', 'kite', 'baseball bat', 'baseball glove', 'skateboard', 'surfboard', + 'tennis racket', 'bottle', 'wine glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple', + 'sandwich', 'orange', 'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake', 'chair', 'couch', + 'potted plant', 'bed', 'dining table', 'toilet', 'tv', 'laptop', 'mouse', 'remote', 'keyboard', 'cell phone', + 'microwave', 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase', 'scissors', 'teddy bear', + 'hair drier', 'toothbrush'] \ No newline at end of file diff --git a/data/scripts/get_coco.sh b/data/scripts/get_coco.sh new file mode 100644 index 0000000..157a0b0 --- /dev/null +++ b/data/scripts/get_coco.sh @@ -0,0 +1,24 @@ +#!/bin/bash +# COCO 2017 dataset http://cocodataset.org +# Download command: bash data/scripts/get_coco.sh +# Train command: python train.py --data coco.yaml +# Default dataset location is next to /yolov5: +# /parent_folder +# /coco +# /yolov5 + +# Download/unzip labels +d='../' # unzip directory +url=https://github.com/ultralytics/yolov5/releases/download/v1.0/ +f='coco2017labels.zip' # 68 MB +echo 'Downloading' $url$f ' ...' && curl -L $url$f -o $f && unzip -q $f -d $d && rm $f # download, unzip, remove + +# Download/unzip images +d='../coco/images' # unzip directory +url=http://images.cocodataset.org/zips/ +f1='train2017.zip' # 19G, 118k images +f2='val2017.zip' # 1G, 5k images +f3='test2017.zip' # 7G, 41k images (optional) +for f in $f1 $f2; do + echo 'Downloading' $url$f ' ...' && curl -L $url$f -o $f && unzip -q $f -d $d && rm $f # download, unzip, remove +done diff --git a/data/scripts/get_voc.sh b/data/scripts/get_voc.sh new file mode 100644 index 0000000..6bdaa9b --- /dev/null +++ b/data/scripts/get_voc.sh @@ -0,0 +1,137 @@ +#!/bin/bash +# PASCAL VOC dataset http://host.robots.ox.ac.uk/pascal/VOC/ +# Download command: bash data/scripts/get_voc.sh +# Train command: python train.py --data voc.yaml +# Default dataset location is next to /yolov5: +# /parent_folder +# /VOC +# /yolov5 + +start=$(date +%s) +mkdir -p ../tmp +cd ../tmp/ + +# Download/unzip images and labels +d='.' # unzip directory +url=https://github.com/ultralytics/yolov5/releases/download/v1.0/ +f1=VOCtrainval_06-Nov-2007.zip # 446MB, 5012 images +f2=VOCtest_06-Nov-2007.zip # 438MB, 4953 images +f3=VOCtrainval_11-May-2012.zip # 1.95GB, 17126 images +for f in $f1 $f2 $f3; do + echo 'Downloading' $url$f ' ...' && curl -L $url$f -o $f && unzip -q $f -d $d && rm $f # download, unzip, remove +done + +end=$(date +%s) +runtime=$((end - start)) +echo "Completed in" $runtime "seconds" + +echo "Splitting dataset..." +python3 - "$@" <train.txt +cat 2007_train.txt 2007_val.txt 2007_test.txt 2012_train.txt 2012_val.txt >train.all.txt + +python3 - "$@" <= 1 + p, s, im0 = Path(path[i]), '%g: ' % i, im0s[i].copy() + else: + p, s, im0 = Path(path), '', im0s + + save_path = str(save_dir / p.name) + txt_path = str(save_dir / 'labels' / p.stem) + ('_%g' % dataset.frame if dataset.mode == 'video' else '') + s += '%gx%g ' % img.shape[2:] # print string + gn = torch.tensor(im0.shape)[[1, 0, 1, 0]] # normalization gain whwh + if len(det): + # Rescale boxes from img_size to im0 size + det[:, :4] = scale_coords(img.shape[2:], det[:, :4], im0.shape).round() + + # Print results + for c in det[:, -1].unique(): + n = (det[:, -1] == c).sum() # detections per class + s += '%g %ss, ' % (n, names[int(c)]) # add to string + + # Write results + for *xyxy, conf, cls in reversed(det): + if save_txt: # Write to file + xywh = (xyxy2xywh(torch.tensor(xyxy).view(1, 4)) / gn).view(-1).tolist() # normalized xywh + line = (cls, *xywh, conf) if opt.save_conf else (cls, *xywh) # label format + with open(txt_path + '.txt', 'a') as f: + f.write(('%g ' * len(line)).rstrip() % line + '\n') + + if save_img or view_img: # Add bbox to image + label = '%s %.2f' % (names[int(cls)], conf) + plot_one_box(xyxy, im0, label=label, color=colors[int(cls)], line_thickness=3) + + # Print time (inference + NMS) + print('%sDone. (%.3fs)' % (s, t2 - t1)) + + # Stream results + if view_img: + cv2.imshow(p, im0) + if cv2.waitKey(1) == ord('q'): # q to quit + raise StopIteration + + # Save results (image with detections) + if save_img: + if dataset.mode == 'images': + cv2.imwrite(save_path, im0) + else: + if vid_path != save_path: # new video + vid_path = save_path + if isinstance(vid_writer, cv2.VideoWriter): + vid_writer.release() # release previous video writer + + fourcc = 'mp4v' # output video codec + fps = vid_cap.get(cv2.CAP_PROP_FPS) + w = int(vid_cap.get(cv2.CAP_PROP_FRAME_WIDTH)) + h = int(vid_cap.get(cv2.CAP_PROP_FRAME_HEIGHT)) + vid_writer = cv2.VideoWriter(save_path, cv2.VideoWriter_fourcc(*fourcc), fps, (w, h)) + vid_writer.write(im0) + + if save_txt or save_img: + print('Results saved to %s' % save_dir) + + print('Done. (%.3fs)' % (time.time() - t0)) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--weights', nargs='+', type=str, default='yolov5s.pt', help='model.pt path(s)') + parser.add_argument('--source', type=str, default='data/images', help='source') # file/folder, 0 for webcam + parser.add_argument('--img-size', type=int, default=640, help='inference size (pixels)') + parser.add_argument('--conf-thres', type=float, default=0.25, help='object confidence threshold') + parser.add_argument('--iou-thres', type=float, default=0.45, help='IOU threshold for NMS') + parser.add_argument('--device', default='', help='cuda device, i.e. 0 or 0,1,2,3 or cpu') + parser.add_argument('--view-img', action='store_true', help='display results') + parser.add_argument('--save-txt', action='store_true', help='save results to *.txt') + parser.add_argument('--save-conf', action='store_true', help='save confidences in --save-txt labels') + parser.add_argument('--classes', nargs='+', type=int, help='filter by class: --class 0, or --class 0 2 3') + parser.add_argument('--agnostic-nms', action='store_true', help='class-agnostic NMS') + parser.add_argument('--augment', action='store_true', help='augmented inference') + parser.add_argument('--update', action='store_true', help='update all models') + parser.add_argument('--project', default='runs/detect', help='save results to project/name') + parser.add_argument('--name', default='exp', help='save results to project/name') + parser.add_argument('--exist-ok', action='store_true', help='existing project/name ok, do not increment') + opt = parser.parse_args() + print(opt) + + with torch.no_grad(): + if opt.update: # update all models (to fix SourceChangeWarning) + for opt.weights in ['yolov5s.pt', 'yolov5m.pt', 'yolov5l.pt', 'yolov5x.pt']: + detect() + strip_optimizer(opt.weights) + else: + detect() diff --git a/export.py b/export.py new file mode 100644 index 0000000..c5e96f1 --- /dev/null +++ b/export.py @@ -0,0 +1,94 @@ +"""Exports a YOLOv5 *.pt model to ONNX and TorchScript formats + +Usage: + $ export PYTHONPATH="$PWD" && python models/export.py --weights ./weights/yolov5s.pt --img 640 --batch 1 +""" + +import argparse +import sys +import time + +sys.path.append('./') # to run '$ python *.py' files in subdirectories + +import torch +import torch.nn as nn + +import models +from models.experimental import attempt_load +from utils.activations import Hardswish +from utils.general import set_logging, check_img_size + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--weights', type=str, default='./yolov5s.pt', help='weights path') # from yolov5/models/ + parser.add_argument('--img-size', nargs='+', type=int, default=[640, 640], help='image size') # height, width + parser.add_argument('--batch-size', type=int, default=1, help='batch size') + opt = parser.parse_args() + opt.img_size *= 2 if len(opt.img_size) == 1 else 1 # expand + print(opt) + set_logging() + t = time.time() + + # Load PyTorch model + model = attempt_load(opt.weights, map_location=torch.device('cpu')) # load FP32 model + labels = model.names + + # Checks + gs = int(max(model.stride)) # grid size (max stride) + opt.img_size = [check_img_size(x, gs) for x in opt.img_size] # verify img_size are gs-multiples + + # Input + img = torch.zeros(opt.batch_size, 3, *opt.img_size) # image size(1,3,320,192) iDetection + + # Update model + for k, m in model.named_modules(): + m._non_persistent_buffers_set = set() # pytorch 1.6.0 compatibility + if isinstance(m, models.common.Conv) and isinstance(m.act, nn.Hardswish): + m.act = Hardswish() # assign activation + # if isinstance(m, models.yolo.Detect): + # m.forward = m.forward_export # assign forward (optional) + model.model[-1].export = True # set Detect() layer export=True + y = model(img) # dry run + + # TorchScript export + try: + print('\nStarting TorchScript export with torch %s...' % torch.__version__) + f = opt.weights.replace('.pt', '.torchscript.pt') # filename + ts = torch.jit.trace(model, img) + ts.save(f) + print('TorchScript export success, saved as %s' % f) + except Exception as e: + print('TorchScript export failure: %s' % e) + + # ONNX export + try: + import onnx + + print('\nStarting ONNX export with onnx %s...' % onnx.__version__) + f = opt.weights.replace('.pt', '.onnx') # filename + torch.onnx.export(model, img, f, verbose=False, opset_version=12, input_names=['images'], + output_names=['classes', 'boxes'] if y is None else ['output']) + + # Checks + onnx_model = onnx.load(f) # load onnx model + onnx.checker.check_model(onnx_model) # check onnx model + # print(onnx.helper.printable_graph(onnx_model.graph)) # print a human readable model + print('ONNX export success, saved as %s' % f) + except Exception as e: + print('ONNX export failure: %s' % e) + + # CoreML export + try: + import coremltools as ct + + print('\nStarting CoreML export with coremltools %s...' % ct.__version__) + # convert model from torchscript and apply pixel scaling as per detect.py + model = ct.convert(ts, inputs=[ct.ImageType(name='image', shape=img.shape, scale=1 / 255.0, bias=[0, 0, 0])]) + f = opt.weights.replace('.pt', '.mlmodel') # filename + model.save(f) + print('CoreML export success, saved as %s' % f) + except Exception as e: + print('CoreML export failure: %s' % e) + + # Finish + print('\nExport complete (%.2fs). Visualize with https://github.com/lutzroeder/netron.' % (time.time() - t)) diff --git a/hubconf.py b/hubconf.py new file mode 100644 index 0000000..96d6b12 --- /dev/null +++ b/hubconf.py @@ -0,0 +1,119 @@ +"""File for accessing YOLOv5 via PyTorch Hub https://pytorch.org/hub/ + +Usage: + import torch + model = torch.hub.load('ultralytics/yolov5', 'yolov5s', pretrained=True, channels=3, classes=80) +""" + +from pathlib import Path + +import torch + +from models.yolo import Model +from utils.general import set_logging +from utils.google_utils import attempt_download + +dependencies = ['torch', 'yaml'] +set_logging() + + +def create(name, pretrained, channels, classes): + """Creates a specified YOLOv5 model + + Arguments: + name (str): name of model, i.e. 'yolov5s' + pretrained (bool): load pretrained weights into the model + channels (int): number of input channels + classes (int): number of model classes + + Returns: + pytorch model + """ + config = Path(__file__).parent / 'models' / f'{name}.yaml' # model.yaml path + try: + model = Model(config, channels, classes) + if pretrained: + fname = f'{name}.pt' # checkpoint filename + attempt_download(fname) # download if not found locally + ckpt = torch.load(fname, map_location=torch.device('cpu')) # load + state_dict = ckpt['model'].float().state_dict() # to FP32 + state_dict = {k: v for k, v in state_dict.items() if model.state_dict()[k].shape == v.shape} # filter + model.load_state_dict(state_dict, strict=False) # load + if len(ckpt['model'].names) == classes: + model.names = ckpt['model'].names # set class names attribute + # model = model.autoshape() # for PIL/cv2/np inputs and NMS + return model + + except Exception as e: + help_url = 'https://github.com/ultralytics/yolov5/issues/36' + s = 'Cache maybe be out of date, try force_reload=True. See %s for help.' % help_url + raise Exception(s) from e + + +def yolov5s(pretrained=False, channels=3, classes=80): + """YOLOv5-small model from https://github.com/ultralytics/yolov5 + + Arguments: + pretrained (bool): load pretrained weights into the model, default=False + channels (int): number of input channels, default=3 + classes (int): number of model classes, default=80 + + Returns: + pytorch model + """ + return create('yolov5s', pretrained, channels, classes) + + +def yolov5m(pretrained=False, channels=3, classes=80): + """YOLOv5-medium model from https://github.com/ultralytics/yolov5 + + Arguments: + pretrained (bool): load pretrained weights into the model, default=False + channels (int): number of input channels, default=3 + classes (int): number of model classes, default=80 + + Returns: + pytorch model + """ + return create('yolov5m', pretrained, channels, classes) + + +def yolov5l(pretrained=False, channels=3, classes=80): + """YOLOv5-large model from https://github.com/ultralytics/yolov5 + + Arguments: + pretrained (bool): load pretrained weights into the model, default=False + channels (int): number of input channels, default=3 + classes (int): number of model classes, default=80 + + Returns: + pytorch model + """ + return create('yolov5l', pretrained, channels, classes) + + +def yolov5x(pretrained=False, channels=3, classes=80): + """YOLOv5-xlarge model from https://github.com/ultralytics/yolov5 + + Arguments: + pretrained (bool): load pretrained weights into the model, default=False + channels (int): number of input channels, default=3 + classes (int): number of model classes, default=80 + + Returns: + pytorch model + """ + return create('yolov5x', pretrained, channels, classes) + + +if __name__ == '__main__': + model = create(name='yolov5s', pretrained=True, channels=3, classes=80) # example + model = model.fuse().autoshape() # for PIL/cv2/np inputs and NMS + + # Verify inference + from PIL import Image + + imgs = [Image.open(x) for x in Path('data/images').glob('*.jpg')] + results = model(imgs) + results.show() + results.print() diff --git a/kneron/bie2nef.py b/kneron/bie2nef.py new file mode 100644 index 0000000..0bac24c --- /dev/null +++ b/kneron/bie2nef.py @@ -0,0 +1,44 @@ +import ktc +import os +import shutil +import subprocess + +# 設定 `.bie` 和 `.nef` 的存放目錄 +onnx_dir = "runs/train/exp24/weights/" +bie_file = os.path.join(onnx_dir, "input.kdp720.scaled.bie") # 確保 `.bie` 路徑正確 + +# 確保 `.bie` 檔案存在 +if not os.path.exists(bie_file): + raise FileNotFoundError(f"❌ Error: BIE file not found at {bie_file}") + +print(f"✅ Found BIE file: {bie_file}") + +# 初始化 ModelConfig +km = ktc.ModelConfig(20008, "0001", "720", bie_path=bie_file) + +# 執行 `.nef` 轉換 +nef_model_path = ktc.compile([km]) + +# 打印出 `.nef` 生成的路徑 +print(f"🔍 Generated NEF file at: {nef_model_path}") + +# 確保 `.nef` 轉換成功 +if not nef_model_path or not os.path.exists(nef_model_path): + raise RuntimeError(f"❌ Error: NEF model was not generated at {nef_model_path}") + +# 確保目標資料夾存在 +os.makedirs(onnx_dir, exist_ok=True) + +# 移動 `.nef` 到指定資料夾 +nef_save_path = os.path.join(onnx_dir, os.path.basename(nef_model_path)) +shutil.copy(nef_model_path, nef_save_path) + +# **立即檢查 `.nef` 是否真的存在** +if os.path.exists(nef_save_path): + print(f"\n✅ NEF file successfully saved to: {nef_save_path}") +else: + raise RuntimeError(f"❌ Error: NEF file NOT found in {nef_save_path} after copying!") + +# **執行 `ls` 指令來確認 Python 內部真的看到 `.nef`** +print("\n🔍 Listing files in target directory:") +subprocess.run(["ls", "-lh", onnx_dir]) diff --git a/kneron/exporting/yolov5/__init__.py b/kneron/exporting/yolov5/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kneron/exporting/yolov5/common.py b/kneron/exporting/yolov5/common.py new file mode 100644 index 0000000..101bcc5 --- /dev/null +++ b/kneron/exporting/yolov5/common.py @@ -0,0 +1,224 @@ +# This file contains modules common to various models +import torch.nn as nn +import torch +import torch.nn.functional as F +def autopad(k, p=None): # kernel, padding + # Pad to 'same' + if p is None: + p = k // 2 if isinstance(k, int) else [x // 2 for x in k] # auto-pad + return p + + +def DWConv(c1, c2, k=1, s=1, act=True): + # Depthwise convolution + return Conv(c1, c2, k, s, g=math.gcd(c1, c2), act=act) + + +class Conv(nn.Module): + # Standard convolution + def __init__(self, c1, c2, k=1, s=1, p=None, g=1, act=True): # ch_in, ch_out, kernel, stride, padding, groups + super(Conv, self).__init__() + self.conv = nn.Conv2d(c1, c2, k, s, autopad(k, p), groups=g, bias=False) + self.bn = nn.BatchNorm2d(c2) + self.act = nn.LeakyReLU(0.1, inplace=True) if act else nn.Identity() + + def forward(self, x): + return self.act(self.bn(self.conv(x))) + + def fuseforward(self, x): + return self.act(self.conv(x)) + + +class Bottleneck(nn.Module): + # Standard bottleneck + def __init__(self, c1, c2, shortcut=True, g=1, e=0.5): # ch_in, ch_out, shortcut, groups, expansion + super(Bottleneck, self).__init__() + c_ = int(c2 * e) # hidden channels + self.cv1 = Conv(c1, c_, 1, 1) + self.cv2 = Conv(c_, c2, 3, 1, g=g) + self.add = shortcut and c1 == c2 + + def forward(self, x): + return x + self.cv2(self.cv1(x)) if self.add else self.cv2(self.cv1(x)) + + +class BottleneckCSP(nn.Module): + # CSP Bottleneck https://github.com/WongKinYiu/CrossStagePartialNetworks + def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5): # ch_in, ch_out, number, shortcut, groups, expansion + super(BottleneckCSP, self).__init__() + c_ = int(c2 * e) # hidden channels + self.cv1 = Conv(c1, c_, 1, 1) + self.cv2 = nn.Conv2d(c1, c_, 1, 1, bias=False) + self.cv3 = nn.Conv2d(c_, c_, 1, 1, bias=False) + self.cv4 = Conv(2 * c_, c2, 1, 1) + self.bn = nn.BatchNorm2d(2 * c_) # applied to cat(cv2, cv3) + self.act = nn.LeakyReLU(0.1, inplace=True) + self.m = nn.Sequential(*[Bottleneck(c_, c_, shortcut, g, e=1.0) for _ in range(n)]) + + def forward(self, x): + y1 = self.cv3(self.m(self.cv1(x))) + y2 = self.cv2(x) + return self.cv4(self.act(self.bn(torch.cat((y1, y2), dim=1)))) + +class SPP(nn.Module): + # Spatial pyramid pooling layer used in YOLOv3-SPP + def __init__(self, c1, c2, k=(5, 9, 13)): + super(SPP, self).__init__() + c_ = c1 // 2 # hidden channels + self.cv1 = Conv(c1, c_, 1, 1) + self.cv2 = Conv(c_ * (len(k) + 1), c2, 1, 1) + self.m = nn.ModuleList([nn.MaxPool2d(kernel_size=x, stride=1, padding=x // 2) for x in k]) + + def forward(self, x): + x = self.cv1(x) + return self.cv2(torch.cat([x] + [m(x) for m in self.m], 1)) + + +class Focus(nn.Module):# + # Focus wh information into c-space + def __init__(self, c1, c2, k=1, s=1, p=None, g=1, act=True): # ch_in, ch_out, kernel, stride, padding, groups + super(Focus, self).__init__() + self.conv = Conv(c1 * 4, c2, k, s, p, g, act) + w1_1 = torch.tensor([[[1., 0., 0.],[0., 0., 0.],[0., 0., 0.]], [[0., 0., 0.],[0., 0., 0.],[0., 0., 0.]], [[0., 0., 0.],[0., 0., 0.],[0., 0., 0.]]]) + w1_2 = torch.tensor([[[0., 0., 0.],[0., 0., 0.],[0., 0., 0.]], [[1., 0., 0.],[0., 0., 0.],[0., 0., 0.]], [[0., 0., 0.],[0., 0., 0.],[0., 0., 0.]]]) + w1_3 = torch.tensor([[[0., 0., 0.],[0., 0., 0.],[0., 0., 0.]], [[0., 0., 0.],[0., 0., 0.],[0., 0., 0.]], [[1., 0., 0.],[0., 0., 0.],[0., 0., 0.]]]) + w3_1 = torch.tensor([[[0., 1., 0.],[0., 0., 0.],[0., 0., 0.]], [[0., 0., 0.],[0., 0., 0.],[0., 0., 0.]], [[0., 0., 0.],[0., 0., 0.],[0., 0., 0.]]]) + w3_2 = torch.tensor([[[0., 0., 0.],[0., 0., 0.],[0., 0., 0.]], [[0., 1., 0.],[0., 0., 0.],[0., 0., 0.]], [[0., 0., 0.],[0., 0., 0.],[0., 0., 0.]]]) + w3_3 = torch.tensor([[[0., 0., 0.],[0., 0., 0.],[0., 0., 0.]], [[0., 0., 0.],[0., 0., 0.],[0., 0., 0.]], [[0., 1., 0.],[0., 0., 0.],[0., 0., 0.]]]) + w2_1 = torch.tensor([[[0., 0., 0.],[1., 0., 0.],[0., 0., 0.]], [[0., 0., 0.],[0., 0., 0.],[0., 0., 0.]], [[0., 0., 0.],[0., 0., 0.],[0., 0., 0.]]]) + w2_2 = torch.tensor([[[0., 0., 0.],[0., 0., 0.],[0., 0., 0.]], [[0., 0., 0.],[1., 0., 0.],[0., 0., 0.]], [[0., 0., 0.],[0., 0., 0.],[0., 0., 0.]]]) + w2_3 = torch.tensor([[[0., 0., 0.],[0., 0., 0.],[0., 0., 0.]], [[0., 0., 0.],[0., 0., 0.],[0., 0., 0.]], [[0., 0., 0.],[1., 0., 0.],[0., 0., 0.]]]) + w4_1 = torch.tensor([[[0., 0., 0.],[0., 1., 0.],[0., 0., 0.]], [[0., 0., 0.],[0., 0., 0.],[0., 0., 0.]], [[0., 0., 0.],[0., 0., 0.],[0., 0., 0.]]]) + w4_2 = torch.tensor([[[0., 0., 0.],[0., 0., 0.],[0., 0., 0.]], [[0., 0., 0.],[0., 1., 0.],[0., 0., 0.]], [[0., 0., 0.],[0., 0., 0.],[0., 0., 0.]]]) + w4_3 = torch.tensor([[[0., 0., 0.],[0., 0., 0.],[0., 0., 0.]], [[0., 0., 0.],[0., 0., 0.],[0., 0., 0.]], [[0., 0., 0.],[0., 1., 0.],[0., 0., 0.]]]) + w1_1 = w1_1.view(1, 3, 3, 3) + w1_2 = w1_2.view(1, 3, 3, 3) + w1_3 = w1_3.view(1, 3, 3, 3) + w2_1 = w2_1.view(1, 3, 3, 3) + w2_2 = w2_2.view(1, 3, 3, 3) + w2_3 = w2_3.view(1, 3, 3, 3) + w3_1 = w3_1.view(1, 3, 3, 3) + w3_2 = w3_2.view(1, 3, 3, 3) + w3_3 = w3_3.view(1, 3, 3, 3) + w4_1 = w4_1.view(1, 3, 3, 3) + w4_2 = w4_2.view(1, 3, 3, 3) + w4_3 = w4_3.view(1, 3, 3, 3) + self.w_cat = torch.cat([w1_1, w1_2,w1_3, w2_1,w2_2,w2_3, w3_1,w3_2,w3_3, w4_1,w4_2,w4_3], 0) + self.p2d = (0, 2, 0, 2) + + def forward(self, x): # x(b,c,w,h) -> y(b,4c,w/2,h/2) + # x = x.type(torch.cuda.FloatTensor) + #x_gt = self.conv(torch.cat([x[..., ::2, ::2], x[..., 1::2, ::2], x[..., ::2, 1::2], x[..., 1::2, 1::2]], 1)) + x_pad = F.pad(x, self.p2d, 'constant', 0) + xx = F.conv2d(x_pad, self.w_cat.to(x.device),stride=2) + xx = self.conv(xx) + #print(torch.sum(x_gt - xx)) + return xx + + + +class Focus_ori(nn.Module):# + # Focus wh information into c-space + def __init__(self, c1, c2, k=1, s=1, p=None, g=1, act=True): # ch_in, ch_out, kernel, stride, padding, groups + super(Focus, self).__init__() + self.conv = Conv(c1 * 4, c2, k, s, p, g, act) + + def forward(self, x): # x(b,c,w,h) -> y(b,4c,w/2,h/2) + x = x.type(torch.cuda.FloatTensor) + return self.conv(torch.cat([x[..., ::2, ::2], x[..., 1::2, ::2], x[..., ::2, 1::2], x[..., 1::2, 1::2]], 1)) + +class Concat(nn.Module): + # Concatenate a list of tensors along dimension + def __init__(self, dimension=1): + super(Concat, self).__init__() + self.d = dimension + + def forward(self, x): + return torch.cat(x, self.d) + + +class Flatten(nn.Module): + # Use after nn.AdaptiveAvgPool2d(1) to remove last 2 dimensions + @staticmethod + def forward(x): + return x.view(x.size(0), -1) + + +class Classify(nn.Module): + # Classification head, i.e. x(b,c1,20,20) to x(b,c2) + def __init__(self, c1, c2, k=1, s=1, p=None, g=1): # ch_in, ch_out, kernel, stride, padding, groups + super(Classify, self).__init__() + self.aap = nn.AdaptiveAvgPool2d(1) # to x(b,c1,1,1) + self.conv = nn.Conv2d(c1, c2, k, s, autopad(k, p), groups=g, bias=False) # to x(b,c2,1,1) + self.flat = Flatten() + + def forward(self, x): + z = torch.cat([self.aap(y) for y in (x if isinstance(x, list) else [x])], 1) # cat if list + return self.flat(self.conv(z)) # flatten to x(b,c2) + +class MixConv2d(nn.Module): + # Mixed Depthwise Conv https://arxiv.org/abs/1907.09595 + def __init__(self, c1, c2, k=(1, 3), s=1, equal_ch=True): + super(MixConv2d, self).__init__() + groups = len(k) + if equal_ch: # equal c_ per group + i = torch.linspace(0, groups - 1E-6, c2).floor() # c2 indices + c_ = [(i == g).sum() for g in range(groups)] # intermediate channels + else: # equal weight.numel() per group + b = [c2] + [0] * groups + a = np.eye(groups + 1, groups, k=-1) + a -= np.roll(a, 1, axis=1) + a *= np.array(k) ** 2 + a[0] = 1 + c_ = np.linalg.lstsq(a, b, rcond=None)[0].round() # solve for equal weight indices, ax = b + + self.m = nn.ModuleList([nn.Conv2d(c1, int(c_[g]), k[g], s, k[g] // 2, bias=False) for g in range(groups)]) + self.bn = nn.BatchNorm2d(c2) + self.act = nn.LeakyReLU(0.1, inplace=True) + + def forward(self, x): + return x + self.act(self.bn(torch.cat([m(x) for m in self.m], 1))) + +class CrossConv(nn.Module): + # Cross Convolution Downsample + def __init__(self, c1, c2, k=3, s=1, g=1, e=1.0, shortcut=False): + # ch_in, ch_out, kernel, stride, groups, expansion, shortcut + super(CrossConv, self).__init__() + c_ = int(c2 * e) # hidden channels + self.cv1 = Conv(c1, c_, (1, k), (1, s)) + self.cv2 = Conv(c_, c2, (k, 1), (s, 1), g=g) + self.add = shortcut and c1 == c2 + + def forward(self, x): + return x + self.cv2(self.cv1(x)) if self.add else self.cv2(self.cv1(x)) + +# class C3(nn.Module): +# # Cross Convolution CSP +# def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5): # ch_in, ch_out, number, shortcut, groups, expansion +# super(C3, self).__init__() +# c_ = int(c2 * e) # hidden channels +# self.cv1 = Conv(c1, c_, 1, 1) +# self.cv2 = nn.Conv2d(c1, c_, 1, 1, bias=False) +# self.cv3 = nn.Conv2d(c_, c_, 1, 1, bias=False) +# self.cv4 = Conv(2 * c_, c2, 1, 1) +# self.bn = nn.BatchNorm2d(2 * c_) # applied to cat(cv2, cv3) +# self.act = nn.LeakyReLU(0.1, inplace=True) +# self.m = nn.Sequential(*[CrossConv(c_, c_, 3, 1, g, 1.0, shortcut) for _ in range(n)]) + +# def forward(self, x): +# y1 = self.cv3(self.m(self.cv1(x))) +# y2 = self.cv2(x) +# return self.cv4(self.act(self.bn(torch.cat((y1, y2), dim=1)))) + +class C3(nn.Module): + # CSP Bottleneck with 3 convolutions + def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5): # ch_in, ch_out, number, shortcut, groups, expansion + super(C3, self).__init__() + c_ = int(c2 * e) # hidden channels + self.cv1 = Conv(c1, c_, 1, 1) + self.cv2 = Conv(c1, c_, 1, 1) + self.cv3 = Conv(2 * c_, c2, 1) # act=FReLU(c2) + self.m = nn.Sequential(*[Bottleneck(c_, c_, shortcut, g, e=1.0) for _ in range(n)]) + + def forward(self, x): + return self.cv3(torch.cat((self.m(self.cv1(x)), self.cv2(x)), dim=1)) \ No newline at end of file diff --git a/kneron/exporting/yolov5/kneron_preprocessing/API.py b/kneron/exporting/yolov5/kneron_preprocessing/API.py new file mode 100644 index 0000000..3630caa --- /dev/null +++ b/kneron/exporting/yolov5/kneron_preprocessing/API.py @@ -0,0 +1,684 @@ +# -*- coding: utf-8 -*- + +import numpy as np +import os +from .funcs.utils import str2int, str2bool +from . import Flow + +flow = Flow() +flow.set_numerical_type('floating') +flow_520 = Flow() +flow_520.set_numerical_type('520') +flow_720 = Flow() +flow_720.set_numerical_type('720') + +DEFAULT = None +default = { + 'crop':{ + 'align_w_to_4':False + }, + 'resize':{ + 'type':'bilinear', + 'calculate_ratio_using_CSim':False + } +} + +def set_default_as_520(): + """ + Set some default parameter as 520 setting + + crop.align_w_to_4 = True + crop.pad_square_to_4 = True + resize.type = 'fixed_520' + resize.calculate_ratio_using_CSim = True + """ + global default + default['crop']['align_w_to_4'] = True + default['resize']['type'] = 'fixed_520' + default['resize']['calculate_ratio_using_CSim'] = True + return + +def set_default_as_floating(): + """ + Set some default parameter as floating setting + + crop.align_w_to_4 = False + crop.pad_square_to_4 = False + resize.type = 'bilinear' + resize.calculate_ratio_using_CSim = False + """ + global default + default['crop']['align_w_to_4'] = False + default['resize']['type'] = 'bilinear' + default['resize']['calculate_ratio_using_CSim'] = False + pass + +def print_info_on(): + """ + turn print infomation on. + """ + flow.set_print_info(True) + flow_520.set_print_info(True) + +def print_info_off(): + """ + turn print infomation off. + """ + flow.set_print_info(False) + flow_520.set_print_info(False) + +def load_image(image): + """ + load_image function + load load_image and output as rgb888 format np.array + + Args: + image: [np.array/str], can be np.array or image file path + + Returns: + out: [np.array], rgb888 format + + Examples: + """ + image = flow.load_image(image, is_raw = False) + return image + +def load_bin(image, fmt=None, size=None): + """ + load_bin function + load bin file and output as rgb888 format np.array + + Args: + image: [str], bin file path + fmt: [str], "rgb888" / "rgb565" / "nir" + size: [tuble], (image_w, image_h) + + Returns: + out: [np.array], rgb888 format + + Examples: + >>> image_data = kneron_preprocessing.API.load_bin(image,'rgb565',(raw_w,raw_h)) + """ + assert isinstance(size, tuple) + assert isinstance(fmt, str) + # assert (fmt.lower() in ['rgb888', "rgb565" , "nir",'RGB888', "RGB565" , "NIR", 'NIR888', 'nir888']) + + image = flow.load_image(image, is_raw = True, raw_img_type='bin', raw_img_fmt = fmt, img_in_width = size[0], img_in_height = size[1]) + flow.set_color_conversion(source_format=fmt, out_format = 'rgb888') + image,_ = flow.funcs['color'](image) + return image + +def load_hex(file, fmt=None, size=None): + """ + load_hex function + load hex file and output as rgb888 format np.array + + Args: + image: [str], hex file path + fmt: [str], "rgb888" / "yuv444" / "ycbcr444" / "yuv422" / "ycbcr422" / "rgb565" + size: [tuble], (image_w, image_h) + + Returns: + out: [np.array], rgb888 format + + Examples: + >>> image_data = kneron_preprocessing.API.load_hex(image,'rgb565',(raw_w,raw_h)) + """ + assert isinstance(size, tuple) + assert isinstance(fmt, str) + assert (fmt.lower() in ['rgb888',"yuv444" , "ycbcr444" , "yuv422" , "ycbcr422" , "rgb565"]) + + image = flow.load_image(file, is_raw = True, raw_img_type='hex', raw_img_fmt = fmt, img_in_width = size[0], img_in_height = size[1]) + flow.set_color_conversion(source_format=fmt, out_format = 'rgb888') + image,_ = flow.funcs['color'](image) + return image + +def dump_image(image, output=None, file_fmt='txt',image_fmt='rgb888',order=0): + """ + dump_image function + + dump txt, bin or hex, default is txt + image format as following format: RGB888, RGBA8888, RGB565, NIR, YUV444, YCbCr444, YUV422, YCbCr422, default is RGB888 + + Args: + image: [np.array/str], can be np.array or image file path + output: [str], dump file path + file_fmt: [str], "bin" / "txt" / "hex", set dump file format, default is txt + image_fmt: [str], RGB888 / RGBA8888 / RGB565 / NIR / YUV444 / YCbCr444 / YUV422 / YCbCr422, default is RGB888 + + Examples: + >>> kneron_preprocessing.API.dump_image(image_data,out_path,fmt='bin') + """ + if isinstance(image, str): + image = load_image(image) + + assert isinstance(image, np.ndarray) + if output is None: + return + + flow.set_output_setting(is_dump=False, dump_format=file_fmt, image_format=image_fmt ,output_file=output) + flow.dump_image(image) + return + +def convert(image, out_fmt = 'RGB888', source_fmt = 'RGB888'): + """ + color convert + + Args: + image: [np.array], input + out_fmt: [str], "rgb888" / "rgba8888" / "rgb565" / "yuv" / "ycbcr" / "yuv422" / "ycbcr422" + source_fmt: [str], "rgb888" / "rgba8888" / "rgb565" / "yuv" / "ycbcr" / "yuv422" / "ycbcr422" + + Returns: + out: [np.array] + + Examples: + + """ + flow.set_color_conversion(source_format = source_fmt, out_format=out_fmt, simulation=False) + image,_ = flow.funcs['color'](image) + return image + +def get_crop_range(box,align_w_to_4=DEFAULT, pad_square_to_4=False,rounding_type=0): + """ + get exact crop box according different setting + + Args: + box: [tuble], (x1, y1, x2, y2) + align_w_to_4: [bool], crop length in w direction align to 4 or not, default False + pad_square_to_4: [bool], pad to square(align 4) or not, default False + rounding_type: [int], 0-> x1,y1 take floor, x2,y2 take ceil; 1->all take rounding + + Returns: + out: [tuble,4], (crop_x1, crop_y1, crop_x2, crop_y2) + + Examples: + >>> image_data = kneron_preprocessing.API.get_crop_range((272,145,461,341), align_w_to_4=True, pad_square_to_4=True) + (272, 145, 460, 341) + """ + if box is None: + return (0,0,0,0) + if align_w_to_4 is None: + align_w_to_4 = default['crop']['align_w_to_4'] + + flow.set_crop(type='specific', start_x=box[0],start_y=box[1],end_x=box[2],end_y=box[3], align_w_to_4=align_w_to_4, pad_square_to_4=pad_square_to_4,rounding_type=rounding_type) + image = np.zeros((1,1,3)).astype('uint8') + _,info = flow.funcs['crop'](image) + + return info['box'] + +def crop(image, box=None, align_w_to_4=DEFAULT, pad_square_to_4=False,rounding_type=0 ,info_out = {}): + """ + crop function + + specific crop range by box + + Args: + image: [np.array], input + box: [tuble], (x1, y1, x2, y2) + align_w_to_4: [bool], crop length in w direction align to 4 or not, default False + pad_square_to_4: [bool], pad to square(align 4) or not, default False + rounding_type: [int], 0-> x1,y1 take floor, x2,y2 take ceil; 1->all take rounding + info_out: [dic], save the final crop box into info_out['box'] + + Returns: + out: [np.array] + + Examples: + >>> info = {} + >>> image_data = kneron_preprocessing.API.crop(image_data,(272,145,461,341), align_w_to_4=True, info_out=info) + >>> info['box'] + (272, 145, 460, 341) + + >>> info = {} + >>> image_data = kneron_preprocessing.API.crop(image_data,(272,145,461,341), pad_square_to_4=True, info_out=info) + >>> info['box'] + (268, 145, 464, 341) + """ + assert isinstance(image, np.ndarray) + if box is None: + return image + if align_w_to_4 is None: + align_w_to_4 = default['crop']['align_w_to_4'] + + flow.set_crop(type='specific', start_x=box[0],start_y=box[1],end_x=box[2],end_y=box[3], align_w_to_4=align_w_to_4, pad_square_to_4=pad_square_to_4,rounding_type=rounding_type) + image,info = flow.funcs['crop'](image) + + info_out['box'] = info['box'] + return image + +def crop_center(image, range=None, align_w_to_4=DEFAULT, pad_square_to_4=False,rounding_type=0 ,info_out = {}): + """ + crop function + + center crop by range + + Args: + image: [np.array], input + range: [tuble], (crop_w, crop_h) + align_w_to_4: [bool], crop length in w direction align to 4 or not, default False + pad_square_to_4: [bool], pad to square(align 4) or not, default False + rounding_type: [int], 0-> x1,y1 take floor, x2,y2 take ceil; 1->all take rounding + info_out: [dic], save the final crop box into info_out['box'] + + Returns: + out: [np.array] + + Examples: + >>> info = {} + >>> image_data = kneron_preprocessing.API.crop_center(image_data,(102,40), align_w_to_4=True,info_out=info) + >>> info['box'] + (268, 220, 372, 260) + + >>> info = {} + >>> image_data = kneron_preprocessing.API.crop_center(image_data,(102,40), pad_square_to_4=True, info_out=info) + >>> info['box'] + (269, 192, 371, 294) + """ + assert isinstance(image, np.ndarray) + if range is None: + return image + if align_w_to_4 is None: + align_w_to_4 = default['crop']['align_w_to_4'] + + flow.set_crop(type='center', crop_w=range[0],crop_h=range[1], align_w_to_4=align_w_to_4, pad_square_to_4=pad_square_to_4,rounding_type=rounding_type) + image,info = flow.funcs['crop'](image) + + info_out['box'] = info['box'] + return image + +def crop_corner(image, range=None, align_w_to_4=DEFAULT,pad_square_to_4=False,rounding_type=0 ,info_out = {}): + """ + crop function + + corner crop by range + + Args: + image: [np.array], input + range: [tuble], (crop_w, crop_h) + align_w_to_4: [bool], crop length in w direction align to 4 or not, default False + pad_square_to_4: [bool], pad to square(align 4) or not, default False + rounding_type: [int], 0-> x1,y1 take floor, x2,y2 take ceil; 1->all take rounding + info_out: [dic], save the final crop box into info_out['box'] + + Returns: + out: [np.array] + + Examples: + >>> info = {} + >>> image_data = kneron_preprocessing.API.crop_corner(image_data,(102,40), align_w_to_4=True,info_out=info) + >>> info['box'] + (0, 0, 104, 40) + + >>> info = {} + >>> image_data = kneron_preprocessing.API.crop_corner(image_data,(102,40), pad_square_to_4=True,info_out=info) + >>> info['box'] + (0, -28, 102, 74) + """ + assert isinstance(image, np.ndarray) + if range is None: + return image + if align_w_to_4 is None: + align_w_to_4 = default['crop']['align_w_to_4'] + + flow.set_crop(type='corner', crop_w=range[0],crop_h=range[1], align_w_to_4=align_w_to_4, pad_square_to_4=pad_square_to_4) + image, info = flow.funcs['crop'](image) + + info_out['box'] = info['box'] + return image + +def resize(image, size=None, keep_ratio = True, zoom = True, type=DEFAULT, calculate_ratio_using_CSim = DEFAULT, info_out = {}): + """ + resize function + + resize type can be bilinear or bilicubic as floating type, fixed or fixed_520/fixed_720 as fixed type. + fixed_520/fixed_720 type has add some function to simulate 520/720 bug. + + Args: + image: [np.array], input + size: [tuble], (input_w, input_h) + keep_ratio: [bool], keep_ratio or not, default True + zoom: [bool], enable resize can zoom image or not, default True + type: [str], "bilinear" / "bilicubic" / "cv2" / "fixed" / "fixed_520" / "fixed_720" + calculate_ratio_using_CSim: [bool], calculate the ratio and scale using Csim function and C float, default False + info_out: [dic], save the final scale size(w,h) into info_out['size'] + + Returns: + out: [np.array] + + Examples: + >>> info = {} + >>> image_data = kneron_preprocessing.API.resize(image_data,size=(56,56),type='fixed',info_out=info) + >>> info_out['size'] + (54,56) + """ + assert isinstance(image, np.ndarray) + if size is None: + return image + if type is None: + type = default['resize']['type'] + if calculate_ratio_using_CSim is None: + calculate_ratio_using_CSim = default['resize']['calculate_ratio_using_CSim'] + + flow.set_resize(resize_w = size[0], resize_h = size[1], type=type, keep_ratio=keep_ratio,zoom=zoom, calculate_ratio_using_CSim=calculate_ratio_using_CSim) + image, info = flow.funcs['resize'](image) + info_out['size'] = info['size'] + + return image + +def pad(image, pad_l=0, pad_r=0, pad_t=0, pad_b=0, pad_val=0): + """ + pad function + + specific left, right, top and bottom pad size. + + Args: + image[np.array]: input + pad_l: [int], pad size from left, default 0 + pad_r: [int], pad size form right, default 0 + pad_t: [int], pad size from top, default 0 + pad_b: [int], pad size form bottom, default 0 + pad_val: [float], the value of pad, , default 0 + + Returns: + out: [np.array] + + Examples: + >>> image_data = kneron_preprocessing.API.pad(image_data,20,40,20,40,-0.5) + """ + assert isinstance(image, np.ndarray) + + flow.set_padding(type='specific',pad_l=pad_l,pad_r=pad_r,pad_t=pad_t,pad_b=pad_b,pad_val=pad_val) + image, _ = flow.funcs['padding'](image) + return image + +def pad_center(image,size=None, pad_val=0): + """ + pad function + + center pad with pad size. + + Args: + image[np.array]: input + size: [tuble], (padded_size_w, padded_size_h) + pad_val: [float], the value of pad, , default 0 + + Returns: + out: [np.array] + + Examples: + >>> image_data = kneron_preprocessing.API.pad_center(image_data,size=(56,56),pad_val=-0.5) + """ + assert isinstance(image, np.ndarray) + if size is None: + return image + assert ( (image.shape[0] <= size[1]) & (image.shape[1] <= size[0]) ) + + flow.set_padding(type='center',padded_w=size[0],padded_h=size[1],pad_val=pad_val) + image, _ = flow.funcs['padding'](image) + return image + +def pad_corner(image,size=None, pad_val=0): + """ + pad function + + corner pad with pad size. + + Args: + image[np.array]: input + size: [tuble], (padded_size_w, padded_size_h) + pad_val: [float], the value of pad, , default 0 + + Returns: + out: [np.array] + + Examples: + >>> image_data = kneron_preprocessing.API.pad_corner(image_data,size=(56,56),pad_val=-0.5) + """ + assert isinstance(image, np.ndarray) + if size is None: + return image + assert ( (image.shape[0] <= size[1]) & (image.shape[1] <= size[0]) ) + + flow.set_padding(type='corner',padded_w=size[0],padded_h=size[1],pad_val=pad_val) + image, _ = flow.funcs['padding'](image) + return image + +def norm(image,scale=256.,bias=-0.5, mean=None, std=None): + """ + norm function + + x = (x/scale - bias) + x[0,1,2] = x - mean[0,1,2] + x[0,1,2] = x / std[0,1,2] + + Args: + image: [np.array], input + scale: [float], default = 256 + bias: [float], default = -0.5 + mean: [tuble,3], default = None + std: [tuble,3], default = None + + Returns: + out: [np.array] + + Examples: + >>> image_data = kneron_preprocessing.API.norm(image_data) + >>> image_data = kneron_preprocessing.API.norm(image_data,mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) + """ + assert isinstance(image, np.ndarray) + + flow.set_normalize(type='specific',scale=scale, bias=bias, mean=mean, std =std) + image, _ = flow.funcs['normalize'](image) + return image + +def inproc_520(image,raw_fmt='rgb565',raw_size=None,npu_size=None, crop_box=None, pad_mode=0, norm='kneron', gray=False, rotate=0, radix=8, bit_width=8, round_w_to_16=True, NUM_BANK_LINE=32,BANK_ENTRY_CNT=512,MAX_IMG_PREPROC_ROW_NUM=511,MAX_IMG_PREPROC_COL_NUM=256): + """ + inproc_520 + + Args: + image: [np.array], input + crop_box: [tuble], (x1, y1, x2, y2), if None will skip crop + pad_mode: [int], 0: pad 2 sides, 1: pad 1 side, 2: no pad. default = 0 + norm: [str], default = 'kneron' + rotate: [int], 0 / 1 / 2 ,default = 0 + radix: [int], default = 8 + bit_width: [int], default = 8 + round_w_to_16: [bool], default = True + gray: [bool], default = False + + Returns: + out: [np.array] + + Examples: + >>> image_data = kneron_preprocessing.API.inproc_520(image_data,npu_size=(56,56),crop_box=(272,145,460,341),pad_mode=1) + """ + # assert isinstance(image, np.ndarray) + + if (not isinstance(image, np.ndarray)): + flow_520.set_raw_img(is_raw_img='yes',raw_img_type = 'bin',raw_img_fmt=raw_fmt, img_in_width=raw_size[0], img_in_height=raw_size[1]) + else: + flow_520.set_raw_img(is_raw_img='no') + flow_520.set_color_conversion(source_format='rgb888') + + if npu_size is None: + return image + + flow_520.set_model_size(w=npu_size[0],h=npu_size[1]) + + ## Crop + if crop_box != None: + flow_520.set_crop(start_x=crop_box[0],start_y=crop_box[1],end_x=crop_box[2],end_y=crop_box[3]) + crop_fisrt = True + else: + crop_fisrt = False + + ## Color + if gray: + flow_520.set_color_conversion(out_format='l',simulation='no') + else: + flow_520.set_color_conversion(out_format='rgb888',simulation='no') + + ## Resize & Pad + pad_mode = str2int(pad_mode) + if (pad_mode == 0): + pad_type = 'center' + resize_keep_ratio = 'yes' + elif (pad_mode == 1): + pad_type = 'corner' + resize_keep_ratio = 'yes' + else: + pad_type = 'center' + resize_keep_ratio = 'no' + + flow_520.set_resize(keep_ratio=resize_keep_ratio) + flow_520.set_padding(type=pad_type) + + ## Norm + flow_520.set_normalize(type=norm) + + ## 520 inproc + flow_520.set_520_setting(radix=radix,bit_width=bit_width,rotate=rotate,crop_fisrt=crop_fisrt,round_w_to_16=round_w_to_16,NUM_BANK_LINE=NUM_BANK_LINE,BANK_ENTRY_CNT=BANK_ENTRY_CNT,MAX_IMG_PREPROC_ROW_NUM=MAX_IMG_PREPROC_ROW_NUM,MAX_IMG_PREPROC_COL_NUM=MAX_IMG_PREPROC_COL_NUM) + image_data, _ = flow_520.run_whole_process(image) + + return image_data + +def inproc_720(image,raw_fmt='rgb565',raw_size=None,npu_size=None, crop_box=None, pad_mode=0, norm='kneron', gray=False): + """ + inproc_720 + + Args: + image: [np.array], input + crop_box: [tuble], (x1, y1, x2, y2), if None will skip crop + pad_mode: [int], 0: pad 2 sides, 1: pad 1 side, 2: no pad. default = 0 + norm: [str], default = 'kneron' + rotate: [int], 0 / 1 / 2 ,default = 0 + radix: [int], default = 8 + bit_width: [int], default = 8 + round_w_to_16: [bool], default = True + gray: [bool], default = False + + Returns: + out: [np.array] + + Examples: + >>> image_data = kneron_preprocessing.API.inproc_520(image_data,npu_size=(56,56),crop_box=(272,145,460,341),pad_mode=1) + """ + # assert isinstance(image, np.ndarray) + + if (not isinstance(image, np.ndarray)): + flow_720.set_raw_img(is_raw_img='yes',raw_img_type = 'bin',raw_img_fmt=raw_fmt, img_in_width=raw_size[0], img_in_height=raw_size[1]) + else: + flow_720.set_raw_img(is_raw_img='no') + flow_720.set_color_conversion(source_format='rgb888') + + if npu_size is None: + return image + + flow_720.set_model_size(w=npu_size[0],h=npu_size[1]) + + ## Crop + if crop_box != None: + flow_720.set_crop(start_x=crop_box[0],start_y=crop_box[1],end_x=crop_box[2],end_y=crop_box[3]) + crop_fisrt = True + else: + crop_fisrt = False + + ## Color + if gray: + flow_720.set_color_conversion(out_format='l',simulation='no') + else: + flow_720.set_color_conversion(out_format='rgb888',simulation='no') + + ## Resize & Pad + pad_mode = str2int(pad_mode) + if (pad_mode == 0): + pad_type = 'center' + resize_keep_ratio = 'yes' + elif (pad_mode == 1): + pad_type = 'corner' + resize_keep_ratio = 'yes' + else: + pad_type = 'center' + resize_keep_ratio = 'no' + + flow_720.set_resize(keep_ratio=resize_keep_ratio) + flow_720.set_padding(type=pad_type) + + ## 720 inproc + # flow_720.set_720_setting(radix=radix,bit_width=bit_width,rotate=rotate,crop_fisrt=crop_fisrt,round_w_to_16=round_w_to_16,NUM_BANK_LINE=NUM_BANK_LINE,BANK_ENTRY_CNT=BANK_ENTRY_CNT,MAX_IMG_PREPROC_ROW_NUM=MAX_IMG_PREPROC_ROW_NUM,MAX_IMG_PREPROC_COL_NUM=MAX_IMG_PREPROC_COL_NUM) + image_data, _ = flow_720.run_whole_process(image) + + return image_data + +def bit_match(data1, data2): + """ + bit_match function + + check data1 is equal to data2 or not. + + Args: + data1: [np.array / str], can be array or txt/bin file + data2: [np.array / str], can be array or txt/bin file + + Returns: + out1: [bool], is match or not + out2: [np.array], if not match, save the position for mismatched data + + Examples: + >>> result, mismatched = kneron_preprocessing.API.bit_match(data1,data2) + """ + if isinstance(data1, str): + if os.path.splitext(data1)[1] == '.bin': + data1 = np.fromfile(data1, dtype='uint8') + elif os.path.splitext(data1)[1] == '.txt': + data1 = np.loadtxt(data1) + + assert isinstance(data1, np.ndarray) + + if isinstance(data2, str): + if os.path.splitext(data2)[1] == '.bin': + data2 = np.fromfile(data2, dtype='uint8') + elif os.path.splitext(data2)[1] == '.txt': + data2 = np.loadtxt(data2) + + assert isinstance(data2, np.ndarray) + + + data1 = data1.reshape((-1,1)) + data2 = data2.reshape((-1,1)) + + if not(len(data1) == len(data2)): + print('error len') + return False, np.zeros((1)) + else: + ans = data2 - data1 + if len(np.where(ans>0)[0]) > 0: + print('error',np.where(ans>0)[0]) + return False, np.where(ans>0)[0] + else: + print('pass') + return True, np.zeros((1)) + +def cpr_to_crp(x_start, x_end, y_start, y_end, pad_l, pad_r, pad_t, pad_b, rx_start, rx_end, ry_start, ry_end): + """ + calculate the parameters of crop->pad->resize flow to HW crop->resize->padding flow + + Args: + + Returns: + + Examples: + + """ + pad_l = round(pad_l * (rx_end-rx_start) / (x_end - x_start + pad_l + pad_r)) + pad_r = round(pad_r * (rx_end-rx_start) / (x_end - x_start + pad_l + pad_r)) + pad_t = round(pad_t * (ry_end-ry_start) / (y_end - y_start + pad_t + pad_b)) + pad_b = round(pad_b * (ry_end-ry_start) / (y_end - y_start + pad_t + pad_b)) + + rx_start +=pad_l + rx_end -=pad_r + ry_start +=pad_t + ry_end -=pad_b + + return x_start, x_end, y_start, y_end, pad_l, pad_r, pad_t, pad_b, rx_start, rx_end, ry_start, ry_end \ No newline at end of file diff --git a/kneron/exporting/yolov5/kneron_preprocessing/Cflow.py b/kneron/exporting/yolov5/kneron_preprocessing/Cflow.py new file mode 100644 index 0000000..02fffe1 --- /dev/null +++ b/kneron/exporting/yolov5/kneron_preprocessing/Cflow.py @@ -0,0 +1,172 @@ +import numpy as np +import argparse +import kneron_preprocessing + +def main_(args): + image = args.input_file + filefmt = args.file_fmt + if filefmt == 'bin': + raw_format = args.raw_format + raw_w = args.input_width + raw_h = args.input_height + + image_data = kneron_preprocessing.API.load_bin(image,raw_format,(raw_w,raw_h)) + else: + image_data = kneron_preprocessing.API.load_image(image) + + + npu_w = args.width + npu_h = args.height + + crop_first = True if args.crop_first == "True" else False + if crop_first: + x1 = args.x_pos + y1 = args.y_pos + x2 = args.crop_w + x1 + y2 = args.crop_h + y1 + crop_box = [x1,y1,x2,y2] + else: + crop_box = None + + pad_mode = args.pad_mode + norm_mode = args.norm_mode + bitwidth = args.bitwidth + radix = args.radix + rotate = args.rotate_mode + + ## + image_data = kneron_preprocessing.API.inproc_520(image_data,npu_size=(npu_w,npu_h),crop_box=crop_box,pad_mode=pad_mode,norm=norm_mode,rotate=rotate,radix=radix,bit_width=bitwidth) + + output_file = args.output_file + kneron_preprocessing.API.dump_image(image_data,output_file,'bin','rgba') + + return + + +if __name__ == "__main__": + argparser = argparse.ArgumentParser( + description="preprocessing" + ) + + argparser.add_argument( + '-i', + '--input_file', + help="input file name" + ) + + argparser.add_argument( + '-ff', + '--file_fmt', + help="input file format, jpg or bin" + ) + + argparser.add_argument( + '-rf', + '--raw_format', + help="input file image format, rgb or rgb565 or nir" + ) + + argparser.add_argument( + '-i_w', + '--input_width', + type=int, + help="input image width" + ) + + argparser.add_argument( + '-i_h', + '--input_height', + type=int, + help="input image height" + ) + + argparser.add_argument( + '-o', + '--output_file', + help="output file name" + ) + + argparser.add_argument( + '-s_w', + '--width', + type=int, + help="output width for npu input", + ) + + argparser.add_argument( + '-s_h', + '--height', + type=int, + help="output height for npu input", + ) + + argparser.add_argument( + '-c_f', + '--crop_first', + help="crop first True or False", + ) + + argparser.add_argument( + '-x', + '--x_pos', + type=int, + help="left up coordinate x", + ) + + argparser.add_argument( + '-y', + '--y_pos', + type=int, + help="left up coordinate y", + ) + + argparser.add_argument( + '-c_w', + '--crop_w', + type=int, + help="crop width", + ) + + argparser.add_argument( + '-c_h', + '--crop_h', + type=int, + help="crop height", + ) + + argparser.add_argument( + '-p_m', + '--pad_mode', + type=int, + help=" 0: pad 2 sides, 1: pad 1 side, 2: no pad.", + ) + + argparser.add_argument( + '-n_m', + '--norm_mode', + help="normalizaton mode: yolo, kneron, tf." + ) + + argparser.add_argument( + '-r_m', + '--rotate_mode', + type=int, + help="rotate mode:0,1,2" + ) + + argparser.add_argument( + '-bw', + '--bitwidth', + type=int, + help="Int for bitwidth" + ) + + argparser.add_argument( + '-r', + '--radix', + type=int, + help="Int for radix" + ) + + args = argparser.parse_args() + main_(args) \ No newline at end of file diff --git a/kneron/exporting/yolov5/kneron_preprocessing/Flow.py b/kneron/exporting/yolov5/kneron_preprocessing/Flow.py new file mode 100644 index 0000000..bab0041 --- /dev/null +++ b/kneron/exporting/yolov5/kneron_preprocessing/Flow.py @@ -0,0 +1,1226 @@ +import numpy as np +from PIL import Image +import json +import math +import sys +from .funcs import * +from .funcs.utils import str2bool, bin_loader, hex_loader, str_fill, clip_ary +from .funcs.utils_520 import round_up_16, round_up_n, cal_img_row_offset, get_pad_num, get_byte_per_pixel +from .funcs.utils_720 import twos_complement_pix, clip_pix +from ctypes import c_float + + +class Flow(object): + # class function + def __init__(self, config_path = ''): + ''' + @brief: + Class name: Flow + Constructor with config_path + + @param: + config_path[str]: json file path or empty, init this class with json file. If empty, will use default setting. + ''' + # init config + self.__init_config() + + # update config with joson file + try: + with open(config_path, encoding='utf-8') as f: + self.config = json.load(f) + except IOError: + pass + + # print info + if str2bool(self.config['print_info']): + print("pre-processing type:", self.config['type_name'],", model_size:",self.config['model_size'],", numerical_type",self.config['numerical_type']) + + # init funcs + self.error_state = 0 + self.subclass = {} + self.subclass['color'] = ColorConversion.runner() + self.subclass['resize'] = Resize.runner() + self.subclass['crop'] = Crop.runner() + self.subclass['padding'] = Padding.runner() + self.subclass['normalize'] = Normalize.runner() + + self.funcs = {} + self.funcs['crop'] = self.run_crop + self.funcs['color'] = self.run_color_conversion + self.funcs['resize'] = self.run_resize + self.funcs['normalize'] = self.run_normalize + self.funcs['padding'] = self.run_padding + + return + + def __init_config(self): + ''' + private function + ''' + self.config = { + "_comment": "PreProcessing", + "type_name": "default", + "numerical_type": "floating", + "print_info":"no", + "model_size": [ + 56, + 56 + ], + "raw_img":{ + "is_raw_img": "no", + "raw_img_type": "bin", + "raw_img_fmt": "rgb565", + "img_in_width": 640, + "img_in_height": 480 + }, + "output_setting":{ + "is_dump": "no", + "dump_format":"bin", + "output_file":"default.bin", + "image_format":"RGB888" + }, + "520_setting":{ + "radix": 8, + "bit_width": 8, + "rotate": 0, + "crop_fisrt": "no", + "NUM_BANK_LINE": 32, + "BANK_ENTRY_CNT": 512, + "MAX_IMG_PREPROC_ROW_NUM": 511, + "MAX_IMG_PREPROC_COL_NUM": 256, + "round_w_to_16": "no" + }, + "720_setting":{ + "radix": 8, + "shift":0, + "sub":0, + "bit_width": 8, + "rotate": 0, + "crop_fisrt": "no", + "matrix_c00": 1, + "matrix_c01": 0, + "matrix_c02": 0, + "matrix_c10": 0, + "matrix_c11": 1, + "matrix_c12": 0, + "matrix_c20": 0, + "matrix_c21": 0, + "matrix_c22": 1, + "vector_b00": 0, + "vector_b01": 0, + "vector_b02": 0 + }, + "floating_setting":{ + "job_list":[ + "color", + "crop", + "resize", + "padding", + "normalize", + ] + }, + "function_setting": { + "color": { + "out_format": "rgb888", + "options": { + "simulation": "no", + "simulation_format": "" + } + }, + "crop": { + "type": "corner", + "align_w_to_4":"no", + "pad_square_to_4":"no", + "rounding_type":0, + "crop_w": "", + "crop_h": "", + "start_x": "", + "start_y": "", + "end_x": "", + "end_y": "" + }, + "resize": { + "type": "fixed", + "keep_ratio": "yes", + "calculate_ratio_using_CSim": "yes", + "zoom": "yes", + "resize_w": "", + "resize_h": "", + }, + "padding": { + "type": "corner", + "pad_val": "", + "padded_w": "", + "padded_h": "", + "pad_l": "", + "pad_r": "", + "pad_t": "", + "pad_b": "" + }, + "normalize": { + "type": "kneron", + "scale": "", + "bias": "", + "mean": "", + "std": "" + } + } + } + return + + def __update_color(self): + ''' + private function + ''' + # + dic = self.config['function_setting']['color'] + dic['model_size'] = self.config['model_size'] + dic['print_info'] = self.config['print_info'] + self.subclass['color'].update(**dic) + + return + + def __update_crop(self): + ''' + private function + ''' + dic = {} + # common + dic['common'] = {} + dic['common']['print_info'] = self.config['print_info'] + dic['common']['model_size'] = self.config['model_size'] + dic['common']['numerical_type'] = self.config['numerical_type'] + + # general + dic['general'] = {} + dic['general']['type'] = self.config['function_setting']['crop']['type'] + dic['general']['align_w_to_4'] = self.config['function_setting']['crop']['align_w_to_4'] + dic['general']['pad_square_to_4'] = self.config['function_setting']['crop']['pad_square_to_4'] + dic['general']['rounding_type'] = self.config['function_setting']['crop']['rounding_type'] + dic['general']['crop_w'] = self.config['function_setting']['crop']['crop_w'] + dic['general']['crop_h'] = self.config['function_setting']['crop']['crop_h'] + dic['general']['start_x'] = self.config['function_setting']['crop']['start_x'] + dic['general']['start_y'] = self.config['function_setting']['crop']['start_y'] + dic['general']['end_x'] = self.config['function_setting']['crop']['end_x'] + dic['general']['end_y'] = self.config['function_setting']['crop']['end_y'] + + # floating + dic['floating'] = {} + + # hw + dic['hw'] = {} + + + self.subclass['crop'].update(**dic) + return + + def __update_resize(self): + ''' + private function + ''' + dic = {} + # common + dic['common'] = {} + dic['common']['print_info'] = self.config['print_info'] + dic['common']['model_size'] = self.config['model_size'] + dic['common']['numerical_type'] = self.config['numerical_type'] + + # general + dic['general'] = {} + dic['general']['type'] = self.config['function_setting']['resize']['type'] + dic['general']['keep_ratio'] = self.config['function_setting']['resize']['keep_ratio'] + dic['general']['zoom'] = self.config['function_setting']['resize']['zoom'] + dic['general']['calculate_ratio_using_CSim'] = self.config['function_setting']['resize']['calculate_ratio_using_CSim'] + dic['general']['resize_w'] = self.config['function_setting']['resize']['resize_w'] + dic['general']['resize_h'] = self.config['function_setting']['resize']['resize_h'] + + # floating + dic['floating'] = {} + + # hw + dic['hw'] = {} + + self.subclass['resize'].update(**dic) + return + + def __update_normalize(self): + ''' + private function + ''' + dic = {} + # general + dic['general'] = {} + dic['general']['print_info'] = self.config['print_info'] + dic['general']['model_size'] = self.config['model_size'] + dic['general']['numerical_type'] = self.config['numerical_type'] + dic['general']['type'] = self.config['function_setting']['normalize']['type'] + + # floating + dic['floating'] = {} + dic['floating']['scale'] = self.config['function_setting']['normalize']['scale'] + dic['floating']['bias'] = self.config['function_setting']['normalize']['bias'] + dic['floating']['mean'] = self.config['function_setting']['normalize']['mean'] + dic['floating']['std'] = self.config['function_setting']['normalize']['std'] + + # hw + dic['hw'] = {} + if self.config['numerical_type'] == '520': + dic['hw']['radix'] = self.config['520_setting']['radix'] + if self.config['numerical_type'] == '720': + dic['hw']['radix'] = self.config['720_setting']['radix'] + + self.subclass['normalize'].update(**dic) + return + + def __update_padding(self): + ''' + private function + ''' + dic = {} + # common + dic['common'] = {} + dic['common']['print_info'] = self.config['print_info'] + dic['common']['model_size'] = self.config['model_size'] + dic['common']['numerical_type'] = self.config['numerical_type'] + + # general + dic['general'] = {} + dic['general']['type'] = self.config['function_setting']['padding']['type'] + dic['general']['pad_val'] = self.config['function_setting']['padding']['pad_val'] + dic['general']['padded_w'] = self.config['function_setting']['padding']['padded_w'] + dic['general']['padded_h'] = self.config['function_setting']['padding']['padded_h'] + dic['general']['pad_l'] = self.config['function_setting']['padding']['pad_l'] + dic['general']['pad_r'] = self.config['function_setting']['padding']['pad_r'] + dic['general']['pad_t'] = self.config['function_setting']['padding']['pad_t'] + dic['general']['pad_b'] = self.config['function_setting']['padding']['pad_b'] + + # floating + dic['floating'] = {} + + # hw + dic['hw'] = {} + if self.config['numerical_type'] == '520': + dic['hw']['radix'] = self.config['520_setting']['radix'] + dic['hw']['normalize_type'] = self.config['function_setting']['normalize']['type'] + elif self.config['numerical_type'] == '720': + dic['hw']['radix'] = self.config['720_setting']['radix'] + dic['hw']['normalize_type'] = self.config['function_setting']['normalize']['type'] + + self.subclass['padding'].update(**dic) + return + + def set_numerical_type(self, type = ''): + ''' + set_numerical_type + + set the preprocess type, now support floating, 520 and 720 + + Args: + type: [str], "520" / "720" / "floating" + ''' + if not (type.lower() in ['520', '720', 'floating']): + type = 'floating' + self.config['numerical_type'] = type + return + + def set_print_info(self, print_info = ''): + ''' + turn print infomation on or off. + + Args: + print_info: [str], "yes" / "no" + ''' + self.config['print_info'] = print_info + return + + def set_model_size(self, w, h): + ''' + set_model_size, set out image size, or npu size + + Args: + w: [int] + h: [int] + ''' + if w <= 0 or h <= 0: + return + self.config['model_size'][0] = w + self.config['model_size'][1] = h + + return + + def set_raw_img(self, is_raw_img='', raw_img_type = '', raw_img_fmt='', img_in_width='',img_in_height=''): + ''' + set if input is raw file + + now support for rgb888,rgb565,nir,yuv and ycbcr + + Args: + is_raw_img: [str], "yes" / "no", is raw file or not + raw_img_type: [str], "bin" / "hex", set the raw file format, now support bin and hex file. + raw_img_fmt: [str], "rgb888" / "rgb565" / "nir" / "ycbcr422" / "ycbcr444" / "yuv422" / "yuv444", set the raw image format. + img_in_width: [int] + img_in_height: [int] + ''' + if not(is_raw_img==''): + self.config['raw_img']['is_raw_img'] = is_raw_img + if not(raw_img_type==''): + self.config['raw_img']['raw_img_type'] = raw_img_type + if not(raw_img_fmt==''): + self.config['raw_img']['raw_img_fmt'] = raw_img_fmt + if not(img_in_width==''): + self.config['raw_img']['img_in_width'] = img_in_width + if not(img_in_height==''): + self.config['raw_img']['img_in_height'] = img_in_height + return + + def set_output_setting(self, is_dump='', dump_format='',image_format='', output_file=''): + ''' + set_output_setting, dump output or not, dump format can be bin , hex or txt + + Args: + is_dump: [str], "yes" / "no", open dump function or not + dump_format: [str], "bin" / "txt" / "hex", set dump file format. + image_format: [str], RGB888 / RGBA8888 / RGB565 / NIR / YUV444 / YCbCr444 / YUV422 / YCbCr422 + output_file: [str], dump file path + ''' + if not(is_dump==''): + self.config['output_setting']['is_dump'] = is_dump + if not(dump_format==''): + self.config['output_setting']['dump_format'] = dump_format + if not(image_format==''): + self.config['output_setting']['image_format'] = image_format + if not(output_file==''): + self.config['output_setting']['output_file'] = output_file + return + + def set_520_setting(self, radix='', bit_width='', rotate='',crop_fisrt='', round_w_to_16 ='',NUM_BANK_LINE='',BANK_ENTRY_CNT='',MAX_IMG_PREPROC_ROW_NUM='',MAX_IMG_PREPROC_COL_NUM=''): + ''' + setting about 520 inproc + + Args: + radix: [int], default 8 + bit_width: [int], default 8 + rotate: [int], 0 / 1 / 2, set rotate type + crop_fisrt: [str], "yes" / "no", crop before inproc or not + round_w_to_16: [str], "yes" / "no", round w align to 16 or not + NUM_BANK_LINE: [int], default 32 + BANK_ENTRY_CNT: [int], default 512 + MAX_IMG_PREPROC_ROW_NUM: [int], default 511 + MAX_IMG_PREPROC_COL_NUM: [int], default 256 + ''' + if not(radix==''): + self.config['520_setting']['radix'] = radix + if not(bit_width==''): + self.config['520_setting']['bit_width'] = bit_width + if not(rotate==''): + self.config['520_setting']['rotate'] = rotate + if not(crop_fisrt==''): + self.config['520_setting']['crop_fisrt'] = crop_fisrt + if not(round_w_to_16==''): + self.config['520_setting']['round_w_to_16'] = round_w_to_16 + if not(NUM_BANK_LINE==''): + self.config['520_setting']['NUM_BANK_LINE'] = NUM_BANK_LINE + if not(BANK_ENTRY_CNT==''): + self.config['520_setting']['BANK_ENTRY_CNT'] = BANK_ENTRY_CNT + if not(MAX_IMG_PREPROC_ROW_NUM==''): + self.config['520_setting']['MAX_IMG_PREPROC_ROW_NUM'] = MAX_IMG_PREPROC_ROW_NUM + if not(MAX_IMG_PREPROC_COL_NUM==''): + self.config['520_setting']['MAX_IMG_PREPROC_COL_NUM'] = MAX_IMG_PREPROC_COL_NUM + return + + def set_720_setting(self, radix='', bit_width='', rotate='',crop_fisrt='', matrix='',vector=''): + ''' + setting about 720 inproc + + Args: + radix: [int], default 8 + bit_width: [int], default 8 + rotate: [int], 0 / 1 / 2, set rotate type + crop_fisrt: [str], "yes" / "no", crop before inproc or not + matrix: [list] + vector: [list] + ''' + if not(radix==''): + self.config['720_setting']['radix'] = radix + if not(bit_width==''): + self.config['720_setting']['bit_width'] = bit_width + if not(rotate==''): + self.config['720_setting']['rotate'] = rotate + if not(crop_fisrt==''): + self.config['720_setting']['crop_fisrt'] = crop_fisrt + return + + def set_floating_setting(self, job_list = []): + ''' + set_floating_setting, set floating pre-processing job list and order, can be combination of color, crop, resize, padding, normalize + + Args: + job_list: [list], combination of "color" / "crop" / "resize" / "padding" / "normalize" + ''' + if not(job_list==[]): + self.config['floating_setting']['job_list'] = job_list + return + + def set_color_conversion(self, source_format = '', out_format='', simulation='', simulation_format=''): + ''' + set_color_conversion + + setting about corlor conversion and inproc format unit. + Turn simulation on can simulate rgb image to other image type. + + Args: + source_format: [str], "rgb888" / "rgb565" / "yuv" / "ycbcr" + out_format: [str], "rgb888" / "l" + simulation: [str], "yes" / "no" + simulation_format: [str], "rgb565" / "yuv" / "ycbcr" + ''' + if not(source_format==''): + self.config['function_setting']['color']['source_format'] = source_format + if not(out_format==''): + self.config['function_setting']['color']['out_format'] = out_format + if not(simulation==''): + self.config['function_setting']['color']['options']['simulation'] = simulation + if not(simulation_format==''): + self.config['function_setting']['color']['options']['simulation_format'] = simulation_format + + return + + def set_resize(self, type='', keep_ratio='', calculate_ratio_using_CSim='',zoom='', resize_w='', resize_h = ''): + ''' + set_resize, setting about resize and inproc resize unit. + + resize type can be bilinear or bilicubic as floating type, fixed or fixed_520 as fixed type. + fixed_520 type has add some function to simulate 520 bug. + + Args: + type[str]: "bilinear" / "bilicubic" / "cv2" / "fixed" / "fixed_520" + keep_ratio[str]: "yes" / "no" + calculate_ratio_using_CSim[str]: "yes" / "no" , calculate the ratio and scale using Csim function and C float + zoom[str]: "yes" / "no", enable resize can zoom image or not + resize_w[int]: if empty, then default will be model_size[0] + resize_h[int]: if empty, then default will be model_size[0] + ''' + if not(type==''): + self.config['function_setting']['resize']['type'] = type + if not(keep_ratio==''): + self.config['function_setting']['resize']['keep_ratio'] = keep_ratio + if not(calculate_ratio_using_CSim==''): + self.config['function_setting']['resize']['calculate_ratio_using_CSim'] = calculate_ratio_using_CSim + if not(zoom==''): + self.config['function_setting']['resize']['zoom'] = zoom + if not(resize_w==''): + self.config['function_setting']['resize']['resize_w'] = resize_w + if not(resize_h==''): + self.config['function_setting']['resize']['resize_h'] = resize_h + + return + + def set_crop(self, type='', crop_w='', crop_h='', start_x='', start_y='', end_x='', end_y='',align_w_to_4="",pad_square_to_4="",rounding_type=""): + ''' + set_crop, setting about crop and rdma crop unit. + + crop type can be corner,center or specific. + + if type = corner and center, need to set crop_w and crop_h(or keep empty to set as model_size) + + if type = specific, need to set start_x, start_y, end_x and end_y + + if start_x, start_y, end_x and end_y all are not empty, then the type will turn to specific automatically + + Args: + type: [str], "corner" / "center" / "specific" + crop_w: [int], if empty, then default will be model_size[0] + crop_h: [int], if empty, then default will be model_size[0] + start_x: [int] + start_y: [int] + end_x: [int] + end_y: [int] + align_w_to_4: [str], crop length in w direction align to 4 or not + pad_square_to_4: [str], pad to square(align 4) or not + rounding_type: [int], 0-> x1,y1 take floor, x2,y2 take ceil; 1->all take rounding + ''' + if not(type==''): + self.config['function_setting']['crop']['type'] = type + if not(align_w_to_4==''): + self.config['function_setting']['crop']['align_w_to_4'] = align_w_to_4 + if not(pad_square_to_4==''): + self.config['function_setting']['crop']['pad_square_to_4'] = pad_square_to_4 + if not(rounding_type==''): + self.config['function_setting']['crop']['rounding_type'] = rounding_type + if not(crop_w==''): + self.config['function_setting']['crop']['crop_w'] = crop_w + if not(crop_h==''): + self.config['function_setting']['crop']['crop_h'] = crop_h + if not(start_x==''): + self.config['function_setting']['crop']['start_x'] = start_x + if not(start_y==''): + self.config['function_setting']['crop']['start_y'] = start_y + if not(end_x==''): + self.config['function_setting']['crop']['end_x'] = end_x + if not(end_y==''): + self.config['function_setting']['crop']['end_y'] = end_y + return + + def set_padding(self, type='', pad_val='', padded_w='', padded_h='', pad_l='', pad_r='', pad_t='', pad_b=''): + ''' + set_padding, setting about padding and inproc padding unit. + + crop type can be corner,center or specific. + + if type = corner and center, need to set out_w and out_h(or keep empty to set as model_size) + + if type = specific, need to set pad_l, pad_r, pad_t and pad_b + + if pad_l, pad_r, pad_t and pad_b all are not empty, then the type will turn to specific automatically + + if numerical type = 520 or 720, then the pad_val will adjust according radix automatically + + Args: + type: [str], "corner" / "center" / "specific" + pad_val: [float] + out_w: [int] + out_h: [int] + pad_l: [int] + pad_r: [int] + pad_t: [int] + pad_b: [int] + ''' + if not(type==''): + self.config['function_setting']['padding']['type'] = type + if not(pad_val==''): + self.config['function_setting']['padding']['pad_val'] = pad_val + if not(padded_w==''): + self.config['function_setting']['padding']['padded_w'] = padded_w + if not(padded_h==''): + self.config['function_setting']['padding']['padded_h'] = padded_h + if not(pad_l==''): + self.config['function_setting']['padding']['pad_l'] = pad_l + if not(pad_r==''): + self.config['function_setting']['padding']['pad_r'] = pad_r + if not(pad_t==''): + self.config['function_setting']['padding']['pad_t'] = pad_t + if not(pad_b==''): + self.config['function_setting']['padding']['pad_b'] = pad_b + return + + def set_normalize(self, type='', scale='', bias='', mean='', std =''): + ''' + set_normalize, setting about normalize and inproc chen unit. + + if numerical type = floating: + normalize type can be customized, torch, tf, caffe, yolo or kneron + if type = customized, need to set scale, bias, mean and std + + if numerical type = 520 or 720: + normalize type can be tf, yolo or kneron + + Args: + type: [str], "customized" / "torch" / "tf" / "caffe" / "yolo" / "kneron" + scale: [float] + bias: [float] + mean: [list,3] + std: [list,3] + ''' + if not(type==''): + self.config['function_setting']['normalize']['type'] = type + if not(scale==''): + self.config['function_setting']['normalize']['scale'] = scale + if not(bias==''): + self.config['function_setting']['normalize']['bias'] = bias + if not(mean==''): + self.config['function_setting']['normalize']['mean'] = mean + if not(std==''): + self.config['function_setting']['normalize']['std'] = std + return + + def load_image(self, image, is_raw = False , raw_img_type = '', raw_img_fmt = '', img_in_height = 0, img_in_width = 0): + ''' + load_image function + + Args: + image: [np.array/str], can be np.array or file path(bin/hex/jpg) + is_raw: [bool], is raw image or not (bin or hex) + raw_img_type: [str], "bin" / "hex" + raw_img_fmt: [str], "yuv444" / "ycbcr444" / "yuv422" / "ycbcr422" / "rgb565" / "nir" + img_in_width: [int] + img_in_height: [int] + + Returns: + out: [np.array], not include color convert + ''' + if isinstance(image, np.ndarray): + return image + if str2bool(is_raw): + dic ={} + dic['raw_img_fmt'] = raw_img_fmt + dic['img_in_height'] = img_in_height + dic['img_in_width'] = img_in_width + if raw_img_type.lower() in ['bin','BIN']: + image_data = bin_loader(image,**dic) + elif raw_img_type.lower() in ['hex','HEX']: + image_data = hex_loader(image,**dic) + elif isinstance(image, str): + image = Image.open(image).convert("RGB") + image_data = np.array(image).astype('uint8') + + assert isinstance(image_data, np.ndarray) + return image_data + + def dump_image(self,image_data): + ''' + dump_image function, according config setting to dump image, txt, bin or hex + + Args: + image: [np.array] + ''' + assert isinstance(image_data, np.ndarray) + assert (len(image_data.shape) >= 2) + + if (len(image_data.shape) == 2): + source_format = 'L' + if (image_data.shape[2] == 4): + source_format = 'RGBA8888' + else: + source_format = 'RGB888' + + convert = ColorConversion.runner() + if (source_format == 'L') & (self.config['output_setting']['image_format'].lower() not in ['L', 'l', 'NIR', 'nir']): + convert.update(**{"source_format": "L","out_format": "RGB888"}) + image_data, _ = convert.run(image_data) + source_format = 'RGB888' + + if (source_format == 'RGBA8888') & (self.config['output_setting']['image_format'].lower() not in ['RGBA8888', 'rgba8888','RGBA','rgba']): + convert.update(**{"source_format": "RGBA8888","out_format": "RGB888"}) + image_data, _ = convert.run(image_data) + source_format = 'RGB888' + + + if (self.config['output_setting']['image_format'].lower() in ['RGB565', 'rgb565']): + convert.update(**{"source_format": source_format,"out_format": "RGB565"}) + image_data_565, _ = convert.run(image_data) + image_data = np.zeros((image_data_565.shape[0],image_data_565.shape[1],2), dtype=np.uint8) + image_data[:,:,1] = ( image_data_565[:,:,0] << 3 ) + ( image_data_565[:,:,1] >> 3 ) + image_data[:,:,0] = ( (image_data_565[:,:,1] & 0x07) << 5 ) + image_data_565[:,:,2] + elif (self.config['output_setting']['image_format'].lower() in ['RGBA8888', 'rgba8888','RGBA','rgba']) & (source_format != 'RGBA8888'): + convert.update(**{"source_format": source_format,"out_format": "rgba"}) + image_data, _ = convert.run(image_data) + elif (self.config['output_setting']['image_format'].lower() in ['L', 'l', 'NIR', 'nir']): + convert.update(**{"source_format": source_format,"out_format": "L"}) + image_data, _ = convert.run(image_data) + elif (self.config['output_setting']['image_format'].lower() in['YUV', 'YUV444','yuv','yuv444']): + convert.update(**{"source_format": source_format,"out_format": "YUV444"}) + image_data_YUV, _ = convert.run(image_data) + image_data = np.zeros((image_data_YUV.shape[0],image_data_YUV.shape[1],4), dtype=np.uint8) + image_data[:,:,3] = image_data_YUV[:,:,0] + image_data[:,:,2] = image_data_YUV[:,:,1] + image_data[:,:,1] = image_data_YUV[:,:,2] + elif (self.config['output_setting']['image_format'].lower() in['YUV422','yuv422']): + convert.update(**{"source_format": source_format,"out_format": "YUV444"}) + image_data_YUV, _ = convert.run(image_data) + pixels = image_data_YUV.shape[0] * image_data_YUV.shape[1] + image_data = np.zeros((pixels*2,1), dtype=np.uint8) + image_data_YUV = image_data_YUV.reshape((-1,1)) + for i in range(0,image_data.shape[0],4): + j = i//2 #source index + image_data[i+3,0] = image_data_YUV[j*3,0] + image_data[i+2,0] = image_data_YUV[j*3+1,0] + image_data[i+1,0] = image_data_YUV[j*3+3,0] + image_data[i,0] = image_data_YUV[j*3+5,0] + elif (self.config['output_setting']['image_format'].lower() in['YCBCR', 'YCBCR444','YCbCr','YCbCr444','ycbcr','ycbcr444']): + convert.update(**{"source_format": source_format,"out_format": "YCBCR444"}) + image_data_YCBCR, _ = convert.run(image_data) + image_data = np.zeros((image_data_YCBCR.shape[0],image_data_YCBCR.shape[1],4), dtype=np.uint8) + image_data[:,:,3] = image_data_YCBCR[:,:,0] + image_data[:,:,2] = image_data_YCBCR[:,:,1] + image_data[:,:,1] = image_data_YCBCR[:,:,2] + elif (self.config['output_setting']['image_format'].lower() in['YCBCR422','YCbCr422','ycbcr422']): + convert.update(**{"source_format": source_format,"out_format": "YCBCR422"}) + image_data_YCBCR, _ = convert.run(image_data) + image_data = np.zeros((image_data_YCBCR.shape[0],image_data_YCBCR.shape[1],2), dtype=np.uint8) + pixels = image_data_YCBCR.shape[0] * image_data_YCBCR.shape[1] + image_data = np.zeros((pixels*2,1), dtype=np.uint8) + image_data_YCBCR = image_data_YCBCR.reshape((-1,1)) + for i in range(0,image_data.shape[0],4): + j = i//2 #source index + image_data[i+3,0] = image_data_YCBCR[j*3,0] + image_data[i+2,0] = image_data_YCBCR[j*3+1,0] + image_data[i+1,0] = image_data_YCBCR[j*3+3,0] + image_data[i,0] = image_data_YCBCR[j*3+5,0] + + if self.config['output_setting']['dump_format'].lower() in ['txt', 'TXT']: + np.savetxt(self.config['output_setting']['output_file'],image_data.reshape((-1,1)),fmt="%.8f") + elif self.config['output_setting']['dump_format'].lower() in ['bin', 'BIN']: + image_data.reshape((-1,1)).astype("uint8").tofile(self.config['output_setting']['output_file']) + elif self.config['output_setting']['dump_format'].lower() in ['hex', 'HEX']: + height, width, c = image_data.shape + output_line = math.floor((height * width) / 4) + image_f = image_data.reshape((height * width, c)) + f = open(self.config['output_setting']['output_file'], "w") + for i in range(output_line): + pixels = "" + for j in range(min((i+1)*4-1, image_f.shape[0]-1), i*4-1, -1): + pixels = pixels + str_fill(hex(image_f[j, 3]).lstrip("0x")) + pixels = pixels + str_fill(hex(image_f[j, 2]).lstrip("0x")) + pixels = pixels + str_fill(hex(image_f[j, 1]).lstrip("0x")) + pixels = pixels + str_fill(hex(image_f[j, 0]).lstrip("0x")) + f.write(pixels + "\n") + return + + def run_whole_process(self, image): + ''' + run_whole_process, according config setting to run all pre-processing + + Args: + image: [np.array/str], can be np.array or file path(bin/jpg) + + Returns: + out: [np.array] + ''' + assert (self.error_state == 0) + + image_data = self.load_image( + image, + is_raw = self.config['raw_img']["is_raw_img"], + raw_img_type = self.config['raw_img']["raw_img_type"], + raw_img_fmt = self.config['raw_img']["raw_img_fmt"], + img_in_height= self.config['raw_img']["img_in_height"], + img_in_width=self.config['raw_img']["img_in_width"]) + + if str2bool(self.config['raw_img']["is_raw_img"]): + self.set_color_conversion(source_format=self.config['raw_img']["raw_img_fmt"]) + elif isinstance(image, str): + self.set_color_conversion(source_format='RGB888') + + h_ori = image_data.shape[0] + w_ori = image_data.shape[1] + + if self.config['numerical_type'] == 'floating': + image_data = self.__run_whole_process_floating(image_data) + elif self.config['numerical_type'] == '520': + image_data = self.__run_whole_process_520(image_data) + elif self.config['numerical_type'] == '720': + image_data = self.__run_whole_process_720(image_data) + + if str2bool(self.config['output_setting']['is_dump']): + self.dump_image(image_data) + + scale = max(1.0*w_ori / image_data.shape[1], 1.0*h_ori / image_data.shape[0]) + out = {'h_ori': h_ori, 'w_ori': w_ori, "scale": scale} + return image_data, out + + def __run_whole_process_floating(self,image_data): + ''' + private function + ''' + for job in self.config['floating_setting']['job_list']: + if job.lower() in ['crop','color','resize','normalize','padding']: + image_data, _ = self.funcs[job](image_data) + + return image_data + + def __run_whole_process_520(self,image_data): + ''' + private function + ''' + # init from config + originH, originW, _ = image_data.shape + npu_img_w = self.config['model_size'][0] + npu_img_h = self.config['model_size'][1] + + if self.config['function_setting']['padding']['type'].lower() in ['center','CENTER','Center','0',0]: + pad_mode = 0 + elif self.config['function_setting']['padding']['type'].lower() in ['corner','CORNER','Corner','1',1]: + pad_mode = 1 + else: + pad_mode = 2 + + if not str2bool(self.config['function_setting']['resize']['keep_ratio']): + pad_mode = 2 + + NUM_BANK_LINE = self.config['520_setting']['NUM_BANK_LINE'] + BANK_ENTRY_CNT = self.config['520_setting']['BANK_ENTRY_CNT'] + MAX_IMG_PREPROC_ROW_NUM = self.config['520_setting']['MAX_IMG_PREPROC_ROW_NUM'] + MAX_IMG_PREPROC_COL_NUM = self.config['520_setting']['MAX_IMG_PREPROC_COL_NUM'] + + raw_fmt = self.config['function_setting']['color']['source_format'] + crop_fisrt = str2bool(self.config['520_setting']['crop_fisrt']) + keep_ratio = str2bool(self.config['function_setting']['resize']['keep_ratio']) + + # init crop + if crop_fisrt: + startW = self.config['function_setting']['crop']['start_x'] + startH = self.config['function_setting']['crop']['start_y'] + cropW = self.config['function_setting']['crop']['end_x'] - self.config['function_setting']['crop']['start_x'] + cropH = self.config['function_setting']['crop']['end_y'] - self.config['function_setting']['crop']['start_y'] + else: + startW = 0 + startH = 0 + cropW = originW + cropH = originH + + crop_num = [0] * 4 + crop_num[0] = startW #left + crop_num[1] = startH #top + crop_num[2] = originW - (startW + cropW) #right + crop_num[3] = originH - (startH + cropH) #bottom + + # calculate scaleW scaleH padW padH + if keep_ratio: + out_w = npu_img_w + out_h = npu_img_h + orig_w = cropW + orig_h = cropH + + w_ratio = c_float(out_w * 1.0 / (orig_w * 1.0)).value + h_ratio = c_float(out_h * 1.0 / (orig_h * 1.0)).value + scale_ratio = 0.0 + scale_target_w = 0 + scale_target_h = 0 + padH = 0 + padW = 0 + + bScaleW = True if w_ratio < h_ratio else False + if bScaleW: + scale_ratio = w_ratio + scale_target_w = int(c_float(scale_ratio * orig_w + 0.5).value) + scale_target_h = int(c_float(scale_ratio * orig_h + 0.5).value) + assert (abs(scale_target_w - out_w) <= 1), "Error: scale down width cannot meet expectation\n" + padH = out_h - scale_target_h + padW = 0 + assert (padH >= 0), "Error: padH shouldn't be less than zero\n" + else: + scale_ratio = h_ratio + scale_target_w = int(c_float(scale_ratio * orig_w + 0.5).value) + scale_target_h = int(c_float(scale_ratio * orig_h + 0.5).value) + assert (abs(scale_target_h - out_h) <= 1), "Error: scale down height cannot meet expectation\n" + padW = out_w - scale_target_w + padH = 0 + assert (padW >= 0), "Error: padW shouldn't be less than zero\n" + + scaleW = out_w - padW + scaleH = out_h - padH + else: + scaleW = npu_img_w + scaleH = npu_img_h + padW = 0 + padH = 0 + + # calculate pad_top pad_bottom pad_left pad_right + if (pad_mode == 0): + # pad on both side + pad_top = padH // 2 + pad_bottom = (padH // 2) + (padH % 2) + pad_left = padW // 2 + pad_right = (padW // 2) + (padW % 2) + elif (pad_mode == 1): + # only pad right and bottom + pad_top = 0 + pad_bottom = padH + pad_left = 0 + pad_right = padW + else: + pad_top = 0 + pad_bottom = 0 + pad_left = 0 + pad_right = 0 + + if (pad_right > 127 or pad_bottom > 127): + print("Pad value larger than 127 is not supported\n") + + orig_pad_num = [0] * 4 + orig_pad_num[0] = pad_left + orig_pad_num[1] = pad_top + orig_pad_num[2] = pad_right + orig_pad_num[3] = pad_bottom + + valid_in_row = cropH + valid_in_col = cropW + out_row = scaleH + padH + out_col = scaleW + padW + + # calculate cut_total + max_row = int(math.floor(BANK_ENTRY_CNT * NUM_BANK_LINE / (out_col / 4))) + max_row = min(max_row, MAX_IMG_PREPROC_ROW_NUM) + + if (pad_mode == 0): + big_pad_row = (out_row % max_row) < (pad_bottom + 4) + if (big_pad_row): + last_row = int(pad_bottom + 4) + cut_total = int(math.ceil( float(out_row - last_row) / max_row) + 1) + else: + cut_total = int(math.ceil( float(out_row) / max_row)) + elif (pad_mode == 1): + big_pad_row = (out_row % max_row) < (pad_bottom + 4) + last_row = max_row + if (big_pad_row): + cut_total = int(math.ceil( float(out_row - last_row) / max_row) + 1) + else: + cut_total = int(math.ceil( float(out_row) / max_row)) + else: + big_pad_row = False + cut_total = int(math.ceil( float(out_row) / max_row)) + + # calculate seg_cnt + max_col = MAX_IMG_PREPROC_COL_NUM + last_col = 0 + if (out_col % max_col): + if (pad_mode == 0): + big_pad_col = (out_col % max_col) < (pad_right + 4) + if (big_pad_col): + last_col = round_up_n(pad_right + 4, 4) + seg_cnt = math.ceil( float(out_col - last_col) / max_col) + 1 + else: + seg_cnt = math.ceil( float(out_col) / max_col) + elif (pad_mode == 1): + big_pad_col = (out_col % max_col) < (pad_right + 4) + last_col = max_col + if (big_pad_col): + seg_cnt = math.ceil( float(out_col - last_col) / max_col) + 1 + else: + seg_cnt = math.ceil( float(out_col) / max_col) + else: + big_pad_col = False + seg_cnt = math.ceil( float(out_col) / max_col) + else: + big_pad_col = False + seg_cnt = math.ceil( float(out_col) / max_col) + + # start loop + if (big_pad_row): + remain_row = out_row - last_row + else: + remain_row = out_row + start_row = 0 + row_num = 0 + for r in range(0, cut_total): + start_row += row_num + block_start_row = cal_img_row_offset(crop_num, orig_pad_num, start_row, out_row, originH) + if (big_pad_row) and (r == (cut_total - 1)): + row_num = last_row + else: + row_num = min(max_row, remain_row) + + # due to HW only support max col = 256, we may need to process data in segments */ + if(big_pad_col): + remain_col = (out_col - last_col) + else: + remain_col = out_col + start_col = 0 + col_num = 0 + block_start_col = crop_num[0] + block_col = 0 + for c in range(0,seg_cnt): + start_col += col_num + block_start_col += block_col + if (big_pad_col) and (c == (seg_cnt - 1)): + col_num = last_col + else: + col_num = min(remain_col, MAX_IMG_PREPROC_COL_NUM) + + pad_num = get_pad_num(orig_pad_num, (c == 0), (r == 0), (c == seg_cnt - 1), (r == cut_total - 1)) + block_row = int(valid_in_row * (row_num - pad_num[1] - pad_num[3]) / (out_row - orig_pad_num[1] - orig_pad_num[3])) + block_col = int(valid_in_col * (col_num - pad_num[0] - pad_num[2]) / (out_col - orig_pad_num[0] - orig_pad_num[2])) + #/* (src_w * byte_per_pixel) should align to multiple of 4-byte and 2 cols */ + byte_per_pixel = get_byte_per_pixel(raw_fmt) + new_block_col = round_up_n(round_up_n(block_col, (4 / byte_per_pixel)), 2) + + if (new_block_col > block_col): + if byte_per_pixel == 1: + block_col = new_block_col - 4 + elif byte_per_pixel == 4: + block_col = new_block_col - 2 + else: + block_col = new_block_col - 2 + + ## + # crop + self.set_crop(start_x=block_start_col, start_y=block_start_row, end_x=block_start_col+block_col,end_y=block_start_row+block_row,align_w_to_4=False) + image_temp, _ = self.funcs['crop'](image_data) + + # color + image_temp, _ = self.funcs['color'](image_temp) + + # resize + self.set_resize(type='fixed_520',keep_ratio='no',calculate_ratio_using_CSim = 'yes', resize_w=(col_num - pad_num[0] - pad_num[2]),resize_h=(row_num - pad_num[1] - pad_num[3])) + image_temp, _ = self.funcs['resize'](image_temp) + + # normalize + image_temp, _ = self.funcs['normalize'](image_temp) + + # padding + self.set_padding(type='specific',pad_l=pad_num[0],pad_t=pad_num[1],pad_r=pad_num[2],pad_b=pad_num[3]) + image_temp, _ = self.funcs['padding'](image_temp) + + ## + remain_col -= col_num + if c == 0: + image_temp_H = image_temp + else: + image_temp_H = np.concatenate((image_temp_H, image_temp), axis=1) + + ## + remain_row -= row_num + if r == 0: + image_temp_V = image_temp_H + else: + image_temp_V = np.concatenate((image_temp_V, image_temp_H), axis=0) + + ## + image_data = image_temp_V + + # # round_w_to_16 + if str2bool(self.config['520_setting']['round_w_to_16']): + out_w_16 = round_up_n(out_col,16) + image = np.ones((out_row,out_w_16 - out_col,4)) *128 + image_data = np.concatenate((image_data, image), axis=1) + + # rotate + rotate = self.config['520_setting']['rotate'] + if not (rotate == 0): + dic = {} + dic['rotate_direction'] = rotate + rotate = Rotate.runner(**dic, b_print = str2bool(self.config['print_info'])) + image_data = rotate.run(image_data) + + return image_data + + def __run_whole_process_720(self,image_data): + ''' + private function + ''' + # init from config + crop_fisrt = str2bool(self.config['720_setting']['crop_fisrt']) + matrix_c00 = self.config['720_setting']['matrix_c00'] + matrix_c01 = self.config['720_setting']['matrix_c01'] + matrix_c02 = self.config['720_setting']['matrix_c02'] + matrix_c10 = self.config['720_setting']['matrix_c10'] + matrix_c11 = self.config['720_setting']['matrix_c11'] + matrix_c12 = self.config['720_setting']['matrix_c12'] + matrix_c20 = self.config['720_setting']['matrix_c20'] + matrix_c21 = self.config['720_setting']['matrix_c21'] + matrix_c22 = self.config['720_setting']['matrix_c22'] + vector_b00 = self.config['720_setting']['vector_b00'] + vector_b01 = self.config['720_setting']['vector_b01'] + vector_b02 = self.config['720_setting']['vector_b02'] + shiftvalue = self.config['720_setting']['shift'] + subvalue = self.config['720_setting']['sub'] + + #crop + if crop_fisrt: + image_data, _ = self.funcs['crop'](image_data) + + #color + image_data, _ = self.funcs['color'](image_data) + + #resize + self.set_resize(type='fixed_720',calculate_ratio_using_CSim = 'yes') + image_data, _ = self.funcs['resize'](image_data) + + #matrix + h, w, c = image_data.shape + image_f = image_data.reshape((h * w, c)) + matrix_c = np.array([[matrix_c00, matrix_c01, matrix_c02], + [matrix_c10, matrix_c11, matrix_c12], + [matrix_c20, matrix_c21, matrix_c22]]) + b = np.array([[vector_b00], [vector_b01], [vector_b02]]) + calculated_image_f = np.zeros(image_f.shape, dtype=np.uint8) + for i in range(h*w): + pt = np.swapaxes(image_f[np.newaxis, i, :], 0, 1) + matrix_pt = np.floor(np.multiply((matrix_c @ pt), 1/np.power(2, 1))) + matrix_pt.astype(int) + result = np.floor(np.multiply(np.add(matrix_pt, b), 1/np.power(2, 7))) + result.astype(int) + + result = twos_complement_pix(result) + + if shiftvalue == 1: + result = clip_pix(np.add(result, -128 * np.ones(result.shape)), -128, 127) + else: + result = clip_pix(result, 0, 255) + + result = result + np.array([[subvalue], [subvalue], [subvalue]]) + calculated_image_f[i, :] = clip_ary(np.squeeze(result)) + + image_data = calculated_image_f.reshape(image_data[:, :, 0:3].shape) + + #padding + image_data, _ = self.funcs['padding'](image_data) + + return image_data + + def run_crop(self, image_data): + ''' + @brief + run_crop, according config setting to run crop + + @param + image[np.array] : only can be np.array + + @return + np.array + ''' + self.__update_crop() + image_data, info = self.subclass['crop'].run(image_data) + return image_data, info + + def run_color_conversion(self, image_data): + ''' + @brief + run_color_conversion, according config setting to run color conversion + + @param + image[np.array] : only can be np.array + + @return + np.array + ''' + self.__update_color() + image_data, info = self.subclass['color'].run(image_data) + return image_data,info + + def run_resize(self, image_data): + ''' + @brief + run_resize, according config setting to run resize + + @param + image[np.array] : only can be np.array + + @return + np.array + ''' + self.__update_resize() + image_data,info = self.subclass['resize'].run(image_data) + return image_data,info + + def run_normalize(self, image_data): + ''' + @brief + run_normalize, according config setting to run normalize + + @param + image[np.array] : only can be np.array + + @return + np.array + ''' + self.__update_normalize() + image_data,info = self.subclass['normalize'].run(image_data) + return image_data,info + + def run_padding(self, image_data): + ''' + @brief + run_padding, according config setting to run padding + + @param + image[np.array] : only can be np.array + + @return + np.array + ''' + self.__update_padding() + image_data,info = self.subclass['padding'].run(image_data) + return image_data,info + + diff --git a/kneron/exporting/yolov5/kneron_preprocessing/__init__.py b/kneron/exporting/yolov5/kneron_preprocessing/__init__.py new file mode 100644 index 0000000..0a40017 --- /dev/null +++ b/kneron/exporting/yolov5/kneron_preprocessing/__init__.py @@ -0,0 +1,2 @@ +from .Flow import * +from .API import * diff --git a/kneron/exporting/yolov5/kneron_preprocessing/funcs/ColorConversion.py b/kneron/exporting/yolov5/kneron_preprocessing/funcs/ColorConversion.py new file mode 100644 index 0000000..8bfea7b --- /dev/null +++ b/kneron/exporting/yolov5/kneron_preprocessing/funcs/ColorConversion.py @@ -0,0 +1,285 @@ +import numpy as np +from PIL import Image +from .utils import signed_rounding, clip, str2bool + +format_bit = 10 +c00_yuv = 1 +c02_yuv = 1436 +c10_yuv = 1 +c11_yuv = -354 +c12_yuv = -732 +c20_yuv = 1 +c21_yuv = 1814 +c00_ycbcr = 1192 +c02_ycbcr = 1634 +c10_ycbcr = 1192 +c11_ycbcr = -401 +c12_ycbcr = -833 +c20_ycbcr = 1192 +c21_ycbcr = 2065 + +Matrix_ycbcr_to_rgb888 = np.array( + [[1.16438356e+00, 1.16438356e+00, 1.16438356e+00], + [2.99747219e-07, - 3.91762529e-01, 2.01723263e+00], + [1.59602686e+00, - 8.12968294e-01, 3.04059479e-06]]) + +Matrix_rgb888_to_ycbcr = np.array( + [[0.25678824, - 0.14822353, 0.43921569], + [0.50412941, - 0.29099216, - 0.36778824], + [0.09790588, 0.43921569, - 0.07142745]]) + +Matrix_rgb888_to_yuv = np.array( + [[ 0.29899106, -0.16877996, 0.49988381], + [ 0.5865453, -0.33110385, -0.41826072], + [ 0.11446364, 0.49988381, -0.08162309]]) + +# Matrix_rgb888_to_yuv = np.array( +# [[0.299, - 0.147, 0.615], +# [0.587, - 0.289, - 0.515], +# [0.114, 0.436, - 0.100]]) + +# Matrix_yuv_to_rgb888 = np.array( +# [[1.000, 1.000, 1.000], +# [0.000, - 0.394, 2.032], +# [1.140, - 0.581, 0.000]]) + +class runner(object): + def __init__(self): + self.set = { + 'print_info':'no', + 'model_size':[0,0], + 'numerical_type':'floating', + "source_format": "rgb888", + "out_format": "rgb888", + "options": { + "simulation": "no", + "simulation_format": "rgb888" + } + } + + def update(self, **kwargs): + # + self.set.update(kwargs) + + ## simulation + self.funs = [] + if str2bool(self.set['options']['simulation']) and self.set['source_format'].lower() in ['RGB888', 'rgb888', 'RGB', 'rgb']: + if self.set['options']['simulation_format'].lower() in ['YUV422', 'yuv422', 'YUV', 'yuv']: + self.funs.append(self._ColorConversion_RGB888_to_YUV422) + self.set['source_format'] = 'YUV422' + elif self.set['options']['simulation_format'].lower() in ['YCBCR422', 'YCbCr422', 'ycbcr422', 'YCBCR', 'YCbCr', 'ycbcr']: + self.funs.append(self._ColorConversion_RGB888_to_YCbCr422) + self.set['source_format'] = 'YCbCr422' + elif self.set['options']['simulation_format'].lower() in['RGB565', 'rgb565']: + self.funs.append(self._ColorConversion_RGB888_to_RGB565) + self.set['source_format'] = 'RGB565' + + ## to rgb888 + if self.set['source_format'].lower() in ['YUV444', 'yuv444','YUV422', 'yuv422', 'YUV', 'yuv']: + self.funs.append(self._ColorConversion_YUV_to_RGB888) + elif self.set['source_format'].lower() in ['YCBCR444', 'YCbCr444', 'ycbcr444','YCBCR422', 'YCbCr422', 'ycbcr422', 'YCBCR', 'YCbCr', 'ycbcr']: + self.funs.append(self._ColorConversion_YCbCr_to_RGB888) + elif self.set['source_format'].lower() in ['RGB565', 'rgb565']: + self.funs.append(self._ColorConversion_RGB565_to_RGB888) + elif self.set['source_format'].lower() in ['l', 'L' , 'nir', 'NIR']: + self.funs.append(self._ColorConversion_L_to_RGB888) + elif self.set['source_format'].lower() in ['RGBA8888', 'rgba8888' , 'RGBA', 'rgba']: + self.funs.append(self._ColorConversion_RGBA8888_to_RGB888) + + ## output format + if self.set['out_format'].lower() in ['L', 'l']: + self.funs.append(self._ColorConversion_RGB888_to_L) + elif self.set['out_format'].lower() in['RGB565', 'rgb565']: + self.funs.append(self._ColorConversion_RGB888_to_RGB565) + elif self.set['out_format'].lower() in['RGBA', 'RGBA8888','rgba','rgba8888']: + self.funs.append(self._ColorConversion_RGB888_to_RGBA8888) + elif self.set['out_format'].lower() in['YUV', 'YUV444','yuv','yuv444']: + self.funs.append(self._ColorConversion_RGB888_to_YUV444) + elif self.set['out_format'].lower() in['YUV422','yuv422']: + self.funs.append(self._ColorConversion_RGB888_to_YUV422) + elif self.set['out_format'].lower() in['YCBCR', 'YCBCR444','YCbCr','YCbCr444','ycbcr','ycbcr444']: + self.funs.append(self._ColorConversion_RGB888_to_YCbCr444) + elif self.set['out_format'].lower() in['YCBCR422','YCbCr422','ycbcr422']: + self.funs.append(self._ColorConversion_RGB888_to_YCbCr422) + + def print_info(self): + print("", + "source_format:", self.set['source_format'], + ', out_format:', self.set['out_format'], + ', simulation:', self.set['options']['simulation'], + ', simulation_format:', self.set['options']['simulation_format']) + + def run(self, image_data): + assert isinstance(image_data, np.ndarray) + # print info + if str2bool(self.set['print_info']): + self.print_info() + + # color + for _, f in enumerate(self.funs): + image_data = f(image_data) + + # output + info = {} + return image_data, info + + def _ColorConversion_RGB888_to_YUV444(self, image): + ## floating + image = image.astype('float') + image = (image @ Matrix_rgb888_to_yuv + 0.5).astype('uint8') + return image + + def _ColorConversion_RGB888_to_YUV422(self, image): + # rgb888 to yuv444 + image = self._ColorConversion_RGB888_to_YUV444(image) + + # yuv444 to yuv422 + u2 = image[:, 0::2, 1] + u4 = np.repeat(u2, 2, axis=1) + v2 = image[:, 1::2, 2] + v4 = np.repeat(v2, 2, axis=1) + image[..., 1] = u4 + image[..., 2] = v4 + return image + + def _ColorConversion_YUV_to_RGB888(self, image): + ## fixed + h, w, c = image.shape + image_f = image.reshape((h * w, c)) + image_rgb_f = np.zeros(image_f.shape, dtype=np.uint8) + + for i in range(h * w): + image_y = image_f[i, 0] *1024 + if image_f[i, 1] > 127: + image_u = -((~(image_f[i, 1] - 1)) & 0xFF) + else: + image_u = image_f[i, 1] + if image_f[i, 2] > 127: + image_v = -((~(image_f[i, 2] - 1)) & 0xFF) + else: + image_v = image_f[i, 2] + + image_r = c00_yuv * image_y + c02_yuv * image_v + image_g = c10_yuv * image_y + c11_yuv * image_u + c12_yuv * image_v + image_b = c20_yuv * image_y + c21_yuv * image_u + + image_r = signed_rounding(image_r, format_bit) + image_g = signed_rounding(image_g, format_bit) + image_b = signed_rounding(image_b, format_bit) + + image_r = image_r >> format_bit + image_g = image_g >> format_bit + image_b = image_b >> format_bit + + image_rgb_f[i, 0] = clip(image_r, 0, 255) + image_rgb_f[i, 1] = clip(image_g, 0, 255) + image_rgb_f[i, 2] = clip(image_b, 0, 255) + + image_rgb = image_rgb_f.reshape((h, w, c)) + return image_rgb + + def _ColorConversion_RGB888_to_YCbCr444(self, image): + ## floating + image = image.astype('float') + image = (image @ Matrix_rgb888_to_ycbcr + 0.5).astype('uint8') + image[:, :, 0] += 16 + image[:, :, 1] += 128 + image[:, :, 2] += 128 + + return image + + def _ColorConversion_RGB888_to_YCbCr422(self, image): + # rgb888 to ycbcr444 + image = self._ColorConversion_RGB888_to_YCbCr444(image) + + # ycbcr444 to ycbcr422 + cb2 = image[:, 0::2, 1] + cb4 = np.repeat(cb2, 2, axis=1) + cr2 = image[:, 1::2, 2] + cr4 = np.repeat(cr2, 2, axis=1) + image[..., 1] = cb4 + image[..., 2] = cr4 + return image + + def _ColorConversion_YCbCr_to_RGB888(self, image): + ## floating + if (self.set['numerical_type'] == 'floating'): + image = image.astype('float') + image[:, :, 0] -= 16 + image[:, :, 1] -= 128 + image[:, :, 2] -= 128 + image = ((image @ Matrix_ycbcr_to_rgb888) + 0.5).astype('uint8') + return image + + ## fixed + h, w, c = image.shape + image_f = image.reshape((h * w, c)) + image_rgb_f = np.zeros(image_f.shape, dtype=np.uint8) + + for i in range(h * w): + image_y = (image_f[i, 0] - 16) * c00_ycbcr + image_cb = image_f[i, 1] - 128 + image_cr = image_f[i, 2] - 128 + + image_r = image_y + c02_ycbcr * image_cr + image_g = image_y + c11_ycbcr * image_cb + c12_ycbcr * image_cr + image_b = image_y + c21_ycbcr * image_cb + + image_r = signed_rounding(image_r, format_bit) + image_g = signed_rounding(image_g, format_bit) + image_b = signed_rounding(image_b, format_bit) + + image_r = image_r >> format_bit + image_g = image_g >> format_bit + image_b = image_b >> format_bit + + image_rgb_f[i, 0] = clip(image_r, 0, 255) + image_rgb_f[i, 1] = clip(image_g, 0, 255) + image_rgb_f[i, 2] = clip(image_b, 0, 255) + + image_rgb = image_rgb_f.reshape((h, w, c)) + return image_rgb + + def _ColorConversion_RGB888_to_RGB565(self, image): + assert (len(image.shape)==3) + assert (image.shape[2]>=3) + + image_rgb565 = np.zeros(image.shape, dtype=np.uint8) + image_rgb = image.astype('uint8') + image_rgb565[:, :, 0] = image_rgb[:, :, 0] >> 3 + image_rgb565[:, :, 1] = image_rgb[:, :, 1] >> 2 + image_rgb565[:, :, 2] = image_rgb[:, :, 2] >> 3 + return image_rgb565 + + def _ColorConversion_RGB565_to_RGB888(self, image): + assert (len(image.shape)==3) + assert (image.shape[2]==3) + + image_rgb = np.zeros(image.shape, dtype=np.uint8) + image_rgb[:, :, 0] = image[:, :, 0] << 3 + image_rgb[:, :, 1] = image[:, :, 1] << 2 + image_rgb[:, :, 2] = image[:, :, 2] << 3 + return image_rgb + + def _ColorConversion_L_to_RGB888(self, image): + image_L = image.astype('uint8') + img = Image.fromarray(image_L).convert('RGB') + image_data = np.array(img).astype('uint8') + return image_data + + def _ColorConversion_RGB888_to_L(self, image): + image_rgb = image.astype('uint8') + img = Image.fromarray(image_rgb).convert('L') + image_data = np.array(img).astype('uint8') + return image_data + + def _ColorConversion_RGBA8888_to_RGB888(self, image): + assert (len(image.shape)==3) + assert (image.shape[2]==4) + return image[:,:,:3] + + def _ColorConversion_RGB888_to_RGBA8888(self, image): + assert (len(image.shape)==3) + assert (image.shape[2]==3) + imageA = np.concatenate((image, np.zeros((image.shape[0], image.shape[1], 1), dtype=np.uint8) ), axis=2) + return imageA diff --git a/kneron/exporting/yolov5/kneron_preprocessing/funcs/Crop.py b/kneron/exporting/yolov5/kneron_preprocessing/funcs/Crop.py new file mode 100644 index 0000000..3dcdb71 --- /dev/null +++ b/kneron/exporting/yolov5/kneron_preprocessing/funcs/Crop.py @@ -0,0 +1,145 @@ +import numpy as np +from PIL import Image +from .utils import str2int, str2float, str2bool, pad_square_to_4 +from .utils_520 import round_up_n +from .Runner_base import Runner_base, Param_base + +class General(Param_base): + type = 'center' + align_w_to_4 = False + pad_square_to_4 = False + rounding_type = 0 + crop_w = 0 + crop_h = 0 + start_x = 0. + start_y = 0. + end_x = 0. + end_y = 0. + def update(self, **dic): + self.type = dic['type'] + self.align_w_to_4 = str2bool(dic['align_w_to_4']) + self.rounding_type = str2int(dic['rounding_type']) + self.crop_w = str2int(dic['crop_w']) + self.crop_h = str2int(dic['crop_h']) + self.start_x = str2float(dic['start_x']) + self.start_y = str2float(dic['start_y']) + self.end_x = str2float(dic['end_x']) + self.end_y = str2float(dic['end_y']) + + def __str__(self): + str_out = [ + ', type:',str(self.type), + ', align_w_to_4:',str(self.align_w_to_4), + ', pad_square_to_4:',str(self.pad_square_to_4), + ', crop_w:',str(self.crop_w), + ', crop_h:',str(self.crop_h), + ', start_x:',str(self.start_x), + ', start_y:',str(self.start_y), + ', end_x:',str(self.end_x), + ', end_y:',str(self.end_y)] + return(' '.join(str_out)) + +class runner(Runner_base): + ## overwrite the class in Runner_base + general = General() + + def __str__(self): + return('') + + def update(self, **kwargs): + ## + super().update(**kwargs) + + ## + if (self.general.start_x != self.general.end_x) and (self.general.start_y != self.general.end_y): + self.general.type = 'specific' + elif(self.general.type != 'specific'): + if self.general.crop_w == 0 or self.general.crop_h == 0: + self.general.crop_w = self.common.model_size[0] + self.general.crop_h = self.common.model_size[1] + assert(self.general.crop_w > 0) + assert(self.general.crop_h > 0) + assert(self.general.type.lower() in ['CENTER', 'Center', 'center', 'CORNER', 'Corner', 'corner']) + else: + assert(self.general.type == 'specific') + + def run(self, image_data): + ## init + img = Image.fromarray(image_data) + w, h = img.size + + ## get range + if self.general.type.lower() in ['CENTER', 'Center', 'center']: + x1, y1, x2, y2 = self._calcuate_xy_center(w, h) + elif self.general.type.lower() in ['CORNER', 'Corner', 'corner']: + x1, y1, x2, y2 = self._calcuate_xy_corner(w, h) + else: + x1 = self.general.start_x + y1 = self.general.start_y + x2 = self.general.end_x + y2 = self.general.end_y + assert( ((x1 != x2) and (y1 != y2)) ) + + ## rounding + if self.general.rounding_type == 0: + x1 = int(np.floor(x1)) + y1 = int(np.floor(y1)) + x2 = int(np.ceil(x2)) + y2 = int(np.ceil(y2)) + else: + x1 = int(round(x1)) + y1 = int(round(y1)) + x2 = int(round(x2)) + y2 = int(round(y2)) + + if self.general.align_w_to_4: + # x1 = (x1+1) &(~3) #//+2 + # x2 = (x2+2) &(~3) #//+1 + x1 = (x1+3) &(~3) #//+2 + left = w - x2 + left = (left+3) &(~3) + x2 = w - left + + ## pad_square_to_4 + if str2bool(self.general.pad_square_to_4): + x1,x2,y1,y2 = pad_square_to_4(x1,x2,y1,y2) + + # do crop + box = (x1,y1,x2,y2) + img = img.crop(box) + + # print info + if str2bool(self.common.print_info): + self.general.start_x = x1 + self.general.start_y = y1 + self.general.end_x = x2 + self.general.end_y = y2 + self.general.crop_w = x2 - x1 + self.general.crop_h = y2 - y1 + self.print_info() + + # output + image_data = np.array(img) + info = {} + info['box'] = box + + return image_data, info + + + ## protect fun + def _calcuate_xy_center(self, w, h): + x1 = w/2 - self.general.crop_w / 2 + y1 = h/2 - self.general.crop_h / 2 + x2 = w/2 + self.general.crop_w / 2 + y2 = h/2 + self.general.crop_h / 2 + return x1, y1, x2, y2 + + def _calcuate_xy_corner(self, _1, _2): + x1 = 0 + y1 = 0 + x2 = self.general.crop_w + y2 = self.general.crop_h + return x1, y1, x2, y2 + + def do_crop(self, image_data, startW, startH, endW, endH): + return image_data[startH:endH, startW:endW, :] diff --git a/kneron/exporting/yolov5/kneron_preprocessing/funcs/Normalize.py b/kneron/exporting/yolov5/kneron_preprocessing/funcs/Normalize.py new file mode 100644 index 0000000..0760fba --- /dev/null +++ b/kneron/exporting/yolov5/kneron_preprocessing/funcs/Normalize.py @@ -0,0 +1,186 @@ +import numpy as np +from .utils import str2bool, str2int, str2float, clip_ary + +class runner(object): + def __init__(self): + self.set = { + 'general': { + 'print_info':'no', + 'model_size':[0,0], + 'numerical_type':'floating', + 'type': 'kneron' + }, + 'floating':{ + "scale": 1, + "bias": 0, + "mean": "", + "std": "", + }, + 'hw':{ + "radix":8, + "shift":"", + "sub":"" + } + } + return + + def update(self, **kwargs): + # + self.set.update(kwargs) + + # + if self.set['general']['numerical_type'] == '520': + if self.set['general']['type'].lower() in ['TF', 'Tf', 'tf']: + self.fun_normalize = self._chen_520 + self.shift = 7 - self.set['hw']['radix'] + self.sub = 128 + elif self.set['general']['type'].lower() in ['YOLO', 'Yolo', 'yolo']: + self.fun_normalize = self._chen_520 + self.shift = 8 - self.set['hw']['radix'] + self.sub = 0 + elif self.set['general']['type'].lower() in ['KNERON', 'Kneron', 'kneron']: + self.fun_normalize = self._chen_520 + self.shift = 8 - self.set['hw']['radix'] + self.sub = 128 + else: + self.fun_normalize = self._chen_520 + self.shift = 0 + self.sub = 0 + elif self.set['general']['numerical_type'] == '720': + self.fun_normalize = self._chen_720 + self.shift = 0 + self.sub = 0 + else: + if self.set['general']['type'].lower() in ['TORCH', 'Torch', 'torch']: + self.fun_normalize = self._normalize_torch + self.set['floating']['scale'] = 255. + self.set['floating']['mean'] = [0.485, 0.456, 0.406] + self.set['floating']['std'] = [0.229, 0.224, 0.225] + elif self.set['general']['type'].lower() in ['TF', 'Tf', 'tf']: + self.fun_normalize = self._normalize_tf + self.set['floating']['scale'] = 127.5 + self.set['floating']['bias'] = -1. + elif self.set['general']['type'].lower() in ['CAFFE', 'Caffe', 'caffe']: + self.fun_normalize = self._normalize_caffe + self.set['floating']['mean'] = [103.939, 116.779, 123.68] + elif self.set['general']['type'].lower() in ['YOLO', 'Yolo', 'yolo']: + self.fun_normalize = self._normalize_yolo + self.set['floating']['scale'] = 255. + elif self.set['general']['type'].lower() in ['KNERON', 'Kneron', 'kneron']: + self.fun_normalize = self._normalize_kneron + self.set['floating']['scale'] = 256. + self.set['floating']['bias'] = -0.5 + else: + self.fun_normalize = self._normalize_customized + self.set['floating']['scale'] = str2float(self.set['floating']['scale']) + self.set['floating']['bias'] = str2float(self.set['floating']['bias']) + if self.set['floating']['mean'] != None: + if len(self.set['floating']['mean']) != 3: + self.set['floating']['mean'] = None + if self.set['floating']['std'] != None: + if len(self.set['floating']['std']) != 3: + self.set['floating']['std'] = None + + + def print_info(self): + if self.set['general']['numerical_type'] == '520': + print("", + 'numerical_type', self.set['general']['numerical_type'], + ", type:", self.set['general']['type'], + ', shift:',self.shift, + ', sub:', self.sub) + else: + print("", + 'numerical_type', self.set['general']['numerical_type'], + ", type:", self.set['general']['type'], + ', scale:',self.set['floating']['scale'], + ', bias:', self.set['floating']['bias'], + ', mean:', self.set['floating']['mean'], + ', std:',self.set['floating']['std']) + + def run(self, image_data): + # print info + if str2bool(self.set['general']['print_info']): + self.print_info() + + # norm + image_data = self.fun_normalize(image_data) + + # output + info = {} + return image_data, info + + def _normalize_torch(self, x): + if len(x.shape) != 3: + return x + x = x.astype('float') + x = x / self.set['floating']['scale'] + x[..., 0] -= self.set['floating']['mean'][0] + x[..., 1] -= self.set['floating']['mean'][1] + x[..., 2] -= self.set['floating']['mean'][2] + x[..., 0] /= self.set['floating']['std'][0] + x[..., 1] /= self.set['floating']['std'][1] + x[..., 2] /= self.set['floating']['std'][2] + return x + + def _normalize_tf(self, x): + # print('_normalize_tf') + x = x.astype('float') + x = x / self.set['floating']['scale'] + x = x + self.set['floating']['bias'] + return x + + def _normalize_caffe(self, x): + if len(x.shape) != 3: + return x + x = x.astype('float') + x = x[..., ::-1] + x[..., 0] -= self.set['floating']['mean'][0] + x[..., 1] -= self.set['floating']['mean'][1] + x[..., 2] -= self.set['floating']['mean'][2] + return x + + def _normalize_yolo(self, x): + # print('_normalize_yolo') + x = x.astype('float') + x = x / self.set['floating']['scale'] + return x + + def _normalize_kneron(self, x): + # print('_normalize_kneron') + x = x.astype('float') + x = x/self.set['floating']['scale'] + x = x + self.set['floating']['bias'] + return x + + def _normalize_customized(self, x): + # print('_normalize_customized') + x = x.astype('float') + if self.set['floating']['scale'] != 0: + x = x/ self.set['floating']['scale'] + x = x + self.set['floating']['bias'] + if self.set['floating']['mean'] is not None: + x[..., 0] -= self.set['floating']['mean'][0] + x[..., 1] -= self.set['floating']['mean'][1] + x[..., 2] -= self.set['floating']['mean'][2] + if self.set['floating']['std'] is not None: + x[..., 0] /= self.set['floating']['std'][0] + x[..., 1] /= self.set['floating']['std'][1] + x[..., 2] /= self.set['floating']['std'][2] + + return x + + def _chen_520(self, x): + # print('_chen_520') + x = (x - self.sub).astype('uint8') + x = (np.right_shift(x,self.shift)) + x=x.astype('uint8') + return x + + def _chen_720(self, x): + # print('_chen_720') + if self.shift == 1: + x = x + np.array([[self.sub], [self.sub], [self.sub]]) + else: + x = x + np.array([[self.sub], [self.sub], [self.sub]]) + return x \ No newline at end of file diff --git a/kneron/exporting/yolov5/kneron_preprocessing/funcs/Padding.py b/kneron/exporting/yolov5/kneron_preprocessing/funcs/Padding.py new file mode 100644 index 0000000..e1af1c5 --- /dev/null +++ b/kneron/exporting/yolov5/kneron_preprocessing/funcs/Padding.py @@ -0,0 +1,187 @@ +import numpy as np +from PIL import Image +from .utils import str2bool, str2int, str2float +from .Runner_base import Runner_base, Param_base + +class General(Param_base): + type = '' + pad_val = '' + padded_w = '' + padded_h = '' + pad_l = '' + pad_r = '' + pad_t = '' + pad_b = '' + padding_ch = 3 + padding_ch_type = 'RGB' + def update(self, **dic): + self.type = dic['type'] + self.pad_val = dic['pad_val'] + self.padded_w = str2int(dic['padded_w']) + self.padded_h = str2int(dic['padded_h']) + self.pad_l = str2int(dic['pad_l']) + self.pad_r = str2int(dic['pad_r']) + self.pad_t = str2int(dic['pad_t']) + self.pad_b = str2int(dic['pad_b']) + + def __str__(self): + str_out = [ + ', type:',str(self.type), + ', pad_val:',str(self.pad_val), + ', pad_l:',str(self.pad_l), + ', pad_r:',str(self.pad_r), + ', pad_r:',str(self.pad_t), + ', pad_b:',str(self.pad_b), + ', padding_ch:',str(self.padding_ch)] + return(' '.join(str_out)) + +class Hw(Param_base): + radix = 8 + normalize_type = 'floating' + def update(self, **dic): + self.radix = dic['radix'] + self.normalize_type = dic['normalize_type'] + + def __str__(self): + str_out = [ + ', radix:', str(self.radix), + ', normalize_type:',str(self.normalize_type)] + return(' '.join(str_out)) + + +class runner(Runner_base): + ## overwrite the class in Runner_base + general = General() + hw = Hw() + + def __str__(self): + return('') + + def update(self, **kwargs): + super().update(**kwargs) + + ## update pad type & pad length + if (self.general.pad_l != 0) or (self.general.pad_r != 0) or (self.general.pad_t != 0) or (self.general.pad_b != 0): + self.general.type = 'specific' + assert(self.general.pad_l >= 0) + assert(self.general.pad_r >= 0) + assert(self.general.pad_t >= 0) + assert(self.general.pad_b >= 0) + elif(self.general.type != 'specific'): + if self.general.padded_w == 0 or self.general.padded_h == 0: + self.general.padded_w = self.common.model_size[0] + self.general.padded_h = self.common.model_size[1] + assert(self.general.padded_w > 0) + assert(self.general.padded_h > 0) + assert(self.general.type.lower() in ['CENTER', 'Center', 'center', 'CORNER', 'Corner', 'corner']) + else: + assert(self.general.type == 'specific') + + ## decide pad_val & padding ch + # if numerical_type is floating + if (self.common.numerical_type == 'floating'): + if self.general.pad_val != 'edge': + self.general.pad_val = str2float(self.general.pad_val) + self.general.padding_ch = 3 + self.general.padding_ch_type = 'RGB' + # if numerical_type is 520 or 720 + else: + if self.general.pad_val == '': + if self.hw.normalize_type.lower() in ['TF', 'Tf', 'tf']: + self.general.pad_val = np.uint8(-128 >> (7 - self.hw.radix)) + elif self.hw.normalize_type.lower() in ['YOLO', 'Yolo', 'yolo']: + self.general.pad_val = np.uint8(0 >> (8 - self.hw.radix)) + elif self.hw.normalize_type.lower() in ['KNERON', 'Kneron', 'kneron']: + self.general.pad_val = np.uint8(-128 >> (8 - self.hw.radix)) + else: + self.general.pad_val = np.uint8(0 >> (8 - self.hw.radix)) + else: + self.general.pad_val = str2int(self.general.pad_val) + self.general.padding_ch = 4 + self.general.padding_ch_type = 'RGBA' + + def run(self, image_data): + # init + shape = image_data.shape + w = shape[1] + h = shape[0] + if len(shape) < 3: + self.general.padding_ch = 1 + self.general.padding_ch_type = 'L' + else: + if shape[2] == 3 and self.general.padding_ch == 4: + image_data = np.concatenate((image_data, np.zeros((h, w, 1), dtype=np.uint8) ), axis=2) + + ## padding + if self.general.type.lower() in ['CENTER', 'Center', 'center']: + img_pad = self._padding_center(image_data, w, h) + elif self.general.type.lower() in ['CORNER', 'Corner', 'corner']: + img_pad = self._padding_corner(image_data, w, h) + else: + img_pad = self._padding_sp(image_data, w, h) + + # print info + if str2bool(self.common.print_info): + self.print_info() + + # output + info = {} + return img_pad, info + + ## protect fun + def _padding_center(self, img, ori_w, ori_h): + # img_pad = Image.new(self.general.padding_ch_type, (self.general.padded_w, self.general.padded_h), int(self.general.pad_val[0])) + # img = Image.fromarray(img) + # img_pad.paste(img, ((self.general.padded_w-ori_w)//2, (self.general.padded_h-ori_h)//2)) + # return img_pad + padH = self.general.padded_h - ori_h + padW = self.general.padded_w - ori_w + self.general.pad_t = padH // 2 + self.general.pad_b = (padH // 2) + (padH % 2) + self.general.pad_l = padW // 2 + self.general.pad_r = (padW // 2) + (padW % 2) + if self.general.pad_l < 0 or self.general.pad_r <0 or self.general.pad_t <0 or self.general.pad_b<0: + return img + img_pad = self._padding_sp(img,ori_w,ori_h) + return img_pad + + def _padding_corner(self, img, ori_w, ori_h): + # img_pad = Image.new(self.general.padding_ch_type, (self.general.padded_w, self.general.padded_h), self.general.pad_val) + # img_pad.paste(img, (0, 0)) + self.general.pad_l = 0 + self.general.pad_r = self.general.padded_w - ori_w + self.general.pad_t = 0 + self.general.pad_b = self.general.padded_h - ori_h + if self.general.pad_l < 0 or self.general.pad_r <0 or self.general.pad_t <0 or self.general.pad_b<0: + return img + img_pad = self._padding_sp(img,ori_w,ori_h) + return img_pad + + def _padding_sp(self, img, ori_w, ori_h): + # block_t = np.zeros((self.general.pad_t, self.general.pad_l + self.general.pad_r + ori_w, self.general.padding_ch), dtype=np.float) + # block_l = np.zeros((ori_h, self.general.pad_l, self.general.padding_ch), dtype=np.float) + # block_r = np.zeros((ori_h, self.general.pad_r, self.general.padding_ch), dtype=np.float) + # block_b = np.zeros((self.general.pad_b, self.general.pad_l + self.general.pad_r + ori_w, self.general.padding_ch), dtype=np.float) + # for i in range(self.general.padding_ch): + # block_t[:, :, i] = np.ones(block_t[:, :, i].shape, dtype=np.float) * self.general.pad_val + # block_l[:, :, i] = np.ones(block_l[:, :, i].shape, dtype=np.float) * self.general.pad_val + # block_r[:, :, i] = np.ones(block_r[:, :, i].shape, dtype=np.float) * self.general.pad_val + # block_b[:, :, i] = np.ones(block_b[:, :, i].shape, dtype=np.float) * self.general.pad_val + # padded_image_hor = np.concatenate((block_l, img, block_r), axis=1) + # padded_image = np.concatenate((block_t, padded_image_hor, block_b), axis=0) + # return padded_image + if self.general.padding_ch == 1: + pad_range = ( (self.general.pad_t, self.general.pad_b),(self.general.pad_l, self.general.pad_r) ) + else: + pad_range = ((self.general.pad_t, self.general.pad_b),(self.general.pad_l, self.general.pad_r),(0,0)) + + if isinstance(self.general.pad_val, str): + if self.general.pad_val == 'edge': + padded_image = np.pad(img, pad_range, mode="edge") + else: + padded_image = np.pad(img, pad_range, mode="constant",constant_values=0) + else: + padded_image = np.pad(img, pad_range, mode="constant",constant_values=self.general.pad_val) + + return padded_image + diff --git a/kneron/exporting/yolov5/kneron_preprocessing/funcs/Resize.py b/kneron/exporting/yolov5/kneron_preprocessing/funcs/Resize.py new file mode 100644 index 0000000..8e948b9 --- /dev/null +++ b/kneron/exporting/yolov5/kneron_preprocessing/funcs/Resize.py @@ -0,0 +1,237 @@ +import numpy as np +import cv2 +from PIL import Image +from .utils import str2bool, str2int +from ctypes import c_float +from .Runner_base import Runner_base, Param_base + +class General(Param_base): + type = 'bilinear' + keep_ratio = True + zoom = True + calculate_ratio_using_CSim = True + resize_w = 0 + resize_h = 0 + resized_w = 0 + resized_h = 0 + def update(self, **dic): + self.type = dic['type'] + self.keep_ratio = str2bool(dic['keep_ratio']) + self.zoom = str2bool(dic['zoom']) + self.calculate_ratio_using_CSim = str2bool(dic['calculate_ratio_using_CSim']) + self.resize_w = str2int(dic['resize_w']) + self.resize_h = str2int(dic['resize_h']) + + def __str__(self): + str_out = [ + ', type:',str(self.type), + ', keep_ratio:',str(self.keep_ratio), + ', zoom:',str(self.zoom), + ', calculate_ratio_using_CSim:',str(self.calculate_ratio_using_CSim), + ', resize_w:',str(self.resize_w), + ', resize_h:',str(self.resize_h), + ', resized_w:',str(self.resized_w), + ', resized_h:',str(self.resized_h)] + return(' '.join(str_out)) + +class Hw(Param_base): + resize_bit = 12 + def update(self, **dic): + pass + + def __str__(self): + str_out = [ + ', resize_bit:',str(self.resize_bit)] + return(' '.join(str_out)) + +class runner(Runner_base): + ## overwrite the class in Runner_base + general = General() + hw = Hw() + + def __str__(self): + return('') + + def update(self, **kwargs): + super().update(**kwargs) + + ## if resize size has not been assigned, then it will take model size as resize size + if self.general.resize_w == 0 or self.general.resize_h == 0: + self.general.resize_w = self.common.model_size[0] + self.general.resize_h = self.common.model_size[1] + assert(self.general.resize_w > 0) + assert(self.general.resize_h > 0) + + ## + if self.common.numerical_type == '520': + self.general.type = 'fixed_520' + elif self.common.numerical_type == '720': + self.general.type = 'fixed_720' + assert(self.general.type.lower() in ['BILINEAR', 'Bilinear', 'bilinear', 'BICUBIC', 'Bicubic', 'bicubic', 'FIXED', 'Fixed', 'fixed', 'FIXED_520', 'Fixed_520', 'fixed_520', 'FIXED_720', 'Fixed_720', 'fixed_720','CV', 'cv', 'opencv', 'OpenCV', 'CV2', 'cv2']) + + + def run(self, image_data): + ## init + ori_w = image_data.shape[1] + ori_h = image_data.shape[0] + info = {} + + ## + if self.general.keep_ratio: + self.general.resized_w, self.general.resized_h = self.calcuate_scale_keep_ratio(self.general.resize_w,self.general.resize_h, ori_w, ori_h, self.general.calculate_ratio_using_CSim) + else: + self.general.resized_w = int(self.general.resize_w) + self.general.resized_h = int(self.general.resize_h) + assert(self.general.resized_w > 0) + assert(self.general.resized_h > 0) + + ## + if (self.general.resized_w > ori_w) or (self.general.resized_h > ori_h): + if not self.general.zoom: + info['size'] = (ori_w,ori_h) + if str2bool(self.common.print_info): + print('no resize') + self.print_info() + return image_data, info + + ## resize + if self.general.type.lower() in ['BILINEAR', 'Bilinear', 'bilinear']: + image_data = self.do_resize_bilinear(image_data, self.general.resized_w, self.general.resized_h) + elif self.general.type.lower() in ['BICUBIC', 'Bicubic', 'bicubic']: + image_data = self.do_resize_bicubic(image_data, self.general.resized_w, self.general.resized_h) + elif self.general.type.lower() in ['CV', 'cv', 'opencv', 'OpenCV', 'CV2', 'cv2']: + image_data = self.do_resize_cv2(image_data, self.general.resized_w, self.general.resized_h) + elif self.general.type.lower() in ['FIXED', 'Fixed', 'fixed', 'FIXED_520', 'Fixed_520', 'fixed_520', 'FIXED_720', 'Fixed_720', 'fixed_720']: + image_data = self.do_resize_fixed(image_data, self.general.resized_w, self.general.resized_h, self.hw.resize_bit, self.general.type) + + + # output + info['size'] = (self.general.resized_w, self.general.resized_h) + + # print info + if str2bool(self.common.print_info): + self.print_info() + + return image_data, info + + def calcuate_scale_keep_ratio(self, tar_w, tar_h, ori_w, ori_h, calculate_ratio_using_CSim): + if not calculate_ratio_using_CSim: + scale_w = tar_w * 1.0 / ori_w*1.0 + scale_h = tar_h * 1.0 / ori_h*1.0 + scale = scale_w if scale_w < scale_h else scale_h + new_w = int(round(ori_w * scale)) + new_h = int(round(ori_h * scale)) + return new_w, new_h + + ## calculate_ratio_using_CSim + scale_w = c_float(tar_w * 1.0 / (ori_w * 1.0)).value + scale_h = c_float(tar_h * 1.0 / (ori_h * 1.0)).value + scale_ratio = 0.0 + scale_target_w = 0 + scale_target_h = 0 + padH = 0 + padW = 0 + + bScaleW = True if scale_w < scale_h else False + if bScaleW: + scale_ratio = scale_w + scale_target_w = int(c_float(scale_ratio * ori_w + 0.5).value) + scale_target_h = int(c_float(scale_ratio * ori_h + 0.5).value) + assert (abs(scale_target_w - tar_w) <= 1), "Error: scale down width cannot meet expectation\n" + padH = tar_h - scale_target_h + padW = 0 + assert (padH >= 0), "Error: padH shouldn't be less than zero\n" + else: + scale_ratio = scale_h + scale_target_w = int(c_float(scale_ratio * ori_w + 0.5).value) + scale_target_h = int(c_float(scale_ratio * ori_h + 0.5).value) + assert (abs(scale_target_h - tar_h) <= 1), "Error: scale down height cannot meet expectation\n" + padW = tar_w - scale_target_w + padH = 0 + assert (padW >= 0), "Error: padW shouldn't be less than zero\n" + new_w = tar_w - padW + new_h = tar_h - padH + return new_w, new_h + + def do_resize_bilinear(self, image_data, resized_w, resized_h): + img = Image.fromarray(image_data) + img = img.resize((resized_w, resized_h), Image.BILINEAR) + image_data = np.array(img).astype('uint8') + return image_data + + def do_resize_bicubic(self, image_data, resized_w, resized_h): + img = Image.fromarray(image_data) + img = img.resize((resized_w, resized_h), Image.BICUBIC) + image_data = np.array(img).astype('uint8') + return image_data + + def do_resize_cv2(self, image_data, resized_w, resized_h): + image_data = cv2.resize(image_data, (resized_w, resized_h)) + image_data = np.array(image_data) + # image_data = np.array(image_data).astype('uint8') + return image_data + + def do_resize_fixed(self, image_data, resized_w, resized_h, resize_bit, type): + if len(image_data.shape) < 3: + m, n = image_data.shape + tmp = np.zeros((m,n,3), dtype=np.uint8) + tmp[:,:,0] = image_data + image_data = tmp + c = 3 + gray = True + else: + m, n, c = image_data.shape + gray = False + + resolution = 1 << resize_bit + + # Width + ratio = int(((n - 1) << resize_bit) / (resized_w - 1)) + ratio_cnt = 0 + src_x = 0 + resized_image_w = np.zeros((m, resized_w, c), dtype=np.uint8) + + for dst_x in range(resized_w): + while ratio_cnt > resolution: + ratio_cnt = ratio_cnt - resolution + src_x = src_x + 1 + mul1 = np.ones((m, c)) * (resolution - ratio_cnt) + mul2 = np.ones((m, c)) * ratio_cnt + resized_image_w[:, dst_x, :] = np.multiply(np.multiply( + image_data[:, src_x, :], mul1) + np.multiply(image_data[:, src_x + 1, :], mul2), 1/resolution) + ratio_cnt = ratio_cnt + ratio + + # Height + ratio = int(((m - 1) << resize_bit) / (resized_h - 1)) + ## NPU HW special case 2 , only on 520 + if type.lower() in ['FIXED_520', 'Fixed_520', 'fixed_520']: + if (((ratio * (resized_h - 1)) % 4096 == 0) and ratio != 4096): + ratio -= 1 + + ratio_cnt = 0 + src_x = 0 + resized_image = np.zeros( + (resized_h, resized_w, c), dtype=np.uint8) + for dst_x in range(resized_h): + while ratio_cnt > resolution: + ratio_cnt = ratio_cnt - resolution + src_x = src_x + 1 + + mul1 = np.ones((resized_w, c)) * (resolution - ratio_cnt) + mul2 = np.ones((resized_w, c)) * ratio_cnt + + ## NPU HW special case 1 , both on 520 / 720 + if (((dst_x > 0) and ratio_cnt == resolution) and (ratio != resolution)): + if type.lower() in ['FIXED_520', 'Fixed_520', 'fixed_520','FIXED_720', 'Fixed_720', 'fixed_720' ]: + resized_image[dst_x, :, :] = np.multiply(np.multiply( + resized_image_w[src_x+1, :, :], mul1) + np.multiply(resized_image_w[src_x + 2, :, :], mul2), 1/resolution) + else: + resized_image[dst_x, :, :] = np.multiply(np.multiply( + resized_image_w[src_x, :, :], mul1) + np.multiply(resized_image_w[src_x + 1, :, :], mul2), 1/resolution) + + ratio_cnt = ratio_cnt + ratio + + if gray: + resized_image = resized_image[:,:,0] + + return resized_image diff --git a/kneron/exporting/yolov5/kneron_preprocessing/funcs/Rotate.py b/kneron/exporting/yolov5/kneron_preprocessing/funcs/Rotate.py new file mode 100644 index 0000000..63f882f --- /dev/null +++ b/kneron/exporting/yolov5/kneron_preprocessing/funcs/Rotate.py @@ -0,0 +1,45 @@ +import numpy as np +from .utils import str2bool, str2int + +class runner(object): + def __init__(self, *args, **kwargs): + self.set = { + 'operator': '', + "rotate_direction": 0, + + } + self.update(*args, **kwargs) + + def update(self, *args, **kwargs): + self.set.update(kwargs) + self.rotate_direction = str2int(self.set['rotate_direction']) + + # print info + if str2bool(self.set['b_print']): + self.print_info() + + def print_info(self): + print("", + 'rotate_direction', self.rotate_direction,) + + + def run(self, image_data): + image_data = self._rotate(image_data) + return image_data + + def _rotate(self,img): + if self.rotate_direction == 1 or self.rotate_direction == 2: + col, row, unit = img.shape + pInBuf = img.reshape((-1,1)) + pOutBufTemp = np.zeros((col* row* unit)) + for r in range(row): + for c in range(col): + for u in range(unit): + if self.rotate_direction == 1: + pOutBufTemp[unit * (c * row + (row - r - 1))+u] = pInBuf[unit * (r * col + c)+u] + elif self.rotate_direction == 2: + pOutBufTemp[unit * (row * (col - c - 1) + r)+u] = pInBuf[unit * (r * col + c)+u] + + img = pOutBufTemp.reshape((col,row,unit)) + + return img diff --git a/kneron/exporting/yolov5/kneron_preprocessing/funcs/Runner_base.py b/kneron/exporting/yolov5/kneron_preprocessing/funcs/Runner_base.py new file mode 100644 index 0000000..7bedbcf --- /dev/null +++ b/kneron/exporting/yolov5/kneron_preprocessing/funcs/Runner_base.py @@ -0,0 +1,59 @@ +from abc import ABCMeta, abstractmethod + +class Param_base(object): + @abstractmethod + def update(self,**dic): + raise NotImplementedError("Must override") + + def load_dic(self, key, **dic): + if key in dic: + param = eval('self.'+key) + param = dic[key] + + def __str__(self): + str_out = [] + return(' '.join(str_out)) + + +class Common(Param_base): + print_info = False + model_size = [0,0] + numerical_type = 'floating' + + def update(self, **dic): + self.print_info = dic['print_info'] + self.model_size = dic['model_size'] + self.numerical_type = dic['numerical_type'] + + def __str__(self): + str_out = ['numerical_type:',str(self.numerical_type)] + return(' '.join(str_out)) + +class Runner_base(metaclass=ABCMeta): + common = Common() + general = Param_base() + floating = Param_base() + hw = Param_base() + + def update(self, **kwargs): + ## update param + self.common.update(**kwargs['common']) + self.general.update(**kwargs['general']) + assert(self.common.numerical_type.lower() in ['floating', '520', '720']) + if (self.common.numerical_type == 'floating'): + if (self.floating.__class__.__name__ != 'Param_base'): + self.floating.update(**kwargs['floating']) + else: + if (self.hw.__class__.__name__ != 'Param_base'): + self.hw.update(**kwargs['hw']) + + def print_info(self): + if (self.common.numerical_type == 'floating'): + print(self, self.common, self.general, self.floating) + else: + print(self, self.common, self.general, self.hw) + + + + + diff --git a/kneron/exporting/yolov5/kneron_preprocessing/funcs/__init__.py b/kneron/exporting/yolov5/kneron_preprocessing/funcs/__init__.py new file mode 100644 index 0000000..0b46298 --- /dev/null +++ b/kneron/exporting/yolov5/kneron_preprocessing/funcs/__init__.py @@ -0,0 +1,2 @@ +from . import ColorConversion, Padding, Resize, Crop, Normalize, Rotate + diff --git a/kneron/exporting/yolov5/kneron_preprocessing/funcs/utils.py b/kneron/exporting/yolov5/kneron_preprocessing/funcs/utils.py new file mode 100644 index 0000000..a1e509a --- /dev/null +++ b/kneron/exporting/yolov5/kneron_preprocessing/funcs/utils.py @@ -0,0 +1,372 @@ +import numpy as np +from PIL import Image +import struct + +def pad_square_to_4(x_start, x_end, y_start, y_end): + w_int = x_end - x_start + h_int = y_end - y_start + pad = w_int - h_int + if pad > 0: + pad_s = (pad >> 1) &(~3) + pad_e = pad - pad_s + y_start -= pad_s + y_end += pad_e + else:#//pad <=0 + pad_s = -(((pad) >> 1) &(~3)) + pad_e = (-pad) - pad_s + x_start -= pad_s + x_end += pad_e + return x_start, x_end, y_start, y_end + +def str_fill(value): + if len(value) == 1: + value = "0" + value + elif len(value) == 0: + value = "00" + + return value + +def clip_ary(value): + list_v = [] + for i in range(len(value)): + v = value[i] % 256 + list_v.append(v) + + return list_v + +def str2bool(v): + if isinstance(v,bool): + return v + return v.lower() in ('TRUE', 'True', 'true', '1', 'T', 't', 'Y', 'YES', 'y', 'yes') + + +def str2int(s): + if s == "": + s = 0 + s = int(s) + return s + +def str2float(s): + if s == "": + s = 0 + s = float(s) + return s + +def clip(value, mini, maxi): + if value < mini: + result = mini + elif value > maxi: + result = maxi + else: + result = value + + return result + + +def clip_ary(value): + list_v = [] + for i in range(len(value)): + v = value[i] % 256 + list_v.append(v) + + return list_v + + +def signed_rounding(value, bit): + if value < 0: + value = value - (1 << (bit - 1)) + else: + value = value + (1 << (bit - 1)) + + return value + +def hex_loader(data_folder,**kwargs): + format_mode = kwargs['raw_img_fmt'] + src_h = kwargs['img_in_height'] + src_w = kwargs['img_in_width'] + + if format_mode in ['YUV444', 'yuv444', 'YCBCR444', 'YCbCr444', 'ycbcr444']: + output = hex_yuv444(data_folder,src_h,src_w) + elif format_mode in ['RGB565', 'rgb565']: + output = hex_rgb565(data_folder,src_h,src_w) + elif format_mode in ['YUV422', 'yuv422', 'YCBCR422', 'YCbCr422', 'ycbcr422']: + output = hex_yuv422(data_folder,src_h,src_w) + + return output + +def hex_rgb565(hex_folder,src_h,src_w): + pix_per_line = 8 + byte_per_line = 16 + + f = open(hex_folder) + pixel_r = [] + pixel_g = [] + pixel_b = [] + + # Ignore the first line + f.readline() + input_line = int((src_h * src_w)/pix_per_line) + for i in range(input_line): + readline = f.readline() + for j in range(int(byte_per_line/2)-1, -1, -1): + data1 = int(readline[(j * 4 + 0):(j * 4 + 2)], 16) + data0 = int(readline[(j * 4 + 2):(j * 4 + 4)], 16) + r = ((data1 & 0xf8) >> 3) + g = (((data0 & 0xe0) >> 5) + ((data1 & 0x7) << 3)) + b = (data0 & 0x1f) + pixel_r.append(r) + pixel_g.append(g) + pixel_b.append(b) + + ary_r = np.array(pixel_r, dtype=np.uint8) + ary_g = np.array(pixel_g, dtype=np.uint8) + ary_b = np.array(pixel_b, dtype=np.uint8) + output = np.concatenate((ary_r[:, None], ary_g[:, None], ary_b[:, None]), axis=1) + output = output.reshape((src_h, src_w, 3)) + + return output + +def hex_yuv444(hex_folder,src_h,src_w): + pix_per_line = 4 + byte_per_line = 16 + + f = open(hex_folder) + byte0 = [] + byte1 = [] + byte2 = [] + byte3 = [] + + # Ignore the first line + f.readline() + input_line = int((src_h * src_w)/pix_per_line) + for i in range(input_line): + readline = f.readline() + for j in range(byte_per_line-1, -1, -1): + data = int(readline[(j*2):(j*2+2)], 16) + if (j+1) % 4 == 0: + byte0.append(data) + elif (j+2) % 4 == 0: + byte1.append(data) + elif (j+3) % 4 == 0: + byte2.append(data) + elif (j+4) % 4 == 0: + byte3.append(data) + # ary_a = np.array(byte0, dtype=np.uint8) + ary_v = np.array(byte1, dtype=np.uint8) + ary_u = np.array(byte2, dtype=np.uint8) + ary_y = np.array(byte3, dtype=np.uint8) + output = np.concatenate((ary_y[:, None], ary_u[:, None], ary_v[:, None]), axis=1) + output = output.reshape((src_h, src_w, 3)) + + return output + +def hex_yuv422(hex_folder,src_h,src_w): + pix_per_line = 8 + byte_per_line = 16 + f = open(hex_folder) + pixel_y = [] + pixel_u = [] + pixel_v = [] + + # Ignore the first line + f.readline() + input_line = int((src_h * src_w)/pix_per_line) + for i in range(input_line): + readline = f.readline() + for j in range(int(byte_per_line/4)-1, -1, -1): + data3 = int(readline[(j * 8 + 0):(j * 8 + 2)], 16) + data2 = int(readline[(j * 8 + 2):(j * 8 + 4)], 16) + data1 = int(readline[(j * 8 + 4):(j * 8 + 6)], 16) + data0 = int(readline[(j * 8 + 6):(j * 8 + 8)], 16) + pixel_y.append(data3) + pixel_y.append(data1) + pixel_u.append(data2) + pixel_u.append(data2) + pixel_v.append(data0) + pixel_v.append(data0) + + ary_y = np.array(pixel_y, dtype=np.uint8) + ary_u = np.array(pixel_u, dtype=np.uint8) + ary_v = np.array(pixel_v, dtype=np.uint8) + output = np.concatenate((ary_y[:, None], ary_u[:, None], ary_v[:, None]), axis=1) + output = output.reshape((src_h, src_w, 3)) + + return output + +def bin_loader(data_folder,**kwargs): + format_mode = kwargs['raw_img_fmt'] + src_h = kwargs['img_in_height'] + src_w = kwargs['img_in_width'] + if format_mode in ['YUV','yuv','YUV444', 'yuv444', 'YCBCR','YCbCr','ycbcr','YCBCR444', 'YCbCr444', 'ycbcr444']: + output = bin_yuv444(data_folder,src_h,src_w) + elif format_mode in ['RGB565', 'rgb565']: + output = bin_rgb565(data_folder,src_h,src_w) + elif format_mode in ['NIR', 'nir','NIR888', 'nir888']: + output = bin_nir(data_folder,src_h,src_w) + elif format_mode in ['YUV422', 'yuv422', 'YCBCR422', 'YCbCr422', 'ycbcr422']: + output = bin_yuv422(data_folder,src_h,src_w) + elif format_mode in ['RGB888','rgb888']: + output = np.fromfile(data_folder, dtype='uint8') + output = output.reshape(src_h,src_w,3) + elif format_mode in ['RGBA8888','rgba8888', 'RGBA' , 'rgba']: + output_temp = np.fromfile(data_folder, dtype='uint8') + output_temp = output_temp.reshape(src_h,src_w,4) + output = output_temp[:,:,0:3] + + return output + +def bin_yuv444(in_img_path,src_h,src_w): + # load bin + struct_fmt = '1B' + struct_len = struct.calcsize(struct_fmt) + struct_unpack = struct.Struct(struct_fmt).unpack_from + + row = src_h + col = src_w + pixels = row*col + + raw = [] + with open(in_img_path, "rb") as f: + while True: + data = f.read(struct_len) + if not data: break + s = struct_unpack(data) + raw.append(s[0]) + + + raw = raw[:pixels*4] + + # + output = np.zeros((pixels * 3), dtype=np.uint8) + cnt = 0 + for i in range(0, pixels*4, 4): + #Y + output[cnt] = raw[i+3] + #U + cnt += 1 + output[cnt] = raw[i+2] + #V + cnt += 1 + output[cnt] = raw[i+1] + + cnt += 1 + + output = output.reshape((src_h,src_w,3)) + return output + +def bin_yuv422(in_img_path,src_h,src_w): + # load bin + struct_fmt = '1B' + struct_len = struct.calcsize(struct_fmt) + struct_unpack = struct.Struct(struct_fmt).unpack_from + + row = src_h + col = src_w + pixels = row*col + + raw = [] + with open(in_img_path, "rb") as f: + while True: + data = f.read(struct_len) + if not data: break + s = struct_unpack(data) + raw.append(s[0]) + + + raw = raw[:pixels*2] + + # + output = np.zeros((pixels * 3), dtype=np.uint8) + cnt = 0 + for i in range(0, pixels*2, 4): + #Y0 + output[cnt] = raw[i+3] + #U0 + cnt += 1 + output[cnt] = raw[i+2] + #V0 + cnt += 1 + output[cnt] = raw[i] + #Y1 + cnt += 1 + output[cnt] = raw[i+1] + #U1 + cnt += 1 + output[cnt] = raw[i+2] + #V1 + cnt += 1 + output[cnt] = raw[i] + + cnt += 1 + + output = output.reshape((src_h,src_w,3)) + return output + +def bin_rgb565(in_img_path,src_h,src_w): + # load bin + struct_fmt = '1B' + struct_len = struct.calcsize(struct_fmt) + struct_unpack = struct.Struct(struct_fmt).unpack_from + + row = src_h + col = src_w + pixels = row*col + + rgba565 = [] + with open(in_img_path, "rb") as f: + while True: + data = f.read(struct_len) + if not data: break + s = struct_unpack(data) + rgba565.append(s[0]) + + + rgba565 = rgba565[:pixels*2] + + # rgb565_bin to numpy_array + output = np.zeros((pixels * 3), dtype=np.uint8) + cnt = 0 + for i in range(0, pixels*2, 2): + temp = rgba565[i] + temp2 = rgba565[i+1] + #R-5 + output[cnt] = (temp2 >>3) + + #G-6 + cnt += 1 + output[cnt] = ((temp & 0xe0) >> 5) + ((temp2 & 0x07) << 3) + + #B-5 + cnt += 1 + output[cnt] = (temp & 0x1f) + + cnt += 1 + + output = output.reshape((src_h,src_w,3)) + return output + +def bin_nir(in_img_path,src_h,src_w): + # load bin + struct_fmt = '1B' + struct_len = struct.calcsize(struct_fmt) + struct_unpack = struct.Struct(struct_fmt).unpack_from + + nir = [] + with open(in_img_path, "rb") as f: + while True: + data = f.read(struct_len) + if not data: break + s = struct_unpack(data) + nir.append(s[0]) + + nir = nir[:src_h*src_w] + pixels = len(nir) + # nir_bin to numpy_array + output = np.zeros((len(nir) * 3), dtype=np.uint8) + for i in range(0, pixels): + output[i*3]=nir[i] + output[i*3+1]=nir[i] + output[i*3+2]=nir[i] + + output = output.reshape((src_h,src_w,3)) + return output diff --git a/kneron/exporting/yolov5/kneron_preprocessing/funcs/utils_520.py b/kneron/exporting/yolov5/kneron_preprocessing/funcs/utils_520.py new file mode 100644 index 0000000..27bd860 --- /dev/null +++ b/kneron/exporting/yolov5/kneron_preprocessing/funcs/utils_520.py @@ -0,0 +1,50 @@ +import math + +def round_up_16(num): + return ((num + (16 - 1)) & ~(16 - 1)) + +def round_up_n(num, n): + if (num > 0): + temp = float(num) / n + return math.ceil(temp) * n + else: + return -math.ceil(float(-num) / n) * n + +def cal_img_row_offset(crop_num, pad_num, start_row, out_row, orig_row): + + scaled_img_row = int(out_row - (pad_num[1] + pad_num[3])) + if ((start_row - pad_num[1]) > 0): + img_str_row = int((start_row - pad_num[1])) + else: + img_str_row = 0 + valid_row = int(orig_row - (crop_num[1] + crop_num[3])) + img_str_row = int(valid_row * img_str_row / scaled_img_row) + return int(img_str_row + crop_num[1]) + +def get_pad_num(pad_num_orig, left, up, right, bottom): + pad_num = [0]*4 + for i in range(0,4): + pad_num[i] = pad_num_orig[i] + + if not (left): + pad_num[0] = 0 + if not (up): + pad_num[1] = 0 + if not (right): + pad_num[2] = 0 + if not (bottom): + pad_num[3] = 0 + + return pad_num + +def get_byte_per_pixel(raw_fmt): + if raw_fmt.lower() in ['RGB888', 'rgb888', 'RGB', 'rgb888']: + return 4 + elif raw_fmt.lower() in ['YUV', 'yuv', 'YUV422', 'yuv422']: + return 2 + elif raw_fmt.lower() in ['RGB565', 'rgb565']: + return 2 + elif raw_fmt.lower() in ['NIR888', 'nir888', 'NIR', 'nir']: + return 1 + else: + return -1 \ No newline at end of file diff --git a/kneron/exporting/yolov5/kneron_preprocessing/funcs/utils_720.py b/kneron/exporting/yolov5/kneron_preprocessing/funcs/utils_720.py new file mode 100644 index 0000000..8d1a046 --- /dev/null +++ b/kneron/exporting/yolov5/kneron_preprocessing/funcs/utils_720.py @@ -0,0 +1,42 @@ +import numpy as np +from PIL import Image + +def twos_complement(value): + value = int(value) + # msb = (value & 0x8000) * (1/np.power(2, 15)) + msb = (value & 0x8000) >> 15 + if msb == 1: + if (((~value) & 0xFFFF) + 1) >= 0xFFFF: + result = ((~value) & 0xFFFF) + else: + result = (((~value) & 0xFFFF) + 1) + result = result * (-1) + else: + result = value + + return result + + +def twos_complement_pix(value): + h, _ = value.shape + for i in range(h): + value[i, 0] = twos_complement(value[i, 0]) + + return value + +def clip(value, mini, maxi): + if value < mini: + result = mini + elif value > maxi: + result = maxi + else: + result = value + + return result + +def clip_pix(value, mini, maxi): + h, _ = value.shape + for i in range(h): + value[i, 0] = clip(value[i, 0], mini, maxi) + + return value \ No newline at end of file diff --git a/kneron/exporting/yolov5/quantize_yolov5.py b/kneron/exporting/yolov5/quantize_yolov5.py new file mode 100644 index 0000000..1d21586 --- /dev/null +++ b/kneron/exporting/yolov5/quantize_yolov5.py @@ -0,0 +1,45 @@ +import os +import numpy as np +import torch +import ktc # Kneron Toolchain +from yolov5_preprocess import Yolov5_preprocess # 使用你的預處理 +import kneron_preprocessing + +# 設定裝置 +device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + +# 設定圖片大小(與訓練時一致) +imgsz_h, imgsz_w = 640, 640 + +# 量化數據集目錄(請確保這個資料夾存在) +data_path = "/data50" +img_list = [] + +# 設定 ONNX 模型路徑(確保這個路徑在 Docker 內部是否正確) +onnx_model_path = "/workspace/yolov5/latest.opt.onnx" + +# **初始化 Kneron ModelConfig 物件** +km = ktc.ModelConfig(20008, "0001", "720", onnx_model=onnx_model_path) + +# 遍歷 data50 並進行預處理 +for root, _, files in os.walk(data_path): + for f in files: + fullpath = os.path.join(root, f) + + # 執行與訓練相同的預處理 + img_data, _ = Yolov5_preprocess(fullpath, device, imgsz_h, imgsz_w) + + # 確保格式為 NumPy 陣列 + img_data = img_data.cpu().numpy() + + print(f"Processed: {fullpath}") + img_list.append(img_data) + +# 轉為 NumPy 格式 +img_list = np.array(img_list) + +# **執行 BIE 量化分析** +bie_model_path = km.analysis({"input": img_list}) + +# 輸出成功訊息 +print("\n✅ Fixed-point analysis done! BIE model saved to:", bie_model_path) diff --git a/kneron/exporting/yolov5/readme.txt b/kneron/exporting/yolov5/readme.txt new file mode 100644 index 0000000..7a99a24 --- /dev/null +++ b/kneron/exporting/yolov5/readme.txt @@ -0,0 +1,68 @@ +(1)yolov5_app.py for ploting moldel inference results +cd applications +python yolov5_app.py + +(2)yolov5_evaluation.py for evaluating moldel mAP at hw_repo +cd applications +python yolov5_evaluation.py + +#mAP @ yolov5s_v2_op9_sig_batch1_input05_640x640_nearest_convert.onnx with(upsampling rearest) + Average Precision (AP) @[ IoU=0.50:0.95 | area= all | maxDets=100 ] = 0.346 + Average Precision (AP) @[ IoU=0.50 | area= all | maxDets=100 ] = 0.533 + Average Precision (AP) @[ IoU=0.75 | area= all | maxDets=100 ] = 0.372 + Average Precision (AP) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = 0.196 + Average Precision (AP) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.391 + Average Precision (AP) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.442 + Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets= 1 ] = 0.279 + Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets= 10 ] = 0.456 + Average Recall (AR) @[ IoU=0.50:0.95 | area= all | maxDets=100 ] = 0.503 + Average Recall (AR) @[ IoU=0.50:0.95 | area= small | maxDets=100 ] = 0.320 + Average Recall (AR) @[ IoU=0.50:0.95 | area=medium | maxDets=100 ] = 0.557 + Average Recall (AR) @[ IoU=0.50:0.95 | area= large | maxDets=100 ] = 0.618 + +(3)yolov5_compare_pth_onnx.py for comparing the results of pytorch moldel and onnx model +cd applications +python yolov5_compare_pth_onnx.py + +(4)v2 model is in the below link. +10.200.210.221:/mnt/models/Object_models/YOLOv5/yolov5s_v2_state_dict_input05.pt +10.200.210.221:/mnt/models/Object_models/YOLOv5/yolov5s_v2_op9_sig_batch1_input05_640x640_nearest_convert.onnx + +(5)the parameters setting +(5.1)In order to get high mAP in coco val2017, please use +101620_yolov5_init_params.json +{ + "model_path": "/mnt/models/Object_models/YOLOv5/yolov5s_v2_state_dict_input05.pt", + "grid20_path": "/mnt/models/Object_models/YOLOv5/20_640x640.npy", + "grid40_path": "/mnt/models/Object_models/YOLOv5/40_640x640.npy", + "grid80_path": "/mnt/models/Object_models/YOLOv5/80_640x640.npy", + "num_classes": 80, + "imgsz_h": 640, + "imgsz_w": 640, + "conf_thres": 0.001, + "iou_thres": 0.65, + "top_k_num": 3000 +} + + +(5.2)For video usage scenarios, please use +102320_yolov5_init_params.json +{ + "model_path": "/mnt/models/Object_models/YOLOv5/yolov5s_v2_state_dict_input05.pt", + "grid20_path": "/mnt/models/Object_models/YOLOv5/20_640x352.npy", + "grid40_path": "/mnt/models/Object_models/YOLOv5/40_640x352.npy", + "grid80_path": "/mnt/models/Object_models/YOLOv5/80_640x352.npy", + "num_classes": 80, + "imgsz_h": 352, + "imgsz_w": 640, + "conf_thres": 0.3, + "iou_thres": 0.5, + "top_k_num": 3000 +} + +(5.3)The differences of above setting are +(5.3.1) Video uses input (640w*352h) to run faster. +Coco has high or flat wide images, so it is better to use input (640w*640h) + +(5.3.2) Using the yolov5 official website setting test coco val2017, the confidence setting is low "conf_thres": 0.001, and the iou setting of NMS is high "iou_thres": 0.65, which gets a better mAP. +But running video needs to be set to "conf_thres": 0.3, so that there are not too many false positives, and the iou setting of NMS "iou_thres": 0.5 is more friendly to close objects diff --git a/kneron/exporting/yolov5/yolo_v2.py b/kneron/exporting/yolov5/yolo_v2.py new file mode 100644 index 0000000..a675a40 --- /dev/null +++ b/kneron/exporting/yolov5/yolo_v2.py @@ -0,0 +1,164 @@ +import argparse +from copy import deepcopy +import torch +#from experimental import * +from .common import * +#from .common_v3 import * +from pathlib import Path +import math +import yaml + +class Detect(nn.Module): + def __init__(self, nc=80, anchors=(), ch=()): # detection layer + super(Detect, self).__init__() + self.stride = None # strides computed during build + self.nc = nc # number of classes + self.no = nc + 5 # number of outputs per anchor + self.nl = len(anchors) # number of detection layers + self.na = len(anchors[0]) // 2 # number of anchors + self.grid = [torch.zeros(1)] * self.nl # init grid + a = torch.tensor(anchors).float().view(self.nl, -1, 2) + self.register_buffer('anchors', a) # shape(nl,na,2) + self.register_buffer('anchor_grid', a.clone().view(self.nl, 1, -1, 1, 1, 2)) # shape(nl,1,na,1,1,2) + self.m = nn.ModuleList(nn.Conv2d(x, self.no * self.na, 1) for x in ch) # output conv + self.export = False # onnx export + + def forward(self, x): + # x = x.copy() # for profiling + z = [] # inference output + self.training |= self.export + for i in range(self.nl): + x[i] = self.m[i](x[i]) # conv + + # bs, _, ny, nx = x[i].shape # x(bs,255,20,20) to x(bs,3,20,20,85) + # x[i] = x[i].view(bs, self.na, self.no, ny, nx).permute(0, 1, 3, 4, 2).contiguous() + x[i] = x[i].sigmoid() + + + + # return x if self.training else (torch.cat(z, 1), x) + return x + + @staticmethod + def _make_grid(nx=20, ny=20): + yv, xv = torch.meshgrid([torch.arange(ny), torch.arange(nx)]) + return torch.stack((xv, yv), 2).view((1, 1, ny, nx, 2)).float() + +class Model(nn.Module): + def __init__(self, cfg='yolov5s.yaml', ch=3, nc=None): # model, input channels, number of classes + super(Model, self).__init__() + with open(cfg) as f: + self.yaml = yaml.load(f, Loader=yaml.FullLoader) # model dict + + # Define model + if nc and nc != self.yaml['nc']: + print('Overriding %s nc=%g with nc=%g' % (cfg, self.yaml['nc'], nc)) + self.yaml['nc'] = nc # override yaml value + self.model, self.save = parse_model(deepcopy(self.yaml), ch=[ch]) # model, savelist, ch_out + # print([x.shape for x in self.forward(torch.zeros(1, ch, 64, 64))]) + + # Build strides, anchors + m = self.model[-1] # Detect() + if isinstance(m, Detect): + s = 128 # 2x min stride + #m.stride = torch.tensor([s / x.shape[-2] for x in self.forward(torch.zeros(1, ch, s, s))]) # forward + # FocusNoSliceCat + m.stride = torch.tensor([8.0,16.0,32.0]) # forward + m.anchors /= m.stride.view(-1, 1, 1) + check_anchor_order(m) + self.stride = m.stride + self._initialize_biases() # only run once + # print('Strides: %s' % m.stride.tolist()) + + # Init weights, biases + initialize_weights(self) + + + def forward(self, x, augment=False, profile=False): + y, dt = [], [] # outputs + for m in self.model: + if m.f != -1: # if not from previous layer + x = y[m.f] if isinstance(m.f, int) else [x if j == -1 else y[j] for j in m.f] # from earlier layers + x = m(x) # run + y.append(x if m.i in self.save else None) # save output + return x + + def _initialize_biases(self, cf=None): # initialize biases into Detect(), cf is class frequency + # cf = torch.bincount(torch.tensor(np.concatenate(dataset.labels, 0)[:, 0]).long(), minlength=nc) + 1. + m = self.model[-1] # Detect() module + for mi, s in zip(m.m, m.stride): #  from + b = mi.bias.view(m.na, -1) # conv.bias(255) to (3,85) + b[:, 4] += math.log(8 / (640 / s) ** 2) # obj (8 objects per 640 image) + b[:, 5:] += math.log(0.6 / (m.nc - 0.99)) if cf is None else torch.log(cf / cf.sum()) # cls + mi.bias = torch.nn.Parameter(b.view(-1), requires_grad=True) + +def make_divisible(x, divisor): + # Returns x evenly divisble by divisor + return math.ceil(x / divisor) * divisor + +def check_anchor_order(m): + # Check anchor order against stride order for YOLOv5 Detect() module m, and correct if necessary + a = m.anchor_grid.prod(-1).view(-1) # anchor area + da = a[-1] - a[0] # delta a + ds = m.stride[-1] - m.stride[0] # delta s + if da.sign() != ds.sign(): # same order + print('Reversing anchor order') + m.anchors[:] = m.anchors.flip(0) + m.anchor_grid[:] = m.anchor_grid.flip(0) + +def initialize_weights(model): + for m in model.modules(): + t = type(m) + if t is nn.Conv2d: + pass # nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + elif t is nn.BatchNorm2d: + m.eps = 1e-3 + m.momentum = 0.03 + elif t in [nn.LeakyReLU, nn.ReLU, nn.ReLU6]: + m.inplace = True + +def parse_model(d, ch): # model_dict, input_channels(3) #original + #print('\n%3s%18s%3s%10s %-40s%-30s' % ('', 'from', 'n', 'params', 'module', 'arguments')) + anchors, nc, gd, gw = d['anchors'], d['nc'], d['depth_multiple'], d['width_multiple'] + na = (len(anchors[0]) // 2) if isinstance(anchors, list) else anchors # number of anchors + no = na * (nc + 5) # number of outputs = anchors * (classes + 5) + + layers, save, c2 = [], [], ch[-1] # layers, savelist, ch out + for i, (f, n, m, args) in enumerate(d['backbone'] + d['head']): # from, number, module, args + m = eval(m) if isinstance(m, str) else m # eval strings + for j, a in enumerate(args): + try: + args[j] = eval(a) if isinstance(a, str) else a # eval strings + except: + pass + + n = max(round(n * gd), 1) if n > 1 else n # depth gain + if m in [nn.Conv2d, Conv, Bottleneck, SPP, DWConv, Focus, BottleneckCSP]: + #print('*m',m) + c1, c2 = ch[f], args[0] + c2 = make_divisible(c2 * gw, 8) if c2 != no else c2 + args = [c1, c2, *args[1:]] + if m in [BottleneckCSP]: + args.insert(2, n) + n = 1 + elif m is nn.BatchNorm2d: + args = [ch[f]] + elif m is Concat: + c2 = sum([ch[-1 if x == -1 else x + 1] for x in f]) + elif m is Detect: + args.append([ch[x + 1] for x in f]) + if isinstance(args[1], int): # number of anchors + args[1] = [list(range(args[1] * 2))] * len(f) + else: + c2 = ch[f] + + m_ = nn.Sequential(*[m(*args) for _ in range(n)]) if n > 1 else m(*args) # module + t = str(m)[8:-2].replace('__main__.', '') # module type + np = sum([x.numel() for x in m_.parameters()]) # number params + m_.i, m_.f, m_.type, m_.np = i, f, t, np # attach index, 'from' index, type, number params + #print('%3s%18s%3s%10.0f %-40s%-30s' % (i, f, n, np, t, args)) # print + save.extend(x % i for x in ([f] if isinstance(f, int) else f) if x != -1) # append to savelist + layers.append(m_) + ch.append(c2) + return nn.Sequential(*layers), sorted(save) + diff --git a/kneron/exporting/yolov5/yolov5_postprocess.py b/kneron/exporting/yolov5/yolov5_postprocess.py new file mode 100644 index 0000000..064d322 --- /dev/null +++ b/kneron/exporting/yolov5/yolov5_postprocess.py @@ -0,0 +1,334 @@ +# coding: utf-8 +import torch +import torchvision +import time +import numpy as np +import sys +np.set_printoptions(threshold=sys.maxsize) +def box_iou(box1, box2): + # https://github.com/pytorch/vision/blob/master/torchvision/ops/boxes.py + """ + Return intersection-over-union (Jaccard index) of boxes. + Both sets of boxes are expected to be in (x1, y1, x2, y2) format. + Arguments: + box1 (Tensor[N, 4]) + box2 (Tensor[M, 4]) + Returns: + iou (Tensor[N, M]): the NxM matrix containing the pairwise + IoU values for every element in boxes1 and boxes2 + """ + + def box_area(box): + # box = 4xn + return (box[2] - box[0]) * (box[3] - box[1]) + + area1 = box_area(box1.t()) + area2 = box_area(box2.t()) + + # inter(N,M) = (rb(N,M,2) - lt(N,M,2)).clamp(0).prod(2) + inter = (torch.min(box1[:, None, 2:], box2[:, 2:]) - torch.max(box1[:, None, :2], box2[:, :2])).clamp(0).prod(2) + return inter / (area1[:, None] + area2 - inter) # iou = inter / (area1 + area2 - inter) + +def xywh2xyxy(x): + # Convert nx4 boxes from [x, y, w, h] to [x1, y1, x2, y2] where xy1=top-left, xy2=bottom-right + y = torch.zeros_like(x) if isinstance(x, torch.Tensor) else np.zeros_like(x) + y[:, 0] = x[:, 0] - x[:, 2] / 2 # top left x + y[:, 1] = x[:, 1] - x[:, 3] / 2 # top left y + y[:, 2] = x[:, 0] + x[:, 2] / 2 # bottom right x + y[:, 3] = x[:, 1] + x[:, 3] / 2 # bottom right y + return y + +def non_max_suppression(prediction, conf_thres=0.1, iou_thres=0.6, top_k_num=3000, merge=False, classes=None, agnostic=False): + """Performs Non-Maximum Suppression (NMS) on inference results + + Returns: + detections with shape: nx6 (x1, y1, x2, y2, conf, cls) + """ + # print('conf_thres',conf_thres) + if prediction.dtype is torch.float16: + prediction = prediction.float() # to FP32 + + nc = prediction[0].shape[1] - 5 # number of classes + xc = prediction[..., 4] > conf_thres # candidates + + # Settings + min_wh, max_wh = 2, 4096 # (pixels) minimum and maximum box width and height + max_det = 300 # maximum number of detections per image + time_limit = 10.0 # seconds to quit after + multi_label = nc > 1 # multiple labels per box (adds 0.5ms/img) + + t = time.time() + output = [None] * prediction.shape[0] + for xi, x in enumerate(prediction): # image index, image inference + # Apply constraints + # x[((x[..., 2:4] < min_wh) | (x[..., 2:4] > max_wh)).any(1), 4] = 0 # width-height + x = x[xc[xi]] # confidence + + # If none remain process next image + if not x.shape[0]: + continue + + # Compute conf + x[:, 5:] *= x[:, 4:5] # conf = obj_conf * cls_conf + + # Box (center x, center y, width, height) to (x1, y1, x2, y2) + box = xywh2xyxy(x[:, :4]) + + # Detections matrix nx6 (xyxy, conf, cls) + if multi_label: + i, j = (x[:, 5:] > conf_thres).nonzero().t() + x = torch.cat((box[i], x[i, j + 5, None], j[:, None].float()), 1) + else: # best class only + conf, j = x[:, 5:].max(1, keepdim=True) + x = torch.cat((box, conf, j.float()), 1)[conf.view(-1) > conf_thres] + + + + # If none remain process next image + n = x.shape[0] # number of boxes + if not n: + continue + + # Sort by confidence + # x = x[x[:, 4].argsort(descending=True)] + + # Batched NMS + c = x[:, 5:6] * (0 if agnostic else max_wh) # classes + boxes, scores = x[:, :4] + c, x[:, 4] # boxes (offset by class), scores + # # Sort by confidence + ind_Sort_by_confidence = x[:, 4].argsort(descending=True) + boxes = boxes[ind_Sort_by_confidence][:top_k_num] # + scores = scores[ind_Sort_by_confidence][:top_k_num] # + x = x[ind_Sort_by_confidence][:top_k_num] # + # cross classes nms + i = torchvision.ops.boxes.nms(boxes, scores, iou_thres) + if i.shape[0] > max_det: # limit detections + i = i[:max_det] + # if merge and (1 < n < 3E3): # Merge NMS (boxes merged using weighted mean) + # update boxes as boxes(i,4) = weights(i,n) * boxes(n,4) + # iou = box_iou(boxes[i], boxes) > iou_thres # iou matrix + # weights = iou * scores[None] # box weights + # x[i, :4] = torch.mm(weights, x[:, :4]).float() / weights.sum(1, keepdim=True) # merged boxes + # if redundant: + # i = i[iou.sum(1) > 1] # require redundancy + + output[xi] = x[i] + if (time.time() - t) > time_limit: + break # time limit exceeded + + return output + +def non_max_suppression_kneron(prediction, conf_thres=0.1, iou_thres=0.6, top_k_num=3000, merge=False, classes=None, agnostic=False): + """Performs Non-Maximum Suppression (NMS) on inference results + + Returns: + detections with shape: nx6 (x1, y1, x2, y2, conf, cls) + """ + if prediction.dtype is torch.float16: + prediction = prediction.float() # to FP32 + + nc = prediction[0].shape[1] - 5 # number of classes + xc = prediction[..., 4] > conf_thres # candidates + + # Settings + min_wh, max_wh = 2, 4096 # (pixels) minimum and maximum box width and height + max_det = 300 # maximum number of detections per image + time_limit = 10.0 # seconds to quit after + multi_label = nc > 1 # multiple labels per box (adds 0.5ms/img) + + t = time.time() + output = [None] * prediction.shape[0] + for xi, x in enumerate(prediction): # image index, image inference + # Apply constraints + # x[((x[..., 2:4] < min_wh) | (x[..., 2:4] > max_wh)).any(1), 4] = 0 # width-height + x = x[xc[xi]] # confidence + + # If none remain process next image + if not x.shape[0]: + continue + + # Compute conf + x[:, 5:] *= x[:, 4:5] # conf = obj_conf * cls_conf + + # Box (center x, center y, width, height) to (x1, y1, x2, y2) + box = xywh2xyxy(x[:, :4]) + + # Detections matrix nx6 (xyxy, conf, cls) + if multi_label: + i, j = (x[:, 5:] > conf_thres).nonzero().t() + x = torch.cat((box[i], x[i, j + 5, None], j[:, None].float()), 1) + else: # best class only + conf, j = x[:, 5:].max(1, keepdim=True) + x = torch.cat((box, conf, j.float()), 1)[conf.view(-1) > conf_thres] + + + + # If none remain process next image + n = x.shape[0] # number of boxes + if not n: + continue + + # Sort by confidence + # x = x[x[:, 4].argsort(descending=True)] + + # Batched NMS + c = x[:, 5:6] * (0 if agnostic else max_wh) # classes + boxes, scores = x[:, :4] + c, x[:, 4] # boxes (offset by class), scores + # Sort by confidence + ind_Sort_by_confidence = x[:, 4].argsort(descending=True) + boxes = boxes[ind_Sort_by_confidence][:top_k_num] # + scores = scores[ind_Sort_by_confidence][:top_k_num] # + x = x[ind_Sort_by_confidence][:top_k_num] # + # cross classes nms + i = torchvision.ops.boxes.nms(boxes, scores, iou_thres) + if i.shape[0] > max_det: # limit detections + i = i[:max_det] + + + output[xi] = x[i] + if (time.time() - t) > time_limit: + break # time limit exceeded + + return output + +def clip_coords(boxes, img_shape): + # Clip bounding xyxy bounding boxes to image shape (height, width) + boxes[:, 0].clamp_(0, img_shape[1]) # x1 + boxes[:, 1].clamp_(0, img_shape[0]) # y1 + boxes[:, 2].clamp_(0, img_shape[1]) # x2 + boxes[:, 3].clamp_(0, img_shape[0]) # y2 + +def scale_coords_ori(img1_shape, coords, img0_shape, ratio_pad=None): + # Rescale coords (xyxy) from img1_shape to img0_shape + if ratio_pad is None: # calculate from img0_shape + gain = min(img1_shape[0] / img0_shape[0], img1_shape[1] / img0_shape[1]) # gain = old / new + pad = (img1_shape[1] - img0_shape[1] * gain) / 2, (img1_shape[0] - img0_shape[0] * gain) / 2 # wh padding + else: + gain = ratio_pad[0][0] + pad = ratio_pad[1] + + coords[:, [0, 2]] -= pad[0] # x padding + coords[:, [1, 3]] -= pad[1] # y padding + coords[:, :4] /= gain + clip_coords(coords, img0_shape) + return coords + +def scale_coords(img1_shape, coords, img0_shape, ratio_pad=None): + # Rescale coords (xyxy) from img1_shape to img0_shape + if ratio_pad is None: # calculate from img0_shape + gain = min(img1_shape[0] / img0_shape[0], img1_shape[1] / img0_shape[1]) # gain = old / new + #pad = (img1_shape[1] - img0_shape[1] * gain) / 2, (img1_shape[0] - img0_shape[0] * gain) / 2 # wh padding + else: + gain = ratio_pad[0][0] + #pad = ratio_pad[1] + + # coords[:, [0, 2]] -= pad[0] # x padding + # coords[:, [1, 3]] -= pad[1] # y padding + coords[:, :4] /= gain + clip_coords(coords, img0_shape) + return coords + +def scale_coords_test(img1_shape, coords, img0_shape, ratio_pad=None): + + coords[:, 0] /= (img1_shape[1] / img0_shape[1]) + coords[:, 2] /= (img1_shape[1] / img0_shape[1]) + coords[:, 1] /= (img1_shape[0] / img0_shape[0]) + coords[:, 3] /= (img1_shape[0] / img0_shape[0]) + clip_coords(coords, img0_shape) + return coords + +def classes_mapping(det, num_classes): + det[:, 5] = det[:, 5] + 1.0 + +def Yolov5_postprocess(pred, img_shape, im0_shape, conf_thres, iou_thres, top_k_num, num_classes, vanish_point, e2e_coco) : + classes, agnostic_nms = None, False# + img_h = im0_shape[0] + vanish_y2 = vanish_point * float(img_h) + # Apply NMS + pred = non_max_suppression(pred, conf_thres, iou_thres, top_k_num, classes=classes, agnostic=agnostic_nms) + #return pred + dets = [] + for i, det in enumerate(pred): # detections per image + gn = torch.tensor(im0_shape)[[1, 0, 1, 0]] # normalization gain whwh + if det is not None and len(det): + # Rescale boxes from img_size to im0 size + det[:, :4] = scale_coords(img_shape[2:], det[:, :4], im0_shape).round() + det = det[det[:,3]>=vanish_y2] + # (x1,y1,x2,y2) -> (x1,y1,w,h) for public_field.py + det[:, 2] = det[:, 2] - det[:, 0] + det[:, 3] = det[:, 3] - det[:, 1] + # classes(0~79) -> classes(1~80) for public_field.py + if e2e_coco: + classes_mapping(det, num_classes) + det = det.cpu().numpy() + dets.append(det) + + if dets and len(dets) > 0: + dets = np.asarray(dets) + dets = np.squeeze(dets, axis=0) # remove outer [] + dets = dets.tolist() + + return dets + +def make_grid(nx=20, ny=20): + yv, xv = torch.meshgrid([torch.arange(ny), torch.arange(nx)]) + grids = torch.stack((xv, yv), 2).view((1, 1, ny, nx, 2)).float() + return grids + +def Yolov5_postprocess_onnx_sig(out,img_shape, im0_shape, conf_thres, iou_thres, top_k_num, grids, num_classes, anchors,vanish_point, e2e_coco) : + nc = num_classes # number of classes + no = nc + 5 # number of outputs per anchor + nl = len(anchors) # number of detection layers + na = len(anchors[0]) // 2 # number of anchors + a = torch.tensor(anchors).float().view(3, -1, 2) + anchor_grid = a.clone().view(3, 1, -1, 1, 1, 2) + stride = torch.tensor([ 8., 16., 32.]) + z = [] + for i in range(nl): + x = torch.from_numpy(out[i]) + # print('x.shape',x.shape) + bs, _, ny, nx = x.shape # x(bs,3,20,20,85) + x = x.view(bs, na, no, ny, nx).permute(0, 1, 3, 4, 2).contiguous() + # grid_r = make_grid(nx, ny) ##grid + # grid_r = grid_r.numpy() ##grid + # file_name = str(i)+'.npy' ##grid + # np.save(file_name,grid_r) ##grid + grid = grids[i]# + #y = x.sigmoid() + y = x + y[..., 0:2] = (y[..., 0:2] * 2. - 0.5 + grid) * stride[i] # xy + y[..., 2:4] = (y[..., 2:4] * 2) ** 2 * anchor_grid[i] # wh + z.append(y.view(bs, -1, no)) + + pred = torch.cat(z, 1) + return Yolov5_postprocess(pred, img_shape, im0_shape, conf_thres, iou_thres, top_k_num, num_classes,vanish_point, e2e_coco) + +def Yolov5_postprocess_sig(out,img_shape, im0_shape, conf_thres, iou_thres, top_k_num, grids, num_classes, anchors,vanish_point, e2e_coco) : + nc = num_classes # number of classes + no = nc + 5 # number of outputs per anchor + nl = len(anchors) # number of detection layers + na = len(anchors[0]) // 2 # number of anchors + a = torch.tensor(anchors).float().view(3, -1, 2) + anchor_grid = a.clone().view(3, 1, -1, 1, 1, 2).to(out[0].device) + stride = torch.tensor([ 8., 16., 32.]).to(out[0].device) + z = [] + for i in range(nl): + x = out[i] + bs, _, ny, nx = x.shape # x(bs,255,20,20) to x(bs,3,20,20,85) + # print('x.shape',x.shape) + x = x.view(bs, na, no, ny, nx).permute(0, 1, 3, 4, 2).contiguous() + + # grid_r = make_grid(nx, ny) ##grid + # grid_r = grid_r.numpy() ##grid + # file_name = str(i)+'.npy' ##grid + # np.save(file_name,grid_r) ##grid + + grid = grids[i].to(out[0].device) # + #y = x.sigmoid() + y = x + y[..., 0:2] = (y[..., 0:2] * 2. - 0.5 + grid) * stride[i] # xy + y[..., 2:4] = (y[..., 2:4] * 2) ** 2 * anchor_grid[i] # wh + z.append(y.view(bs, -1, no)) + # exit(0) + pred = torch.cat(z, 1) + return Yolov5_postprocess(pred, img_shape, im0_shape, conf_thres, iou_thres, top_k_num, num_classes,vanish_point, e2e_coco) \ No newline at end of file diff --git a/kneron/exporting/yolov5/yolov5_preprocess.py b/kneron/exporting/yolov5/yolov5_preprocess.py new file mode 100644 index 0000000..a4abf12 --- /dev/null +++ b/kneron/exporting/yolov5/yolov5_preprocess.py @@ -0,0 +1,160 @@ +# coding: utf-8 +import torch +import cv2 +import numpy as np +import math +import time +from . import kneron_preprocessing +kneron_preprocessing.API.set_default_as_520() +torch.backends.cudnn.deterministic = True +img_formats = ['.bmp', '.jpg', '.jpeg', '.png', '.tif', '.tiff', '.dng'] +def make_divisible(x, divisor): + # Returns x evenly divisble by divisor + return math.ceil(x / divisor) * divisor + +def check_img_size(img_size, s=32): + # Verify img_size is a multiple of stride s + new_size = make_divisible(img_size, int(s)) # ceil gs-multiple + if new_size != img_size: + print('WARNING: --img-size %g must be multiple of max stride %g, updating to %g' % (img_size, s, new_size)) + return new_size + +def letterbox_ori(img, new_shape=(640, 640), color=(0, 0, 0), auto=True, scaleFill=False, scaleup=True): + # Resize image to a 32-pixel-multiple rectangle https://github.com/ultralytics/yolov3/issues/232 + shape = img.shape[:2] # current shape [height, width] + if isinstance(new_shape, int): + new_shape = (new_shape, new_shape) + + # Scale ratio (new / old) + r = min(new_shape[0] / shape[0], new_shape[1] / shape[1]) + if not scaleup: # only scale down, do not scale up (for better test mAP) + r = min(r, 1.0) + + # Compute padding + ratio = r, r # width, height ratios + new_unpad = int(round(shape[1] * r)), int(round(shape[0] * r)) # width, height + dw, dh = new_shape[1] - new_unpad[0], new_shape[0] - new_unpad[1] # wh padding + + dw /= 2 # divide padding into 2 sides + dh /= 2 + + if shape[::-1] != new_unpad: # resize + img = cv2.resize(img, new_unpad, interpolation=cv2.INTER_LINEAR) + #img = kneron_preprocessing.API.resize(img,size=new_unpad, keep_ratio = False) + + top, bottom = int(round(dh - 0.1)), int(round(dh + 0.1)) + left, right = int(round(dw - 0.1)), int(round(dw + 0.1)) + # top, bottom = int(0), int(round(dh + 0.1)) + # left, right = int(0), int(round(dw + 0.1)) + img = cv2.copyMakeBorder(img, top, bottom, left, right, cv2.BORDER_CONSTANT, value=color) # add border + #img = kneron_preprocessing.API.pad(img, left, right, top, bottom, 0) + + return img, ratio, (dw, dh) + +def letterbox(img, new_shape=(640, 640), color=(0, 0, 0), auto=True, scaleFill=False, scaleup=True): + # Resize image to a 32-pixel-multiple rectangle https://github.com/ultralytics/yolov3/issues/232 + shape = img.shape[:2] # current shape [height, width] + if isinstance(new_shape, int): + new_shape = (new_shape, new_shape) + + # Scale ratio (new / old) + r = min(new_shape[0] / shape[0], new_shape[1] / shape[1]) + if not scaleup: # only scale down, do not scale up (for better test mAP) + r = min(r, 1.0) + + # Compute padding + ratio = r, r # width, height ratios + new_unpad = int(round(shape[1] * r)), int(round(shape[0] * r)) # width, height + dw, dh = new_shape[1] - new_unpad[0], new_shape[0] - new_unpad[1] # wh padding + + # dw /= 2 # divide padding into 2 sides + # dh /= 2 + + if shape[::-1] != new_unpad: # resize + #img = cv2.resize(img, new_unpad, interpolation=cv2.INTER_LINEAR) + img = kneron_preprocessing.API.resize(img,size=new_unpad, keep_ratio = False) + + # top, bottom = int(round(dh - 0.1)), int(round(dh + 0.1)) + # left, right = int(round(dw - 0.1)), int(round(dw + 0.1)) + top, bottom = int(0), int(round(dh + 0.1)) + left, right = int(0), int(round(dw + 0.1)) + #img = cv2.copyMakeBorder(img, top, bottom, left, right, cv2.BORDER_CONSTANT, value=color) # add border + img = kneron_preprocessing.API.pad(img, left, right, top, bottom, 0) + + return img, ratio, (dw, dh) + +def letterbox_test(img, new_shape=(640, 640), color=(0, 0, 0), auto=True, scaleFill=False, scaleup=True): + + ratio = 1.0, 1.0 + dw, dh = 0, 0 + img = kneron_preprocessing.API.resize(img, size=(480, 256), keep_ratio=False, type='bilinear') + return img, ratio, (dw, dh) + +def LoadImages(path,img_size): #_rgb # for inference + if isinstance(path, str): + img0 = cv2.imread(path) # BGR + else: + img0 = path # BGR + + # Padded resize + img = letterbox(img0, new_shape=img_size)[0] + # Convert + img = img[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB, to 3x416x416 + img = np.ascontiguousarray(img) + return img, img0 + +def LoadImages_yyy(path,img_size): #_yyy # for inference + if isinstance(path, str): + img0 = cv2.imread(path) # BGR + else: + img0 = path # BGR + + yvu = cv2.cvtColor(img0, cv2.COLOR_BGR2YCrCb) + y, v, u = cv2.split(yvu) + img0 = np.stack((y,)*3, axis=-1) + + # Padded resize + img = letterbox(img0, new_shape=img_size)[0] + + # Convert + img = img[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB, to 3x416x416 + img = np.ascontiguousarray(img) + return img, img0 + +def LoadImages_yuv420(path,img_size): #_yuv420 # for inference + if isinstance(path, str): + img0 = cv2.imread(path) # BGR + else: + img0 = path # BGR + img_h, img_w = img0.shape[:2] + img_h = (img_h // 2) * 2 + img_w = (img_w // 2) * 2 + img = img0[:img_h,:img_w,:] + yuv = cv2.cvtColor(img, cv2.COLOR_BGR2YUV_I420) + img0= cv2.cvtColor(yuv, cv2.COLOR_YUV2BGR_I420) #yuv420 + + + # Padded resize + img = letterbox(img0, new_shape=img_size)[0] + + # Convert + img = img[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB, to 3x416x416 + img = np.ascontiguousarray(img) + return img, img0 + +def Yolov5_preprocess(image_path, device, imgsz_h, imgsz_w) : + model_stride_max = 32 + imgsz_h = check_img_size(imgsz_h, s=model_stride_max) # check img_size + imgsz_w = check_img_size(imgsz_w, s=model_stride_max) # check img_size + img, im0 = LoadImages(image_path, img_size=(imgsz_h,imgsz_w)) + img = kneron_preprocessing.API.norm(img) #path1 + #print('img',img.shape) + img = torch.from_numpy(img).to(device) #path1,path2 + # img = img.float() # uint8 to fp16/32 #path2 + # img /= 255.0#256.0 - 0.5 # 0 - 255 to -0.5 - 0.5 #path2 + + if img.ndimension() == 3: + img = img.unsqueeze(0) + + return img, im0 + diff --git a/kneron/exporting/yolov5/yolov5_runner.py b/kneron/exporting/yolov5/yolov5_runner.py new file mode 100644 index 0000000..db7ddc4 --- /dev/null +++ b/kneron/exporting/yolov5/yolov5_runner.py @@ -0,0 +1,91 @@ +import torch +torch.set_printoptions(precision=10) +torch.set_printoptions(threshold=99999999999) +torch.backends.cudnn.deterministic = True +from .yolov5_preprocess import * +from .yolov5_postprocess import * +from .yolo_v2 import Model as Model_v2 +import onnxruntime +import time +import os +from collections import Counter +import torch.nn.functional as F +import random +from pathlib import Path + +class Yolov5Runner: + def __init__(self, model_path, yaml_path, grid20_path, grid40_path, grid80_path, num_classes, imgsz_h, imgsz_w, conf_thres, iou_thres, top_k_num, vanish_point, **kwargs):#is_onnx, + """ + inputs : + model_path : str ,path to model + """ + self.model_path = model_path + self.imgsz_h = imgsz_h + self.imgsz_w = imgsz_w + self.conf_thres = conf_thres + self.iou_thres = iou_thres + self.top_k_num = top_k_num + self.vanish_point = vanish_point + self.num_classes = num_classes + self.DEVICE = torch.device("cpu")#torch.device('cuda:0')# + self.grid20 = torch.from_numpy(np.load(grid20_path)) + self.grid40 = torch.from_numpy(np.load(grid40_path)) + self.grid80 = torch.from_numpy(np.load(grid80_path)) + self.grids = [self.grid80, self.grid40, self.grid20] + if 'onnx' not in model_path: + self.yolov5_model = Model_v2(yaml_path, nc=num_classes) + self.yolov5_model.load_state_dict(torch.load(model_path, map_location=self.DEVICE))#,strict=False) + self.yolov5_model.float().eval() + self.yolov5_model.to(self.DEVICE) + self.yolov5_model.eval() + else: + #onnxruntime.set_default_logger_severity(0) + self.sess = onnxruntime.InferenceSession(model_path) + # self.sess.set_providers(['CUDAExecutionProvider']) + self.input_name = self.sess.get_inputs()[0].name + self.onnx_batch_size = self.sess.get_inputs()[0].shape[0] + self.onnx_img_size_h = self.sess.get_inputs()[0].shape[2] + self.onnx_img_size_w = self.sess.get_inputs()[0].shape[3] + + self.anchors = [[10, 13, 16, 30, 33, 23], [30, 61, 62, 45, 59, 119], [116, 90, 156, 198, 373, 326]] #yolov5 + print('self.vanish_point',self.vanish_point) + self.e2e_coco = kwargs.get('e2e_coco', False) + + def run(self, img_path): + """ + inputs : + img_path : path of the image + outputs : + dets : list + """ + self.yolov5_model.eval() + with torch.no_grad(): + img, im0 = Yolov5_preprocess(img_path, self.DEVICE, self.imgsz_h, self.imgsz_w) + if next(self.yolov5_model.parameters()).is_cuda: + img = img.type(torch.cuda.FloatTensor) + else: + img = img.type(torch.FloatTensor) + pred = self.yolov5_model(img, augment=False) + img_shape, im0_shape = img.shape, im0.shape + dets = Yolov5_postprocess_sig(pred,img_shape, im0_shape, self.conf_thres, self.iou_thres, self.top_k_num, self.grids, self.num_classes, self.anchors,self.vanish_point) + return dets + + def run_onnx(self, img_path): + """ + inputs : + img_path : path of the image + outputs : + dets : list + """ + with torch.no_grad(): + img, im0 = Yolov5_preprocess(img_path, self.DEVICE, self.imgsz_h, self.imgsz_w) + np_images = np.array(img.cpu()) + np_images = np_images.astype(np.float32) + pred_onnx = self.sess.run(None, {self.input_name: np_images }) + img_shape, im0_shape = img.shape, im0.shape + # print('img_shape',img_shape) + # print('im0_shape', im0_shape) + dets_onnx = Yolov5_postprocess_onnx_sig(pred_onnx,img_shape, im0_shape, self.conf_thres, self.iou_thres, self.top_k_num, self.grids, self.num_classes, self.anchors,self.vanish_point, self.e2e_coco) + return dets_onnx + + diff --git a/kneron/exporting/yolov5_export.py b/kneron/exporting/yolov5_export.py new file mode 100644 index 0000000..4f59bbd --- /dev/null +++ b/kneron/exporting/yolov5_export.py @@ -0,0 +1,80 @@ +import os +import torch +import sys +import yaml +import argparse + +from yolov5.yolov5_runner import Yolov5Runner + +def save_weight(num_classes): + current_path=os.getcwd() + par_path = os.path.dirname(current_path) + sys.path.append(os.path.join(par_path, 'yolov5')) + from models.yolo import Model + num_classes = num_classes + device=torch.device('cpu') + ckpt = torch.load(path, map_location=device) + model = Model(yaml_path, nc=num_classes) + ckpt['model'] = {k: v for k, v in ckpt['model'].float().state_dict().items() if k in model.state_dict() and model.state_dict()[k].shape == v.shape} + model.load_state_dict(ckpt['model']) + torch.save(model.state_dict(),pt_path,_use_new_zipfile_serialization=False) + +def export_onnx(input_h, input_w, num_classes): + + onnx_batch_size, onnx_img_h, onnx_img_w = 1, input_h, input_w + yolov5_model = Yolov5Runner(model_path=pt_path, yaml_path=yaml_path, grid20_path=grid20_path, grid40_path=grid40_path, grid80_path=grid80_path, num_classes=num_classes, imgsz_h=onnx_img_h, imgsz_w=onnx_img_w, conf_thres=0.001, iou_thres=0.65, top_k_num=3000, vanish_point=0.0) + + # Input + img = torch.zeros((onnx_batch_size, 3, onnx_img_h, onnx_img_w)) + # img = img.type(torch.cuda.FloatTensor) + + # Load PyTorch model + model = yolov5_model.yolov5_model + model.eval() + model.model[-1].export = True # set Detect() layer export=True + y = model(img) # dry run + + # ONNX export + try: + import onnx + print('\nStarting ONNX export with onnx %s...' % onnx.__version__) + print('****onnx file****',onnx_export_file) + torch.onnx.export(model, img, onnx_export_file, verbose=False, opset_version=11, keep_initializers_as_inputs=True, input_names=['images'], output_names=['classes', 'boxes'] if y is None else ['output']) + # Checks + onnx_model = onnx.load(onnx_export_file) # load onnx model + onnx.checker.check_model(onnx_model) # check onnx model + print(onnx.helper.printable_graph(onnx_model.graph)) # print a human readable model + print('ONNX export success, saved as %s' % onnx_export_file) + except Exception as e: + print('ONNX export failure: %s' % e) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--data', type=str, default='../yolov5/data/pretrained_paths_520.yaml', help='the path to pretrained model paths yaml file') + + args = parser.parse_args() + + with open(args.data) as f: + data_dict = yaml.load(f, Loader=yaml.FullLoader) # data dict + + os.environ["CUDA_VISIBLE_DEVICES"] = '0' + num_classes = data_dict['nc'] + input_w = data_dict['input_w'] + input_h = data_dict['input_h'] + grid_dir = data_dict['grid_dir'] + grid20_path = data_dict['grid20_path'] + grid40_path = data_dict['grid40_path'] + grid80_path = data_dict['grid80_path'] + path = data_dict['path'] + pt_path=data_dict['pt_path'] + yaml_path=data_dict['yaml_path'] + onnx_export_file = data_dict['onnx_export_file'] + save_weight(num_classes) + export_onnx(input_h, input_w, num_classes) + + + + + + diff --git a/kneron/inference.py b/kneron/inference.py new file mode 100644 index 0000000..0bdc54c --- /dev/null +++ b/kneron/inference.py @@ -0,0 +1,64 @@ +import os +import sys +import argparse +import yaml +import cv2 +import numpy as np + +def draw(img_path, bboxes, save_path = None, names = None): + + img = cv2.imread(img_path) + for bbox in bboxes: + l,t,w,h,score,class_id=bbox + if names is not None: + class_id = names[int(class_id)] + img = cv2.rectangle(img,(int(l),int(t)),(int(l+w),int(t+h)),(0, 255, 0),6) + text = "{}".format(class_id) + " {}".format(np.round(score, 3)) + img = cv2.putText(img, text, (int(l), int(t)-10), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2) + if save_path is None: + save_path = img_path + output_file = os.path.join(save_path, "output.jpg") # 確保有圖片副檔名 + cv2.imwrite(output_file, img) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--img-path', type=str, default=None, help='path to image') + parser.add_argument('--save-path', type=str, default=None, help='path to save image') + parser.add_argument('--data', type=str, default='data/pretrained_paths_520.yaml', help='the path to pretrained model paths yaml file') + parser.add_argument('--conf_thres', type=float, default=0.3, help='confidence threshold') + parser.add_argument('--iou_thres', type=float, default=0.5, help='iou threshold for NMS') + parser.add_argument('--onnx', help='inference onnx model',action='store_true') + + args = parser.parse_args() + + par_path = os.path.abspath(os.path.join(os.getcwd(), os.pardir)) + sys.path.append(par_path) + sys.path.append(os.path.join(par_path, 'exporting') ) + + from yolov5.yolov5_runner import Yolov5Runner + + with open(args.data) as f: + data_dict = yaml.load(f, Loader=yaml.FullLoader) # data dict + + num_classes = data_dict['nc'] + input_w = data_dict['input_w'] + input_h = data_dict['input_h'] + grid_dir = data_dict['grid_dir'] + grid20_path = data_dict['grid20_path'] + grid40_path = data_dict['grid40_path'] + grid80_path = data_dict['grid80_path'] + path = data_dict['path'] + + + if args.onnx: + yolov5_model = Yolov5Runner(model_path=data_dict['onnx_export_file'], yaml_path=data_dict['yaml_path'], grid20_path=grid20_path, grid40_path=grid40_path, grid80_path=grid80_path, num_classes=num_classes, imgsz_h=input_h, imgsz_w=input_w, conf_thres=args.conf_thres, iou_thres=args.iou_thres, top_k_num=3000, vanish_point=0.0) + bboxes = yolov5_model.run_onnx(args.img_path) + else: + yolov5_model = Yolov5Runner(model_path=data_dict['pt_path'], yaml_path=data_dict['yaml_path'], grid20_path=grid20_path, grid40_path=grid40_path, grid80_path=grid80_path, num_classes=num_classes, imgsz_h=input_h, imgsz_w=input_w, conf_thres=args.conf_thres, iou_thres=args.iou_thres, top_k_num=3000, vanish_point=0.0) + bboxes = yolov5_model.run(args.img_path) + + print(bboxes) + + if args.save_path is not None: + draw(args.img_path, bboxes, save_path = args.save_path, names = data_dict['names']) diff --git a/kneron/inference_e2e.py b/kneron/inference_e2e.py new file mode 100644 index 0000000..7050fba --- /dev/null +++ b/kneron/inference_e2e.py @@ -0,0 +1,53 @@ +import os +import sys +import argparse +import yaml +from tqdm import tqdm +import json + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--img-path', type=str, help='Path to the dataset directory.') + parser.add_argument('--params', type=str, help='Path to the init params file.') + parser.add_argument('--save-path', type=str, help='Path to save output in json.') + + args = parser.parse_args() + + par_path = os.path.abspath(os.path.join(os.getcwd(), os.pardir)) + sys.path.append(par_path) + sys.path.append(os.path.join(par_path, 'exporting') ) + + from yolov5.yolov5_runner import Yolov5Runner + + with open(args.params, "r", encoding="utf-8") as f: + params_dict = yaml.load(f, Loader=yaml.FullLoader) # data dict + + num_classes = params_dict['nc'] + input_w = params_dict['input_w'] + input_h = params_dict['input_h'] + grid20_path = params_dict['grid20_path'] + grid40_path = params_dict['grid40_path'] + grid80_path = params_dict['grid80_path'] + conf_thres = params_dict['conf_thres'] + iou_thres = params_dict['iou_thres'] + model_type = params_dict['model_type'] + e2e_coco = params_dict['e2e_coco'] + + if model_type == 'onnx': + yolov5_model = Yolov5Runner(model_path=params_dict['onnx_path'], yaml_path=params_dict['model_yaml_path'], grid20_path=grid20_path, grid40_path=grid40_path, grid80_path=grid80_path, num_classes=num_classes, imgsz_h=input_h, imgsz_w=input_w, conf_thres=conf_thres, iou_thres=iou_thres, top_k_num=3000, vanish_point=0.0, e2e_coco=e2e_coco) + else: + yolov5_model = Yolov5Runner(model_path=params_dict['pt_path'], yaml_path=params_dict['model_yaml_path'], grid20_path=grid20_path, grid40_path=grid40_path, grid80_path=grid80_path, num_classes=num_classes, imgsz_h=input_h, imgsz_w=input_w, conf_thres=conf_thres, iou_thres=iou_thres, top_k_num=3000, vanish_point=0.0, e2e_coco=e2e_coco) + + img_list = os.listdir(args.img_path) + results = [] + for img_name in tqdm(img_list): + if img_name.split('.')[-1] not in ['png', 'jpg']: + continue + img_path = os.path.join(args.img_path, img_name) + if model_type == 'onnx': + bboxes = yolov5_model.run_onnx(img_path) + else: + bboxes = yolov5_model.run(img_path) + results.append({'img_path': img_path, 'bbox': bboxes } ) + with open(args.save_path, 'w') as fp: + json.dump(results, fp) diff --git a/kneron/ktc720.py b/kneron/ktc720.py new file mode 100644 index 0000000..999cdb6 --- /dev/null +++ b/kneron/ktc720.py @@ -0,0 +1,72 @@ +import ktc +import numpy as np +import os +import onnx +from PIL import Image +import torch +from yolov5_preprocess import Yolov5_preprocess +import kneron_preprocessing + +onnx_path = 'runs/train/exp24/weights/best_simplified.onnx' +m = onnx.load(onnx_path) +m = ktc.onnx_optimizer.onnx2onnx_flow(m) +onnx.save(m,'latest.opt.onnx') +km = ktc.ModelConfig(20008, "0001", "720", onnx_model=m) +eval_result = km.evaluate() +print("\nNpu performance evaluation result:\n" + str(eval_result)) + +device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + +imgsz_h, imgsz_w = 640, 640 + +data_path = "data50" +files_found = [f for _, _, files in os.walk(data_path) for f in files if f.lower().endswith((".jpg", ".jpeg", ".png", ".bmp"))] + +if not files_found: + raise FileNotFoundError(f"❌ Error: No images found in {data_path}! Please check your dataset.") + +print(f"✅ Found {len(files_found)} images in {data_path}") + +# **獲取 ONNX 模型的輸入名稱** +input_name = m.graph.input[0].name # 確保 key 與 ONNX input name 一致 +# 存儲預處理後的圖片數據 +img_list = [] + +# 遍歷 data50 並進行預處理 +for root, _, files in os.walk(data_path): + for f in files: + fullpath = os.path.join(root, f) + + # **只處理圖片文件** + if not f.lower().endswith((".jpg", ".jpeg", ".png", ".bmp")): + print(f"⚠️ Skipping non-image file: {fullpath}") + continue + + # **嘗試處理圖片** + try: + img_data, _ = Yolov5_preprocess(fullpath, device, imgsz_h, imgsz_w) + img_data = img_data.cpu().numpy() + print(f"✅ Processed: {fullpath}") + img_list.append(img_data) + except Exception as e: + print(f"❌ Failed to process {fullpath}: {e}") + +# **確保 img_list 不是空的** +if not img_list: + raise ValueError("❌ Error: No valid images were processed! Please check the image paths and formats.") + +# **執行 BIE 量化** +bie_model_path = km.analysis({input_name: img_list}) + +# **確認 BIE 模型是否生成** +if not os.path.exists(bie_model_path): + raise RuntimeError(f"❌ Error: BIE model was not generated! Please check your quantization process.") + +# 顯示成功訊息 +print("\n✅ Fixed-point analysis done! BIE model saved to:", bie_model_path) + +# 確保 `km` 已經初始化,並且 `.bie` 模型已生成 +nef_model_path = ktc.compile([km]) + +# 顯示成功訊息 +print("\n✅ Compile done! NEF file saved to:", nef_model_path) \ No newline at end of file diff --git a/kneron/oldquantize_yolov5.py b/kneron/oldquantize_yolov5.py new file mode 100644 index 0000000..89947b7 --- /dev/null +++ b/kneron/oldquantize_yolov5.py @@ -0,0 +1,33 @@ +import os +import numpy as np +import torch +from yolov5_preprocess import Yolov5_preprocess # 使用你的預處理 +import kneron_preprocessing + +# 設定裝置 +device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + +# 設定圖片大小(與訓練時一致) +imgsz_h, imgsz_w = 640, 640 + +# 量化數據集目錄(請確保這個資料夾存在) +data_path = "/data50" +img_list = [] + +# 遍歷 voc_data50 並進行預處理 +for root, _, files in os.walk(data_path): + for f in files: + fullpath = os.path.join(root, f) + + # 執行與訓練相同的預處理 + img_data, _ = Yolov5_preprocess(fullpath, device, imgsz_h, imgsz_w) + + print(f"Processed: {fullpath}") + img_list.append(img_data) + +# 轉為 NumPy 格式 +img_list = np.array(img_list) + +# 執行 BIE 量化分析 +bie_model_path = km.analysis({"input": img_list}) +print("\nFixed-point analysis done. Saved bie model to '" + str(bie_model_path) + "'") diff --git a/kneron/onnx2nef520.py b/kneron/onnx2nef520.py new file mode 100644 index 0000000..bb0c5a1 --- /dev/null +++ b/kneron/onnx2nef520.py @@ -0,0 +1,110 @@ +import ktc +import numpy as np +import os +import onnx +import shutil +from PIL import Image +import torch +from yolov5_preprocess import Yolov5_preprocess +import kneron_preprocessing + +# 設定 ONNX 模型儲存路徑 +onnx_dir = 'runs/train/exp24/weights/' +onnx_path = os.path.join(onnx_dir, 'best_no_sigmoid.onnx') + +# 確保目標資料夾存在 +os.makedirs(onnx_dir, exist_ok=True) + +# 加載並優化 ONNX 模型 +m = onnx.load(onnx_path) +m = ktc.onnx_optimizer.onnx2onnx_flow(m) +opt_onnx_path = os.path.join(onnx_dir, 'latest.opt.onnx') +onnx.save(m, opt_onnx_path) + +km = ktc.ModelConfig(20008, "0001", "520", onnx_model=m) +eval_result = km.evaluate() +print("\nNpu performance evaluation result:\n" + str(eval_result)) + +device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + +imgsz_h, imgsz_w = 640, 640 + +data_path = "datacoin" +files_found = [f for _, _, files in os.walk(data_path) for f in files if f.lower().endswith((".jpg", ".jpeg", ".png", ".bmp"))] + +if not files_found: + raise FileNotFoundError(f"\u274c Error: No images found in {data_path}! Please check your dataset.") + +print(f"\u2705 Found {len(files_found)} images in {data_path}") + +# 獲取 ONNX 模型的輸入名稱 +input_name = m.graph.input[0].name # 確保 key 與 ONNX input name 一致 + +# 存儲預處理後的圖片數據 +img_list = [] + +# 遍歷 data50 並進行預處理 +for root, _, files in os.walk(data_path): + for f in files: + fullpath = os.path.join(root, f) + + # 只處理圖片文件 + if not f.lower().endswith((".jpg", ".jpeg", ".png", ".bmp")): + print(f"\u26a0\ufe0f Skipping non-image file: {fullpath}") + continue + + # 嘗試處理圖片 + try: + img_data, _ = Yolov5_preprocess(fullpath, device, imgsz_h, imgsz_w) + img_data = img_data.cpu().numpy() + print(f"\u2705 Processed: {fullpath}") + img_list.append(img_data) + except Exception as e: + print(f"\u274c Failed to process {fullpath}: {e}") + +# 確保 img_list 不是空的 +if not img_list: + raise ValueError("\u274c Error: No valid images were processed! Please check the image paths and formats.") + +# 執行 BIE 量化 +bie_model_path = km.analysis({input_name: img_list}) + +# 確保 BIE 檔案儲存到指定目錄 +bie_save_path = os.path.join(onnx_dir, os.path.basename(bie_model_path)) +shutil.copy(bie_model_path, bie_save_path) # 使用 shutil.move 來處理跨磁碟移動 + +# 確認 BIE 模型是否生成 +if not os.path.exists(bie_save_path): + raise RuntimeError(f"\u274c Error: BIE model was not generated! Please check your quantization process.") + +print("\n\u2705 Fixed-point analysis done! BIE model saved to:", bie_save_path) + +# 確保 `km` 已經初始化,並且 `.bie` 模型已生成 +nef_model_path = ktc.compile([km]) + +# 確保 nef_model_path 不是 None 或空值 +if not nef_model_path: + raise RuntimeError("❌ Error: ktc.compile() did not return a valid .nef file path!") + +# 確保 NEF 目標資料夾存在 +os.makedirs(onnx_dir, exist_ok=True) + +# 確保 nef_model_path 不是 None 或空值 +if not nef_model_path: + raise RuntimeError("❌ Error: ktc.compile() did not return a valid .nef file path!") + +# 確保 .nef 檔案存在 +if not os.path.exists(nef_model_path): + raise RuntimeError(f"❌ Error: NEF model was not generated at {nef_model_path}! Please check your compilation process.") + +# 確保 NEF 檔案儲存到指定目錄 +nef_save_path = os.path.join(onnx_dir, os.path.basename(nef_model_path)) +if os.path.exists(nef_model_path): + shutil.copy(nef_model_path, nef_save_path) +else: + raise RuntimeError(f"❌ Error: NEF model was expected at {nef_model_path}, but it does not exist!") # 同樣使用 shutil.move + +if not os.path.exists(nef_save_path): + raise RuntimeError(f"\u274c Error: NEF model was not generated! Please check your compilation process.") + +print("\n\u2705 Compile done! NEF file saved to:", nef_save_path) diff --git a/kneron/onnx2nef630.py b/kneron/onnx2nef630.py new file mode 100644 index 0000000..dda13a8 --- /dev/null +++ b/kneron/onnx2nef630.py @@ -0,0 +1,110 @@ +import ktc +import numpy as np +import os +import onnx +import shutil +from PIL import Image +import torch +from yolov5_preprocess import Yolov5_preprocess +import kneron_preprocessing + +# 設定 ONNX 模型儲存路徑 +onnx_dir = 'runs/train/exp29/weights/' +onnx_path = os.path.join(onnx_dir, 'best_simplified.onnx') + +# 確保目標資料夾存在 +os.makedirs(onnx_dir, exist_ok=True) + +# 加載並優化 ONNX 模型 +m = onnx.load(onnx_path) +m = ktc.onnx_optimizer.onnx2onnx_flow(m) +opt_onnx_path = os.path.join(onnx_dir, 'latest.opt.onnx') +onnx.save(m, opt_onnx_path) + +km = ktc.ModelConfig(20008, "0001", "630", onnx_model=m) +eval_result = km.evaluate() +print("\nNpu performance evaluation result:\n" + str(eval_result)) + +device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + +imgsz_h, imgsz_w = 640, 640 + +data_path = "data4" +files_found = [f for _, _, files in os.walk(data_path) for f in files if f.lower().endswith((".jpg", ".jpeg", ".png", ".bmp"))] + +if not files_found: + raise FileNotFoundError(f"\u274c Error: No images found in {data_path}! Please check your dataset.") + +print(f"\u2705 Found {len(files_found)} images in {data_path}") + +# 獲取 ONNX 模型的輸入名稱 +input_name = m.graph.input[0].name # 確保 key 與 ONNX input name 一致 + +# 存儲預處理後的圖片數據 +img_list = [] + +# 遍歷 data50 並進行預處理 +for root, _, files in os.walk(data_path): + for f in files: + fullpath = os.path.join(root, f) + + # 只處理圖片文件 + if not f.lower().endswith((".jpg", ".jpeg", ".png", ".bmp")): + print(f"\u26a0\ufe0f Skipping non-image file: {fullpath}") + continue + + # 嘗試處理圖片 + try: + img_data, _ = Yolov5_preprocess(fullpath, device, imgsz_h, imgsz_w) + img_data = img_data.cpu().numpy() + print(f"\u2705 Processed: {fullpath}") + img_list.append(img_data) + except Exception as e: + print(f"\u274c Failed to process {fullpath}: {e}") + +# 確保 img_list 不是空的 +if not img_list: + raise ValueError("\u274c Error: No valid images were processed! Please check the image paths and formats.") + +# 執行 BIE 量化 +bie_model_path = km.analysis({input_name: img_list}) + +# 確保 BIE 檔案儲存到指定目錄 +bie_save_path = os.path.join(onnx_dir, os.path.basename(bie_model_path)) +shutil.copy(bie_model_path, bie_save_path) # 使用 shutil.move 來處理跨磁碟移動 + +# 確認 BIE 模型是否生成 +if not os.path.exists(bie_save_path): + raise RuntimeError(f"\u274c Error: BIE model was not generated! Please check your quantization process.") + +print("\n\u2705 Fixed-point analysis done! BIE model saved to:", bie_save_path) + +# 確保 `km` 已經初始化,並且 `.bie` 模型已生成 +nef_model_path = ktc.compile([km]) + +# 確保 nef_model_path 不是 None 或空值 +if not nef_model_path: + raise RuntimeError("❌ Error: ktc.compile() did not return a valid .nef file path!") + +# 確保 NEF 目標資料夾存在 +os.makedirs(onnx_dir, exist_ok=True) + +# 確保 nef_model_path 不是 None 或空值 +if not nef_model_path: + raise RuntimeError("❌ Error: ktc.compile() did not return a valid .nef file path!") + +# 確保 .nef 檔案存在 +if not os.path.exists(nef_model_path): + raise RuntimeError(f"❌ Error: NEF model was not generated at {nef_model_path}! Please check your compilation process.") + +# 確保 NEF 檔案儲存到指定目錄 +nef_save_path = os.path.join(onnx_dir, os.path.basename(nef_model_path)) +if os.path.exists(nef_model_path): + shutil.copy(nef_model_path, nef_save_path) +else: + raise RuntimeError(f"❌ Error: NEF model was expected at {nef_model_path}, but it does not exist!") # 同樣使用 shutil.move + +if not os.path.exists(nef_save_path): + raise RuntimeError(f"\u274c Error: NEF model was not generated! Please check your compilation process.") + +print("\n\u2705 Compile done! NEF file saved to:", nef_save_path) diff --git a/kneron/onnx2nef720.py b/kneron/onnx2nef720.py new file mode 100644 index 0000000..89496dd --- /dev/null +++ b/kneron/onnx2nef720.py @@ -0,0 +1,110 @@ +import ktc +import numpy as np +import os +import onnx +import shutil +from PIL import Image +import torch +from yolov5_preprocess import Yolov5_preprocess +import kneron_preprocessing + +# 設定 ONNX 模型儲存路徑 +onnx_dir = 'runs/train/exp73/weights/' +onnx_path = os.path.join(onnx_dir, 'best_simplified.onnx') + +# 確保目標資料夾存在 +os.makedirs(onnx_dir, exist_ok=True) + +# 加載並優化 ONNX 模型 +m = onnx.load(onnx_path) +m = ktc.onnx_optimizer.onnx2onnx_flow(m) +opt_onnx_path = os.path.join(onnx_dir, 'latest.opt.onnx') +onnx.save(m, opt_onnx_path) + +km = ktc.ModelConfig(20008, "0001", "720", onnx_model=m) +eval_result = km.evaluate() +print("\nNpu performance evaluation result:\n" + str(eval_result)) + +device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + +imgsz_h, imgsz_w = 640, 640 + +data_path = "data50" +files_found = [f for _, _, files in os.walk(data_path) for f in files if f.lower().endswith((".jpg", ".jpeg", ".png", ".bmp"))] + +if not files_found: + raise FileNotFoundError(f"\u274c Error: No images found in {data_path}! Please check your dataset.") + +print(f"\u2705 Found {len(files_found)} images in {data_path}") + +# 獲取 ONNX 模型的輸入名稱 +input_name = m.graph.input[0].name # 確保 key 與 ONNX input name 一致 + +# 存儲預處理後的圖片數據 +img_list = [] + +# 遍歷 data50 並進行預處理 +for root, _, files in os.walk(data_path): + for f in files: + fullpath = os.path.join(root, f) + + # 只處理圖片文件 + if not f.lower().endswith((".jpg", ".jpeg", ".png", ".bmp")): + print(f"\u26a0\ufe0f Skipping non-image file: {fullpath}") + continue + + # 嘗試處理圖片 + try: + img_data, _ = Yolov5_preprocess(fullpath, device, imgsz_h, imgsz_w) + img_data = img_data.cpu().numpy() + print(f"\u2705 Processed: {fullpath}") + img_list.append(img_data) + except Exception as e: + print(f"\u274c Failed to process {fullpath}: {e}") + +# 確保 img_list 不是空的 +if not img_list: + raise ValueError("\u274c Error: No valid images were processed! Please check the image paths and formats.") + +# 執行 BIE 量化 +bie_model_path = km.analysis({input_name: img_list}) + +# 確保 BIE 檔案儲存到指定目錄 +bie_save_path = os.path.join(onnx_dir, os.path.basename(bie_model_path)) +shutil.copy(bie_model_path, bie_save_path) # 使用 shutil.move 來處理跨磁碟移動 + +# 確認 BIE 模型是否生成 +if not os.path.exists(bie_save_path): + raise RuntimeError(f"\u274c Error: BIE model was not generated! Please check your quantization process.") + +print("\n\u2705 Fixed-point analysis done! BIE model saved to:", bie_save_path) + +# 確保 `km` 已經初始化,並且 `.bie` 模型已生成 +nef_model_path = ktc.compile([km]) + +# 確保 nef_model_path 不是 None 或空值 +if not nef_model_path: + raise RuntimeError("❌ Error: ktc.compile() did not return a valid .nef file path!") + +# 確保 NEF 目標資料夾存在 +os.makedirs(onnx_dir, exist_ok=True) + +# 確保 nef_model_path 不是 None 或空值 +if not nef_model_path: + raise RuntimeError("❌ Error: ktc.compile() did not return a valid .nef file path!") + +# 確保 .nef 檔案存在 +if not os.path.exists(nef_model_path): + raise RuntimeError(f"❌ Error: NEF model was not generated at {nef_model_path}! Please check your compilation process.") + +# 確保 NEF 檔案儲存到指定目錄 +nef_save_path = os.path.join(onnx_dir, os.path.basename(nef_model_path)) +if os.path.exists(nef_model_path): + shutil.copy(nef_model_path, nef_save_path) +else: + raise RuntimeError(f"❌ Error: NEF model was expected at {nef_model_path}, but it does not exist!") # 同樣使用 shutil.move + +if not os.path.exists(nef_save_path): + raise RuntimeError(f"\u274c Error: NEF model was not generated! Please check your compilation process.") + +print("\n\u2705 Compile done! NEF file saved to:", nef_save_path) diff --git a/kneron/onnx2nefSTDC630.py b/kneron/onnx2nefSTDC630.py new file mode 100644 index 0000000..dd5bb44 --- /dev/null +++ b/kneron/onnx2nefSTDC630.py @@ -0,0 +1,110 @@ +import ktc +import numpy as np +import os +import onnx +import shutil +from PIL import Image +import torch +from yolov5_preprocess import Yolov5_preprocess +import kneron_preprocessing + +# 設定 ONNX 模型儲存路徑 +onnx_dir = 'work_dirs/kn_stdc1_in1k-pre_512x1024_80k_cityscapes/' +onnx_path = os.path.join(onnx_dir, 'latest.onnx') + +# 確保目標資料夾存在 +os.makedirs(onnx_dir, exist_ok=True) + +# 加載並優化 ONNX 模型 +m = onnx.load(onnx_path) +m = ktc.onnx_optimizer.onnx2onnx_flow(m) +opt_onnx_path = os.path.join(onnx_dir, 'latest.opt.onnx') +onnx.save(m, opt_onnx_path) + +km = ktc.ModelConfig(20008, "0001", "630", onnx_model=m) +eval_result = km.evaluate() +print("\nNpu performance evaluation result:\n" + str(eval_result)) + +device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + +imgsz_h, imgsz_w = 640, 640 + +data_path = "data50" +files_found = [f for _, _, files in os.walk(data_path) for f in files if f.lower().endswith((".jpg", ".jpeg", ".png", ".bmp"))] + +if not files_found: + raise FileNotFoundError(f"\u274c Error: No images found in {data_path}! Please check your dataset.") + +print(f"\u2705 Found {len(files_found)} images in {data_path}") + +# 獲取 ONNX 模型的輸入名稱 +input_name = m.graph.input[0].name # 確保 key 與 ONNX input name 一致 + +# 存儲預處理後的圖片數據 +img_list = [] + +# 遍歷 data50 並進行預處理 +for root, _, files in os.walk(data_path): + for f in files: + fullpath = os.path.join(root, f) + + # 只處理圖片文件 + if not f.lower().endswith((".jpg", ".jpeg", ".png", ".bmp")): + print(f"\u26a0\ufe0f Skipping non-image file: {fullpath}") + continue + + # 嘗試處理圖片 + try: + img_data, _ = Yolov5_preprocess(fullpath, device, imgsz_h, imgsz_w) + img_data = img_data.cpu().numpy() + print(f"\u2705 Processed: {fullpath}") + img_list.append(img_data) + except Exception as e: + print(f"\u274c Failed to process {fullpath}: {e}") + +# 確保 img_list 不是空的 +if not img_list: + raise ValueError("\u274c Error: No valid images were processed! Please check the image paths and formats.") + +# 執行 BIE 量化 +bie_model_path = km.analysis({input_name: img_list}) + +# 確保 BIE 檔案儲存到指定目錄 +bie_save_path = os.path.join(onnx_dir, os.path.basename(bie_model_path)) +shutil.copy(bie_model_path, bie_save_path) # 使用 shutil.move 來處理跨磁碟移動 + +# 確認 BIE 模型是否生成 +if not os.path.exists(bie_save_path): + raise RuntimeError(f"\u274c Error: BIE model was not generated! Please check your quantization process.") + +print("\n\u2705 Fixed-point analysis done! BIE model saved to:", bie_save_path) + +# 確保 `km` 已經初始化,並且 `.bie` 模型已生成 +nef_model_path = ktc.compile([km]) + +# 確保 nef_model_path 不是 None 或空值 +if not nef_model_path: + raise RuntimeError("❌ Error: ktc.compile() did not return a valid .nef file path!") + +# 確保 NEF 目標資料夾存在 +os.makedirs(onnx_dir, exist_ok=True) + +# 確保 nef_model_path 不是 None 或空值 +if not nef_model_path: + raise RuntimeError("❌ Error: ktc.compile() did not return a valid .nef file path!") + +# 確保 .nef 檔案存在 +if not os.path.exists(nef_model_path): + raise RuntimeError(f"❌ Error: NEF model was not generated at {nef_model_path}! Please check your compilation process.") + +# 確保 NEF 檔案儲存到指定目錄 +nef_save_path = os.path.join(onnx_dir, os.path.basename(nef_model_path)) +if os.path.exists(nef_model_path): + shutil.copy(nef_model_path, nef_save_path) +else: + raise RuntimeError(f"❌ Error: NEF model was expected at {nef_model_path}, but it does not exist!") # 同樣使用 shutil.move + +if not os.path.exists(nef_save_path): + raise RuntimeError(f"\u274c Error: NEF model was not generated! Please check your compilation process.") + +print("\n\u2705 Compile done! NEF file saved to:", nef_save_path) diff --git a/kneron/preprocessing/API.py b/kneron/preprocessing/API.py new file mode 100644 index 0000000..3630caa --- /dev/null +++ b/kneron/preprocessing/API.py @@ -0,0 +1,684 @@ +# -*- coding: utf-8 -*- + +import numpy as np +import os +from .funcs.utils import str2int, str2bool +from . import Flow + +flow = Flow() +flow.set_numerical_type('floating') +flow_520 = Flow() +flow_520.set_numerical_type('520') +flow_720 = Flow() +flow_720.set_numerical_type('720') + +DEFAULT = None +default = { + 'crop':{ + 'align_w_to_4':False + }, + 'resize':{ + 'type':'bilinear', + 'calculate_ratio_using_CSim':False + } +} + +def set_default_as_520(): + """ + Set some default parameter as 520 setting + + crop.align_w_to_4 = True + crop.pad_square_to_4 = True + resize.type = 'fixed_520' + resize.calculate_ratio_using_CSim = True + """ + global default + default['crop']['align_w_to_4'] = True + default['resize']['type'] = 'fixed_520' + default['resize']['calculate_ratio_using_CSim'] = True + return + +def set_default_as_floating(): + """ + Set some default parameter as floating setting + + crop.align_w_to_4 = False + crop.pad_square_to_4 = False + resize.type = 'bilinear' + resize.calculate_ratio_using_CSim = False + """ + global default + default['crop']['align_w_to_4'] = False + default['resize']['type'] = 'bilinear' + default['resize']['calculate_ratio_using_CSim'] = False + pass + +def print_info_on(): + """ + turn print infomation on. + """ + flow.set_print_info(True) + flow_520.set_print_info(True) + +def print_info_off(): + """ + turn print infomation off. + """ + flow.set_print_info(False) + flow_520.set_print_info(False) + +def load_image(image): + """ + load_image function + load load_image and output as rgb888 format np.array + + Args: + image: [np.array/str], can be np.array or image file path + + Returns: + out: [np.array], rgb888 format + + Examples: + """ + image = flow.load_image(image, is_raw = False) + return image + +def load_bin(image, fmt=None, size=None): + """ + load_bin function + load bin file and output as rgb888 format np.array + + Args: + image: [str], bin file path + fmt: [str], "rgb888" / "rgb565" / "nir" + size: [tuble], (image_w, image_h) + + Returns: + out: [np.array], rgb888 format + + Examples: + >>> image_data = kneron_preprocessing.API.load_bin(image,'rgb565',(raw_w,raw_h)) + """ + assert isinstance(size, tuple) + assert isinstance(fmt, str) + # assert (fmt.lower() in ['rgb888', "rgb565" , "nir",'RGB888', "RGB565" , "NIR", 'NIR888', 'nir888']) + + image = flow.load_image(image, is_raw = True, raw_img_type='bin', raw_img_fmt = fmt, img_in_width = size[0], img_in_height = size[1]) + flow.set_color_conversion(source_format=fmt, out_format = 'rgb888') + image,_ = flow.funcs['color'](image) + return image + +def load_hex(file, fmt=None, size=None): + """ + load_hex function + load hex file and output as rgb888 format np.array + + Args: + image: [str], hex file path + fmt: [str], "rgb888" / "yuv444" / "ycbcr444" / "yuv422" / "ycbcr422" / "rgb565" + size: [tuble], (image_w, image_h) + + Returns: + out: [np.array], rgb888 format + + Examples: + >>> image_data = kneron_preprocessing.API.load_hex(image,'rgb565',(raw_w,raw_h)) + """ + assert isinstance(size, tuple) + assert isinstance(fmt, str) + assert (fmt.lower() in ['rgb888',"yuv444" , "ycbcr444" , "yuv422" , "ycbcr422" , "rgb565"]) + + image = flow.load_image(file, is_raw = True, raw_img_type='hex', raw_img_fmt = fmt, img_in_width = size[0], img_in_height = size[1]) + flow.set_color_conversion(source_format=fmt, out_format = 'rgb888') + image,_ = flow.funcs['color'](image) + return image + +def dump_image(image, output=None, file_fmt='txt',image_fmt='rgb888',order=0): + """ + dump_image function + + dump txt, bin or hex, default is txt + image format as following format: RGB888, RGBA8888, RGB565, NIR, YUV444, YCbCr444, YUV422, YCbCr422, default is RGB888 + + Args: + image: [np.array/str], can be np.array or image file path + output: [str], dump file path + file_fmt: [str], "bin" / "txt" / "hex", set dump file format, default is txt + image_fmt: [str], RGB888 / RGBA8888 / RGB565 / NIR / YUV444 / YCbCr444 / YUV422 / YCbCr422, default is RGB888 + + Examples: + >>> kneron_preprocessing.API.dump_image(image_data,out_path,fmt='bin') + """ + if isinstance(image, str): + image = load_image(image) + + assert isinstance(image, np.ndarray) + if output is None: + return + + flow.set_output_setting(is_dump=False, dump_format=file_fmt, image_format=image_fmt ,output_file=output) + flow.dump_image(image) + return + +def convert(image, out_fmt = 'RGB888', source_fmt = 'RGB888'): + """ + color convert + + Args: + image: [np.array], input + out_fmt: [str], "rgb888" / "rgba8888" / "rgb565" / "yuv" / "ycbcr" / "yuv422" / "ycbcr422" + source_fmt: [str], "rgb888" / "rgba8888" / "rgb565" / "yuv" / "ycbcr" / "yuv422" / "ycbcr422" + + Returns: + out: [np.array] + + Examples: + + """ + flow.set_color_conversion(source_format = source_fmt, out_format=out_fmt, simulation=False) + image,_ = flow.funcs['color'](image) + return image + +def get_crop_range(box,align_w_to_4=DEFAULT, pad_square_to_4=False,rounding_type=0): + """ + get exact crop box according different setting + + Args: + box: [tuble], (x1, y1, x2, y2) + align_w_to_4: [bool], crop length in w direction align to 4 or not, default False + pad_square_to_4: [bool], pad to square(align 4) or not, default False + rounding_type: [int], 0-> x1,y1 take floor, x2,y2 take ceil; 1->all take rounding + + Returns: + out: [tuble,4], (crop_x1, crop_y1, crop_x2, crop_y2) + + Examples: + >>> image_data = kneron_preprocessing.API.get_crop_range((272,145,461,341), align_w_to_4=True, pad_square_to_4=True) + (272, 145, 460, 341) + """ + if box is None: + return (0,0,0,0) + if align_w_to_4 is None: + align_w_to_4 = default['crop']['align_w_to_4'] + + flow.set_crop(type='specific', start_x=box[0],start_y=box[1],end_x=box[2],end_y=box[3], align_w_to_4=align_w_to_4, pad_square_to_4=pad_square_to_4,rounding_type=rounding_type) + image = np.zeros((1,1,3)).astype('uint8') + _,info = flow.funcs['crop'](image) + + return info['box'] + +def crop(image, box=None, align_w_to_4=DEFAULT, pad_square_to_4=False,rounding_type=0 ,info_out = {}): + """ + crop function + + specific crop range by box + + Args: + image: [np.array], input + box: [tuble], (x1, y1, x2, y2) + align_w_to_4: [bool], crop length in w direction align to 4 or not, default False + pad_square_to_4: [bool], pad to square(align 4) or not, default False + rounding_type: [int], 0-> x1,y1 take floor, x2,y2 take ceil; 1->all take rounding + info_out: [dic], save the final crop box into info_out['box'] + + Returns: + out: [np.array] + + Examples: + >>> info = {} + >>> image_data = kneron_preprocessing.API.crop(image_data,(272,145,461,341), align_w_to_4=True, info_out=info) + >>> info['box'] + (272, 145, 460, 341) + + >>> info = {} + >>> image_data = kneron_preprocessing.API.crop(image_data,(272,145,461,341), pad_square_to_4=True, info_out=info) + >>> info['box'] + (268, 145, 464, 341) + """ + assert isinstance(image, np.ndarray) + if box is None: + return image + if align_w_to_4 is None: + align_w_to_4 = default['crop']['align_w_to_4'] + + flow.set_crop(type='specific', start_x=box[0],start_y=box[1],end_x=box[2],end_y=box[3], align_w_to_4=align_w_to_4, pad_square_to_4=pad_square_to_4,rounding_type=rounding_type) + image,info = flow.funcs['crop'](image) + + info_out['box'] = info['box'] + return image + +def crop_center(image, range=None, align_w_to_4=DEFAULT, pad_square_to_4=False,rounding_type=0 ,info_out = {}): + """ + crop function + + center crop by range + + Args: + image: [np.array], input + range: [tuble], (crop_w, crop_h) + align_w_to_4: [bool], crop length in w direction align to 4 or not, default False + pad_square_to_4: [bool], pad to square(align 4) or not, default False + rounding_type: [int], 0-> x1,y1 take floor, x2,y2 take ceil; 1->all take rounding + info_out: [dic], save the final crop box into info_out['box'] + + Returns: + out: [np.array] + + Examples: + >>> info = {} + >>> image_data = kneron_preprocessing.API.crop_center(image_data,(102,40), align_w_to_4=True,info_out=info) + >>> info['box'] + (268, 220, 372, 260) + + >>> info = {} + >>> image_data = kneron_preprocessing.API.crop_center(image_data,(102,40), pad_square_to_4=True, info_out=info) + >>> info['box'] + (269, 192, 371, 294) + """ + assert isinstance(image, np.ndarray) + if range is None: + return image + if align_w_to_4 is None: + align_w_to_4 = default['crop']['align_w_to_4'] + + flow.set_crop(type='center', crop_w=range[0],crop_h=range[1], align_w_to_4=align_w_to_4, pad_square_to_4=pad_square_to_4,rounding_type=rounding_type) + image,info = flow.funcs['crop'](image) + + info_out['box'] = info['box'] + return image + +def crop_corner(image, range=None, align_w_to_4=DEFAULT,pad_square_to_4=False,rounding_type=0 ,info_out = {}): + """ + crop function + + corner crop by range + + Args: + image: [np.array], input + range: [tuble], (crop_w, crop_h) + align_w_to_4: [bool], crop length in w direction align to 4 or not, default False + pad_square_to_4: [bool], pad to square(align 4) or not, default False + rounding_type: [int], 0-> x1,y1 take floor, x2,y2 take ceil; 1->all take rounding + info_out: [dic], save the final crop box into info_out['box'] + + Returns: + out: [np.array] + + Examples: + >>> info = {} + >>> image_data = kneron_preprocessing.API.crop_corner(image_data,(102,40), align_w_to_4=True,info_out=info) + >>> info['box'] + (0, 0, 104, 40) + + >>> info = {} + >>> image_data = kneron_preprocessing.API.crop_corner(image_data,(102,40), pad_square_to_4=True,info_out=info) + >>> info['box'] + (0, -28, 102, 74) + """ + assert isinstance(image, np.ndarray) + if range is None: + return image + if align_w_to_4 is None: + align_w_to_4 = default['crop']['align_w_to_4'] + + flow.set_crop(type='corner', crop_w=range[0],crop_h=range[1], align_w_to_4=align_w_to_4, pad_square_to_4=pad_square_to_4) + image, info = flow.funcs['crop'](image) + + info_out['box'] = info['box'] + return image + +def resize(image, size=None, keep_ratio = True, zoom = True, type=DEFAULT, calculate_ratio_using_CSim = DEFAULT, info_out = {}): + """ + resize function + + resize type can be bilinear or bilicubic as floating type, fixed or fixed_520/fixed_720 as fixed type. + fixed_520/fixed_720 type has add some function to simulate 520/720 bug. + + Args: + image: [np.array], input + size: [tuble], (input_w, input_h) + keep_ratio: [bool], keep_ratio or not, default True + zoom: [bool], enable resize can zoom image or not, default True + type: [str], "bilinear" / "bilicubic" / "cv2" / "fixed" / "fixed_520" / "fixed_720" + calculate_ratio_using_CSim: [bool], calculate the ratio and scale using Csim function and C float, default False + info_out: [dic], save the final scale size(w,h) into info_out['size'] + + Returns: + out: [np.array] + + Examples: + >>> info = {} + >>> image_data = kneron_preprocessing.API.resize(image_data,size=(56,56),type='fixed',info_out=info) + >>> info_out['size'] + (54,56) + """ + assert isinstance(image, np.ndarray) + if size is None: + return image + if type is None: + type = default['resize']['type'] + if calculate_ratio_using_CSim is None: + calculate_ratio_using_CSim = default['resize']['calculate_ratio_using_CSim'] + + flow.set_resize(resize_w = size[0], resize_h = size[1], type=type, keep_ratio=keep_ratio,zoom=zoom, calculate_ratio_using_CSim=calculate_ratio_using_CSim) + image, info = flow.funcs['resize'](image) + info_out['size'] = info['size'] + + return image + +def pad(image, pad_l=0, pad_r=0, pad_t=0, pad_b=0, pad_val=0): + """ + pad function + + specific left, right, top and bottom pad size. + + Args: + image[np.array]: input + pad_l: [int], pad size from left, default 0 + pad_r: [int], pad size form right, default 0 + pad_t: [int], pad size from top, default 0 + pad_b: [int], pad size form bottom, default 0 + pad_val: [float], the value of pad, , default 0 + + Returns: + out: [np.array] + + Examples: + >>> image_data = kneron_preprocessing.API.pad(image_data,20,40,20,40,-0.5) + """ + assert isinstance(image, np.ndarray) + + flow.set_padding(type='specific',pad_l=pad_l,pad_r=pad_r,pad_t=pad_t,pad_b=pad_b,pad_val=pad_val) + image, _ = flow.funcs['padding'](image) + return image + +def pad_center(image,size=None, pad_val=0): + """ + pad function + + center pad with pad size. + + Args: + image[np.array]: input + size: [tuble], (padded_size_w, padded_size_h) + pad_val: [float], the value of pad, , default 0 + + Returns: + out: [np.array] + + Examples: + >>> image_data = kneron_preprocessing.API.pad_center(image_data,size=(56,56),pad_val=-0.5) + """ + assert isinstance(image, np.ndarray) + if size is None: + return image + assert ( (image.shape[0] <= size[1]) & (image.shape[1] <= size[0]) ) + + flow.set_padding(type='center',padded_w=size[0],padded_h=size[1],pad_val=pad_val) + image, _ = flow.funcs['padding'](image) + return image + +def pad_corner(image,size=None, pad_val=0): + """ + pad function + + corner pad with pad size. + + Args: + image[np.array]: input + size: [tuble], (padded_size_w, padded_size_h) + pad_val: [float], the value of pad, , default 0 + + Returns: + out: [np.array] + + Examples: + >>> image_data = kneron_preprocessing.API.pad_corner(image_data,size=(56,56),pad_val=-0.5) + """ + assert isinstance(image, np.ndarray) + if size is None: + return image + assert ( (image.shape[0] <= size[1]) & (image.shape[1] <= size[0]) ) + + flow.set_padding(type='corner',padded_w=size[0],padded_h=size[1],pad_val=pad_val) + image, _ = flow.funcs['padding'](image) + return image + +def norm(image,scale=256.,bias=-0.5, mean=None, std=None): + """ + norm function + + x = (x/scale - bias) + x[0,1,2] = x - mean[0,1,2] + x[0,1,2] = x / std[0,1,2] + + Args: + image: [np.array], input + scale: [float], default = 256 + bias: [float], default = -0.5 + mean: [tuble,3], default = None + std: [tuble,3], default = None + + Returns: + out: [np.array] + + Examples: + >>> image_data = kneron_preprocessing.API.norm(image_data) + >>> image_data = kneron_preprocessing.API.norm(image_data,mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) + """ + assert isinstance(image, np.ndarray) + + flow.set_normalize(type='specific',scale=scale, bias=bias, mean=mean, std =std) + image, _ = flow.funcs['normalize'](image) + return image + +def inproc_520(image,raw_fmt='rgb565',raw_size=None,npu_size=None, crop_box=None, pad_mode=0, norm='kneron', gray=False, rotate=0, radix=8, bit_width=8, round_w_to_16=True, NUM_BANK_LINE=32,BANK_ENTRY_CNT=512,MAX_IMG_PREPROC_ROW_NUM=511,MAX_IMG_PREPROC_COL_NUM=256): + """ + inproc_520 + + Args: + image: [np.array], input + crop_box: [tuble], (x1, y1, x2, y2), if None will skip crop + pad_mode: [int], 0: pad 2 sides, 1: pad 1 side, 2: no pad. default = 0 + norm: [str], default = 'kneron' + rotate: [int], 0 / 1 / 2 ,default = 0 + radix: [int], default = 8 + bit_width: [int], default = 8 + round_w_to_16: [bool], default = True + gray: [bool], default = False + + Returns: + out: [np.array] + + Examples: + >>> image_data = kneron_preprocessing.API.inproc_520(image_data,npu_size=(56,56),crop_box=(272,145,460,341),pad_mode=1) + """ + # assert isinstance(image, np.ndarray) + + if (not isinstance(image, np.ndarray)): + flow_520.set_raw_img(is_raw_img='yes',raw_img_type = 'bin',raw_img_fmt=raw_fmt, img_in_width=raw_size[0], img_in_height=raw_size[1]) + else: + flow_520.set_raw_img(is_raw_img='no') + flow_520.set_color_conversion(source_format='rgb888') + + if npu_size is None: + return image + + flow_520.set_model_size(w=npu_size[0],h=npu_size[1]) + + ## Crop + if crop_box != None: + flow_520.set_crop(start_x=crop_box[0],start_y=crop_box[1],end_x=crop_box[2],end_y=crop_box[3]) + crop_fisrt = True + else: + crop_fisrt = False + + ## Color + if gray: + flow_520.set_color_conversion(out_format='l',simulation='no') + else: + flow_520.set_color_conversion(out_format='rgb888',simulation='no') + + ## Resize & Pad + pad_mode = str2int(pad_mode) + if (pad_mode == 0): + pad_type = 'center' + resize_keep_ratio = 'yes' + elif (pad_mode == 1): + pad_type = 'corner' + resize_keep_ratio = 'yes' + else: + pad_type = 'center' + resize_keep_ratio = 'no' + + flow_520.set_resize(keep_ratio=resize_keep_ratio) + flow_520.set_padding(type=pad_type) + + ## Norm + flow_520.set_normalize(type=norm) + + ## 520 inproc + flow_520.set_520_setting(radix=radix,bit_width=bit_width,rotate=rotate,crop_fisrt=crop_fisrt,round_w_to_16=round_w_to_16,NUM_BANK_LINE=NUM_BANK_LINE,BANK_ENTRY_CNT=BANK_ENTRY_CNT,MAX_IMG_PREPROC_ROW_NUM=MAX_IMG_PREPROC_ROW_NUM,MAX_IMG_PREPROC_COL_NUM=MAX_IMG_PREPROC_COL_NUM) + image_data, _ = flow_520.run_whole_process(image) + + return image_data + +def inproc_720(image,raw_fmt='rgb565',raw_size=None,npu_size=None, crop_box=None, pad_mode=0, norm='kneron', gray=False): + """ + inproc_720 + + Args: + image: [np.array], input + crop_box: [tuble], (x1, y1, x2, y2), if None will skip crop + pad_mode: [int], 0: pad 2 sides, 1: pad 1 side, 2: no pad. default = 0 + norm: [str], default = 'kneron' + rotate: [int], 0 / 1 / 2 ,default = 0 + radix: [int], default = 8 + bit_width: [int], default = 8 + round_w_to_16: [bool], default = True + gray: [bool], default = False + + Returns: + out: [np.array] + + Examples: + >>> image_data = kneron_preprocessing.API.inproc_520(image_data,npu_size=(56,56),crop_box=(272,145,460,341),pad_mode=1) + """ + # assert isinstance(image, np.ndarray) + + if (not isinstance(image, np.ndarray)): + flow_720.set_raw_img(is_raw_img='yes',raw_img_type = 'bin',raw_img_fmt=raw_fmt, img_in_width=raw_size[0], img_in_height=raw_size[1]) + else: + flow_720.set_raw_img(is_raw_img='no') + flow_720.set_color_conversion(source_format='rgb888') + + if npu_size is None: + return image + + flow_720.set_model_size(w=npu_size[0],h=npu_size[1]) + + ## Crop + if crop_box != None: + flow_720.set_crop(start_x=crop_box[0],start_y=crop_box[1],end_x=crop_box[2],end_y=crop_box[3]) + crop_fisrt = True + else: + crop_fisrt = False + + ## Color + if gray: + flow_720.set_color_conversion(out_format='l',simulation='no') + else: + flow_720.set_color_conversion(out_format='rgb888',simulation='no') + + ## Resize & Pad + pad_mode = str2int(pad_mode) + if (pad_mode == 0): + pad_type = 'center' + resize_keep_ratio = 'yes' + elif (pad_mode == 1): + pad_type = 'corner' + resize_keep_ratio = 'yes' + else: + pad_type = 'center' + resize_keep_ratio = 'no' + + flow_720.set_resize(keep_ratio=resize_keep_ratio) + flow_720.set_padding(type=pad_type) + + ## 720 inproc + # flow_720.set_720_setting(radix=radix,bit_width=bit_width,rotate=rotate,crop_fisrt=crop_fisrt,round_w_to_16=round_w_to_16,NUM_BANK_LINE=NUM_BANK_LINE,BANK_ENTRY_CNT=BANK_ENTRY_CNT,MAX_IMG_PREPROC_ROW_NUM=MAX_IMG_PREPROC_ROW_NUM,MAX_IMG_PREPROC_COL_NUM=MAX_IMG_PREPROC_COL_NUM) + image_data, _ = flow_720.run_whole_process(image) + + return image_data + +def bit_match(data1, data2): + """ + bit_match function + + check data1 is equal to data2 or not. + + Args: + data1: [np.array / str], can be array or txt/bin file + data2: [np.array / str], can be array or txt/bin file + + Returns: + out1: [bool], is match or not + out2: [np.array], if not match, save the position for mismatched data + + Examples: + >>> result, mismatched = kneron_preprocessing.API.bit_match(data1,data2) + """ + if isinstance(data1, str): + if os.path.splitext(data1)[1] == '.bin': + data1 = np.fromfile(data1, dtype='uint8') + elif os.path.splitext(data1)[1] == '.txt': + data1 = np.loadtxt(data1) + + assert isinstance(data1, np.ndarray) + + if isinstance(data2, str): + if os.path.splitext(data2)[1] == '.bin': + data2 = np.fromfile(data2, dtype='uint8') + elif os.path.splitext(data2)[1] == '.txt': + data2 = np.loadtxt(data2) + + assert isinstance(data2, np.ndarray) + + + data1 = data1.reshape((-1,1)) + data2 = data2.reshape((-1,1)) + + if not(len(data1) == len(data2)): + print('error len') + return False, np.zeros((1)) + else: + ans = data2 - data1 + if len(np.where(ans>0)[0]) > 0: + print('error',np.where(ans>0)[0]) + return False, np.where(ans>0)[0] + else: + print('pass') + return True, np.zeros((1)) + +def cpr_to_crp(x_start, x_end, y_start, y_end, pad_l, pad_r, pad_t, pad_b, rx_start, rx_end, ry_start, ry_end): + """ + calculate the parameters of crop->pad->resize flow to HW crop->resize->padding flow + + Args: + + Returns: + + Examples: + + """ + pad_l = round(pad_l * (rx_end-rx_start) / (x_end - x_start + pad_l + pad_r)) + pad_r = round(pad_r * (rx_end-rx_start) / (x_end - x_start + pad_l + pad_r)) + pad_t = round(pad_t * (ry_end-ry_start) / (y_end - y_start + pad_t + pad_b)) + pad_b = round(pad_b * (ry_end-ry_start) / (y_end - y_start + pad_t + pad_b)) + + rx_start +=pad_l + rx_end -=pad_r + ry_start +=pad_t + ry_end -=pad_b + + return x_start, x_end, y_start, y_end, pad_l, pad_r, pad_t, pad_b, rx_start, rx_end, ry_start, ry_end \ No newline at end of file diff --git a/kneron/preprocessing/Cflow.py b/kneron/preprocessing/Cflow.py new file mode 100644 index 0000000..02fffe1 --- /dev/null +++ b/kneron/preprocessing/Cflow.py @@ -0,0 +1,172 @@ +import numpy as np +import argparse +import kneron_preprocessing + +def main_(args): + image = args.input_file + filefmt = args.file_fmt + if filefmt == 'bin': + raw_format = args.raw_format + raw_w = args.input_width + raw_h = args.input_height + + image_data = kneron_preprocessing.API.load_bin(image,raw_format,(raw_w,raw_h)) + else: + image_data = kneron_preprocessing.API.load_image(image) + + + npu_w = args.width + npu_h = args.height + + crop_first = True if args.crop_first == "True" else False + if crop_first: + x1 = args.x_pos + y1 = args.y_pos + x2 = args.crop_w + x1 + y2 = args.crop_h + y1 + crop_box = [x1,y1,x2,y2] + else: + crop_box = None + + pad_mode = args.pad_mode + norm_mode = args.norm_mode + bitwidth = args.bitwidth + radix = args.radix + rotate = args.rotate_mode + + ## + image_data = kneron_preprocessing.API.inproc_520(image_data,npu_size=(npu_w,npu_h),crop_box=crop_box,pad_mode=pad_mode,norm=norm_mode,rotate=rotate,radix=radix,bit_width=bitwidth) + + output_file = args.output_file + kneron_preprocessing.API.dump_image(image_data,output_file,'bin','rgba') + + return + + +if __name__ == "__main__": + argparser = argparse.ArgumentParser( + description="preprocessing" + ) + + argparser.add_argument( + '-i', + '--input_file', + help="input file name" + ) + + argparser.add_argument( + '-ff', + '--file_fmt', + help="input file format, jpg or bin" + ) + + argparser.add_argument( + '-rf', + '--raw_format', + help="input file image format, rgb or rgb565 or nir" + ) + + argparser.add_argument( + '-i_w', + '--input_width', + type=int, + help="input image width" + ) + + argparser.add_argument( + '-i_h', + '--input_height', + type=int, + help="input image height" + ) + + argparser.add_argument( + '-o', + '--output_file', + help="output file name" + ) + + argparser.add_argument( + '-s_w', + '--width', + type=int, + help="output width for npu input", + ) + + argparser.add_argument( + '-s_h', + '--height', + type=int, + help="output height for npu input", + ) + + argparser.add_argument( + '-c_f', + '--crop_first', + help="crop first True or False", + ) + + argparser.add_argument( + '-x', + '--x_pos', + type=int, + help="left up coordinate x", + ) + + argparser.add_argument( + '-y', + '--y_pos', + type=int, + help="left up coordinate y", + ) + + argparser.add_argument( + '-c_w', + '--crop_w', + type=int, + help="crop width", + ) + + argparser.add_argument( + '-c_h', + '--crop_h', + type=int, + help="crop height", + ) + + argparser.add_argument( + '-p_m', + '--pad_mode', + type=int, + help=" 0: pad 2 sides, 1: pad 1 side, 2: no pad.", + ) + + argparser.add_argument( + '-n_m', + '--norm_mode', + help="normalizaton mode: yolo, kneron, tf." + ) + + argparser.add_argument( + '-r_m', + '--rotate_mode', + type=int, + help="rotate mode:0,1,2" + ) + + argparser.add_argument( + '-bw', + '--bitwidth', + type=int, + help="Int for bitwidth" + ) + + argparser.add_argument( + '-r', + '--radix', + type=int, + help="Int for radix" + ) + + args = argparser.parse_args() + main_(args) \ No newline at end of file diff --git a/kneron/preprocessing/Flow.py b/kneron/preprocessing/Flow.py new file mode 100644 index 0000000..bab0041 --- /dev/null +++ b/kneron/preprocessing/Flow.py @@ -0,0 +1,1226 @@ +import numpy as np +from PIL import Image +import json +import math +import sys +from .funcs import * +from .funcs.utils import str2bool, bin_loader, hex_loader, str_fill, clip_ary +from .funcs.utils_520 import round_up_16, round_up_n, cal_img_row_offset, get_pad_num, get_byte_per_pixel +from .funcs.utils_720 import twos_complement_pix, clip_pix +from ctypes import c_float + + +class Flow(object): + # class function + def __init__(self, config_path = ''): + ''' + @brief: + Class name: Flow + Constructor with config_path + + @param: + config_path[str]: json file path or empty, init this class with json file. If empty, will use default setting. + ''' + # init config + self.__init_config() + + # update config with joson file + try: + with open(config_path, encoding='utf-8') as f: + self.config = json.load(f) + except IOError: + pass + + # print info + if str2bool(self.config['print_info']): + print("pre-processing type:", self.config['type_name'],", model_size:",self.config['model_size'],", numerical_type",self.config['numerical_type']) + + # init funcs + self.error_state = 0 + self.subclass = {} + self.subclass['color'] = ColorConversion.runner() + self.subclass['resize'] = Resize.runner() + self.subclass['crop'] = Crop.runner() + self.subclass['padding'] = Padding.runner() + self.subclass['normalize'] = Normalize.runner() + + self.funcs = {} + self.funcs['crop'] = self.run_crop + self.funcs['color'] = self.run_color_conversion + self.funcs['resize'] = self.run_resize + self.funcs['normalize'] = self.run_normalize + self.funcs['padding'] = self.run_padding + + return + + def __init_config(self): + ''' + private function + ''' + self.config = { + "_comment": "PreProcessing", + "type_name": "default", + "numerical_type": "floating", + "print_info":"no", + "model_size": [ + 56, + 56 + ], + "raw_img":{ + "is_raw_img": "no", + "raw_img_type": "bin", + "raw_img_fmt": "rgb565", + "img_in_width": 640, + "img_in_height": 480 + }, + "output_setting":{ + "is_dump": "no", + "dump_format":"bin", + "output_file":"default.bin", + "image_format":"RGB888" + }, + "520_setting":{ + "radix": 8, + "bit_width": 8, + "rotate": 0, + "crop_fisrt": "no", + "NUM_BANK_LINE": 32, + "BANK_ENTRY_CNT": 512, + "MAX_IMG_PREPROC_ROW_NUM": 511, + "MAX_IMG_PREPROC_COL_NUM": 256, + "round_w_to_16": "no" + }, + "720_setting":{ + "radix": 8, + "shift":0, + "sub":0, + "bit_width": 8, + "rotate": 0, + "crop_fisrt": "no", + "matrix_c00": 1, + "matrix_c01": 0, + "matrix_c02": 0, + "matrix_c10": 0, + "matrix_c11": 1, + "matrix_c12": 0, + "matrix_c20": 0, + "matrix_c21": 0, + "matrix_c22": 1, + "vector_b00": 0, + "vector_b01": 0, + "vector_b02": 0 + }, + "floating_setting":{ + "job_list":[ + "color", + "crop", + "resize", + "padding", + "normalize", + ] + }, + "function_setting": { + "color": { + "out_format": "rgb888", + "options": { + "simulation": "no", + "simulation_format": "" + } + }, + "crop": { + "type": "corner", + "align_w_to_4":"no", + "pad_square_to_4":"no", + "rounding_type":0, + "crop_w": "", + "crop_h": "", + "start_x": "", + "start_y": "", + "end_x": "", + "end_y": "" + }, + "resize": { + "type": "fixed", + "keep_ratio": "yes", + "calculate_ratio_using_CSim": "yes", + "zoom": "yes", + "resize_w": "", + "resize_h": "", + }, + "padding": { + "type": "corner", + "pad_val": "", + "padded_w": "", + "padded_h": "", + "pad_l": "", + "pad_r": "", + "pad_t": "", + "pad_b": "" + }, + "normalize": { + "type": "kneron", + "scale": "", + "bias": "", + "mean": "", + "std": "" + } + } + } + return + + def __update_color(self): + ''' + private function + ''' + # + dic = self.config['function_setting']['color'] + dic['model_size'] = self.config['model_size'] + dic['print_info'] = self.config['print_info'] + self.subclass['color'].update(**dic) + + return + + def __update_crop(self): + ''' + private function + ''' + dic = {} + # common + dic['common'] = {} + dic['common']['print_info'] = self.config['print_info'] + dic['common']['model_size'] = self.config['model_size'] + dic['common']['numerical_type'] = self.config['numerical_type'] + + # general + dic['general'] = {} + dic['general']['type'] = self.config['function_setting']['crop']['type'] + dic['general']['align_w_to_4'] = self.config['function_setting']['crop']['align_w_to_4'] + dic['general']['pad_square_to_4'] = self.config['function_setting']['crop']['pad_square_to_4'] + dic['general']['rounding_type'] = self.config['function_setting']['crop']['rounding_type'] + dic['general']['crop_w'] = self.config['function_setting']['crop']['crop_w'] + dic['general']['crop_h'] = self.config['function_setting']['crop']['crop_h'] + dic['general']['start_x'] = self.config['function_setting']['crop']['start_x'] + dic['general']['start_y'] = self.config['function_setting']['crop']['start_y'] + dic['general']['end_x'] = self.config['function_setting']['crop']['end_x'] + dic['general']['end_y'] = self.config['function_setting']['crop']['end_y'] + + # floating + dic['floating'] = {} + + # hw + dic['hw'] = {} + + + self.subclass['crop'].update(**dic) + return + + def __update_resize(self): + ''' + private function + ''' + dic = {} + # common + dic['common'] = {} + dic['common']['print_info'] = self.config['print_info'] + dic['common']['model_size'] = self.config['model_size'] + dic['common']['numerical_type'] = self.config['numerical_type'] + + # general + dic['general'] = {} + dic['general']['type'] = self.config['function_setting']['resize']['type'] + dic['general']['keep_ratio'] = self.config['function_setting']['resize']['keep_ratio'] + dic['general']['zoom'] = self.config['function_setting']['resize']['zoom'] + dic['general']['calculate_ratio_using_CSim'] = self.config['function_setting']['resize']['calculate_ratio_using_CSim'] + dic['general']['resize_w'] = self.config['function_setting']['resize']['resize_w'] + dic['general']['resize_h'] = self.config['function_setting']['resize']['resize_h'] + + # floating + dic['floating'] = {} + + # hw + dic['hw'] = {} + + self.subclass['resize'].update(**dic) + return + + def __update_normalize(self): + ''' + private function + ''' + dic = {} + # general + dic['general'] = {} + dic['general']['print_info'] = self.config['print_info'] + dic['general']['model_size'] = self.config['model_size'] + dic['general']['numerical_type'] = self.config['numerical_type'] + dic['general']['type'] = self.config['function_setting']['normalize']['type'] + + # floating + dic['floating'] = {} + dic['floating']['scale'] = self.config['function_setting']['normalize']['scale'] + dic['floating']['bias'] = self.config['function_setting']['normalize']['bias'] + dic['floating']['mean'] = self.config['function_setting']['normalize']['mean'] + dic['floating']['std'] = self.config['function_setting']['normalize']['std'] + + # hw + dic['hw'] = {} + if self.config['numerical_type'] == '520': + dic['hw']['radix'] = self.config['520_setting']['radix'] + if self.config['numerical_type'] == '720': + dic['hw']['radix'] = self.config['720_setting']['radix'] + + self.subclass['normalize'].update(**dic) + return + + def __update_padding(self): + ''' + private function + ''' + dic = {} + # common + dic['common'] = {} + dic['common']['print_info'] = self.config['print_info'] + dic['common']['model_size'] = self.config['model_size'] + dic['common']['numerical_type'] = self.config['numerical_type'] + + # general + dic['general'] = {} + dic['general']['type'] = self.config['function_setting']['padding']['type'] + dic['general']['pad_val'] = self.config['function_setting']['padding']['pad_val'] + dic['general']['padded_w'] = self.config['function_setting']['padding']['padded_w'] + dic['general']['padded_h'] = self.config['function_setting']['padding']['padded_h'] + dic['general']['pad_l'] = self.config['function_setting']['padding']['pad_l'] + dic['general']['pad_r'] = self.config['function_setting']['padding']['pad_r'] + dic['general']['pad_t'] = self.config['function_setting']['padding']['pad_t'] + dic['general']['pad_b'] = self.config['function_setting']['padding']['pad_b'] + + # floating + dic['floating'] = {} + + # hw + dic['hw'] = {} + if self.config['numerical_type'] == '520': + dic['hw']['radix'] = self.config['520_setting']['radix'] + dic['hw']['normalize_type'] = self.config['function_setting']['normalize']['type'] + elif self.config['numerical_type'] == '720': + dic['hw']['radix'] = self.config['720_setting']['radix'] + dic['hw']['normalize_type'] = self.config['function_setting']['normalize']['type'] + + self.subclass['padding'].update(**dic) + return + + def set_numerical_type(self, type = ''): + ''' + set_numerical_type + + set the preprocess type, now support floating, 520 and 720 + + Args: + type: [str], "520" / "720" / "floating" + ''' + if not (type.lower() in ['520', '720', 'floating']): + type = 'floating' + self.config['numerical_type'] = type + return + + def set_print_info(self, print_info = ''): + ''' + turn print infomation on or off. + + Args: + print_info: [str], "yes" / "no" + ''' + self.config['print_info'] = print_info + return + + def set_model_size(self, w, h): + ''' + set_model_size, set out image size, or npu size + + Args: + w: [int] + h: [int] + ''' + if w <= 0 or h <= 0: + return + self.config['model_size'][0] = w + self.config['model_size'][1] = h + + return + + def set_raw_img(self, is_raw_img='', raw_img_type = '', raw_img_fmt='', img_in_width='',img_in_height=''): + ''' + set if input is raw file + + now support for rgb888,rgb565,nir,yuv and ycbcr + + Args: + is_raw_img: [str], "yes" / "no", is raw file or not + raw_img_type: [str], "bin" / "hex", set the raw file format, now support bin and hex file. + raw_img_fmt: [str], "rgb888" / "rgb565" / "nir" / "ycbcr422" / "ycbcr444" / "yuv422" / "yuv444", set the raw image format. + img_in_width: [int] + img_in_height: [int] + ''' + if not(is_raw_img==''): + self.config['raw_img']['is_raw_img'] = is_raw_img + if not(raw_img_type==''): + self.config['raw_img']['raw_img_type'] = raw_img_type + if not(raw_img_fmt==''): + self.config['raw_img']['raw_img_fmt'] = raw_img_fmt + if not(img_in_width==''): + self.config['raw_img']['img_in_width'] = img_in_width + if not(img_in_height==''): + self.config['raw_img']['img_in_height'] = img_in_height + return + + def set_output_setting(self, is_dump='', dump_format='',image_format='', output_file=''): + ''' + set_output_setting, dump output or not, dump format can be bin , hex or txt + + Args: + is_dump: [str], "yes" / "no", open dump function or not + dump_format: [str], "bin" / "txt" / "hex", set dump file format. + image_format: [str], RGB888 / RGBA8888 / RGB565 / NIR / YUV444 / YCbCr444 / YUV422 / YCbCr422 + output_file: [str], dump file path + ''' + if not(is_dump==''): + self.config['output_setting']['is_dump'] = is_dump + if not(dump_format==''): + self.config['output_setting']['dump_format'] = dump_format + if not(image_format==''): + self.config['output_setting']['image_format'] = image_format + if not(output_file==''): + self.config['output_setting']['output_file'] = output_file + return + + def set_520_setting(self, radix='', bit_width='', rotate='',crop_fisrt='', round_w_to_16 ='',NUM_BANK_LINE='',BANK_ENTRY_CNT='',MAX_IMG_PREPROC_ROW_NUM='',MAX_IMG_PREPROC_COL_NUM=''): + ''' + setting about 520 inproc + + Args: + radix: [int], default 8 + bit_width: [int], default 8 + rotate: [int], 0 / 1 / 2, set rotate type + crop_fisrt: [str], "yes" / "no", crop before inproc or not + round_w_to_16: [str], "yes" / "no", round w align to 16 or not + NUM_BANK_LINE: [int], default 32 + BANK_ENTRY_CNT: [int], default 512 + MAX_IMG_PREPROC_ROW_NUM: [int], default 511 + MAX_IMG_PREPROC_COL_NUM: [int], default 256 + ''' + if not(radix==''): + self.config['520_setting']['radix'] = radix + if not(bit_width==''): + self.config['520_setting']['bit_width'] = bit_width + if not(rotate==''): + self.config['520_setting']['rotate'] = rotate + if not(crop_fisrt==''): + self.config['520_setting']['crop_fisrt'] = crop_fisrt + if not(round_w_to_16==''): + self.config['520_setting']['round_w_to_16'] = round_w_to_16 + if not(NUM_BANK_LINE==''): + self.config['520_setting']['NUM_BANK_LINE'] = NUM_BANK_LINE + if not(BANK_ENTRY_CNT==''): + self.config['520_setting']['BANK_ENTRY_CNT'] = BANK_ENTRY_CNT + if not(MAX_IMG_PREPROC_ROW_NUM==''): + self.config['520_setting']['MAX_IMG_PREPROC_ROW_NUM'] = MAX_IMG_PREPROC_ROW_NUM + if not(MAX_IMG_PREPROC_COL_NUM==''): + self.config['520_setting']['MAX_IMG_PREPROC_COL_NUM'] = MAX_IMG_PREPROC_COL_NUM + return + + def set_720_setting(self, radix='', bit_width='', rotate='',crop_fisrt='', matrix='',vector=''): + ''' + setting about 720 inproc + + Args: + radix: [int], default 8 + bit_width: [int], default 8 + rotate: [int], 0 / 1 / 2, set rotate type + crop_fisrt: [str], "yes" / "no", crop before inproc or not + matrix: [list] + vector: [list] + ''' + if not(radix==''): + self.config['720_setting']['radix'] = radix + if not(bit_width==''): + self.config['720_setting']['bit_width'] = bit_width + if not(rotate==''): + self.config['720_setting']['rotate'] = rotate + if not(crop_fisrt==''): + self.config['720_setting']['crop_fisrt'] = crop_fisrt + return + + def set_floating_setting(self, job_list = []): + ''' + set_floating_setting, set floating pre-processing job list and order, can be combination of color, crop, resize, padding, normalize + + Args: + job_list: [list], combination of "color" / "crop" / "resize" / "padding" / "normalize" + ''' + if not(job_list==[]): + self.config['floating_setting']['job_list'] = job_list + return + + def set_color_conversion(self, source_format = '', out_format='', simulation='', simulation_format=''): + ''' + set_color_conversion + + setting about corlor conversion and inproc format unit. + Turn simulation on can simulate rgb image to other image type. + + Args: + source_format: [str], "rgb888" / "rgb565" / "yuv" / "ycbcr" + out_format: [str], "rgb888" / "l" + simulation: [str], "yes" / "no" + simulation_format: [str], "rgb565" / "yuv" / "ycbcr" + ''' + if not(source_format==''): + self.config['function_setting']['color']['source_format'] = source_format + if not(out_format==''): + self.config['function_setting']['color']['out_format'] = out_format + if not(simulation==''): + self.config['function_setting']['color']['options']['simulation'] = simulation + if not(simulation_format==''): + self.config['function_setting']['color']['options']['simulation_format'] = simulation_format + + return + + def set_resize(self, type='', keep_ratio='', calculate_ratio_using_CSim='',zoom='', resize_w='', resize_h = ''): + ''' + set_resize, setting about resize and inproc resize unit. + + resize type can be bilinear or bilicubic as floating type, fixed or fixed_520 as fixed type. + fixed_520 type has add some function to simulate 520 bug. + + Args: + type[str]: "bilinear" / "bilicubic" / "cv2" / "fixed" / "fixed_520" + keep_ratio[str]: "yes" / "no" + calculate_ratio_using_CSim[str]: "yes" / "no" , calculate the ratio and scale using Csim function and C float + zoom[str]: "yes" / "no", enable resize can zoom image or not + resize_w[int]: if empty, then default will be model_size[0] + resize_h[int]: if empty, then default will be model_size[0] + ''' + if not(type==''): + self.config['function_setting']['resize']['type'] = type + if not(keep_ratio==''): + self.config['function_setting']['resize']['keep_ratio'] = keep_ratio + if not(calculate_ratio_using_CSim==''): + self.config['function_setting']['resize']['calculate_ratio_using_CSim'] = calculate_ratio_using_CSim + if not(zoom==''): + self.config['function_setting']['resize']['zoom'] = zoom + if not(resize_w==''): + self.config['function_setting']['resize']['resize_w'] = resize_w + if not(resize_h==''): + self.config['function_setting']['resize']['resize_h'] = resize_h + + return + + def set_crop(self, type='', crop_w='', crop_h='', start_x='', start_y='', end_x='', end_y='',align_w_to_4="",pad_square_to_4="",rounding_type=""): + ''' + set_crop, setting about crop and rdma crop unit. + + crop type can be corner,center or specific. + + if type = corner and center, need to set crop_w and crop_h(or keep empty to set as model_size) + + if type = specific, need to set start_x, start_y, end_x and end_y + + if start_x, start_y, end_x and end_y all are not empty, then the type will turn to specific automatically + + Args: + type: [str], "corner" / "center" / "specific" + crop_w: [int], if empty, then default will be model_size[0] + crop_h: [int], if empty, then default will be model_size[0] + start_x: [int] + start_y: [int] + end_x: [int] + end_y: [int] + align_w_to_4: [str], crop length in w direction align to 4 or not + pad_square_to_4: [str], pad to square(align 4) or not + rounding_type: [int], 0-> x1,y1 take floor, x2,y2 take ceil; 1->all take rounding + ''' + if not(type==''): + self.config['function_setting']['crop']['type'] = type + if not(align_w_to_4==''): + self.config['function_setting']['crop']['align_w_to_4'] = align_w_to_4 + if not(pad_square_to_4==''): + self.config['function_setting']['crop']['pad_square_to_4'] = pad_square_to_4 + if not(rounding_type==''): + self.config['function_setting']['crop']['rounding_type'] = rounding_type + if not(crop_w==''): + self.config['function_setting']['crop']['crop_w'] = crop_w + if not(crop_h==''): + self.config['function_setting']['crop']['crop_h'] = crop_h + if not(start_x==''): + self.config['function_setting']['crop']['start_x'] = start_x + if not(start_y==''): + self.config['function_setting']['crop']['start_y'] = start_y + if not(end_x==''): + self.config['function_setting']['crop']['end_x'] = end_x + if not(end_y==''): + self.config['function_setting']['crop']['end_y'] = end_y + return + + def set_padding(self, type='', pad_val='', padded_w='', padded_h='', pad_l='', pad_r='', pad_t='', pad_b=''): + ''' + set_padding, setting about padding and inproc padding unit. + + crop type can be corner,center or specific. + + if type = corner and center, need to set out_w and out_h(or keep empty to set as model_size) + + if type = specific, need to set pad_l, pad_r, pad_t and pad_b + + if pad_l, pad_r, pad_t and pad_b all are not empty, then the type will turn to specific automatically + + if numerical type = 520 or 720, then the pad_val will adjust according radix automatically + + Args: + type: [str], "corner" / "center" / "specific" + pad_val: [float] + out_w: [int] + out_h: [int] + pad_l: [int] + pad_r: [int] + pad_t: [int] + pad_b: [int] + ''' + if not(type==''): + self.config['function_setting']['padding']['type'] = type + if not(pad_val==''): + self.config['function_setting']['padding']['pad_val'] = pad_val + if not(padded_w==''): + self.config['function_setting']['padding']['padded_w'] = padded_w + if not(padded_h==''): + self.config['function_setting']['padding']['padded_h'] = padded_h + if not(pad_l==''): + self.config['function_setting']['padding']['pad_l'] = pad_l + if not(pad_r==''): + self.config['function_setting']['padding']['pad_r'] = pad_r + if not(pad_t==''): + self.config['function_setting']['padding']['pad_t'] = pad_t + if not(pad_b==''): + self.config['function_setting']['padding']['pad_b'] = pad_b + return + + def set_normalize(self, type='', scale='', bias='', mean='', std =''): + ''' + set_normalize, setting about normalize and inproc chen unit. + + if numerical type = floating: + normalize type can be customized, torch, tf, caffe, yolo or kneron + if type = customized, need to set scale, bias, mean and std + + if numerical type = 520 or 720: + normalize type can be tf, yolo or kneron + + Args: + type: [str], "customized" / "torch" / "tf" / "caffe" / "yolo" / "kneron" + scale: [float] + bias: [float] + mean: [list,3] + std: [list,3] + ''' + if not(type==''): + self.config['function_setting']['normalize']['type'] = type + if not(scale==''): + self.config['function_setting']['normalize']['scale'] = scale + if not(bias==''): + self.config['function_setting']['normalize']['bias'] = bias + if not(mean==''): + self.config['function_setting']['normalize']['mean'] = mean + if not(std==''): + self.config['function_setting']['normalize']['std'] = std + return + + def load_image(self, image, is_raw = False , raw_img_type = '', raw_img_fmt = '', img_in_height = 0, img_in_width = 0): + ''' + load_image function + + Args: + image: [np.array/str], can be np.array or file path(bin/hex/jpg) + is_raw: [bool], is raw image or not (bin or hex) + raw_img_type: [str], "bin" / "hex" + raw_img_fmt: [str], "yuv444" / "ycbcr444" / "yuv422" / "ycbcr422" / "rgb565" / "nir" + img_in_width: [int] + img_in_height: [int] + + Returns: + out: [np.array], not include color convert + ''' + if isinstance(image, np.ndarray): + return image + if str2bool(is_raw): + dic ={} + dic['raw_img_fmt'] = raw_img_fmt + dic['img_in_height'] = img_in_height + dic['img_in_width'] = img_in_width + if raw_img_type.lower() in ['bin','BIN']: + image_data = bin_loader(image,**dic) + elif raw_img_type.lower() in ['hex','HEX']: + image_data = hex_loader(image,**dic) + elif isinstance(image, str): + image = Image.open(image).convert("RGB") + image_data = np.array(image).astype('uint8') + + assert isinstance(image_data, np.ndarray) + return image_data + + def dump_image(self,image_data): + ''' + dump_image function, according config setting to dump image, txt, bin or hex + + Args: + image: [np.array] + ''' + assert isinstance(image_data, np.ndarray) + assert (len(image_data.shape) >= 2) + + if (len(image_data.shape) == 2): + source_format = 'L' + if (image_data.shape[2] == 4): + source_format = 'RGBA8888' + else: + source_format = 'RGB888' + + convert = ColorConversion.runner() + if (source_format == 'L') & (self.config['output_setting']['image_format'].lower() not in ['L', 'l', 'NIR', 'nir']): + convert.update(**{"source_format": "L","out_format": "RGB888"}) + image_data, _ = convert.run(image_data) + source_format = 'RGB888' + + if (source_format == 'RGBA8888') & (self.config['output_setting']['image_format'].lower() not in ['RGBA8888', 'rgba8888','RGBA','rgba']): + convert.update(**{"source_format": "RGBA8888","out_format": "RGB888"}) + image_data, _ = convert.run(image_data) + source_format = 'RGB888' + + + if (self.config['output_setting']['image_format'].lower() in ['RGB565', 'rgb565']): + convert.update(**{"source_format": source_format,"out_format": "RGB565"}) + image_data_565, _ = convert.run(image_data) + image_data = np.zeros((image_data_565.shape[0],image_data_565.shape[1],2), dtype=np.uint8) + image_data[:,:,1] = ( image_data_565[:,:,0] << 3 ) + ( image_data_565[:,:,1] >> 3 ) + image_data[:,:,0] = ( (image_data_565[:,:,1] & 0x07) << 5 ) + image_data_565[:,:,2] + elif (self.config['output_setting']['image_format'].lower() in ['RGBA8888', 'rgba8888','RGBA','rgba']) & (source_format != 'RGBA8888'): + convert.update(**{"source_format": source_format,"out_format": "rgba"}) + image_data, _ = convert.run(image_data) + elif (self.config['output_setting']['image_format'].lower() in ['L', 'l', 'NIR', 'nir']): + convert.update(**{"source_format": source_format,"out_format": "L"}) + image_data, _ = convert.run(image_data) + elif (self.config['output_setting']['image_format'].lower() in['YUV', 'YUV444','yuv','yuv444']): + convert.update(**{"source_format": source_format,"out_format": "YUV444"}) + image_data_YUV, _ = convert.run(image_data) + image_data = np.zeros((image_data_YUV.shape[0],image_data_YUV.shape[1],4), dtype=np.uint8) + image_data[:,:,3] = image_data_YUV[:,:,0] + image_data[:,:,2] = image_data_YUV[:,:,1] + image_data[:,:,1] = image_data_YUV[:,:,2] + elif (self.config['output_setting']['image_format'].lower() in['YUV422','yuv422']): + convert.update(**{"source_format": source_format,"out_format": "YUV444"}) + image_data_YUV, _ = convert.run(image_data) + pixels = image_data_YUV.shape[0] * image_data_YUV.shape[1] + image_data = np.zeros((pixels*2,1), dtype=np.uint8) + image_data_YUV = image_data_YUV.reshape((-1,1)) + for i in range(0,image_data.shape[0],4): + j = i//2 #source index + image_data[i+3,0] = image_data_YUV[j*3,0] + image_data[i+2,0] = image_data_YUV[j*3+1,0] + image_data[i+1,0] = image_data_YUV[j*3+3,0] + image_data[i,0] = image_data_YUV[j*3+5,0] + elif (self.config['output_setting']['image_format'].lower() in['YCBCR', 'YCBCR444','YCbCr','YCbCr444','ycbcr','ycbcr444']): + convert.update(**{"source_format": source_format,"out_format": "YCBCR444"}) + image_data_YCBCR, _ = convert.run(image_data) + image_data = np.zeros((image_data_YCBCR.shape[0],image_data_YCBCR.shape[1],4), dtype=np.uint8) + image_data[:,:,3] = image_data_YCBCR[:,:,0] + image_data[:,:,2] = image_data_YCBCR[:,:,1] + image_data[:,:,1] = image_data_YCBCR[:,:,2] + elif (self.config['output_setting']['image_format'].lower() in['YCBCR422','YCbCr422','ycbcr422']): + convert.update(**{"source_format": source_format,"out_format": "YCBCR422"}) + image_data_YCBCR, _ = convert.run(image_data) + image_data = np.zeros((image_data_YCBCR.shape[0],image_data_YCBCR.shape[1],2), dtype=np.uint8) + pixels = image_data_YCBCR.shape[0] * image_data_YCBCR.shape[1] + image_data = np.zeros((pixels*2,1), dtype=np.uint8) + image_data_YCBCR = image_data_YCBCR.reshape((-1,1)) + for i in range(0,image_data.shape[0],4): + j = i//2 #source index + image_data[i+3,0] = image_data_YCBCR[j*3,0] + image_data[i+2,0] = image_data_YCBCR[j*3+1,0] + image_data[i+1,0] = image_data_YCBCR[j*3+3,0] + image_data[i,0] = image_data_YCBCR[j*3+5,0] + + if self.config['output_setting']['dump_format'].lower() in ['txt', 'TXT']: + np.savetxt(self.config['output_setting']['output_file'],image_data.reshape((-1,1)),fmt="%.8f") + elif self.config['output_setting']['dump_format'].lower() in ['bin', 'BIN']: + image_data.reshape((-1,1)).astype("uint8").tofile(self.config['output_setting']['output_file']) + elif self.config['output_setting']['dump_format'].lower() in ['hex', 'HEX']: + height, width, c = image_data.shape + output_line = math.floor((height * width) / 4) + image_f = image_data.reshape((height * width, c)) + f = open(self.config['output_setting']['output_file'], "w") + for i in range(output_line): + pixels = "" + for j in range(min((i+1)*4-1, image_f.shape[0]-1), i*4-1, -1): + pixels = pixels + str_fill(hex(image_f[j, 3]).lstrip("0x")) + pixels = pixels + str_fill(hex(image_f[j, 2]).lstrip("0x")) + pixels = pixels + str_fill(hex(image_f[j, 1]).lstrip("0x")) + pixels = pixels + str_fill(hex(image_f[j, 0]).lstrip("0x")) + f.write(pixels + "\n") + return + + def run_whole_process(self, image): + ''' + run_whole_process, according config setting to run all pre-processing + + Args: + image: [np.array/str], can be np.array or file path(bin/jpg) + + Returns: + out: [np.array] + ''' + assert (self.error_state == 0) + + image_data = self.load_image( + image, + is_raw = self.config['raw_img']["is_raw_img"], + raw_img_type = self.config['raw_img']["raw_img_type"], + raw_img_fmt = self.config['raw_img']["raw_img_fmt"], + img_in_height= self.config['raw_img']["img_in_height"], + img_in_width=self.config['raw_img']["img_in_width"]) + + if str2bool(self.config['raw_img']["is_raw_img"]): + self.set_color_conversion(source_format=self.config['raw_img']["raw_img_fmt"]) + elif isinstance(image, str): + self.set_color_conversion(source_format='RGB888') + + h_ori = image_data.shape[0] + w_ori = image_data.shape[1] + + if self.config['numerical_type'] == 'floating': + image_data = self.__run_whole_process_floating(image_data) + elif self.config['numerical_type'] == '520': + image_data = self.__run_whole_process_520(image_data) + elif self.config['numerical_type'] == '720': + image_data = self.__run_whole_process_720(image_data) + + if str2bool(self.config['output_setting']['is_dump']): + self.dump_image(image_data) + + scale = max(1.0*w_ori / image_data.shape[1], 1.0*h_ori / image_data.shape[0]) + out = {'h_ori': h_ori, 'w_ori': w_ori, "scale": scale} + return image_data, out + + def __run_whole_process_floating(self,image_data): + ''' + private function + ''' + for job in self.config['floating_setting']['job_list']: + if job.lower() in ['crop','color','resize','normalize','padding']: + image_data, _ = self.funcs[job](image_data) + + return image_data + + def __run_whole_process_520(self,image_data): + ''' + private function + ''' + # init from config + originH, originW, _ = image_data.shape + npu_img_w = self.config['model_size'][0] + npu_img_h = self.config['model_size'][1] + + if self.config['function_setting']['padding']['type'].lower() in ['center','CENTER','Center','0',0]: + pad_mode = 0 + elif self.config['function_setting']['padding']['type'].lower() in ['corner','CORNER','Corner','1',1]: + pad_mode = 1 + else: + pad_mode = 2 + + if not str2bool(self.config['function_setting']['resize']['keep_ratio']): + pad_mode = 2 + + NUM_BANK_LINE = self.config['520_setting']['NUM_BANK_LINE'] + BANK_ENTRY_CNT = self.config['520_setting']['BANK_ENTRY_CNT'] + MAX_IMG_PREPROC_ROW_NUM = self.config['520_setting']['MAX_IMG_PREPROC_ROW_NUM'] + MAX_IMG_PREPROC_COL_NUM = self.config['520_setting']['MAX_IMG_PREPROC_COL_NUM'] + + raw_fmt = self.config['function_setting']['color']['source_format'] + crop_fisrt = str2bool(self.config['520_setting']['crop_fisrt']) + keep_ratio = str2bool(self.config['function_setting']['resize']['keep_ratio']) + + # init crop + if crop_fisrt: + startW = self.config['function_setting']['crop']['start_x'] + startH = self.config['function_setting']['crop']['start_y'] + cropW = self.config['function_setting']['crop']['end_x'] - self.config['function_setting']['crop']['start_x'] + cropH = self.config['function_setting']['crop']['end_y'] - self.config['function_setting']['crop']['start_y'] + else: + startW = 0 + startH = 0 + cropW = originW + cropH = originH + + crop_num = [0] * 4 + crop_num[0] = startW #left + crop_num[1] = startH #top + crop_num[2] = originW - (startW + cropW) #right + crop_num[3] = originH - (startH + cropH) #bottom + + # calculate scaleW scaleH padW padH + if keep_ratio: + out_w = npu_img_w + out_h = npu_img_h + orig_w = cropW + orig_h = cropH + + w_ratio = c_float(out_w * 1.0 / (orig_w * 1.0)).value + h_ratio = c_float(out_h * 1.0 / (orig_h * 1.0)).value + scale_ratio = 0.0 + scale_target_w = 0 + scale_target_h = 0 + padH = 0 + padW = 0 + + bScaleW = True if w_ratio < h_ratio else False + if bScaleW: + scale_ratio = w_ratio + scale_target_w = int(c_float(scale_ratio * orig_w + 0.5).value) + scale_target_h = int(c_float(scale_ratio * orig_h + 0.5).value) + assert (abs(scale_target_w - out_w) <= 1), "Error: scale down width cannot meet expectation\n" + padH = out_h - scale_target_h + padW = 0 + assert (padH >= 0), "Error: padH shouldn't be less than zero\n" + else: + scale_ratio = h_ratio + scale_target_w = int(c_float(scale_ratio * orig_w + 0.5).value) + scale_target_h = int(c_float(scale_ratio * orig_h + 0.5).value) + assert (abs(scale_target_h - out_h) <= 1), "Error: scale down height cannot meet expectation\n" + padW = out_w - scale_target_w + padH = 0 + assert (padW >= 0), "Error: padW shouldn't be less than zero\n" + + scaleW = out_w - padW + scaleH = out_h - padH + else: + scaleW = npu_img_w + scaleH = npu_img_h + padW = 0 + padH = 0 + + # calculate pad_top pad_bottom pad_left pad_right + if (pad_mode == 0): + # pad on both side + pad_top = padH // 2 + pad_bottom = (padH // 2) + (padH % 2) + pad_left = padW // 2 + pad_right = (padW // 2) + (padW % 2) + elif (pad_mode == 1): + # only pad right and bottom + pad_top = 0 + pad_bottom = padH + pad_left = 0 + pad_right = padW + else: + pad_top = 0 + pad_bottom = 0 + pad_left = 0 + pad_right = 0 + + if (pad_right > 127 or pad_bottom > 127): + print("Pad value larger than 127 is not supported\n") + + orig_pad_num = [0] * 4 + orig_pad_num[0] = pad_left + orig_pad_num[1] = pad_top + orig_pad_num[2] = pad_right + orig_pad_num[3] = pad_bottom + + valid_in_row = cropH + valid_in_col = cropW + out_row = scaleH + padH + out_col = scaleW + padW + + # calculate cut_total + max_row = int(math.floor(BANK_ENTRY_CNT * NUM_BANK_LINE / (out_col / 4))) + max_row = min(max_row, MAX_IMG_PREPROC_ROW_NUM) + + if (pad_mode == 0): + big_pad_row = (out_row % max_row) < (pad_bottom + 4) + if (big_pad_row): + last_row = int(pad_bottom + 4) + cut_total = int(math.ceil( float(out_row - last_row) / max_row) + 1) + else: + cut_total = int(math.ceil( float(out_row) / max_row)) + elif (pad_mode == 1): + big_pad_row = (out_row % max_row) < (pad_bottom + 4) + last_row = max_row + if (big_pad_row): + cut_total = int(math.ceil( float(out_row - last_row) / max_row) + 1) + else: + cut_total = int(math.ceil( float(out_row) / max_row)) + else: + big_pad_row = False + cut_total = int(math.ceil( float(out_row) / max_row)) + + # calculate seg_cnt + max_col = MAX_IMG_PREPROC_COL_NUM + last_col = 0 + if (out_col % max_col): + if (pad_mode == 0): + big_pad_col = (out_col % max_col) < (pad_right + 4) + if (big_pad_col): + last_col = round_up_n(pad_right + 4, 4) + seg_cnt = math.ceil( float(out_col - last_col) / max_col) + 1 + else: + seg_cnt = math.ceil( float(out_col) / max_col) + elif (pad_mode == 1): + big_pad_col = (out_col % max_col) < (pad_right + 4) + last_col = max_col + if (big_pad_col): + seg_cnt = math.ceil( float(out_col - last_col) / max_col) + 1 + else: + seg_cnt = math.ceil( float(out_col) / max_col) + else: + big_pad_col = False + seg_cnt = math.ceil( float(out_col) / max_col) + else: + big_pad_col = False + seg_cnt = math.ceil( float(out_col) / max_col) + + # start loop + if (big_pad_row): + remain_row = out_row - last_row + else: + remain_row = out_row + start_row = 0 + row_num = 0 + for r in range(0, cut_total): + start_row += row_num + block_start_row = cal_img_row_offset(crop_num, orig_pad_num, start_row, out_row, originH) + if (big_pad_row) and (r == (cut_total - 1)): + row_num = last_row + else: + row_num = min(max_row, remain_row) + + # due to HW only support max col = 256, we may need to process data in segments */ + if(big_pad_col): + remain_col = (out_col - last_col) + else: + remain_col = out_col + start_col = 0 + col_num = 0 + block_start_col = crop_num[0] + block_col = 0 + for c in range(0,seg_cnt): + start_col += col_num + block_start_col += block_col + if (big_pad_col) and (c == (seg_cnt - 1)): + col_num = last_col + else: + col_num = min(remain_col, MAX_IMG_PREPROC_COL_NUM) + + pad_num = get_pad_num(orig_pad_num, (c == 0), (r == 0), (c == seg_cnt - 1), (r == cut_total - 1)) + block_row = int(valid_in_row * (row_num - pad_num[1] - pad_num[3]) / (out_row - orig_pad_num[1] - orig_pad_num[3])) + block_col = int(valid_in_col * (col_num - pad_num[0] - pad_num[2]) / (out_col - orig_pad_num[0] - orig_pad_num[2])) + #/* (src_w * byte_per_pixel) should align to multiple of 4-byte and 2 cols */ + byte_per_pixel = get_byte_per_pixel(raw_fmt) + new_block_col = round_up_n(round_up_n(block_col, (4 / byte_per_pixel)), 2) + + if (new_block_col > block_col): + if byte_per_pixel == 1: + block_col = new_block_col - 4 + elif byte_per_pixel == 4: + block_col = new_block_col - 2 + else: + block_col = new_block_col - 2 + + ## + # crop + self.set_crop(start_x=block_start_col, start_y=block_start_row, end_x=block_start_col+block_col,end_y=block_start_row+block_row,align_w_to_4=False) + image_temp, _ = self.funcs['crop'](image_data) + + # color + image_temp, _ = self.funcs['color'](image_temp) + + # resize + self.set_resize(type='fixed_520',keep_ratio='no',calculate_ratio_using_CSim = 'yes', resize_w=(col_num - pad_num[0] - pad_num[2]),resize_h=(row_num - pad_num[1] - pad_num[3])) + image_temp, _ = self.funcs['resize'](image_temp) + + # normalize + image_temp, _ = self.funcs['normalize'](image_temp) + + # padding + self.set_padding(type='specific',pad_l=pad_num[0],pad_t=pad_num[1],pad_r=pad_num[2],pad_b=pad_num[3]) + image_temp, _ = self.funcs['padding'](image_temp) + + ## + remain_col -= col_num + if c == 0: + image_temp_H = image_temp + else: + image_temp_H = np.concatenate((image_temp_H, image_temp), axis=1) + + ## + remain_row -= row_num + if r == 0: + image_temp_V = image_temp_H + else: + image_temp_V = np.concatenate((image_temp_V, image_temp_H), axis=0) + + ## + image_data = image_temp_V + + # # round_w_to_16 + if str2bool(self.config['520_setting']['round_w_to_16']): + out_w_16 = round_up_n(out_col,16) + image = np.ones((out_row,out_w_16 - out_col,4)) *128 + image_data = np.concatenate((image_data, image), axis=1) + + # rotate + rotate = self.config['520_setting']['rotate'] + if not (rotate == 0): + dic = {} + dic['rotate_direction'] = rotate + rotate = Rotate.runner(**dic, b_print = str2bool(self.config['print_info'])) + image_data = rotate.run(image_data) + + return image_data + + def __run_whole_process_720(self,image_data): + ''' + private function + ''' + # init from config + crop_fisrt = str2bool(self.config['720_setting']['crop_fisrt']) + matrix_c00 = self.config['720_setting']['matrix_c00'] + matrix_c01 = self.config['720_setting']['matrix_c01'] + matrix_c02 = self.config['720_setting']['matrix_c02'] + matrix_c10 = self.config['720_setting']['matrix_c10'] + matrix_c11 = self.config['720_setting']['matrix_c11'] + matrix_c12 = self.config['720_setting']['matrix_c12'] + matrix_c20 = self.config['720_setting']['matrix_c20'] + matrix_c21 = self.config['720_setting']['matrix_c21'] + matrix_c22 = self.config['720_setting']['matrix_c22'] + vector_b00 = self.config['720_setting']['vector_b00'] + vector_b01 = self.config['720_setting']['vector_b01'] + vector_b02 = self.config['720_setting']['vector_b02'] + shiftvalue = self.config['720_setting']['shift'] + subvalue = self.config['720_setting']['sub'] + + #crop + if crop_fisrt: + image_data, _ = self.funcs['crop'](image_data) + + #color + image_data, _ = self.funcs['color'](image_data) + + #resize + self.set_resize(type='fixed_720',calculate_ratio_using_CSim = 'yes') + image_data, _ = self.funcs['resize'](image_data) + + #matrix + h, w, c = image_data.shape + image_f = image_data.reshape((h * w, c)) + matrix_c = np.array([[matrix_c00, matrix_c01, matrix_c02], + [matrix_c10, matrix_c11, matrix_c12], + [matrix_c20, matrix_c21, matrix_c22]]) + b = np.array([[vector_b00], [vector_b01], [vector_b02]]) + calculated_image_f = np.zeros(image_f.shape, dtype=np.uint8) + for i in range(h*w): + pt = np.swapaxes(image_f[np.newaxis, i, :], 0, 1) + matrix_pt = np.floor(np.multiply((matrix_c @ pt), 1/np.power(2, 1))) + matrix_pt.astype(int) + result = np.floor(np.multiply(np.add(matrix_pt, b), 1/np.power(2, 7))) + result.astype(int) + + result = twos_complement_pix(result) + + if shiftvalue == 1: + result = clip_pix(np.add(result, -128 * np.ones(result.shape)), -128, 127) + else: + result = clip_pix(result, 0, 255) + + result = result + np.array([[subvalue], [subvalue], [subvalue]]) + calculated_image_f[i, :] = clip_ary(np.squeeze(result)) + + image_data = calculated_image_f.reshape(image_data[:, :, 0:3].shape) + + #padding + image_data, _ = self.funcs['padding'](image_data) + + return image_data + + def run_crop(self, image_data): + ''' + @brief + run_crop, according config setting to run crop + + @param + image[np.array] : only can be np.array + + @return + np.array + ''' + self.__update_crop() + image_data, info = self.subclass['crop'].run(image_data) + return image_data, info + + def run_color_conversion(self, image_data): + ''' + @brief + run_color_conversion, according config setting to run color conversion + + @param + image[np.array] : only can be np.array + + @return + np.array + ''' + self.__update_color() + image_data, info = self.subclass['color'].run(image_data) + return image_data,info + + def run_resize(self, image_data): + ''' + @brief + run_resize, according config setting to run resize + + @param + image[np.array] : only can be np.array + + @return + np.array + ''' + self.__update_resize() + image_data,info = self.subclass['resize'].run(image_data) + return image_data,info + + def run_normalize(self, image_data): + ''' + @brief + run_normalize, according config setting to run normalize + + @param + image[np.array] : only can be np.array + + @return + np.array + ''' + self.__update_normalize() + image_data,info = self.subclass['normalize'].run(image_data) + return image_data,info + + def run_padding(self, image_data): + ''' + @brief + run_padding, according config setting to run padding + + @param + image[np.array] : only can be np.array + + @return + np.array + ''' + self.__update_padding() + image_data,info = self.subclass['padding'].run(image_data) + return image_data,info + + diff --git a/kneron/preprocessing/__init__.py b/kneron/preprocessing/__init__.py new file mode 100644 index 0000000..0a40017 --- /dev/null +++ b/kneron/preprocessing/__init__.py @@ -0,0 +1,2 @@ +from .Flow import * +from .API import * diff --git a/kneron/preprocessing/funcs/ColorConversion.py b/kneron/preprocessing/funcs/ColorConversion.py new file mode 100644 index 0000000..8bfea7b --- /dev/null +++ b/kneron/preprocessing/funcs/ColorConversion.py @@ -0,0 +1,285 @@ +import numpy as np +from PIL import Image +from .utils import signed_rounding, clip, str2bool + +format_bit = 10 +c00_yuv = 1 +c02_yuv = 1436 +c10_yuv = 1 +c11_yuv = -354 +c12_yuv = -732 +c20_yuv = 1 +c21_yuv = 1814 +c00_ycbcr = 1192 +c02_ycbcr = 1634 +c10_ycbcr = 1192 +c11_ycbcr = -401 +c12_ycbcr = -833 +c20_ycbcr = 1192 +c21_ycbcr = 2065 + +Matrix_ycbcr_to_rgb888 = np.array( + [[1.16438356e+00, 1.16438356e+00, 1.16438356e+00], + [2.99747219e-07, - 3.91762529e-01, 2.01723263e+00], + [1.59602686e+00, - 8.12968294e-01, 3.04059479e-06]]) + +Matrix_rgb888_to_ycbcr = np.array( + [[0.25678824, - 0.14822353, 0.43921569], + [0.50412941, - 0.29099216, - 0.36778824], + [0.09790588, 0.43921569, - 0.07142745]]) + +Matrix_rgb888_to_yuv = np.array( + [[ 0.29899106, -0.16877996, 0.49988381], + [ 0.5865453, -0.33110385, -0.41826072], + [ 0.11446364, 0.49988381, -0.08162309]]) + +# Matrix_rgb888_to_yuv = np.array( +# [[0.299, - 0.147, 0.615], +# [0.587, - 0.289, - 0.515], +# [0.114, 0.436, - 0.100]]) + +# Matrix_yuv_to_rgb888 = np.array( +# [[1.000, 1.000, 1.000], +# [0.000, - 0.394, 2.032], +# [1.140, - 0.581, 0.000]]) + +class runner(object): + def __init__(self): + self.set = { + 'print_info':'no', + 'model_size':[0,0], + 'numerical_type':'floating', + "source_format": "rgb888", + "out_format": "rgb888", + "options": { + "simulation": "no", + "simulation_format": "rgb888" + } + } + + def update(self, **kwargs): + # + self.set.update(kwargs) + + ## simulation + self.funs = [] + if str2bool(self.set['options']['simulation']) and self.set['source_format'].lower() in ['RGB888', 'rgb888', 'RGB', 'rgb']: + if self.set['options']['simulation_format'].lower() in ['YUV422', 'yuv422', 'YUV', 'yuv']: + self.funs.append(self._ColorConversion_RGB888_to_YUV422) + self.set['source_format'] = 'YUV422' + elif self.set['options']['simulation_format'].lower() in ['YCBCR422', 'YCbCr422', 'ycbcr422', 'YCBCR', 'YCbCr', 'ycbcr']: + self.funs.append(self._ColorConversion_RGB888_to_YCbCr422) + self.set['source_format'] = 'YCbCr422' + elif self.set['options']['simulation_format'].lower() in['RGB565', 'rgb565']: + self.funs.append(self._ColorConversion_RGB888_to_RGB565) + self.set['source_format'] = 'RGB565' + + ## to rgb888 + if self.set['source_format'].lower() in ['YUV444', 'yuv444','YUV422', 'yuv422', 'YUV', 'yuv']: + self.funs.append(self._ColorConversion_YUV_to_RGB888) + elif self.set['source_format'].lower() in ['YCBCR444', 'YCbCr444', 'ycbcr444','YCBCR422', 'YCbCr422', 'ycbcr422', 'YCBCR', 'YCbCr', 'ycbcr']: + self.funs.append(self._ColorConversion_YCbCr_to_RGB888) + elif self.set['source_format'].lower() in ['RGB565', 'rgb565']: + self.funs.append(self._ColorConversion_RGB565_to_RGB888) + elif self.set['source_format'].lower() in ['l', 'L' , 'nir', 'NIR']: + self.funs.append(self._ColorConversion_L_to_RGB888) + elif self.set['source_format'].lower() in ['RGBA8888', 'rgba8888' , 'RGBA', 'rgba']: + self.funs.append(self._ColorConversion_RGBA8888_to_RGB888) + + ## output format + if self.set['out_format'].lower() in ['L', 'l']: + self.funs.append(self._ColorConversion_RGB888_to_L) + elif self.set['out_format'].lower() in['RGB565', 'rgb565']: + self.funs.append(self._ColorConversion_RGB888_to_RGB565) + elif self.set['out_format'].lower() in['RGBA', 'RGBA8888','rgba','rgba8888']: + self.funs.append(self._ColorConversion_RGB888_to_RGBA8888) + elif self.set['out_format'].lower() in['YUV', 'YUV444','yuv','yuv444']: + self.funs.append(self._ColorConversion_RGB888_to_YUV444) + elif self.set['out_format'].lower() in['YUV422','yuv422']: + self.funs.append(self._ColorConversion_RGB888_to_YUV422) + elif self.set['out_format'].lower() in['YCBCR', 'YCBCR444','YCbCr','YCbCr444','ycbcr','ycbcr444']: + self.funs.append(self._ColorConversion_RGB888_to_YCbCr444) + elif self.set['out_format'].lower() in['YCBCR422','YCbCr422','ycbcr422']: + self.funs.append(self._ColorConversion_RGB888_to_YCbCr422) + + def print_info(self): + print("", + "source_format:", self.set['source_format'], + ', out_format:', self.set['out_format'], + ', simulation:', self.set['options']['simulation'], + ', simulation_format:', self.set['options']['simulation_format']) + + def run(self, image_data): + assert isinstance(image_data, np.ndarray) + # print info + if str2bool(self.set['print_info']): + self.print_info() + + # color + for _, f in enumerate(self.funs): + image_data = f(image_data) + + # output + info = {} + return image_data, info + + def _ColorConversion_RGB888_to_YUV444(self, image): + ## floating + image = image.astype('float') + image = (image @ Matrix_rgb888_to_yuv + 0.5).astype('uint8') + return image + + def _ColorConversion_RGB888_to_YUV422(self, image): + # rgb888 to yuv444 + image = self._ColorConversion_RGB888_to_YUV444(image) + + # yuv444 to yuv422 + u2 = image[:, 0::2, 1] + u4 = np.repeat(u2, 2, axis=1) + v2 = image[:, 1::2, 2] + v4 = np.repeat(v2, 2, axis=1) + image[..., 1] = u4 + image[..., 2] = v4 + return image + + def _ColorConversion_YUV_to_RGB888(self, image): + ## fixed + h, w, c = image.shape + image_f = image.reshape((h * w, c)) + image_rgb_f = np.zeros(image_f.shape, dtype=np.uint8) + + for i in range(h * w): + image_y = image_f[i, 0] *1024 + if image_f[i, 1] > 127: + image_u = -((~(image_f[i, 1] - 1)) & 0xFF) + else: + image_u = image_f[i, 1] + if image_f[i, 2] > 127: + image_v = -((~(image_f[i, 2] - 1)) & 0xFF) + else: + image_v = image_f[i, 2] + + image_r = c00_yuv * image_y + c02_yuv * image_v + image_g = c10_yuv * image_y + c11_yuv * image_u + c12_yuv * image_v + image_b = c20_yuv * image_y + c21_yuv * image_u + + image_r = signed_rounding(image_r, format_bit) + image_g = signed_rounding(image_g, format_bit) + image_b = signed_rounding(image_b, format_bit) + + image_r = image_r >> format_bit + image_g = image_g >> format_bit + image_b = image_b >> format_bit + + image_rgb_f[i, 0] = clip(image_r, 0, 255) + image_rgb_f[i, 1] = clip(image_g, 0, 255) + image_rgb_f[i, 2] = clip(image_b, 0, 255) + + image_rgb = image_rgb_f.reshape((h, w, c)) + return image_rgb + + def _ColorConversion_RGB888_to_YCbCr444(self, image): + ## floating + image = image.astype('float') + image = (image @ Matrix_rgb888_to_ycbcr + 0.5).astype('uint8') + image[:, :, 0] += 16 + image[:, :, 1] += 128 + image[:, :, 2] += 128 + + return image + + def _ColorConversion_RGB888_to_YCbCr422(self, image): + # rgb888 to ycbcr444 + image = self._ColorConversion_RGB888_to_YCbCr444(image) + + # ycbcr444 to ycbcr422 + cb2 = image[:, 0::2, 1] + cb4 = np.repeat(cb2, 2, axis=1) + cr2 = image[:, 1::2, 2] + cr4 = np.repeat(cr2, 2, axis=1) + image[..., 1] = cb4 + image[..., 2] = cr4 + return image + + def _ColorConversion_YCbCr_to_RGB888(self, image): + ## floating + if (self.set['numerical_type'] == 'floating'): + image = image.astype('float') + image[:, :, 0] -= 16 + image[:, :, 1] -= 128 + image[:, :, 2] -= 128 + image = ((image @ Matrix_ycbcr_to_rgb888) + 0.5).astype('uint8') + return image + + ## fixed + h, w, c = image.shape + image_f = image.reshape((h * w, c)) + image_rgb_f = np.zeros(image_f.shape, dtype=np.uint8) + + for i in range(h * w): + image_y = (image_f[i, 0] - 16) * c00_ycbcr + image_cb = image_f[i, 1] - 128 + image_cr = image_f[i, 2] - 128 + + image_r = image_y + c02_ycbcr * image_cr + image_g = image_y + c11_ycbcr * image_cb + c12_ycbcr * image_cr + image_b = image_y + c21_ycbcr * image_cb + + image_r = signed_rounding(image_r, format_bit) + image_g = signed_rounding(image_g, format_bit) + image_b = signed_rounding(image_b, format_bit) + + image_r = image_r >> format_bit + image_g = image_g >> format_bit + image_b = image_b >> format_bit + + image_rgb_f[i, 0] = clip(image_r, 0, 255) + image_rgb_f[i, 1] = clip(image_g, 0, 255) + image_rgb_f[i, 2] = clip(image_b, 0, 255) + + image_rgb = image_rgb_f.reshape((h, w, c)) + return image_rgb + + def _ColorConversion_RGB888_to_RGB565(self, image): + assert (len(image.shape)==3) + assert (image.shape[2]>=3) + + image_rgb565 = np.zeros(image.shape, dtype=np.uint8) + image_rgb = image.astype('uint8') + image_rgb565[:, :, 0] = image_rgb[:, :, 0] >> 3 + image_rgb565[:, :, 1] = image_rgb[:, :, 1] >> 2 + image_rgb565[:, :, 2] = image_rgb[:, :, 2] >> 3 + return image_rgb565 + + def _ColorConversion_RGB565_to_RGB888(self, image): + assert (len(image.shape)==3) + assert (image.shape[2]==3) + + image_rgb = np.zeros(image.shape, dtype=np.uint8) + image_rgb[:, :, 0] = image[:, :, 0] << 3 + image_rgb[:, :, 1] = image[:, :, 1] << 2 + image_rgb[:, :, 2] = image[:, :, 2] << 3 + return image_rgb + + def _ColorConversion_L_to_RGB888(self, image): + image_L = image.astype('uint8') + img = Image.fromarray(image_L).convert('RGB') + image_data = np.array(img).astype('uint8') + return image_data + + def _ColorConversion_RGB888_to_L(self, image): + image_rgb = image.astype('uint8') + img = Image.fromarray(image_rgb).convert('L') + image_data = np.array(img).astype('uint8') + return image_data + + def _ColorConversion_RGBA8888_to_RGB888(self, image): + assert (len(image.shape)==3) + assert (image.shape[2]==4) + return image[:,:,:3] + + def _ColorConversion_RGB888_to_RGBA8888(self, image): + assert (len(image.shape)==3) + assert (image.shape[2]==3) + imageA = np.concatenate((image, np.zeros((image.shape[0], image.shape[1], 1), dtype=np.uint8) ), axis=2) + return imageA diff --git a/kneron/preprocessing/funcs/Crop.py b/kneron/preprocessing/funcs/Crop.py new file mode 100644 index 0000000..3dcdb71 --- /dev/null +++ b/kneron/preprocessing/funcs/Crop.py @@ -0,0 +1,145 @@ +import numpy as np +from PIL import Image +from .utils import str2int, str2float, str2bool, pad_square_to_4 +from .utils_520 import round_up_n +from .Runner_base import Runner_base, Param_base + +class General(Param_base): + type = 'center' + align_w_to_4 = False + pad_square_to_4 = False + rounding_type = 0 + crop_w = 0 + crop_h = 0 + start_x = 0. + start_y = 0. + end_x = 0. + end_y = 0. + def update(self, **dic): + self.type = dic['type'] + self.align_w_to_4 = str2bool(dic['align_w_to_4']) + self.rounding_type = str2int(dic['rounding_type']) + self.crop_w = str2int(dic['crop_w']) + self.crop_h = str2int(dic['crop_h']) + self.start_x = str2float(dic['start_x']) + self.start_y = str2float(dic['start_y']) + self.end_x = str2float(dic['end_x']) + self.end_y = str2float(dic['end_y']) + + def __str__(self): + str_out = [ + ', type:',str(self.type), + ', align_w_to_4:',str(self.align_w_to_4), + ', pad_square_to_4:',str(self.pad_square_to_4), + ', crop_w:',str(self.crop_w), + ', crop_h:',str(self.crop_h), + ', start_x:',str(self.start_x), + ', start_y:',str(self.start_y), + ', end_x:',str(self.end_x), + ', end_y:',str(self.end_y)] + return(' '.join(str_out)) + +class runner(Runner_base): + ## overwrite the class in Runner_base + general = General() + + def __str__(self): + return('') + + def update(self, **kwargs): + ## + super().update(**kwargs) + + ## + if (self.general.start_x != self.general.end_x) and (self.general.start_y != self.general.end_y): + self.general.type = 'specific' + elif(self.general.type != 'specific'): + if self.general.crop_w == 0 or self.general.crop_h == 0: + self.general.crop_w = self.common.model_size[0] + self.general.crop_h = self.common.model_size[1] + assert(self.general.crop_w > 0) + assert(self.general.crop_h > 0) + assert(self.general.type.lower() in ['CENTER', 'Center', 'center', 'CORNER', 'Corner', 'corner']) + else: + assert(self.general.type == 'specific') + + def run(self, image_data): + ## init + img = Image.fromarray(image_data) + w, h = img.size + + ## get range + if self.general.type.lower() in ['CENTER', 'Center', 'center']: + x1, y1, x2, y2 = self._calcuate_xy_center(w, h) + elif self.general.type.lower() in ['CORNER', 'Corner', 'corner']: + x1, y1, x2, y2 = self._calcuate_xy_corner(w, h) + else: + x1 = self.general.start_x + y1 = self.general.start_y + x2 = self.general.end_x + y2 = self.general.end_y + assert( ((x1 != x2) and (y1 != y2)) ) + + ## rounding + if self.general.rounding_type == 0: + x1 = int(np.floor(x1)) + y1 = int(np.floor(y1)) + x2 = int(np.ceil(x2)) + y2 = int(np.ceil(y2)) + else: + x1 = int(round(x1)) + y1 = int(round(y1)) + x2 = int(round(x2)) + y2 = int(round(y2)) + + if self.general.align_w_to_4: + # x1 = (x1+1) &(~3) #//+2 + # x2 = (x2+2) &(~3) #//+1 + x1 = (x1+3) &(~3) #//+2 + left = w - x2 + left = (left+3) &(~3) + x2 = w - left + + ## pad_square_to_4 + if str2bool(self.general.pad_square_to_4): + x1,x2,y1,y2 = pad_square_to_4(x1,x2,y1,y2) + + # do crop + box = (x1,y1,x2,y2) + img = img.crop(box) + + # print info + if str2bool(self.common.print_info): + self.general.start_x = x1 + self.general.start_y = y1 + self.general.end_x = x2 + self.general.end_y = y2 + self.general.crop_w = x2 - x1 + self.general.crop_h = y2 - y1 + self.print_info() + + # output + image_data = np.array(img) + info = {} + info['box'] = box + + return image_data, info + + + ## protect fun + def _calcuate_xy_center(self, w, h): + x1 = w/2 - self.general.crop_w / 2 + y1 = h/2 - self.general.crop_h / 2 + x2 = w/2 + self.general.crop_w / 2 + y2 = h/2 + self.general.crop_h / 2 + return x1, y1, x2, y2 + + def _calcuate_xy_corner(self, _1, _2): + x1 = 0 + y1 = 0 + x2 = self.general.crop_w + y2 = self.general.crop_h + return x1, y1, x2, y2 + + def do_crop(self, image_data, startW, startH, endW, endH): + return image_data[startH:endH, startW:endW, :] diff --git a/kneron/preprocessing/funcs/Normalize.py b/kneron/preprocessing/funcs/Normalize.py new file mode 100644 index 0000000..0760fba --- /dev/null +++ b/kneron/preprocessing/funcs/Normalize.py @@ -0,0 +1,186 @@ +import numpy as np +from .utils import str2bool, str2int, str2float, clip_ary + +class runner(object): + def __init__(self): + self.set = { + 'general': { + 'print_info':'no', + 'model_size':[0,0], + 'numerical_type':'floating', + 'type': 'kneron' + }, + 'floating':{ + "scale": 1, + "bias": 0, + "mean": "", + "std": "", + }, + 'hw':{ + "radix":8, + "shift":"", + "sub":"" + } + } + return + + def update(self, **kwargs): + # + self.set.update(kwargs) + + # + if self.set['general']['numerical_type'] == '520': + if self.set['general']['type'].lower() in ['TF', 'Tf', 'tf']: + self.fun_normalize = self._chen_520 + self.shift = 7 - self.set['hw']['radix'] + self.sub = 128 + elif self.set['general']['type'].lower() in ['YOLO', 'Yolo', 'yolo']: + self.fun_normalize = self._chen_520 + self.shift = 8 - self.set['hw']['radix'] + self.sub = 0 + elif self.set['general']['type'].lower() in ['KNERON', 'Kneron', 'kneron']: + self.fun_normalize = self._chen_520 + self.shift = 8 - self.set['hw']['radix'] + self.sub = 128 + else: + self.fun_normalize = self._chen_520 + self.shift = 0 + self.sub = 0 + elif self.set['general']['numerical_type'] == '720': + self.fun_normalize = self._chen_720 + self.shift = 0 + self.sub = 0 + else: + if self.set['general']['type'].lower() in ['TORCH', 'Torch', 'torch']: + self.fun_normalize = self._normalize_torch + self.set['floating']['scale'] = 255. + self.set['floating']['mean'] = [0.485, 0.456, 0.406] + self.set['floating']['std'] = [0.229, 0.224, 0.225] + elif self.set['general']['type'].lower() in ['TF', 'Tf', 'tf']: + self.fun_normalize = self._normalize_tf + self.set['floating']['scale'] = 127.5 + self.set['floating']['bias'] = -1. + elif self.set['general']['type'].lower() in ['CAFFE', 'Caffe', 'caffe']: + self.fun_normalize = self._normalize_caffe + self.set['floating']['mean'] = [103.939, 116.779, 123.68] + elif self.set['general']['type'].lower() in ['YOLO', 'Yolo', 'yolo']: + self.fun_normalize = self._normalize_yolo + self.set['floating']['scale'] = 255. + elif self.set['general']['type'].lower() in ['KNERON', 'Kneron', 'kneron']: + self.fun_normalize = self._normalize_kneron + self.set['floating']['scale'] = 256. + self.set['floating']['bias'] = -0.5 + else: + self.fun_normalize = self._normalize_customized + self.set['floating']['scale'] = str2float(self.set['floating']['scale']) + self.set['floating']['bias'] = str2float(self.set['floating']['bias']) + if self.set['floating']['mean'] != None: + if len(self.set['floating']['mean']) != 3: + self.set['floating']['mean'] = None + if self.set['floating']['std'] != None: + if len(self.set['floating']['std']) != 3: + self.set['floating']['std'] = None + + + def print_info(self): + if self.set['general']['numerical_type'] == '520': + print("", + 'numerical_type', self.set['general']['numerical_type'], + ", type:", self.set['general']['type'], + ', shift:',self.shift, + ', sub:', self.sub) + else: + print("", + 'numerical_type', self.set['general']['numerical_type'], + ", type:", self.set['general']['type'], + ', scale:',self.set['floating']['scale'], + ', bias:', self.set['floating']['bias'], + ', mean:', self.set['floating']['mean'], + ', std:',self.set['floating']['std']) + + def run(self, image_data): + # print info + if str2bool(self.set['general']['print_info']): + self.print_info() + + # norm + image_data = self.fun_normalize(image_data) + + # output + info = {} + return image_data, info + + def _normalize_torch(self, x): + if len(x.shape) != 3: + return x + x = x.astype('float') + x = x / self.set['floating']['scale'] + x[..., 0] -= self.set['floating']['mean'][0] + x[..., 1] -= self.set['floating']['mean'][1] + x[..., 2] -= self.set['floating']['mean'][2] + x[..., 0] /= self.set['floating']['std'][0] + x[..., 1] /= self.set['floating']['std'][1] + x[..., 2] /= self.set['floating']['std'][2] + return x + + def _normalize_tf(self, x): + # print('_normalize_tf') + x = x.astype('float') + x = x / self.set['floating']['scale'] + x = x + self.set['floating']['bias'] + return x + + def _normalize_caffe(self, x): + if len(x.shape) != 3: + return x + x = x.astype('float') + x = x[..., ::-1] + x[..., 0] -= self.set['floating']['mean'][0] + x[..., 1] -= self.set['floating']['mean'][1] + x[..., 2] -= self.set['floating']['mean'][2] + return x + + def _normalize_yolo(self, x): + # print('_normalize_yolo') + x = x.astype('float') + x = x / self.set['floating']['scale'] + return x + + def _normalize_kneron(self, x): + # print('_normalize_kneron') + x = x.astype('float') + x = x/self.set['floating']['scale'] + x = x + self.set['floating']['bias'] + return x + + def _normalize_customized(self, x): + # print('_normalize_customized') + x = x.astype('float') + if self.set['floating']['scale'] != 0: + x = x/ self.set['floating']['scale'] + x = x + self.set['floating']['bias'] + if self.set['floating']['mean'] is not None: + x[..., 0] -= self.set['floating']['mean'][0] + x[..., 1] -= self.set['floating']['mean'][1] + x[..., 2] -= self.set['floating']['mean'][2] + if self.set['floating']['std'] is not None: + x[..., 0] /= self.set['floating']['std'][0] + x[..., 1] /= self.set['floating']['std'][1] + x[..., 2] /= self.set['floating']['std'][2] + + return x + + def _chen_520(self, x): + # print('_chen_520') + x = (x - self.sub).astype('uint8') + x = (np.right_shift(x,self.shift)) + x=x.astype('uint8') + return x + + def _chen_720(self, x): + # print('_chen_720') + if self.shift == 1: + x = x + np.array([[self.sub], [self.sub], [self.sub]]) + else: + x = x + np.array([[self.sub], [self.sub], [self.sub]]) + return x \ No newline at end of file diff --git a/kneron/preprocessing/funcs/Padding.py b/kneron/preprocessing/funcs/Padding.py new file mode 100644 index 0000000..e1af1c5 --- /dev/null +++ b/kneron/preprocessing/funcs/Padding.py @@ -0,0 +1,187 @@ +import numpy as np +from PIL import Image +from .utils import str2bool, str2int, str2float +from .Runner_base import Runner_base, Param_base + +class General(Param_base): + type = '' + pad_val = '' + padded_w = '' + padded_h = '' + pad_l = '' + pad_r = '' + pad_t = '' + pad_b = '' + padding_ch = 3 + padding_ch_type = 'RGB' + def update(self, **dic): + self.type = dic['type'] + self.pad_val = dic['pad_val'] + self.padded_w = str2int(dic['padded_w']) + self.padded_h = str2int(dic['padded_h']) + self.pad_l = str2int(dic['pad_l']) + self.pad_r = str2int(dic['pad_r']) + self.pad_t = str2int(dic['pad_t']) + self.pad_b = str2int(dic['pad_b']) + + def __str__(self): + str_out = [ + ', type:',str(self.type), + ', pad_val:',str(self.pad_val), + ', pad_l:',str(self.pad_l), + ', pad_r:',str(self.pad_r), + ', pad_r:',str(self.pad_t), + ', pad_b:',str(self.pad_b), + ', padding_ch:',str(self.padding_ch)] + return(' '.join(str_out)) + +class Hw(Param_base): + radix = 8 + normalize_type = 'floating' + def update(self, **dic): + self.radix = dic['radix'] + self.normalize_type = dic['normalize_type'] + + def __str__(self): + str_out = [ + ', radix:', str(self.radix), + ', normalize_type:',str(self.normalize_type)] + return(' '.join(str_out)) + + +class runner(Runner_base): + ## overwrite the class in Runner_base + general = General() + hw = Hw() + + def __str__(self): + return('') + + def update(self, **kwargs): + super().update(**kwargs) + + ## update pad type & pad length + if (self.general.pad_l != 0) or (self.general.pad_r != 0) or (self.general.pad_t != 0) or (self.general.pad_b != 0): + self.general.type = 'specific' + assert(self.general.pad_l >= 0) + assert(self.general.pad_r >= 0) + assert(self.general.pad_t >= 0) + assert(self.general.pad_b >= 0) + elif(self.general.type != 'specific'): + if self.general.padded_w == 0 or self.general.padded_h == 0: + self.general.padded_w = self.common.model_size[0] + self.general.padded_h = self.common.model_size[1] + assert(self.general.padded_w > 0) + assert(self.general.padded_h > 0) + assert(self.general.type.lower() in ['CENTER', 'Center', 'center', 'CORNER', 'Corner', 'corner']) + else: + assert(self.general.type == 'specific') + + ## decide pad_val & padding ch + # if numerical_type is floating + if (self.common.numerical_type == 'floating'): + if self.general.pad_val != 'edge': + self.general.pad_val = str2float(self.general.pad_val) + self.general.padding_ch = 3 + self.general.padding_ch_type = 'RGB' + # if numerical_type is 520 or 720 + else: + if self.general.pad_val == '': + if self.hw.normalize_type.lower() in ['TF', 'Tf', 'tf']: + self.general.pad_val = np.uint8(-128 >> (7 - self.hw.radix)) + elif self.hw.normalize_type.lower() in ['YOLO', 'Yolo', 'yolo']: + self.general.pad_val = np.uint8(0 >> (8 - self.hw.radix)) + elif self.hw.normalize_type.lower() in ['KNERON', 'Kneron', 'kneron']: + self.general.pad_val = np.uint8(-128 >> (8 - self.hw.radix)) + else: + self.general.pad_val = np.uint8(0 >> (8 - self.hw.radix)) + else: + self.general.pad_val = str2int(self.general.pad_val) + self.general.padding_ch = 4 + self.general.padding_ch_type = 'RGBA' + + def run(self, image_data): + # init + shape = image_data.shape + w = shape[1] + h = shape[0] + if len(shape) < 3: + self.general.padding_ch = 1 + self.general.padding_ch_type = 'L' + else: + if shape[2] == 3 and self.general.padding_ch == 4: + image_data = np.concatenate((image_data, np.zeros((h, w, 1), dtype=np.uint8) ), axis=2) + + ## padding + if self.general.type.lower() in ['CENTER', 'Center', 'center']: + img_pad = self._padding_center(image_data, w, h) + elif self.general.type.lower() in ['CORNER', 'Corner', 'corner']: + img_pad = self._padding_corner(image_data, w, h) + else: + img_pad = self._padding_sp(image_data, w, h) + + # print info + if str2bool(self.common.print_info): + self.print_info() + + # output + info = {} + return img_pad, info + + ## protect fun + def _padding_center(self, img, ori_w, ori_h): + # img_pad = Image.new(self.general.padding_ch_type, (self.general.padded_w, self.general.padded_h), int(self.general.pad_val[0])) + # img = Image.fromarray(img) + # img_pad.paste(img, ((self.general.padded_w-ori_w)//2, (self.general.padded_h-ori_h)//2)) + # return img_pad + padH = self.general.padded_h - ori_h + padW = self.general.padded_w - ori_w + self.general.pad_t = padH // 2 + self.general.pad_b = (padH // 2) + (padH % 2) + self.general.pad_l = padW // 2 + self.general.pad_r = (padW // 2) + (padW % 2) + if self.general.pad_l < 0 or self.general.pad_r <0 or self.general.pad_t <0 or self.general.pad_b<0: + return img + img_pad = self._padding_sp(img,ori_w,ori_h) + return img_pad + + def _padding_corner(self, img, ori_w, ori_h): + # img_pad = Image.new(self.general.padding_ch_type, (self.general.padded_w, self.general.padded_h), self.general.pad_val) + # img_pad.paste(img, (0, 0)) + self.general.pad_l = 0 + self.general.pad_r = self.general.padded_w - ori_w + self.general.pad_t = 0 + self.general.pad_b = self.general.padded_h - ori_h + if self.general.pad_l < 0 or self.general.pad_r <0 or self.general.pad_t <0 or self.general.pad_b<0: + return img + img_pad = self._padding_sp(img,ori_w,ori_h) + return img_pad + + def _padding_sp(self, img, ori_w, ori_h): + # block_t = np.zeros((self.general.pad_t, self.general.pad_l + self.general.pad_r + ori_w, self.general.padding_ch), dtype=np.float) + # block_l = np.zeros((ori_h, self.general.pad_l, self.general.padding_ch), dtype=np.float) + # block_r = np.zeros((ori_h, self.general.pad_r, self.general.padding_ch), dtype=np.float) + # block_b = np.zeros((self.general.pad_b, self.general.pad_l + self.general.pad_r + ori_w, self.general.padding_ch), dtype=np.float) + # for i in range(self.general.padding_ch): + # block_t[:, :, i] = np.ones(block_t[:, :, i].shape, dtype=np.float) * self.general.pad_val + # block_l[:, :, i] = np.ones(block_l[:, :, i].shape, dtype=np.float) * self.general.pad_val + # block_r[:, :, i] = np.ones(block_r[:, :, i].shape, dtype=np.float) * self.general.pad_val + # block_b[:, :, i] = np.ones(block_b[:, :, i].shape, dtype=np.float) * self.general.pad_val + # padded_image_hor = np.concatenate((block_l, img, block_r), axis=1) + # padded_image = np.concatenate((block_t, padded_image_hor, block_b), axis=0) + # return padded_image + if self.general.padding_ch == 1: + pad_range = ( (self.general.pad_t, self.general.pad_b),(self.general.pad_l, self.general.pad_r) ) + else: + pad_range = ((self.general.pad_t, self.general.pad_b),(self.general.pad_l, self.general.pad_r),(0,0)) + + if isinstance(self.general.pad_val, str): + if self.general.pad_val == 'edge': + padded_image = np.pad(img, pad_range, mode="edge") + else: + padded_image = np.pad(img, pad_range, mode="constant",constant_values=0) + else: + padded_image = np.pad(img, pad_range, mode="constant",constant_values=self.general.pad_val) + + return padded_image + diff --git a/kneron/preprocessing/funcs/Resize.py b/kneron/preprocessing/funcs/Resize.py new file mode 100644 index 0000000..8e948b9 --- /dev/null +++ b/kneron/preprocessing/funcs/Resize.py @@ -0,0 +1,237 @@ +import numpy as np +import cv2 +from PIL import Image +from .utils import str2bool, str2int +from ctypes import c_float +from .Runner_base import Runner_base, Param_base + +class General(Param_base): + type = 'bilinear' + keep_ratio = True + zoom = True + calculate_ratio_using_CSim = True + resize_w = 0 + resize_h = 0 + resized_w = 0 + resized_h = 0 + def update(self, **dic): + self.type = dic['type'] + self.keep_ratio = str2bool(dic['keep_ratio']) + self.zoom = str2bool(dic['zoom']) + self.calculate_ratio_using_CSim = str2bool(dic['calculate_ratio_using_CSim']) + self.resize_w = str2int(dic['resize_w']) + self.resize_h = str2int(dic['resize_h']) + + def __str__(self): + str_out = [ + ', type:',str(self.type), + ', keep_ratio:',str(self.keep_ratio), + ', zoom:',str(self.zoom), + ', calculate_ratio_using_CSim:',str(self.calculate_ratio_using_CSim), + ', resize_w:',str(self.resize_w), + ', resize_h:',str(self.resize_h), + ', resized_w:',str(self.resized_w), + ', resized_h:',str(self.resized_h)] + return(' '.join(str_out)) + +class Hw(Param_base): + resize_bit = 12 + def update(self, **dic): + pass + + def __str__(self): + str_out = [ + ', resize_bit:',str(self.resize_bit)] + return(' '.join(str_out)) + +class runner(Runner_base): + ## overwrite the class in Runner_base + general = General() + hw = Hw() + + def __str__(self): + return('') + + def update(self, **kwargs): + super().update(**kwargs) + + ## if resize size has not been assigned, then it will take model size as resize size + if self.general.resize_w == 0 or self.general.resize_h == 0: + self.general.resize_w = self.common.model_size[0] + self.general.resize_h = self.common.model_size[1] + assert(self.general.resize_w > 0) + assert(self.general.resize_h > 0) + + ## + if self.common.numerical_type == '520': + self.general.type = 'fixed_520' + elif self.common.numerical_type == '720': + self.general.type = 'fixed_720' + assert(self.general.type.lower() in ['BILINEAR', 'Bilinear', 'bilinear', 'BICUBIC', 'Bicubic', 'bicubic', 'FIXED', 'Fixed', 'fixed', 'FIXED_520', 'Fixed_520', 'fixed_520', 'FIXED_720', 'Fixed_720', 'fixed_720','CV', 'cv', 'opencv', 'OpenCV', 'CV2', 'cv2']) + + + def run(self, image_data): + ## init + ori_w = image_data.shape[1] + ori_h = image_data.shape[0] + info = {} + + ## + if self.general.keep_ratio: + self.general.resized_w, self.general.resized_h = self.calcuate_scale_keep_ratio(self.general.resize_w,self.general.resize_h, ori_w, ori_h, self.general.calculate_ratio_using_CSim) + else: + self.general.resized_w = int(self.general.resize_w) + self.general.resized_h = int(self.general.resize_h) + assert(self.general.resized_w > 0) + assert(self.general.resized_h > 0) + + ## + if (self.general.resized_w > ori_w) or (self.general.resized_h > ori_h): + if not self.general.zoom: + info['size'] = (ori_w,ori_h) + if str2bool(self.common.print_info): + print('no resize') + self.print_info() + return image_data, info + + ## resize + if self.general.type.lower() in ['BILINEAR', 'Bilinear', 'bilinear']: + image_data = self.do_resize_bilinear(image_data, self.general.resized_w, self.general.resized_h) + elif self.general.type.lower() in ['BICUBIC', 'Bicubic', 'bicubic']: + image_data = self.do_resize_bicubic(image_data, self.general.resized_w, self.general.resized_h) + elif self.general.type.lower() in ['CV', 'cv', 'opencv', 'OpenCV', 'CV2', 'cv2']: + image_data = self.do_resize_cv2(image_data, self.general.resized_w, self.general.resized_h) + elif self.general.type.lower() in ['FIXED', 'Fixed', 'fixed', 'FIXED_520', 'Fixed_520', 'fixed_520', 'FIXED_720', 'Fixed_720', 'fixed_720']: + image_data = self.do_resize_fixed(image_data, self.general.resized_w, self.general.resized_h, self.hw.resize_bit, self.general.type) + + + # output + info['size'] = (self.general.resized_w, self.general.resized_h) + + # print info + if str2bool(self.common.print_info): + self.print_info() + + return image_data, info + + def calcuate_scale_keep_ratio(self, tar_w, tar_h, ori_w, ori_h, calculate_ratio_using_CSim): + if not calculate_ratio_using_CSim: + scale_w = tar_w * 1.0 / ori_w*1.0 + scale_h = tar_h * 1.0 / ori_h*1.0 + scale = scale_w if scale_w < scale_h else scale_h + new_w = int(round(ori_w * scale)) + new_h = int(round(ori_h * scale)) + return new_w, new_h + + ## calculate_ratio_using_CSim + scale_w = c_float(tar_w * 1.0 / (ori_w * 1.0)).value + scale_h = c_float(tar_h * 1.0 / (ori_h * 1.0)).value + scale_ratio = 0.0 + scale_target_w = 0 + scale_target_h = 0 + padH = 0 + padW = 0 + + bScaleW = True if scale_w < scale_h else False + if bScaleW: + scale_ratio = scale_w + scale_target_w = int(c_float(scale_ratio * ori_w + 0.5).value) + scale_target_h = int(c_float(scale_ratio * ori_h + 0.5).value) + assert (abs(scale_target_w - tar_w) <= 1), "Error: scale down width cannot meet expectation\n" + padH = tar_h - scale_target_h + padW = 0 + assert (padH >= 0), "Error: padH shouldn't be less than zero\n" + else: + scale_ratio = scale_h + scale_target_w = int(c_float(scale_ratio * ori_w + 0.5).value) + scale_target_h = int(c_float(scale_ratio * ori_h + 0.5).value) + assert (abs(scale_target_h - tar_h) <= 1), "Error: scale down height cannot meet expectation\n" + padW = tar_w - scale_target_w + padH = 0 + assert (padW >= 0), "Error: padW shouldn't be less than zero\n" + new_w = tar_w - padW + new_h = tar_h - padH + return new_w, new_h + + def do_resize_bilinear(self, image_data, resized_w, resized_h): + img = Image.fromarray(image_data) + img = img.resize((resized_w, resized_h), Image.BILINEAR) + image_data = np.array(img).astype('uint8') + return image_data + + def do_resize_bicubic(self, image_data, resized_w, resized_h): + img = Image.fromarray(image_data) + img = img.resize((resized_w, resized_h), Image.BICUBIC) + image_data = np.array(img).astype('uint8') + return image_data + + def do_resize_cv2(self, image_data, resized_w, resized_h): + image_data = cv2.resize(image_data, (resized_w, resized_h)) + image_data = np.array(image_data) + # image_data = np.array(image_data).astype('uint8') + return image_data + + def do_resize_fixed(self, image_data, resized_w, resized_h, resize_bit, type): + if len(image_data.shape) < 3: + m, n = image_data.shape + tmp = np.zeros((m,n,3), dtype=np.uint8) + tmp[:,:,0] = image_data + image_data = tmp + c = 3 + gray = True + else: + m, n, c = image_data.shape + gray = False + + resolution = 1 << resize_bit + + # Width + ratio = int(((n - 1) << resize_bit) / (resized_w - 1)) + ratio_cnt = 0 + src_x = 0 + resized_image_w = np.zeros((m, resized_w, c), dtype=np.uint8) + + for dst_x in range(resized_w): + while ratio_cnt > resolution: + ratio_cnt = ratio_cnt - resolution + src_x = src_x + 1 + mul1 = np.ones((m, c)) * (resolution - ratio_cnt) + mul2 = np.ones((m, c)) * ratio_cnt + resized_image_w[:, dst_x, :] = np.multiply(np.multiply( + image_data[:, src_x, :], mul1) + np.multiply(image_data[:, src_x + 1, :], mul2), 1/resolution) + ratio_cnt = ratio_cnt + ratio + + # Height + ratio = int(((m - 1) << resize_bit) / (resized_h - 1)) + ## NPU HW special case 2 , only on 520 + if type.lower() in ['FIXED_520', 'Fixed_520', 'fixed_520']: + if (((ratio * (resized_h - 1)) % 4096 == 0) and ratio != 4096): + ratio -= 1 + + ratio_cnt = 0 + src_x = 0 + resized_image = np.zeros( + (resized_h, resized_w, c), dtype=np.uint8) + for dst_x in range(resized_h): + while ratio_cnt > resolution: + ratio_cnt = ratio_cnt - resolution + src_x = src_x + 1 + + mul1 = np.ones((resized_w, c)) * (resolution - ratio_cnt) + mul2 = np.ones((resized_w, c)) * ratio_cnt + + ## NPU HW special case 1 , both on 520 / 720 + if (((dst_x > 0) and ratio_cnt == resolution) and (ratio != resolution)): + if type.lower() in ['FIXED_520', 'Fixed_520', 'fixed_520','FIXED_720', 'Fixed_720', 'fixed_720' ]: + resized_image[dst_x, :, :] = np.multiply(np.multiply( + resized_image_w[src_x+1, :, :], mul1) + np.multiply(resized_image_w[src_x + 2, :, :], mul2), 1/resolution) + else: + resized_image[dst_x, :, :] = np.multiply(np.multiply( + resized_image_w[src_x, :, :], mul1) + np.multiply(resized_image_w[src_x + 1, :, :], mul2), 1/resolution) + + ratio_cnt = ratio_cnt + ratio + + if gray: + resized_image = resized_image[:,:,0] + + return resized_image diff --git a/kneron/preprocessing/funcs/Rotate.py b/kneron/preprocessing/funcs/Rotate.py new file mode 100644 index 0000000..63f882f --- /dev/null +++ b/kneron/preprocessing/funcs/Rotate.py @@ -0,0 +1,45 @@ +import numpy as np +from .utils import str2bool, str2int + +class runner(object): + def __init__(self, *args, **kwargs): + self.set = { + 'operator': '', + "rotate_direction": 0, + + } + self.update(*args, **kwargs) + + def update(self, *args, **kwargs): + self.set.update(kwargs) + self.rotate_direction = str2int(self.set['rotate_direction']) + + # print info + if str2bool(self.set['b_print']): + self.print_info() + + def print_info(self): + print("", + 'rotate_direction', self.rotate_direction,) + + + def run(self, image_data): + image_data = self._rotate(image_data) + return image_data + + def _rotate(self,img): + if self.rotate_direction == 1 or self.rotate_direction == 2: + col, row, unit = img.shape + pInBuf = img.reshape((-1,1)) + pOutBufTemp = np.zeros((col* row* unit)) + for r in range(row): + for c in range(col): + for u in range(unit): + if self.rotate_direction == 1: + pOutBufTemp[unit * (c * row + (row - r - 1))+u] = pInBuf[unit * (r * col + c)+u] + elif self.rotate_direction == 2: + pOutBufTemp[unit * (row * (col - c - 1) + r)+u] = pInBuf[unit * (r * col + c)+u] + + img = pOutBufTemp.reshape((col,row,unit)) + + return img diff --git a/kneron/preprocessing/funcs/Runner_base.py b/kneron/preprocessing/funcs/Runner_base.py new file mode 100644 index 0000000..7bedbcf --- /dev/null +++ b/kneron/preprocessing/funcs/Runner_base.py @@ -0,0 +1,59 @@ +from abc import ABCMeta, abstractmethod + +class Param_base(object): + @abstractmethod + def update(self,**dic): + raise NotImplementedError("Must override") + + def load_dic(self, key, **dic): + if key in dic: + param = eval('self.'+key) + param = dic[key] + + def __str__(self): + str_out = [] + return(' '.join(str_out)) + + +class Common(Param_base): + print_info = False + model_size = [0,0] + numerical_type = 'floating' + + def update(self, **dic): + self.print_info = dic['print_info'] + self.model_size = dic['model_size'] + self.numerical_type = dic['numerical_type'] + + def __str__(self): + str_out = ['numerical_type:',str(self.numerical_type)] + return(' '.join(str_out)) + +class Runner_base(metaclass=ABCMeta): + common = Common() + general = Param_base() + floating = Param_base() + hw = Param_base() + + def update(self, **kwargs): + ## update param + self.common.update(**kwargs['common']) + self.general.update(**kwargs['general']) + assert(self.common.numerical_type.lower() in ['floating', '520', '720']) + if (self.common.numerical_type == 'floating'): + if (self.floating.__class__.__name__ != 'Param_base'): + self.floating.update(**kwargs['floating']) + else: + if (self.hw.__class__.__name__ != 'Param_base'): + self.hw.update(**kwargs['hw']) + + def print_info(self): + if (self.common.numerical_type == 'floating'): + print(self, self.common, self.general, self.floating) + else: + print(self, self.common, self.general, self.hw) + + + + + diff --git a/kneron/preprocessing/funcs/__init__.py b/kneron/preprocessing/funcs/__init__.py new file mode 100644 index 0000000..0b46298 --- /dev/null +++ b/kneron/preprocessing/funcs/__init__.py @@ -0,0 +1,2 @@ +from . import ColorConversion, Padding, Resize, Crop, Normalize, Rotate + diff --git a/kneron/preprocessing/funcs/utils.py b/kneron/preprocessing/funcs/utils.py new file mode 100644 index 0000000..a1e509a --- /dev/null +++ b/kneron/preprocessing/funcs/utils.py @@ -0,0 +1,372 @@ +import numpy as np +from PIL import Image +import struct + +def pad_square_to_4(x_start, x_end, y_start, y_end): + w_int = x_end - x_start + h_int = y_end - y_start + pad = w_int - h_int + if pad > 0: + pad_s = (pad >> 1) &(~3) + pad_e = pad - pad_s + y_start -= pad_s + y_end += pad_e + else:#//pad <=0 + pad_s = -(((pad) >> 1) &(~3)) + pad_e = (-pad) - pad_s + x_start -= pad_s + x_end += pad_e + return x_start, x_end, y_start, y_end + +def str_fill(value): + if len(value) == 1: + value = "0" + value + elif len(value) == 0: + value = "00" + + return value + +def clip_ary(value): + list_v = [] + for i in range(len(value)): + v = value[i] % 256 + list_v.append(v) + + return list_v + +def str2bool(v): + if isinstance(v,bool): + return v + return v.lower() in ('TRUE', 'True', 'true', '1', 'T', 't', 'Y', 'YES', 'y', 'yes') + + +def str2int(s): + if s == "": + s = 0 + s = int(s) + return s + +def str2float(s): + if s == "": + s = 0 + s = float(s) + return s + +def clip(value, mini, maxi): + if value < mini: + result = mini + elif value > maxi: + result = maxi + else: + result = value + + return result + + +def clip_ary(value): + list_v = [] + for i in range(len(value)): + v = value[i] % 256 + list_v.append(v) + + return list_v + + +def signed_rounding(value, bit): + if value < 0: + value = value - (1 << (bit - 1)) + else: + value = value + (1 << (bit - 1)) + + return value + +def hex_loader(data_folder,**kwargs): + format_mode = kwargs['raw_img_fmt'] + src_h = kwargs['img_in_height'] + src_w = kwargs['img_in_width'] + + if format_mode in ['YUV444', 'yuv444', 'YCBCR444', 'YCbCr444', 'ycbcr444']: + output = hex_yuv444(data_folder,src_h,src_w) + elif format_mode in ['RGB565', 'rgb565']: + output = hex_rgb565(data_folder,src_h,src_w) + elif format_mode in ['YUV422', 'yuv422', 'YCBCR422', 'YCbCr422', 'ycbcr422']: + output = hex_yuv422(data_folder,src_h,src_w) + + return output + +def hex_rgb565(hex_folder,src_h,src_w): + pix_per_line = 8 + byte_per_line = 16 + + f = open(hex_folder) + pixel_r = [] + pixel_g = [] + pixel_b = [] + + # Ignore the first line + f.readline() + input_line = int((src_h * src_w)/pix_per_line) + for i in range(input_line): + readline = f.readline() + for j in range(int(byte_per_line/2)-1, -1, -1): + data1 = int(readline[(j * 4 + 0):(j * 4 + 2)], 16) + data0 = int(readline[(j * 4 + 2):(j * 4 + 4)], 16) + r = ((data1 & 0xf8) >> 3) + g = (((data0 & 0xe0) >> 5) + ((data1 & 0x7) << 3)) + b = (data0 & 0x1f) + pixel_r.append(r) + pixel_g.append(g) + pixel_b.append(b) + + ary_r = np.array(pixel_r, dtype=np.uint8) + ary_g = np.array(pixel_g, dtype=np.uint8) + ary_b = np.array(pixel_b, dtype=np.uint8) + output = np.concatenate((ary_r[:, None], ary_g[:, None], ary_b[:, None]), axis=1) + output = output.reshape((src_h, src_w, 3)) + + return output + +def hex_yuv444(hex_folder,src_h,src_w): + pix_per_line = 4 + byte_per_line = 16 + + f = open(hex_folder) + byte0 = [] + byte1 = [] + byte2 = [] + byte3 = [] + + # Ignore the first line + f.readline() + input_line = int((src_h * src_w)/pix_per_line) + for i in range(input_line): + readline = f.readline() + for j in range(byte_per_line-1, -1, -1): + data = int(readline[(j*2):(j*2+2)], 16) + if (j+1) % 4 == 0: + byte0.append(data) + elif (j+2) % 4 == 0: + byte1.append(data) + elif (j+3) % 4 == 0: + byte2.append(data) + elif (j+4) % 4 == 0: + byte3.append(data) + # ary_a = np.array(byte0, dtype=np.uint8) + ary_v = np.array(byte1, dtype=np.uint8) + ary_u = np.array(byte2, dtype=np.uint8) + ary_y = np.array(byte3, dtype=np.uint8) + output = np.concatenate((ary_y[:, None], ary_u[:, None], ary_v[:, None]), axis=1) + output = output.reshape((src_h, src_w, 3)) + + return output + +def hex_yuv422(hex_folder,src_h,src_w): + pix_per_line = 8 + byte_per_line = 16 + f = open(hex_folder) + pixel_y = [] + pixel_u = [] + pixel_v = [] + + # Ignore the first line + f.readline() + input_line = int((src_h * src_w)/pix_per_line) + for i in range(input_line): + readline = f.readline() + for j in range(int(byte_per_line/4)-1, -1, -1): + data3 = int(readline[(j * 8 + 0):(j * 8 + 2)], 16) + data2 = int(readline[(j * 8 + 2):(j * 8 + 4)], 16) + data1 = int(readline[(j * 8 + 4):(j * 8 + 6)], 16) + data0 = int(readline[(j * 8 + 6):(j * 8 + 8)], 16) + pixel_y.append(data3) + pixel_y.append(data1) + pixel_u.append(data2) + pixel_u.append(data2) + pixel_v.append(data0) + pixel_v.append(data0) + + ary_y = np.array(pixel_y, dtype=np.uint8) + ary_u = np.array(pixel_u, dtype=np.uint8) + ary_v = np.array(pixel_v, dtype=np.uint8) + output = np.concatenate((ary_y[:, None], ary_u[:, None], ary_v[:, None]), axis=1) + output = output.reshape((src_h, src_w, 3)) + + return output + +def bin_loader(data_folder,**kwargs): + format_mode = kwargs['raw_img_fmt'] + src_h = kwargs['img_in_height'] + src_w = kwargs['img_in_width'] + if format_mode in ['YUV','yuv','YUV444', 'yuv444', 'YCBCR','YCbCr','ycbcr','YCBCR444', 'YCbCr444', 'ycbcr444']: + output = bin_yuv444(data_folder,src_h,src_w) + elif format_mode in ['RGB565', 'rgb565']: + output = bin_rgb565(data_folder,src_h,src_w) + elif format_mode in ['NIR', 'nir','NIR888', 'nir888']: + output = bin_nir(data_folder,src_h,src_w) + elif format_mode in ['YUV422', 'yuv422', 'YCBCR422', 'YCbCr422', 'ycbcr422']: + output = bin_yuv422(data_folder,src_h,src_w) + elif format_mode in ['RGB888','rgb888']: + output = np.fromfile(data_folder, dtype='uint8') + output = output.reshape(src_h,src_w,3) + elif format_mode in ['RGBA8888','rgba8888', 'RGBA' , 'rgba']: + output_temp = np.fromfile(data_folder, dtype='uint8') + output_temp = output_temp.reshape(src_h,src_w,4) + output = output_temp[:,:,0:3] + + return output + +def bin_yuv444(in_img_path,src_h,src_w): + # load bin + struct_fmt = '1B' + struct_len = struct.calcsize(struct_fmt) + struct_unpack = struct.Struct(struct_fmt).unpack_from + + row = src_h + col = src_w + pixels = row*col + + raw = [] + with open(in_img_path, "rb") as f: + while True: + data = f.read(struct_len) + if not data: break + s = struct_unpack(data) + raw.append(s[0]) + + + raw = raw[:pixels*4] + + # + output = np.zeros((pixels * 3), dtype=np.uint8) + cnt = 0 + for i in range(0, pixels*4, 4): + #Y + output[cnt] = raw[i+3] + #U + cnt += 1 + output[cnt] = raw[i+2] + #V + cnt += 1 + output[cnt] = raw[i+1] + + cnt += 1 + + output = output.reshape((src_h,src_w,3)) + return output + +def bin_yuv422(in_img_path,src_h,src_w): + # load bin + struct_fmt = '1B' + struct_len = struct.calcsize(struct_fmt) + struct_unpack = struct.Struct(struct_fmt).unpack_from + + row = src_h + col = src_w + pixels = row*col + + raw = [] + with open(in_img_path, "rb") as f: + while True: + data = f.read(struct_len) + if not data: break + s = struct_unpack(data) + raw.append(s[0]) + + + raw = raw[:pixels*2] + + # + output = np.zeros((pixels * 3), dtype=np.uint8) + cnt = 0 + for i in range(0, pixels*2, 4): + #Y0 + output[cnt] = raw[i+3] + #U0 + cnt += 1 + output[cnt] = raw[i+2] + #V0 + cnt += 1 + output[cnt] = raw[i] + #Y1 + cnt += 1 + output[cnt] = raw[i+1] + #U1 + cnt += 1 + output[cnt] = raw[i+2] + #V1 + cnt += 1 + output[cnt] = raw[i] + + cnt += 1 + + output = output.reshape((src_h,src_w,3)) + return output + +def bin_rgb565(in_img_path,src_h,src_w): + # load bin + struct_fmt = '1B' + struct_len = struct.calcsize(struct_fmt) + struct_unpack = struct.Struct(struct_fmt).unpack_from + + row = src_h + col = src_w + pixels = row*col + + rgba565 = [] + with open(in_img_path, "rb") as f: + while True: + data = f.read(struct_len) + if not data: break + s = struct_unpack(data) + rgba565.append(s[0]) + + + rgba565 = rgba565[:pixels*2] + + # rgb565_bin to numpy_array + output = np.zeros((pixels * 3), dtype=np.uint8) + cnt = 0 + for i in range(0, pixels*2, 2): + temp = rgba565[i] + temp2 = rgba565[i+1] + #R-5 + output[cnt] = (temp2 >>3) + + #G-6 + cnt += 1 + output[cnt] = ((temp & 0xe0) >> 5) + ((temp2 & 0x07) << 3) + + #B-5 + cnt += 1 + output[cnt] = (temp & 0x1f) + + cnt += 1 + + output = output.reshape((src_h,src_w,3)) + return output + +def bin_nir(in_img_path,src_h,src_w): + # load bin + struct_fmt = '1B' + struct_len = struct.calcsize(struct_fmt) + struct_unpack = struct.Struct(struct_fmt).unpack_from + + nir = [] + with open(in_img_path, "rb") as f: + while True: + data = f.read(struct_len) + if not data: break + s = struct_unpack(data) + nir.append(s[0]) + + nir = nir[:src_h*src_w] + pixels = len(nir) + # nir_bin to numpy_array + output = np.zeros((len(nir) * 3), dtype=np.uint8) + for i in range(0, pixels): + output[i*3]=nir[i] + output[i*3+1]=nir[i] + output[i*3+2]=nir[i] + + output = output.reshape((src_h,src_w,3)) + return output diff --git a/kneron/preprocessing/funcs/utils_520.py b/kneron/preprocessing/funcs/utils_520.py new file mode 100644 index 0000000..27bd860 --- /dev/null +++ b/kneron/preprocessing/funcs/utils_520.py @@ -0,0 +1,50 @@ +import math + +def round_up_16(num): + return ((num + (16 - 1)) & ~(16 - 1)) + +def round_up_n(num, n): + if (num > 0): + temp = float(num) / n + return math.ceil(temp) * n + else: + return -math.ceil(float(-num) / n) * n + +def cal_img_row_offset(crop_num, pad_num, start_row, out_row, orig_row): + + scaled_img_row = int(out_row - (pad_num[1] + pad_num[3])) + if ((start_row - pad_num[1]) > 0): + img_str_row = int((start_row - pad_num[1])) + else: + img_str_row = 0 + valid_row = int(orig_row - (crop_num[1] + crop_num[3])) + img_str_row = int(valid_row * img_str_row / scaled_img_row) + return int(img_str_row + crop_num[1]) + +def get_pad_num(pad_num_orig, left, up, right, bottom): + pad_num = [0]*4 + for i in range(0,4): + pad_num[i] = pad_num_orig[i] + + if not (left): + pad_num[0] = 0 + if not (up): + pad_num[1] = 0 + if not (right): + pad_num[2] = 0 + if not (bottom): + pad_num[3] = 0 + + return pad_num + +def get_byte_per_pixel(raw_fmt): + if raw_fmt.lower() in ['RGB888', 'rgb888', 'RGB', 'rgb888']: + return 4 + elif raw_fmt.lower() in ['YUV', 'yuv', 'YUV422', 'yuv422']: + return 2 + elif raw_fmt.lower() in ['RGB565', 'rgb565']: + return 2 + elif raw_fmt.lower() in ['NIR888', 'nir888', 'NIR', 'nir']: + return 1 + else: + return -1 \ No newline at end of file diff --git a/kneron/preprocessing/funcs/utils_720.py b/kneron/preprocessing/funcs/utils_720.py new file mode 100644 index 0000000..8d1a046 --- /dev/null +++ b/kneron/preprocessing/funcs/utils_720.py @@ -0,0 +1,42 @@ +import numpy as np +from PIL import Image + +def twos_complement(value): + value = int(value) + # msb = (value & 0x8000) * (1/np.power(2, 15)) + msb = (value & 0x8000) >> 15 + if msb == 1: + if (((~value) & 0xFFFF) + 1) >= 0xFFFF: + result = ((~value) & 0xFFFF) + else: + result = (((~value) & 0xFFFF) + 1) + result = result * (-1) + else: + result = value + + return result + + +def twos_complement_pix(value): + h, _ = value.shape + for i in range(h): + value[i, 0] = twos_complement(value[i, 0]) + + return value + +def clip(value, mini, maxi): + if value < mini: + result = mini + elif value > maxi: + result = maxi + else: + result = value + + return result + +def clip_pix(value, mini, maxi): + h, _ = value.shape + for i in range(h): + value[i, 0] = clip(value[i, 0], mini, maxi) + + return value \ No newline at end of file diff --git a/kneron/quantize_yolov5.py b/kneron/quantize_yolov5.py new file mode 100644 index 0000000..9435082 --- /dev/null +++ b/kneron/quantize_yolov5.py @@ -0,0 +1,66 @@ +import os +import numpy as np +import torch +import ktc # Kneron Toolchain +import onnx +from yolov5_preprocess import Yolov5_preprocess +import kneron_preprocessing + +# 設定裝置 +device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + +# 設定圖片大小 +imgsz_h, imgsz_w = 640, 640 + +# 設定正確的數據目錄 +data_path = "/workspace/yolov5/data50" + +# 確保 data50 內有圖片 +files_found = [f for _, _, files in os.walk(data_path) for f in files if f.lower().endswith((".jpg", ".jpeg", ".png", ".bmp"))] + +if not files_found: + raise FileNotFoundError(f"❌ Error: No images found in {data_path}! Please check your dataset.") + +print(f"✅ Found {len(files_found)} images in {data_path}") + +# **獲取 ONNX 模型的輸入名稱** +onnx_model_path = "/workspace/yolov5/runs/train/exp24/weights/latest.opt.onnx" +m = onnx.load(onnx_model_path) +input_name = m.graph.input[0].name # 確保 key 與 ONNX input name 一致 + +km = ktc.ModelConfig(20008, "0001", "720", onnx_model=onnx_model_path) +# 存儲預處理後的圖片數據 +img_list = [] + +# 遍歷 data50 並進行預處理 +for root, _, files in os.walk(data_path): + for f in files: + fullpath = os.path.join(root, f) + + # **只處理圖片文件** + if not f.lower().endswith((".jpg", ".jpeg", ".png", ".bmp")): + print(f"⚠️ Skipping non-image file: {fullpath}") + continue + + # **嘗試處理圖片** + try: + img_data, _ = Yolov5_preprocess(fullpath, device, imgsz_h, imgsz_w) + img_data = img_data.cpu().numpy() + print(f"✅ Processed: {fullpath}") + img_list.append(img_data) + except Exception as e: + print(f"❌ Failed to process {fullpath}: {e}") + +# **確保 img_list 不是空的** +if not img_list: + raise ValueError("❌ Error: No valid images were processed! Please check the image paths and formats.") + +# **執行 BIE 量化** +bie_model_path = km.analysis({input_name: img_list}) + +# **確認 BIE 模型是否生成** +if not os.path.exists(bie_model_path): + raise RuntimeError(f"❌ Error: BIE model was not generated! Please check your quantization process.") + +# 顯示成功訊息 +print("\n✅ Fixed-point analysis done! BIE model saved to:", bie_model_path) diff --git a/kneron/removenode.py b/kneron/removenode.py new file mode 100644 index 0000000..5d7e1e3 --- /dev/null +++ b/kneron/removenode.py @@ -0,0 +1,34 @@ +import onnx +import ktc.onnx_optimizer as kneron_opt +from onnx import helper + +def replace_sigmoid_with_identity(model): + """ + Replaces all Sigmoid nodes with Identity nodes to maintain model integrity. + """ + for node in model.graph.node: + if node.op_type == "Sigmoid": + print(f"Replacing {node.name} with Identity") + identity_node = helper.make_node( + "Identity", + inputs=node.input, + outputs=node.output, + name=node.name + "_identity" + ) + model.graph.node.extend([identity_node]) + model.graph.node.remove(node) + + return model + +def process_onnx(input_onnx_path, output_onnx_path): + """ Replaces Sigmoid with Identity and saves the new model """ + model = onnx.load(input_onnx_path) + model = replace_sigmoid_with_identity(model) + onnx.save(model, output_onnx_path) + print(f"Modified ONNX model saved to: {output_onnx_path}") + +# 使用 Docker 掛載的路徑 +input_onnx = "/workspace/yolov5/runs/train/exp24/weights/best_simplified.onnx" +output_onnx = "/workspace/yolov5/runs/train/exp24/weights/best_no_sigmoid.onnx" + +process_onnx(input_onnx, output_onnx) diff --git a/kneron/yolov5_export.py b/kneron/yolov5_export.py new file mode 100644 index 0000000..c865fbf --- /dev/null +++ b/kneron/yolov5_export.py @@ -0,0 +1,80 @@ +import os +import torch +import sys +import yaml +import argparse + +from yolov5_runner import Yolov5Runner + +def save_weight(num_classes): + current_path=os.getcwd() + par_path = os.path.dirname(current_path) + sys.path.append(os.path.join(par_path, 'yolov5')) + from models.yolo import Model + num_classes = num_classes + device=torch.device('cpu') + ckpt = torch.load(path, map_location=device) + model = Model(yaml_path, nc=num_classes) + ckpt['model'] = {k: v for k, v in ckpt['model'].float().state_dict().items() if k in model.state_dict() and model.state_dict()[k].shape == v.shape} + model.load_state_dict(ckpt['model']) + torch.save(model.state_dict(),pt_path,_use_new_zipfile_serialization=False) + +def export_onnx(input_h, input_w, num_classes): + + onnx_batch_size, onnx_img_h, onnx_img_w = 1, input_h, input_w + yolov5_model = Yolov5Runner(model_path=pt_path, yaml_path=yaml_path, grid20_path=grid20_path, grid40_path=grid40_path, grid80_path=grid80_path, num_classes=num_classes, imgsz_h=onnx_img_h, imgsz_w=onnx_img_w, conf_thres=0.001, iou_thres=0.65, top_k_num=3000, vanish_point=0.0) + + # Input + img = torch.zeros((onnx_batch_size, 3, onnx_img_h, onnx_img_w)) + # img = img.type(torch.cuda.FloatTensor) + + # Load PyTorch model + model = yolov5_model.yolov5_model + model.eval() + model.model[-1].export = True # set Detect() layer export=True + y = model(img) # dry run + + # ONNX export + try: + import onnx + print('\nStarting ONNX export with onnx %s...' % onnx.__version__) + print('****onnx file****',onnx_export_file) + torch.onnx.export(model, img, onnx_export_file, verbose=False, opset_version=11, keep_initializers_as_inputs=True, input_names=['images'], output_names=['classes', 'boxes'] if y is None else ['output']) + # Checks + onnx_model = onnx.load(onnx_export_file) # load onnx model + onnx.checker.check_model(onnx_model) # check onnx model + print(onnx.helper.printable_graph(onnx_model.graph)) # print a human readable model + print('ONNX export success, saved as %s' % onnx_export_file) + except Exception as e: + print('ONNX export failure: %s' % e) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--data', type=str, default='../yolov5/data/pretrained_paths_520.yaml', help='the path to pretrained model paths yaml file') + + args = parser.parse_args() + + with open(args.data) as f: + data_dict = yaml.load(f, Loader=yaml.FullLoader) # data dict + + os.environ["CUDA_VISIBLE_DEVICES"] = '0' + num_classes = data_dict['nc'] + input_w = data_dict['input_w'] + input_h = data_dict['input_h'] + grid_dir = data_dict['grid_dir'] + grid20_path = data_dict['grid20_path'] + grid40_path = data_dict['grid40_path'] + grid80_path = data_dict['grid80_path'] + path = data_dict['path'] + pt_path=data_dict['pt_path'] + yaml_path=data_dict['yaml_path'] + onnx_export_file = data_dict['onnx_export_file'] + save_weight(num_classes) + export_onnx(input_h, input_w, num_classes) + + + + + + diff --git a/kneron/yolov5_preprocess.py b/kneron/yolov5_preprocess.py new file mode 100644 index 0000000..ca6800e --- /dev/null +++ b/kneron/yolov5_preprocess.py @@ -0,0 +1,161 @@ +# coding: utf-8 +import torch +import cv2 +import numpy as np +import math +import time +import kneron_preprocessing + +kneron_preprocessing.API.set_default_as_520() +torch.backends.cudnn.deterministic = True +img_formats = ['.bmp', '.jpg', '.jpeg', '.png', '.tif', '.tiff', '.dng'] +def make_divisible(x, divisor): + # Returns x evenly divisble by divisor + return math.ceil(x / divisor) * divisor + +def check_img_size(img_size, s=32): + # Verify img_size is a multiple of stride s + new_size = make_divisible(img_size, int(s)) # ceil gs-multiple + if new_size != img_size: + print('WARNING: --img-size %g must be multiple of max stride %g, updating to %g' % (img_size, s, new_size)) + return new_size + +def letterbox_ori(img, new_shape=(640, 640), color=(0, 0, 0), auto=True, scaleFill=False, scaleup=True): + # Resize image to a 32-pixel-multiple rectangle https://github.com/ultralytics/yolov3/issues/232 + shape = img.shape[:2] # current shape [height, width] + if isinstance(new_shape, int): + new_shape = (new_shape, new_shape) + + # Scale ratio (new / old) + r = min(new_shape[0] / shape[0], new_shape[1] / shape[1]) + if not scaleup: # only scale down, do not scale up (for better test mAP) + r = min(r, 1.0) + + # Compute padding + ratio = r, r # width, height ratios + new_unpad = int(round(shape[1] * r)), int(round(shape[0] * r)) # width, height + dw, dh = new_shape[1] - new_unpad[0], new_shape[0] - new_unpad[1] # wh padding + + dw /= 2 # divide padding into 2 sides + dh /= 2 + + if shape[::-1] != new_unpad: # resize + img = cv2.resize(img, new_unpad, interpolation=cv2.INTER_LINEAR) + #img = kneron_preprocessing.API.resize(img,size=new_unpad, keep_ratio = False) + + top, bottom = int(round(dh - 0.1)), int(round(dh + 0.1)) + left, right = int(round(dw - 0.1)), int(round(dw + 0.1)) + # top, bottom = int(0), int(round(dh + 0.1)) + # left, right = int(0), int(round(dw + 0.1)) + img = cv2.copyMakeBorder(img, top, bottom, left, right, cv2.BORDER_CONSTANT, value=color) # add border + #img = kneron_preprocessing.API.pad(img, left, right, top, bottom, 0) + + return img, ratio, (dw, dh) + +def letterbox(img, new_shape=(640, 640), color=(0, 0, 0), auto=True, scaleFill=False, scaleup=True): + # Resize image to a 32-pixel-multiple rectangle https://github.com/ultralytics/yolov3/issues/232 + shape = img.shape[:2] # current shape [height, width] + if isinstance(new_shape, int): + new_shape = (new_shape, new_shape) + + # Scale ratio (new / old) + r = min(new_shape[0] / shape[0], new_shape[1] / shape[1]) + if not scaleup: # only scale down, do not scale up (for better test mAP) + r = min(r, 1.0) + + # Compute padding + ratio = r, r # width, height ratios + new_unpad = int(round(shape[1] * r)), int(round(shape[0] * r)) # width, height + dw, dh = new_shape[1] - new_unpad[0], new_shape[0] - new_unpad[1] # wh padding + + # dw /= 2 # divide padding into 2 sides + # dh /= 2 + + if shape[::-1] != new_unpad: # resize + #img = cv2.resize(img, new_unpad, interpolation=cv2.INTER_LINEAR) + img = kneron_preprocessing.API.resize(img,size=new_unpad, keep_ratio = False) + + # top, bottom = int(round(dh - 0.1)), int(round(dh + 0.1)) + # left, right = int(round(dw - 0.1)), int(round(dw + 0.1)) + top, bottom = int(0), int(round(dh + 0.1)) + left, right = int(0), int(round(dw + 0.1)) + #img = cv2.copyMakeBorder(img, top, bottom, left, right, cv2.BORDER_CONSTANT, value=color) # add border + img = kneron_preprocessing.API.pad(img, left, right, top, bottom, 0) + + return img, ratio, (dw, dh) + +def letterbox_test(img, new_shape=(640, 640), color=(0, 0, 0), auto=True, scaleFill=False, scaleup=True): + + ratio = 1.0, 1.0 + dw, dh = 0, 0 + img = kneron_preprocessing.API.resize(img, size=(480, 256), keep_ratio=False, type='bilinear') + return img, ratio, (dw, dh) + +def LoadImages(path,img_size): #_rgb # for inference + if isinstance(path, str): + img0 = cv2.imread(path) # BGR + else: + img0 = path # BGR + + # Padded resize + img = letterbox(img0, new_shape=img_size)[0] + # Convert + img = img[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB, to 3x416x416 + img = np.ascontiguousarray(img) + return img, img0 + +def LoadImages_yyy(path,img_size): #_yyy # for inference + if isinstance(path, str): + img0 = cv2.imread(path) # BGR + else: + img0 = path # BGR + + yvu = cv2.cvtColor(img0, cv2.COLOR_BGR2YCrCb) + y, v, u = cv2.split(yvu) + img0 = np.stack((y,)*3, axis=-1) + + # Padded resize + img = letterbox(img0, new_shape=img_size)[0] + + # Convert + img = img[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB, to 3x416x416 + img = np.ascontiguousarray(img) + return img, img0 + +def LoadImages_yuv420(path,img_size): #_yuv420 # for inference + if isinstance(path, str): + img0 = cv2.imread(path) # BGR + else: + img0 = path # BGR + img_h, img_w = img0.shape[:2] + img_h = (img_h // 2) * 2 + img_w = (img_w // 2) * 2 + img = img0[:img_h,:img_w,:] + yuv = cv2.cvtColor(img, cv2.COLOR_BGR2YUV_I420) + img0= cv2.cvtColor(yuv, cv2.COLOR_YUV2BGR_I420) #yuv420 + + + # Padded resize + img = letterbox(img0, new_shape=img_size)[0] + + # Convert + img = img[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB, to 3x416x416 + img = np.ascontiguousarray(img) + return img, img0 + +def Yolov5_preprocess(image_path, device, imgsz_h, imgsz_w) : + model_stride_max = 32 + imgsz_h = check_img_size(imgsz_h, s=model_stride_max) # check img_size + imgsz_w = check_img_size(imgsz_w, s=model_stride_max) # check img_size + img, im0 = LoadImages(image_path, img_size=(imgsz_h,imgsz_w)) + img = kneron_preprocessing.API.norm(img) #path1 + #print('img',img.shape) + img = torch.from_numpy(img).to(device) #path1,path2 + # img = img.float() # uint8 to fp16/32 #path2 + # img /= 255.0#256.0 - 0.5 # 0 - 255 to -0.5 - 0.5 #path2 + + if img.ndimension() == 3: + img = img.unsqueeze(0) + + return img, im0 + diff --git a/kneron/yolov5_savingWeight.py b/kneron/yolov5_savingWeight.py new file mode 100644 index 0000000..1563cda --- /dev/null +++ b/kneron/yolov5_savingWeight.py @@ -0,0 +1,42 @@ +import os +import torch +import sys +import argparse +import yaml + +def save_weight(num_classes): + from models.yolo import Model + num_classes = num_classes + device=torch.device('cpu') + ckpt = torch.load(path, map_location=device) + model = Model(yaml_path, nc=num_classes) + ckpt['model'] = {k: v for k, v in ckpt['model'].float().state_dict().items() if k in model.state_dict() and model.state_dict()[k].shape == v.shape} + model.load_state_dict(ckpt['model']) + torch.save(model.state_dict(),pt_path,_use_new_zipfile_serialization=False) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--data', type=str, default='data/pretrained_paths_520.yaml', help='the path to pretrained model paths yaml file') + args = parser.parse_args() + + with open(args.data) as f: + data_dict = yaml.load(f, Loader=yaml.FullLoader) # data dict + + os.environ["CUDA_VISIBLE_DEVICES"] = '0' + input_w = data_dict['input_w'] + input_h = data_dict['input_h'] + grid_dir = data_dict['grid_dir'] + grid20_path = data_dict['grid20_path'] + grid40_path = data_dict['grid40_path'] + grid80_path = data_dict['grid80_path'] + path = data_dict['path'] + pt_path=data_dict['pt_path'] + yaml_path=data_dict['yaml_path'] + + save_weight(data_dict['nc']) + + + + + diff --git a/models/__init__.py b/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/models/common.py b/models/common.py new file mode 100644 index 0000000..14a03f4 --- /dev/null +++ b/models/common.py @@ -0,0 +1,246 @@ +# This file contains modules common to various models + +import math + +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +from PIL import Image, ImageDraw + +from utils.datasets import letterbox +from utils.general import non_max_suppression, make_divisible, scale_coords, xyxy2xywh +from utils.plots import color_list + + +def autopad(k, p=None): # kernel, padding + # Pad to 'same' + if p is None: + p = k // 2 if isinstance(k, int) else [x // 2 for x in k] # auto-pad + return p + + +def DWConv(c1, c2, k=1, s=1, act=True): + # Depthwise convolution + return Conv(c1, c2, k, s, g=math.gcd(c1, c2), act=act) + + +class Conv(nn.Module): + # Standard convolution + def __init__(self, c1, c2, k=1, s=1, p=None, g=1, act=True): # ch_in, ch_out, kernel, stride, padding, groups + super(Conv, self).__init__() + self.conv = nn.Conv2d(c1, c2, k, s, autopad(k, p), groups=g, bias=False) + self.bn = nn.BatchNorm2d(c2) + #self.act = nn.Hardswish() if act else nn.Identity() + self.act = nn.LeakyReLU(0.1, inplace=True) if act else nn.Identity() + def forward(self, x): + # print('x',x.size()) + y = self.act(self.bn(self.conv(x))) + # print('y',y.size()) + return y + + def fuseforward(self, x): + return self.act(self.conv(x)) + + +class Bottleneck(nn.Module): + # Standard bottleneck + def __init__(self, c1, c2, shortcut=True, g=1, e=0.5): # ch_in, ch_out, shortcut, groups, expansion + super(Bottleneck, self).__init__() + c_ = int(c2 * e) # hidden channels + self.cv1 = Conv(c1, c_, 1, 1) + self.cv2 = Conv(c_, c2, 3, 1, g=g) + self.add = shortcut and c1 == c2 + + def forward(self, x): + return x + self.cv2(self.cv1(x)) if self.add else self.cv2(self.cv1(x)) + + +class BottleneckCSP(nn.Module): + # CSP Bottleneck https://github.com/WongKinYiu/CrossStagePartialNetworks + def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5): # ch_in, ch_out, number, shortcut, groups, expansion + super(BottleneckCSP, self).__init__() + c_ = int(c2 * e) # hidden channels + self.cv1 = Conv(c1, c_, 1, 1) + self.cv2 = nn.Conv2d(c1, c_, 1, 1, bias=False) + self.cv3 = nn.Conv2d(c_, c_, 1, 1, bias=False) + self.cv4 = Conv(2 * c_, c2, 1, 1) + self.bn = nn.BatchNorm2d(2 * c_) # applied to cat(cv2, cv3) + self.act = nn.LeakyReLU(0.1, inplace=True) + self.m = nn.Sequential(*[Bottleneck(c_, c_, shortcut, g, e=1.0) for _ in range(n)]) + + def forward(self, x): + y1 = self.cv3(self.m(self.cv1(x))) + y2 = self.cv2(x) + return self.cv4(self.act(self.bn(torch.cat((y1, y2), dim=1)))) + + + + +class Focus(nn.Module): + # Focus wh information into c-space + def __init__(self, c1, c2, k=1, s=1, p=None, g=1, act=True): # ch_in, ch_out, kernel, stride, padding, groups + super(Focus, self).__init__() + self.conv = Conv(c1 * 4, c2, k, s, p, g, act) + + def forward(self, x): # x(b,c,w,h) -> y(b,4c,w/2,h/2) + return self.conv(torch.cat([x[..., ::2, ::2], x[..., 1::2, ::2], x[..., ::2, 1::2], x[..., 1::2, 1::2]], 1)) + + +class Concat(nn.Module): + # Concatenate a list of tensors along dimension + def __init__(self, dimension=1): + super(Concat, self).__init__() + self.d = dimension + + def forward(self, x): + # print('Concat x.size()',x.size()) + y=torch.cat(x, self.d) + # print('Concat y.size()',y.size()) + return y + + +class NMS(nn.Module): + # Non-Maximum Suppression (NMS) module + conf = 0.25 # confidence threshold + iou = 0.45 # IoU threshold + classes = None # (optional list) filter by class + + def __init__(self): + super(NMS, self).__init__() + + def forward(self, x): + return non_max_suppression(x[0], conf_thres=self.conf, iou_thres=self.iou, classes=self.classes) + + +class autoShape(nn.Module): + # input-robust model wrapper for passing cv2/np/PIL/torch inputs. Includes preprocessing, inference and NMS + img_size = 640 # inference size (pixels) + conf = 0.25 # NMS confidence threshold + iou = 0.45 # NMS IoU threshold + classes = None # (optional list) filter by class + + def __init__(self, model): + super(autoShape, self).__init__() + self.model = model.eval() + + def forward(self, imgs, size=640, augment=False, profile=False): + # supports inference from various sources. For height=720, width=1280, RGB images example inputs are: + # opencv: imgs = cv2.imread('image.jpg')[:,:,::-1] # HWC BGR to RGB x(720,1280,3) + # PIL: imgs = Image.open('image.jpg') # HWC x(720,1280,3) + # numpy: imgs = np.zeros((720,1280,3)) # HWC + # torch: imgs = torch.zeros(16,3,720,1280) # BCHW + # multiple: imgs = [Image.open('image1.jpg'), Image.open('image2.jpg'), ...] # list of images + + p = next(self.model.parameters()) # for device and type + if isinstance(imgs, torch.Tensor): # torch + return self.model(imgs.to(p.device).type_as(p), augment, profile) # inference + + # Pre-process + if not isinstance(imgs, list): + imgs = [imgs] + shape0, shape1 = [], [] # image and inference shapes + batch = range(len(imgs)) # batch size + for i in batch: + imgs[i] = np.array(imgs[i]) # to numpy + imgs[i] = imgs[i][:, :, :3] if imgs[i].ndim == 3 else np.tile(imgs[i][:, :, None], 3) # enforce 3ch input + s = imgs[i].shape[:2] # HWC + shape0.append(s) # image shape + g = (size / max(s)) # gain + shape1.append([y * g for y in s]) + shape1 = [make_divisible(x, int(self.stride.max())) for x in np.stack(shape1, 0).max(0)] # inference shape + x = [letterbox(imgs[i], new_shape=shape1, auto=False)[0] for i in batch] # pad + x = np.stack(x, 0) if batch[-1] else x[0][None] # stack + x = np.ascontiguousarray(x.transpose((0, 3, 1, 2))) # BHWC to BCHW + x = torch.from_numpy(x).to(p.device).type_as(p) / 255. # uint8 to fp16/32 + + # Inference + with torch.no_grad(): + y = self.model(x, augment, profile)[0] # forward + y = non_max_suppression(y, conf_thres=self.conf, iou_thres=self.iou, classes=self.classes) # NMS + + # Post-process + for i in batch: + if y[i] is not None: + y[i][:, :4] = scale_coords(shape1, y[i][:, :4], shape0[i]) + + return Detections(imgs, y, self.names) + + +class Detections: + # detections class for YOLOv5 inference results + def __init__(self, imgs, pred, names=None): + super(Detections, self).__init__() + self.imgs = imgs # list of images as numpy arrays + self.pred = pred # list of tensors pred[0] = (xyxy, conf, cls) + self.names = names # class names + self.xyxy = pred # xyxy pixels + self.xywh = [xyxy2xywh(x) for x in pred] # xywh pixels + gn = [torch.Tensor([*[im.shape[i] for i in [1, 0, 1, 0]], 1., 1.]) for im in imgs] # normalization gains + self.xyxyn = [x / g for x, g in zip(self.xyxy, gn)] # xyxy normalized + self.xywhn = [x / g for x, g in zip(self.xywh, gn)] # xywh normalized + + def display(self, pprint=False, show=False, save=False): + colors = color_list() + for i, (img, pred) in enumerate(zip(self.imgs, self.pred)): + str = f'Image {i + 1}/{len(self.pred)}: {img.shape[0]}x{img.shape[1]} ' + if pred is not None: + for c in pred[:, -1].unique(): + n = (pred[:, -1] == c).sum() # detections per class + str += f'{n} {self.names[int(c)]}s, ' # add to string + if show or save: + img = Image.fromarray(img.astype(np.uint8)) if isinstance(img, np.ndarray) else img # from np + for *box, conf, cls in pred: # xyxy, confidence, class + # str += '%s %.2f, ' % (names[int(cls)], conf) # label + ImageDraw.Draw(img).rectangle(box, width=4, outline=colors[int(cls) % 10]) # plot + if save: + f = f'results{i}.jpg' + str += f"saved to '{f}'" + img.save(f) # save + if show: + img.show(f'Image {i}') # show + if pprint: + print(str) + + def print(self): + self.display(pprint=True) # print results + + def show(self): + self.display(show=True) # show results + + def save(self): + self.display(save=True) # save results + + +class Flatten(nn.Module): + # Use after nn.AdaptiveAvgPool2d(1) to remove last 2 dimensions + @staticmethod + def forward(x): + return x.view(x.size(0), -1) + + +class Classify(nn.Module): + # Classification head, i.e. x(b,c1,20,20) to x(b,c2) + def __init__(self, c1, c2, k=1, s=1, p=None, g=1): # ch_in, ch_out, kernel, stride, padding, groups + super(Classify, self).__init__() + self.aap = nn.AdaptiveAvgPool2d(1) # to x(b,c1,1,1) + self.conv = nn.Conv2d(c1, c2, k, s, autopad(k, p), groups=g, bias=False) # to x(b,c2,1,1) + self.flat = Flatten() + + def forward(self, x): + z = torch.cat([self.aap(y) for y in (x if isinstance(x, list) else [x])], 1) # cat if list + return self.flat(self.conv(z)) # flatten to x(b,c2) + + +class SPP(nn.Module): + # Spatial pyramid pooling layer used in YOLOv3-SPP + def __init__(self, c1, c2, k=(5, 9, 13)): + super(SPP, self).__init__() + c_ = c1 // 2 # hidden channels + self.cv1 = Conv(c1, c_, 1, 1) + self.cv2 = Conv(c_ * (len(k) + 1), c2, 1, 1) + self.m = nn.ModuleList([nn.MaxPool2d(kernel_size=x, stride=1, padding=x // 2) for x in k]) + + def forward(self, x): + x = self.cv1(x) + return self.cv2(torch.cat([x] + [m(x) for m in self.m], 1)) \ No newline at end of file diff --git a/models/experimental.py b/models/experimental.py new file mode 100644 index 0000000..1a66f1a --- /dev/null +++ b/models/experimental.py @@ -0,0 +1,152 @@ +# This file contains experimental modules + +import numpy as np +import torch +import torch.nn as nn + +from models.common import Conv, DWConv +from utils.google_utils import attempt_download + + +class CrossConv(nn.Module): + # Cross Convolution Downsample + def __init__(self, c1, c2, k=3, s=1, g=1, e=1.0, shortcut=False): + # ch_in, ch_out, kernel, stride, groups, expansion, shortcut + super(CrossConv, self).__init__() + c_ = int(c2 * e) # hidden channels + self.cv1 = Conv(c1, c_, (1, k), (1, s)) + self.cv2 = Conv(c_, c2, (k, 1), (s, 1), g=g) + self.add = shortcut and c1 == c2 + + def forward(self, x): + return x + self.cv2(self.cv1(x)) if self.add else self.cv2(self.cv1(x)) + + +class C3(nn.Module): + # Cross Convolution CSP + def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5): # ch_in, ch_out, number, shortcut, groups, expansion + super(C3, self).__init__() + c_ = int(c2 * e) # hidden channels + self.cv1 = Conv(c1, c_, 1, 1) + self.cv2 = nn.Conv2d(c1, c_, 1, 1, bias=False) + self.cv3 = nn.Conv2d(c_, c_, 1, 1, bias=False) + self.cv4 = Conv(2 * c_, c2, 1, 1) + self.bn = nn.BatchNorm2d(2 * c_) # applied to cat(cv2, cv3) + self.act = nn.LeakyReLU(0.1, inplace=True) + self.m = nn.Sequential(*[CrossConv(c_, c_, 3, 1, g, 1.0, shortcut) for _ in range(n)]) + + def forward(self, x): + y1 = self.cv3(self.m(self.cv1(x))) + y2 = self.cv2(x) + return self.cv4(self.act(self.bn(torch.cat((y1, y2), dim=1)))) + + +class Sum(nn.Module): + # Weighted sum of 2 or more layers https://arxiv.org/abs/1911.09070 + def __init__(self, n, weight=False): # n: number of inputs + super(Sum, self).__init__() + self.weight = weight # apply weights boolean + self.iter = range(n - 1) # iter object + if weight: + self.w = nn.Parameter(-torch.arange(1., n) / 2, requires_grad=True) # layer weights + + def forward(self, x): + y = x[0] # no weight + if self.weight: + w = torch.sigmoid(self.w) * 2 + for i in self.iter: + y = y + x[i + 1] * w[i] + else: + for i in self.iter: + y = y + x[i + 1] + return y + + +class GhostConv(nn.Module): + # Ghost Convolution https://github.com/huawei-noah/ghostnet + def __init__(self, c1, c2, k=1, s=1, g=1, act=True): # ch_in, ch_out, kernel, stride, groups + super(GhostConv, self).__init__() + c_ = c2 // 2 # hidden channels + self.cv1 = Conv(c1, c_, k, s, None, g, act) + self.cv2 = Conv(c_, c_, 5, 1, None, c_, act) + + def forward(self, x): + y = self.cv1(x) + return torch.cat([y, self.cv2(y)], 1) + + +class GhostBottleneck(nn.Module): + # Ghost Bottleneck https://github.com/huawei-noah/ghostnet + def __init__(self, c1, c2, k, s): + super(GhostBottleneck, self).__init__() + c_ = c2 // 2 + self.conv = nn.Sequential(GhostConv(c1, c_, 1, 1), # pw + DWConv(c_, c_, k, s, act=False) if s == 2 else nn.Identity(), # dw + GhostConv(c_, c2, 1, 1, act=False)) # pw-linear + self.shortcut = nn.Sequential(DWConv(c1, c1, k, s, act=False), + Conv(c1, c2, 1, 1, act=False)) if s == 2 else nn.Identity() + + def forward(self, x): + return self.conv(x) + self.shortcut(x) + + +class MixConv2d(nn.Module): + # Mixed Depthwise Conv https://arxiv.org/abs/1907.09595 + def __init__(self, c1, c2, k=(1, 3), s=1, equal_ch=True): + super(MixConv2d, self).__init__() + groups = len(k) + if equal_ch: # equal c_ per group + i = torch.linspace(0, groups - 1E-6, c2).floor() # c2 indices + c_ = [(i == g).sum() for g in range(groups)] # intermediate channels + else: # equal weight.numel() per group + b = [c2] + [0] * groups + a = np.eye(groups + 1, groups, k=-1) + a -= np.roll(a, 1, axis=1) + a *= np.array(k) ** 2 + a[0] = 1 + c_ = np.linalg.lstsq(a, b, rcond=None)[0].round() # solve for equal weight indices, ax = b + + self.m = nn.ModuleList([nn.Conv2d(c1, int(c_[g]), k[g], s, k[g] // 2, bias=False) for g in range(groups)]) + self.bn = nn.BatchNorm2d(c2) + self.act = nn.LeakyReLU(0.1, inplace=True) + + def forward(self, x): + return x + self.act(self.bn(torch.cat([m(x) for m in self.m], 1))) + + +class Ensemble(nn.ModuleList): + # Ensemble of models + def __init__(self): + super(Ensemble, self).__init__() + + def forward(self, x, augment=False): + y = [] + for module in self: + y.append(module(x, augment)[0]) + # y = torch.stack(y).max(0)[0] # max ensemble + # y = torch.cat(y, 1) # nms ensemble + y = torch.stack(y).mean(0) # mean ensemble + return y, None # inference, train output + + +def attempt_load(weights, map_location=None): + # Loads an ensemble of models weights=[a,b,c] or a single model weights=[a] or weights=a + model = Ensemble() + for w in weights if isinstance(weights, list) else [weights]: + ckpt = torch.load(w, map_location=map_location) + model.append( ckpt['model'].float().fuse().eval() ) # load FP32 model + + # Compatibility updates + for m in model.modules(): + if type(m) in [nn.Hardswish, nn.LeakyReLU, nn.ReLU, nn.ReLU6]: + m.inplace = True # pytorch 1.7.0 compatibility + elif type(m) is Conv: + m._non_persistent_buffers_set = set() # pytorch 1.6.0 compatibility + + if len(model) == 1: + return model[-1] # return model + else: + print('Ensemble created with %s\n' % weights) + for k in ['names', 'stride']: + setattr(model, k, getattr(model[-1], k)) + return model # return ensemble diff --git a/models/export.py b/models/export.py new file mode 100644 index 0000000..c5e96f1 --- /dev/null +++ b/models/export.py @@ -0,0 +1,94 @@ +"""Exports a YOLOv5 *.pt model to ONNX and TorchScript formats + +Usage: + $ export PYTHONPATH="$PWD" && python models/export.py --weights ./weights/yolov5s.pt --img 640 --batch 1 +""" + +import argparse +import sys +import time + +sys.path.append('./') # to run '$ python *.py' files in subdirectories + +import torch +import torch.nn as nn + +import models +from models.experimental import attempt_load +from utils.activations import Hardswish +from utils.general import set_logging, check_img_size + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--weights', type=str, default='./yolov5s.pt', help='weights path') # from yolov5/models/ + parser.add_argument('--img-size', nargs='+', type=int, default=[640, 640], help='image size') # height, width + parser.add_argument('--batch-size', type=int, default=1, help='batch size') + opt = parser.parse_args() + opt.img_size *= 2 if len(opt.img_size) == 1 else 1 # expand + print(opt) + set_logging() + t = time.time() + + # Load PyTorch model + model = attempt_load(opt.weights, map_location=torch.device('cpu')) # load FP32 model + labels = model.names + + # Checks + gs = int(max(model.stride)) # grid size (max stride) + opt.img_size = [check_img_size(x, gs) for x in opt.img_size] # verify img_size are gs-multiples + + # Input + img = torch.zeros(opt.batch_size, 3, *opt.img_size) # image size(1,3,320,192) iDetection + + # Update model + for k, m in model.named_modules(): + m._non_persistent_buffers_set = set() # pytorch 1.6.0 compatibility + if isinstance(m, models.common.Conv) and isinstance(m.act, nn.Hardswish): + m.act = Hardswish() # assign activation + # if isinstance(m, models.yolo.Detect): + # m.forward = m.forward_export # assign forward (optional) + model.model[-1].export = True # set Detect() layer export=True + y = model(img) # dry run + + # TorchScript export + try: + print('\nStarting TorchScript export with torch %s...' % torch.__version__) + f = opt.weights.replace('.pt', '.torchscript.pt') # filename + ts = torch.jit.trace(model, img) + ts.save(f) + print('TorchScript export success, saved as %s' % f) + except Exception as e: + print('TorchScript export failure: %s' % e) + + # ONNX export + try: + import onnx + + print('\nStarting ONNX export with onnx %s...' % onnx.__version__) + f = opt.weights.replace('.pt', '.onnx') # filename + torch.onnx.export(model, img, f, verbose=False, opset_version=12, input_names=['images'], + output_names=['classes', 'boxes'] if y is None else ['output']) + + # Checks + onnx_model = onnx.load(f) # load onnx model + onnx.checker.check_model(onnx_model) # check onnx model + # print(onnx.helper.printable_graph(onnx_model.graph)) # print a human readable model + print('ONNX export success, saved as %s' % f) + except Exception as e: + print('ONNX export failure: %s' % e) + + # CoreML export + try: + import coremltools as ct + + print('\nStarting CoreML export with coremltools %s...' % ct.__version__) + # convert model from torchscript and apply pixel scaling as per detect.py + model = ct.convert(ts, inputs=[ct.ImageType(name='image', shape=img.shape, scale=1 / 255.0, bias=[0, 0, 0])]) + f = opt.weights.replace('.pt', '.mlmodel') # filename + model.save(f) + print('CoreML export success, saved as %s' % f) + except Exception as e: + print('CoreML export failure: %s' % e) + + # Finish + print('\nExport complete (%.2fs). Visualize with https://github.com/lutzroeder/netron.' % (time.time() - t)) diff --git a/models/hub/yolov3-spp.yaml b/models/hub/yolov3-spp.yaml new file mode 100644 index 0000000..b6cadd9 --- /dev/null +++ b/models/hub/yolov3-spp.yaml @@ -0,0 +1,51 @@ +# parameters +nc: 80 # number of classes +depth_multiple: 1.0 # model depth multiple +width_multiple: 1.0 # layer channel multiple + +# anchors +anchors: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + +# darknet53 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Conv, [32, 3, 1]], # 0 + [-1, 1, Conv, [64, 3, 2]], # 1-P1/2 + [-1, 1, Bottleneck, [64]], + [-1, 1, Conv, [128, 3, 2]], # 3-P2/4 + [-1, 2, Bottleneck, [128]], + [-1, 1, Conv, [256, 3, 2]], # 5-P3/8 + [-1, 8, Bottleneck, [256]], + [-1, 1, Conv, [512, 3, 2]], # 7-P4/16 + [-1, 8, Bottleneck, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 9-P5/32 + [-1, 4, Bottleneck, [1024]], # 10 + ] + +# YOLOv3-SPP head +head: + [[-1, 1, Bottleneck, [1024, False]], + [-1, 1, SPP, [512, [5, 9, 13]]], + [-1, 1, Conv, [1024, 3, 1]], + [-1, 1, Conv, [512, 1, 1]], + [-1, 1, Conv, [1024, 3, 1]], # 15 (P5/32-large) + + [-2, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 8], 1, Concat, [1]], # cat backbone P4 + [-1, 1, Bottleneck, [512, False]], + [-1, 1, Bottleneck, [512, False]], + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, Conv, [512, 3, 1]], # 22 (P4/16-medium) + + [-2, 1, Conv, [128, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P3 + [-1, 1, Bottleneck, [256, False]], + [-1, 2, Bottleneck, [256, False]], # 27 (P3/8-small) + + [[27, 22, 15], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5) + ] diff --git a/models/hub/yolov5-fpn.yaml b/models/hub/yolov5-fpn.yaml new file mode 100644 index 0000000..4d2fae1 --- /dev/null +++ b/models/hub/yolov5-fpn.yaml @@ -0,0 +1,42 @@ +# parameters +nc: 80 # number of classes +depth_multiple: 1.0 # model depth multiple +width_multiple: 1.0 # layer channel multiple + +# anchors +anchors: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + +# YOLOv5 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Focus, [64, 3]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, Bottleneck, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 9, BottleneckCSP, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, BottleneckCSP, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 + [-1, 1, SPP, [1024, [5, 9, 13]]], + [-1, 6, BottleneckCSP, [1024]], # 9 + ] + +# YOLOv5 FPN head +head: + [[-1, 3, BottleneckCSP, [1024, False]], # 10 (P5/32-large) + + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 1, Conv, [512, 1, 1]], + [-1, 3, BottleneckCSP, [512, False]], # 14 (P4/16-medium) + + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 1, Conv, [256, 1, 1]], + [-1, 3, BottleneckCSP, [256, False]], # 18 (P3/8-small) + + [[18, 14, 10], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5) + ] diff --git a/models/hub/yolov5-panet.yaml b/models/hub/yolov5-panet.yaml new file mode 100644 index 0000000..9ed05dd --- /dev/null +++ b/models/hub/yolov5-panet.yaml @@ -0,0 +1,48 @@ +# parameters +nc: 80 # number of classes +depth_multiple: 1.0 # model depth multiple +width_multiple: 1.0 # layer channel multiple + +# anchors +anchors: + - [116,90, 156,198, 373,326] # P5/32 + - [30,61, 62,45, 59,119] # P4/16 + - [10,13, 16,30, 33,23] # P3/8 + +# YOLOv5 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Focus, [64, 3]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, BottleneckCSP, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 9, BottleneckCSP, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, BottleneckCSP, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 + [-1, 1, SPP, [1024, [5, 9, 13]]], + [-1, 3, BottleneckCSP, [1024, False]], # 9 + ] + +# YOLOv5 PANet head +head: + [[-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, BottleneckCSP, [512, False]], # 13 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, BottleneckCSP, [256, False]], # 17 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 14], 1, Concat, [1]], # cat head P4 + [-1, 3, BottleneckCSP, [512, False]], # 20 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 10], 1, Concat, [1]], # cat head P5 + [-1, 3, BottleneckCSP, [1024, False]], # 23 (P5/32-large) + + [[17, 20, 23], 1, Detect, [nc, anchors]], # Detect(P5, P4, P3) + ] diff --git a/models/yolo.py b/models/yolo.py new file mode 100644 index 0000000..fa36dcf --- /dev/null +++ b/models/yolo.py @@ -0,0 +1,292 @@ +import argparse +import logging +import math +import sys +from copy import deepcopy +from pathlib import Path + +sys.path.append('./') # to run '$ python *.py' files in subdirectories +logger = logging.getLogger(__name__) + +import torch +import torch.nn as nn + + +from models.common import Conv, Bottleneck, SPP, DWConv, Focus, BottleneckCSP, Concat, NMS, autoShape +from models.experimental import MixConv2d, CrossConv, C3 +from utils.autoanchor import check_anchor_order +from utils.general import make_divisible, check_file, set_logging +from utils.torch_utils import time_synchronized, fuse_conv_and_bn, model_info, scale_img, initialize_weights, \ + select_device, copy_attr + +try: + import thop # for FLOPS computation +except ImportError: + thop = None + + +class Detect(nn.Module): + stride = None # strides computed during build + export = False # onnx export + + def __init__(self, nc=80, anchors=(), ch=()): # detection layer + super(Detect, self).__init__() + self.nc = nc # number of classes + self.no = nc + 5 # number of outputs per anchor + self.nl = len(anchors) # number of detection layers + self.na = len(anchors[0]) // 2 # number of anchors + self.grid = [torch.zeros(1)] * self.nl # init grid + a = torch.tensor(anchors).float().view(self.nl, -1, 2) + self.register_buffer('anchors', a) # shape(nl,na,2) + self.register_buffer('anchor_grid', a.clone().view(self.nl, 1, -1, 1, 1, 2)) # shape(nl,1,na,1,1,2) + self.m = nn.ModuleList(nn.Conv2d(x, self.no * self.na, 1) for x in ch) # output conv + + def forward(self, x): + # x = x.copy() # for profiling + z = [] # inference output + self.training |= self.export + for i in range(self.nl): + x[i] = self.m[i](x[i]) # conv + bs, _, ny, nx = x[i].shape # x(bs,255,20,20) to x(bs,3,20,20,85) + x[i] = x[i].view(bs, self.na, self.no, ny, nx).permute(0, 1, 3, 4, 2).contiguous() + + if not self.training: # inference + if self.grid[i].shape[2:4] != x[i].shape[2:4]: + self.grid[i] = self._make_grid(nx, ny).to(x[i].device) + + y = x[i].sigmoid() + y[..., 0:2] = (y[..., 0:2] * 2. - 0.5 + self.grid[i].to(x[i].device)) * self.stride[i] # xy + y[..., 2:4] = (y[..., 2:4] * 2) ** 2 * self.anchor_grid[i] # wh + z.append(y.view(bs, -1, self.no)) + + return x if self.training else (torch.cat(z, 1), x) + + @staticmethod + def _make_grid(nx=20, ny=20): + yv, xv = torch.meshgrid([torch.arange(ny), torch.arange(nx)]) + return torch.stack((xv, yv), 2).view((1, 1, ny, nx, 2)).float() + + +class Model(nn.Module): + def __init__(self, cfg='yolov5s.yaml', ch=3, nc=None): # model, input channels, number of classes + super(Model, self).__init__() + if isinstance(cfg, dict): + self.yaml = cfg # model dict + else: # is *.yaml + import yaml # for torch hub + self.yaml_file = Path(cfg).name + with open(cfg) as f: + self.yaml = yaml.load(f, Loader=yaml.FullLoader) # model dict + + # Define model + if nc and nc != self.yaml['nc']: + logger.info('Overriding model.yaml nc=%g with nc=%g' % (self.yaml['nc'], nc)) + self.yaml['nc'] = nc # override yaml value + self.model, self.save = parse_model(deepcopy(self.yaml), ch=[ch]) # model, savelist, ch_out + # print([x.shape for x in self.forward(torch.zeros(1, ch, 64, 64))]) + + # Build strides, anchors + m = self.model[-1] # Detect() + if isinstance(m, Detect): + s = 128 # 2x min stride + m.stride = torch.tensor([s / x.shape[-2] for x in self.forward(torch.zeros(1, ch, s, s))]) # forward + m.anchors /= m.stride.view(-1, 1, 1) + check_anchor_order(m) + self.stride = m.stride + self._initialize_biases() # only run once + # print('Strides: %s' % m.stride.tolist()) + + # Init weights, biases + initialize_weights(self) + self.info() + logger.info('') + + def forward(self, x, augment=False, profile=False): + if augment: + img_size = x.shape[-2:] # height, width + s = [1, 0.83, 0.67] # scales + f = [None, 3, None] # flips (2-ud, 3-lr) + y = [] # outputs + for si, fi in zip(s, f): + xi = scale_img(x.flip(fi) if fi else x, si) + yi = self.forward_once(xi)[0] # forward + # cv2.imwrite('img%g.jpg' % s, 255 * xi[0].numpy().transpose((1, 2, 0))[:, :, ::-1]) # save + yi[..., :4] /= si # de-scale + if fi == 2: + yi[..., 1] = img_size[0] - yi[..., 1] # de-flip ud + elif fi == 3: + yi[..., 0] = img_size[1] - yi[..., 0] # de-flip lr + y.append(yi) + return torch.cat(y, 1), None # augmented inference, train + else: + return self.forward_once(x, profile) # single-scale inference, train + + def forward_once(self, x, profile=False): + y, dt = [], [] # outputs + for m in self.model: + if m.f != -1: # if not from previous layer + x = y[m.f] if isinstance(m.f, int) else [x if j == -1 else y[j] for j in m.f] # from earlier layers + + if profile: + o = thop.profile(m, inputs=(x,), verbose=False)[0] / 1E9 * 2 if thop else 0 # FLOPS + t = time_synchronized() + for _ in range(10): + _ = m(x) + dt.append((time_synchronized() - t) * 100) + print('%10.1f%10.0f%10.1fms %-40s' % (o, m.np, dt[-1], m.type)) + + x = m(x) # run + y.append(x if m.i in self.save else None) # save output + + if profile: + print('%.1fms total' % sum(dt)) + return x + + def _initialize_biases(self, cf=None): # initialize biases into Detect(), cf is class frequency + # https://arxiv.org/abs/1708.02002 section 3.3 + # cf = torch.bincount(torch.tensor(np.concatenate(dataset.labels, 0)[:, 0]).long(), minlength=nc) + 1. + m = self.model[-1] # Detect() module + for mi, s in zip(m.m, m.stride): # from + b = mi.bias.view(m.na, -1) # conv.bias(255) to (3,85) + # b[:, 4] += math.log(8 / (640 / s) ** 2) # obj (8 objects per 640 image) + # b[:, 5:] += math.log(0.6 / (m.nc - 0.99)) if cf is None else torch.log(cf / cf.sum()) # cls + + b.data[:, 4] += math.log(8 / (640 / s) ** 2) # obj (8 objects per 640 image) + b.data[:, 5:] += math.log(0.6 / (m.nc - 0.99)) if cf is None else torch.log(cf / cf.sum()) # cls + + + mi.bias = torch.nn.Parameter(b.view(-1), requires_grad=True) + + def _print_biases(self): + m = self.model[-1] # Detect() module + for mi in m.m: # from + b = mi.bias.detach().view(m.na, -1).T # conv.bias(255) to (3,85) + print(('%6g Conv2d.bias:' + '%10.3g' * 6) % (mi.weight.shape[1], *b[:5].mean(1).tolist(), b[5:].mean())) + + # def _print_weights(self): + # for m in self.model.modules(): + # if type(m) is Bottleneck: + # print('%10.3g' % (m.w.detach().sigmoid() * 2)) # shortcut weights + + def fuse(self): # fuse model Conv2d() + BatchNorm2d() layers + print('Fusing layers... ') + for m in self.model.modules(): + if type(m) is Conv and hasattr(m, 'bn'): + m.conv = fuse_conv_and_bn(m.conv, m.bn) # update conv + delattr(m, 'bn') # remove batchnorm + m.forward = m.fuseforward # update forward + self.info() + return self + + def nms(self, mode=True): # add or remove NMS module + present = type(self.model[-1]) is NMS # last layer is NMS + if mode and not present: + print('Adding NMS... ') + m = NMS() # module + m.f = -1 # from + m.i = self.model[-1].i + 1 # index + self.model.add_module(name='%s' % m.i, module=m) # add + self.eval() + elif not mode and present: + print('Removing NMS... ') + self.model = self.model[:-1] # remove + return self + + def autoshape(self): # add autoShape module + print('Adding autoShape... ') + m = autoShape(self) # wrap model + copy_attr(m, self, include=('yaml', 'nc', 'hyp', 'names', 'stride'), exclude=()) # copy attributes + return m + + def info(self, verbose=False, img_size=640): # print model information + model_info(self, verbose, img_size) + +def parse_model(d, ch): # model_dict, input_channels(3) + logger.info('\n%3s%18s%3s%10s %-40s%-30s' % ('', 'from', 'n', 'params', 'module', 'arguments')) + anchors, nc, gd, gw = d['anchors'], d['nc'], d['depth_multiple'], d['width_multiple'] + na = (len(anchors[0]) // 2) if isinstance(anchors, list) else anchors # number of anchors + no = na * (nc + 5) # number of outputs = anchors * (classes + 5) + + layers, save, c2 = [], [], ch[-1] # layers, savelist, ch out + for i, (f, n, m, args) in enumerate(d['backbone'] + d['head']): # from, number, module, args + m = eval(m) if isinstance(m, str) else m # eval strings + for j, a in enumerate(args): + try: + args[j] = eval(a) if isinstance(a, str) else a # eval strings + except: + pass + + n = max(round(n * gd), 1) if n > 1 else n # depth gain + if m in [Conv, Bottleneck, SPP, DWConv, MixConv2d, Focus, CrossConv, BottleneckCSP, C3]: + c1, c2 = ch[f], args[0] + + # Normal + # if i > 0 and args[0] != no: # channel expansion factor + # ex = 1.75 # exponential (default 2.0) + # e = math.log(c2 / ch[1]) / math.log(2) + # c2 = int(ch[1] * ex ** e) + # if m != Focus: + + #########################!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + c2 = make_divisible(c2 * gw, 8) if c2 != no else c2 + + # Experimental + # if i > 0 and args[0] != no: # channel expansion factor + # ex = 1 + gw # exponential (default 2.0) + # ch1 = 32 # ch[1] + # e = math.log(c2 / ch1) / math.log(2) # level 1-n + # c2 = int(ch1 * ex ** e) + # if m != Focus: + # c2 = make_divisible(c2, 8) if c2 != no else c2 + + args = [c1, c2, *args[1:]] + if m in [BottleneckCSP, C3]: + args.insert(2, n) + n = 1 + elif m is nn.BatchNorm2d: + args = [ch[f]] + elif m is Concat: + c2 = sum([ch[-1 if x == -1 else x + 1] for x in f]) + elif m is Detect: + args.append([ch[x + 1] for x in f]) + if isinstance(args[1], int): # number of anchors + args[1] = [list(range(args[1] * 2))] * len(f) + else: + c2 = ch[f] + + m_ = nn.Sequential(*[m(*args) for _ in range(n)]) if n > 1 else m(*args) # module + t = str(m)[8:-2].replace('__main__.', '') # module type + np = sum([x.numel() for x in m_.parameters()]) # number params + m_.i, m_.f, m_.type, m_.np = i, f, t, np # attach index, 'from' index, type, number params + logger.info('%3s%18s%3s%10.0f %-40s%-30s' % (i, f, n, np, t, args)) # print + save.extend(x % i for x in ([f] if isinstance(f, int) else f) if x != -1) # append to savelist + layers.append(m_) + ch.append(c2) + return nn.Sequential(*layers), sorted(save) + + + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--cfg', type=str, default='yolov5s.yaml', help='model.yaml') + parser.add_argument('--device', default='', help='cuda device, i.e. 0 or 0,1,2,3 or cpu') + opt = parser.parse_args() + opt.cfg = check_file(opt.cfg) # check file + set_logging() + device = select_device(opt.device) + + # Create model + model = Model(opt.cfg).to(device) + model.train() + + # Profile + # img = torch.rand(8 if torch.cuda.is_available() else 1, 3, 640, 640).to(device) + # y = model(img, profile=True) + + # Tensorboard + # from torch.utils.tensorboard import SummaryWriter + # tb_writer = SummaryWriter() + # print("Run 'tensorboard --logdir=models/runs' to view tensorboard at http://localhost:6006/") + # tb_writer.add_graph(model.model, img) # add model to tensorboard + # tb_writer.add_image('test', img[0], dataformats='CWH') # add model to tensorboard diff --git a/models/yolov5s-noupsample.yaml b/models/yolov5s-noupsample.yaml new file mode 100644 index 0000000..2b6d055 --- /dev/null +++ b/models/yolov5s-noupsample.yaml @@ -0,0 +1,34 @@ +# parameters +nc: 80 # number of classes +depth_multiple: 0.33 # model depth multiple +width_multiple: 0.50 # layer channel multiple + +# anchors +anchors: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + +# YOLOv5 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Focus, [64, 3]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, BottleneckCSP, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 9, BottleneckCSP, [256]], #4 + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, BottleneckCSP, [512]], #6 + [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 + [-1, 1, SPP, [1024, [5, 9, 13]]], + [-1, 3, BottleneckCSP, [1024, False]], # 9 + ] + +# YOLOv5 head +head: + [[4, 1, Conv, [256, 3, 1]], #10 + [6, 1, Conv, [512, 3, 1]], #11 + [[7, 9], 1, Concat, [1]], # cat head P5 + [-1, 3, BottleneckCSP, [1024, False]], # 13 + [[10, 11, 13], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5) + ] diff --git a/models/yolov5s.yaml b/models/yolov5s.yaml new file mode 100644 index 0000000..2bec452 --- /dev/null +++ b/models/yolov5s.yaml @@ -0,0 +1,48 @@ +# parameters +nc: 80 # number of classes +depth_multiple: 0.33 # model depth multiple +width_multiple: 0.50 # layer channel multiple + +# anchors +anchors: + - [10,13, 16,30, 33,23] # P3/8 + - [30,61, 62,45, 59,119] # P4/16 + - [116,90, 156,198, 373,326] # P5/32 + +# YOLOv5 backbone +backbone: + # [from, number, module, args] + [[-1, 1, Focus, [64, 3]], # 0-P1/2 + [-1, 1, Conv, [128, 3, 2]], # 1-P2/4 + [-1, 3, BottleneckCSP, [128]], + [-1, 1, Conv, [256, 3, 2]], # 3-P3/8 + [-1, 9, BottleneckCSP, [256]], + [-1, 1, Conv, [512, 3, 2]], # 5-P4/16 + [-1, 9, BottleneckCSP, [512]], + [-1, 1, Conv, [1024, 3, 2]], # 7-P5/32 + [-1, 1, SPP, [1024, [5, 9, 13]]], + [-1, 3, BottleneckCSP, [1024, False]], # 9 + ] + +# YOLOv5 head +head: + [[-1, 1, Conv, [512, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 6], 1, Concat, [1]], # cat backbone P4 + [-1, 3, BottleneckCSP, [512, False]], # 13 + + [-1, 1, Conv, [256, 1, 1]], + [-1, 1, nn.Upsample, [None, 2, 'nearest']], + [[-1, 4], 1, Concat, [1]], # cat backbone P3 + [-1, 3, BottleneckCSP, [256, False]], # 17 (P3/8-small) + + [-1, 1, Conv, [256, 3, 2]], + [[-1, 14], 1, Concat, [1]], # cat head P4 + [-1, 3, BottleneckCSP, [512, False]], # 20 (P4/16-medium) + + [-1, 1, Conv, [512, 3, 2]], + [[-1, 10], 1, Concat, [1]], # cat head P5 + [-1, 3, BottleneckCSP, [1024, False]], # 23 (P5/32-large) + + [[17, 20, 23], 1, Detect, [nc, anchors]], # Detect(P3, P4, P5) + ] diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..8812d87 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,32 @@ +# pip install -r requirements.txt + +# base ---------------------------------------- +matplotlib>=3.2.2 +numpy>=1.18.5 +opencv-python>=4.1.2 +Pillow +PyYAML>=5.3.1 +scipy>=1.4.1 +torch>=1.7.0 +torchvision>=0.8.1 +tqdm>=4.41.0 + +# logging ------------------------------------- +tensorboard>=2.4.1 +# wandb + +# plotting ------------------------------------ +seaborn>=0.11.0 +pandas + +# export -------------------------------------- +# coremltools>=4.1 +onnx==1.7.0 +onnxruntime +onnx-simplifier +# scikit-learn==0.19.2 # for coreml quantization + +# extras -------------------------------------- +thop # FLOPS computation +pycocotools>=2.0 # COCO mAP + diff --git a/test.py b/test.py new file mode 100644 index 0000000..828c705 --- /dev/null +++ b/test.py @@ -0,0 +1,327 @@ +import argparse +import glob +import json +import os +from pathlib import Path + +import numpy as np +import torch +import yaml +from tqdm import tqdm + +from models.experimental import attempt_load +from utils.datasets import create_dataloader +from utils.general import coco80_to_coco91_class, check_dataset, check_file, check_img_size, box_iou, \ + non_max_suppression, scale_coords, xyxy2xywh, xywh2xyxy, clip_coords, set_logging, increment_path +from utils.loss import compute_loss +from utils.metrics import ap_per_class +from utils.plots import plot_images, output_to_target +from utils.torch_utils import select_device, time_synchronized + + +def test(data, + weights=None, + batch_size=16, + imgsz=640, + conf_thres=0.001, + iou_thres=0.6, # for NMS + save_json=False, + single_cls=False, + augment=False, + verbose=False, + model=None, + dataloader=None, + save_dir=Path(''), # for saving images + save_txt=False, # for auto-labelling + save_conf=False, + plots=True, + log_imgs=0): # number of logged images + + # Initialize/load model and set device + training = model is not None + if training: # called by train.py + device = next(model.parameters()).device # get model device + + else: # called directly + set_logging() + device = select_device(opt.device, batch_size=batch_size) + save_txt = opt.save_txt # save *.txt labels + + # Directories + save_dir = Path(increment_path(Path(opt.project) / opt.name, exist_ok=opt.exist_ok)) # increment run + (save_dir / 'labels' if save_txt else save_dir).mkdir(parents=True, exist_ok=True) # make dir + + # Load model + model = attempt_load(weights, map_location=device) # load FP32 model + imgsz = check_img_size(imgsz, s=model.stride.max()) # check img_size + + # Multi-GPU disabled, incompatible with .half() https://github.com/ultralytics/yolov5/issues/99 + # if device.type != 'cpu' and torch.cuda.device_count() > 1: + # model = nn.DataParallel(model) + + # Half + half = device.type != 'cpu' # half precision only supported on CUDA + if half: + model.half() + + # Configure + model.eval() + is_coco = data.endswith('coco.yaml') # is COCO dataset + with open(data) as f: + data = yaml.load(f, Loader=yaml.FullLoader) # model dict + check_dataset(data) # check + nc = 1 if single_cls else int(data['nc']) # number of classes + iouv = torch.linspace(0.5, 0.95, 10).to(device) # iou vector for mAP@0.5:0.95 + niou = iouv.numel() + + # Logging + log_imgs, wandb = min(log_imgs, 100), None # ceil + try: + import wandb # Weights & Biases + except ImportError: + log_imgs = 0 + + # Dataloader + if not training: + img = torch.zeros((1, 3, imgsz, imgsz), device=device) # init img + _ = model(img.half() if half else img) if device.type != 'cpu' else None # run once + path = data['test'] if opt.task == 'test' else data['val'] # path to val/test images + dataloader = create_dataloader(path, imgsz, batch_size, model.stride.max(), opt, pad=0.5, rect=True)[0] + + seen = 0 + names = {k: v for k, v in enumerate(model.names if hasattr(model, 'names') else model.module.names)} + coco91class = coco80_to_coco91_class() + s = ('%20s' + '%12s' * 6) % ('Class', 'Images', 'Targets', 'P', 'R', 'mAP@.5', 'mAP@.5:.95') + p, r, f1, mp, mr, map50, map, t0, t1 = 0., 0., 0., 0., 0., 0., 0., 0., 0. + loss = torch.zeros(3, device=device) + jdict, stats, ap, ap_class, wandb_images = [], [], [], [], [] + for batch_i, (img, targets, paths, shapes) in enumerate(tqdm(dataloader, desc=s)): + img = img.to(device, non_blocking=True) + img = img.half() if half else img.float() # uint8 to fp16/32 + img /= 256.0 - 0.5 # 0 - 255 to 0.0 - 1.0 + targets = targets.to(device) + nb, _, height, width = img.shape # batch size, channels, height, width + whwh = torch.Tensor([width, height, width, height]).to(device) + + # Disable gradients + with torch.no_grad(): + # Run model + t = time_synchronized() + inf_out, train_out = model(img, augment=augment) # inference and training outputs + t0 += time_synchronized() - t + + # Compute loss + if training: # if model has loss hyperparameters + loss += compute_loss([x.float() for x in train_out], targets, model)[1][:3] # box, obj, cls + + # Run NMS + t = time_synchronized() + output = non_max_suppression(inf_out, conf_thres=conf_thres, iou_thres=iou_thres) + t1 += time_synchronized() - t + + # Statistics per image + for si, pred in enumerate(output): + labels = targets[targets[:, 0] == si, 1:] + nl = len(labels) + tcls = labels[:, 0].tolist() if nl else [] # target class + seen += 1 + + if len(pred) == 0: + if nl: + stats.append((torch.zeros(0, niou, dtype=torch.bool), torch.Tensor(), torch.Tensor(), tcls)) + continue + + # Append to text file + path = Path(paths[si]) + if save_txt: + gn = torch.tensor(shapes[si][0])[[1, 0, 1, 0]] # normalization gain whwh + x = pred.clone() + x[:, :4] = scale_coords(img[si].shape[1:], x[:, :4], shapes[si][0], shapes[si][1]) # to original + for *xyxy, conf, cls in x: + xywh = (xyxy2xywh(torch.tensor(xyxy).view(1, 4)) / gn).view(-1).tolist() # normalized xywh + line = (cls, *xywh, conf) if save_conf else (cls, *xywh) # label format + with open(save_dir / 'labels' / (path.stem + '.txt'), 'a') as f: + f.write(('%g ' * len(line)).rstrip() % line + '\n') + + # W&B logging + if plots and len(wandb_images) < log_imgs: + box_data = [{"position": {"minX": xyxy[0], "minY": xyxy[1], "maxX": xyxy[2], "maxY": xyxy[3]}, + "class_id": int(cls), + "box_caption": "%s %.3f" % (names[cls], conf), + "scores": {"class_score": conf}, + "domain": "pixel"} for *xyxy, conf, cls in pred.tolist()] + boxes = {"predictions": {"box_data": box_data, "class_labels": names}} + wandb_images.append(wandb.Image(img[si], boxes=boxes, caption=path.name)) + + # Clip boxes to image bounds + clip_coords(pred, (height, width)) + + # Append to pycocotools JSON dictionary + if save_json: + # [{"image_id": 42, "category_id": 18, "bbox": [258.15, 41.29, 348.26, 243.78], "score": 0.236}, ... + image_id = int(path.stem) if path.stem.isnumeric() else path.stem + box = pred[:, :4].clone() # xyxy + scale_coords(img[si].shape[1:], box, shapes[si][0], shapes[si][1]) # to original shape + box = xyxy2xywh(box) # xywh + box[:, :2] -= box[:, 2:] / 2 # xy center to top-left corner + for p, b in zip(pred.tolist(), box.tolist()): + jdict.append({'image_id': image_id, + 'category_id': coco91class[int(p[5])] if is_coco else int(p[5]), + 'bbox': [round(x, 3) for x in b], + 'score': round(p[4], 5)}) + + # Assign all predictions as incorrect + correct = torch.zeros(pred.shape[0], niou, dtype=torch.bool, device=device) + if nl: + detected = [] # target indices + tcls_tensor = labels[:, 0] + + # target boxes + tbox = xywh2xyxy(labels[:, 1:5]) * whwh + + # Per target class + for cls in torch.unique(tcls_tensor): + ti = (cls == tcls_tensor).nonzero(as_tuple=False).view(-1) # prediction indices + pi = (cls == pred[:, 5]).nonzero(as_tuple=False).view(-1) # target indices + + # Search for detections + if pi.shape[0]: + # Prediction to target ious + ious, i = box_iou(pred[pi, :4], tbox[ti]).max(1) # best ious, indices + + # Append detections + detected_set = set() + for j in (ious > iouv[0]).nonzero(as_tuple=False): + d = ti[i[j]] # detected target + if d.item() not in detected_set: + detected_set.add(d.item()) + detected.append(d) + correct[pi[j]] = ious[j] > iouv # iou_thres is 1xn + if len(detected) == nl: # all targets already located in image + break + + # Append statistics (correct, conf, pcls, tcls) + stats.append((correct.cpu(), pred[:, 4].cpu(), pred[:, 5].cpu(), tcls)) + + # Plot images + if plots and batch_i < 3: + f = save_dir / f'test_batch{batch_i}_labels.jpg' # filename + plot_images(img, targets, paths, f, names) # labels + f = save_dir / f'test_batch{batch_i}_pred.jpg' + plot_images(img, output_to_target(output, width, height), paths, f, names) # predictions + + # Compute statistics + stats = [np.concatenate(x, 0) for x in zip(*stats)] # to numpy + if len(stats) and stats[0].any(): + p, r, ap, f1, ap_class = ap_per_class(*stats, plot=plots, fname=save_dir / 'precision-recall_curve.png') + p, r, ap50, ap = p[:, 0], r[:, 0], ap[:, 0], ap.mean(1) # [P, R, AP@0.5, AP@0.5:0.95] + mp, mr, map50, map = p.mean(), r.mean(), ap50.mean(), ap.mean() + nt = np.bincount(stats[3].astype(np.int64), minlength=nc) # number of targets per class + else: + nt = torch.zeros(1) + + # W&B logging + if plots and wandb: + wandb.log({"Images": wandb_images}) + wandb.log({"Validation": [wandb.Image(str(x), caption=x.name) for x in sorted(save_dir.glob('test*.jpg'))]}) + + # Print results + pf = '%20s' + '%12.3g' * 6 # print format + print(pf % ('all', seen, nt.sum(), mp, mr, map50, map)) + + # Print results per class + if verbose and nc > 1 and len(stats): + for i, c in enumerate(ap_class): + print(pf % (names[c], seen, nt[c], p[i], r[i], ap50[i], ap[i])) + + # Print speeds + t = tuple(x / seen * 1E3 for x in (t0, t1, t0 + t1)) + (imgsz, imgsz, batch_size) # tuple + if not training: + print('Speed: %.1f/%.1f/%.1f ms inference/NMS/total per %gx%g image at batch-size %g' % t) + + # Save JSON + if save_json and len(jdict): + w = Path(weights[0] if isinstance(weights, list) else weights).stem if weights is not None else '' # weights + anno_json = glob.glob('../coco/annotations/instances_val*.json')[0] # annotations json + pred_json = str(save_dir / f"{w}_predictions.json") # predictions json + print('\nEvaluating pycocotools mAP... saving %s...' % pred_json) + with open(pred_json, 'w') as f: + json.dump(jdict, f) + + try: # https://github.com/cocodataset/cocoapi/blob/master/PythonAPI/pycocoEvalDemo.ipynb + from pycocotools.coco import COCO + from pycocotools.cocoeval import COCOeval + + anno = COCO(anno_json) # init annotations api + pred = anno.loadRes(pred_json) # init predictions api + eval = COCOeval(anno, pred, 'bbox') + if is_coco: + eval.params.imgIds = [int(Path(x).stem) for x in dataloader.dataset.img_files] # image IDs to evaluate + eval.evaluate() + eval.accumulate() + eval.summarize() + map, map50 = eval.stats[:2] # update results (mAP@0.5:0.95, mAP@0.5) + except Exception as e: + print('ERROR: pycocotools unable to run: %s' % e) + + # Return results + if not training: + print('Results saved to %s' % save_dir) + model.float() # for training + maps = np.zeros(nc) + map + for i, c in enumerate(ap_class): + maps[c] = ap[i] + return (mp, mr, map50, map, *(loss.cpu() / len(dataloader)).tolist()), maps, t + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(prog='test.py') + parser.add_argument('--weights', nargs='+', type=str, default='best.pt', help='model.pt path(s)') + parser.add_argument('--data', type=str, default='data/coco128.yaml', help='*.data path') + parser.add_argument('--batch-size', type=int, default=32, help='size of each image batch') + parser.add_argument('--img-size', type=int, default=640, help='inference size (pixels)') + parser.add_argument('--conf-thres', type=float, default=0.001, help='object confidence threshold') + parser.add_argument('--iou-thres', type=float, default=0.65, help='IOU threshold for NMS') + parser.add_argument('--task', default='val', help="'val', 'test', 'study'") + parser.add_argument('--device', default='cpu', help='cuda device, i.e. 0 or 0,1,2,3 or cpu') + parser.add_argument('--single-cls', action='store_true', help='treat as single-class dataset') + parser.add_argument('--augment', action='store_true', help='augmented inference') + parser.add_argument('--verbose', action='store_true', help='report mAP by class') + parser.add_argument('--save-txt', action='store_true', help='save results to *.txt') + parser.add_argument('--save-conf', action='store_true', help='save confidences in --save-txt labels') + parser.add_argument('--save-json', action='store_true', help='save a cocoapi-compatible JSON results file') + parser.add_argument('--project', default='runs/test', help='save to project/name') + parser.add_argument('--name', default='exp', help='save to project/name') + parser.add_argument('--exist-ok', action='store_true', help='existing project/name ok, do not increment') + opt = parser.parse_args() + opt.save_json |= opt.data.endswith('coco.yaml') + opt.data = check_file(opt.data) # check file + print(opt) + + if opt.task in ['val', 'test']: # run normally + test(opt.data, + opt.weights, + opt.batch_size, + opt.img_size, + opt.conf_thres, + opt.iou_thres, + opt.save_json, + opt.single_cls, + opt.augment, + opt.verbose, + save_txt=opt.save_txt, + save_conf=opt.save_conf, + ) + + elif opt.task == 'study': # run over a range of settings and save/plot + for weights in ['yolov5s.pt', 'yolov5m.pt', 'yolov5l.pt', 'yolov5x.pt']: + f = 'study_%s_%s.txt' % (Path(opt.data).stem, Path(weights).stem) # filename to save to + x = list(range(320, 800, 64)) # x axis + y = [] # y axis + for i in x: # img-size + print('\nRunning %s point %s...' % (f, i)) + r, _, t = test(opt.data, weights, opt.batch_size, i, opt.conf_thres, opt.iou_thres, opt.save_json) + y.append(r + t) # results and times + np.savetxt(f, y, fmt='%10.4g') # save + os.system('zip -r study.zip study_*.txt') + # utils.general.plot_study_txt(f, x) # plot diff --git a/tools/analysis_tools/analyze_logs.py b/tools/analysis_tools/analyze_logs.py new file mode 100644 index 0000000..8ca81d3 --- /dev/null +++ b/tools/analysis_tools/analyze_logs.py @@ -0,0 +1,180 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import json +from collections import defaultdict + +import matplotlib.pyplot as plt +import numpy as np +import seaborn as sns + + +def cal_train_time(log_dicts, args): + for i, log_dict in enumerate(log_dicts): + print(f'{"-" * 5}Analyze train time of {args.json_logs[i]}{"-" * 5}') + all_times = [] + for epoch in log_dict.keys(): + if args.include_outliers: + all_times.append(log_dict[epoch]['time']) + else: + all_times.append(log_dict[epoch]['time'][1:]) + all_times = np.array(all_times) + epoch_ave_time = all_times.mean(-1) + slowest_epoch = epoch_ave_time.argmax() + fastest_epoch = epoch_ave_time.argmin() + std_over_epoch = epoch_ave_time.std() + print(f'slowest epoch {slowest_epoch + 1}, ' + f'average time is {epoch_ave_time[slowest_epoch]:.4f}') + print(f'fastest epoch {fastest_epoch + 1}, ' + f'average time is {epoch_ave_time[fastest_epoch]:.4f}') + print(f'time std over epochs is {std_over_epoch:.4f}') + print(f'average iter time: {np.mean(all_times):.4f} s/iter') + print() + + +def plot_curve(log_dicts, args): + if args.backend is not None: + plt.switch_backend(args.backend) + sns.set_style(args.style) + # if legend is None, use {filename}_{key} as legend + legend = args.legend + if legend is None: + legend = [] + for json_log in args.json_logs: + for metric in args.keys: + legend.append(f'{json_log}_{metric}') + assert len(legend) == (len(args.json_logs) * len(args.keys)) + metrics = args.keys + + num_metrics = len(metrics) + for i, log_dict in enumerate(log_dicts): + epochs = list(log_dict.keys()) + for j, metric in enumerate(metrics): + print(f'plot curve of {args.json_logs[i]}, metric is {metric}') + if metric not in log_dict[epochs[0]]: + raise KeyError( + f'{args.json_logs[i]} does not contain metric {metric}') + + if 'mAP' in metric: + xs = np.arange(1, max(epochs) + 1) + ys = [] + for epoch in epochs: + ys += log_dict[epoch][metric] + ax = plt.gca() + ax.set_xticks(xs) + plt.xlabel('epoch') + plt.plot(xs, ys, label=legend[i * num_metrics + j], marker='o') + else: + xs = [] + ys = [] + num_iters_per_epoch = log_dict[epochs[0]]['iter'][-2] + for epoch in epochs: + iters = log_dict[epoch]['iter'] + if log_dict[epoch]['mode'][-1] == 'val': + iters = iters[:-1] + xs.append( + np.array(iters) + (epoch - 1) * num_iters_per_epoch) + ys.append(np.array(log_dict[epoch][metric][:len(iters)])) + xs = np.concatenate(xs) + ys = np.concatenate(ys) + plt.xlabel('iter') + plt.plot( + xs, ys, label=legend[i * num_metrics + j], linewidth=0.5) + plt.legend() + if args.title is not None: + plt.title(args.title) + if args.out is None: + plt.show() + else: + print(f'save curve to: {args.out}') + plt.savefig(args.out) + plt.cla() + + +def add_plot_parser(subparsers): + parser_plt = subparsers.add_parser( + 'plot_curve', help='parser for plotting curves') + parser_plt.add_argument( + 'json_logs', + type=str, + nargs='+', + help='path of train log in json format') + parser_plt.add_argument( + '--keys', + type=str, + nargs='+', + default=['bbox_mAP'], + help='the metric that you want to plot') + parser_plt.add_argument('--title', type=str, help='title of figure') + parser_plt.add_argument( + '--legend', + type=str, + nargs='+', + default=None, + help='legend of each plot') + parser_plt.add_argument( + '--backend', type=str, default=None, help='backend of plt') + parser_plt.add_argument( + '--style', type=str, default='dark', help='style of plt') + parser_plt.add_argument('--out', type=str, default=None) + + +def add_time_parser(subparsers): + parser_time = subparsers.add_parser( + 'cal_train_time', + help='parser for computing the average time per training iteration') + parser_time.add_argument( + 'json_logs', + type=str, + nargs='+', + help='path of train log in json format') + parser_time.add_argument( + '--include-outliers', + action='store_true', + help='include the first value of every epoch when computing ' + 'the average time') + + +def parse_args(): + parser = argparse.ArgumentParser(description='Analyze Json Log') + # currently only support plot curve and calculate average train time + subparsers = parser.add_subparsers(dest='task', help='task parser') + add_plot_parser(subparsers) + add_time_parser(subparsers) + args = parser.parse_args() + return args + + +def load_json_logs(json_logs): + # load and convert json_logs to log_dict, key is epoch, value is a sub dict + # keys of sub dict is different metrics, e.g. memory, bbox_mAP + # value of sub dict is a list of corresponding values of all iterations + log_dicts = [dict() for _ in json_logs] + for json_log, log_dict in zip(json_logs, log_dicts): + with open(json_log, 'r') as log_file: + for line in log_file: + log = json.loads(line.strip()) + # skip lines without `epoch` field + if 'epoch' not in log: + continue + epoch = log.pop('epoch') + if epoch not in log_dict: + log_dict[epoch] = defaultdict(list) + for k, v in log.items(): + log_dict[epoch][k].append(v) + return log_dicts + + +def main(): + args = parse_args() + + json_logs = args.json_logs + for json_log in json_logs: + assert json_log.endswith('.json') + + log_dicts = load_json_logs(json_logs) + + eval(args.task)(log_dicts, args) + + +if __name__ == '__main__': + main() diff --git a/tools/analysis_tools/analyze_results.py b/tools/analysis_tools/analyze_results.py new file mode 100644 index 0000000..8265265 --- /dev/null +++ b/tools/analysis_tools/analyze_results.py @@ -0,0 +1,199 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import os.path as osp + +import mmcv +import numpy as np +from mmcv import Config, DictAction + +from mmdet.core.evaluation import eval_map +from mmdet.core.visualization import imshow_gt_det_bboxes +from mmdet.datasets import build_dataset, get_loading_pipeline + + +def bbox_map_eval(det_result, annotation): + """Evaluate mAP of single image det result. + + Args: + det_result (list[list]): [[cls1_det, cls2_det, ...], ...]. + The outer list indicates images, and the inner list indicates + per-class detected bboxes. + annotation (dict): Ground truth annotations where keys of + annotations are: + + - bboxes: numpy array of shape (n, 4) + - labels: numpy array of shape (n, ) + - bboxes_ignore (optional): numpy array of shape (k, 4) + - labels_ignore (optional): numpy array of shape (k, ) + + Returns: + float: mAP + """ + + # use only bbox det result + if isinstance(det_result, tuple): + bbox_det_result = [det_result[0]] + else: + bbox_det_result = [det_result] + # mAP + iou_thrs = np.linspace( + .5, 0.95, int(np.round((0.95 - .5) / .05)) + 1, endpoint=True) + mean_aps = [] + for thr in iou_thrs: + mean_ap, _ = eval_map( + bbox_det_result, [annotation], iou_thr=thr, logger='silent') + mean_aps.append(mean_ap) + return sum(mean_aps) / len(mean_aps) + + +class ResultVisualizer: + """Display and save evaluation results. + + Args: + show (bool): Whether to show the image. Default: True + wait_time (float): Value of waitKey param. Default: 0. + score_thr (float): Minimum score of bboxes to be shown. + Default: 0 + """ + + def __init__(self, show=False, wait_time=0, score_thr=0): + self.show = show + self.wait_time = wait_time + self.score_thr = score_thr + + def _save_image_gts_results(self, dataset, results, mAPs, out_dir=None): + mmcv.mkdir_or_exist(out_dir) + + for mAP_info in mAPs: + index, mAP = mAP_info + data_info = dataset.prepare_train_img(index) + + # calc save file path + filename = data_info['filename'] + if data_info['img_prefix'] is not None: + filename = osp.join(data_info['img_prefix'], filename) + else: + filename = data_info['filename'] + fname, name = osp.splitext(osp.basename(filename)) + save_filename = fname + '_' + str(round(mAP, 3)) + name + out_file = osp.join(out_dir, save_filename) + imshow_gt_det_bboxes( + data_info['img'], + data_info, + results[index], + dataset.CLASSES, + show=self.show, + score_thr=self.score_thr, + wait_time=self.wait_time, + out_file=out_file) + + def evaluate_and_show(self, + dataset, + results, + topk=20, + show_dir='work_dir', + eval_fn=None): + """Evaluate and show results. + + Args: + dataset (Dataset): A PyTorch dataset. + results (list): Det results from test results pkl file + topk (int): Number of the highest topk and + lowest topk after evaluation index sorting. Default: 20 + show_dir (str, optional): The filename to write the image. + Default: 'work_dir' + eval_fn (callable, optional): Eval function, Default: None + """ + + assert topk > 0 + if (topk * 2) > len(dataset): + topk = len(dataset) // 2 + + if eval_fn is None: + eval_fn = bbox_map_eval + else: + assert callable(eval_fn) + + prog_bar = mmcv.ProgressBar(len(results)) + _mAPs = {} + for i, (result, ) in enumerate(zip(results)): + # self.dataset[i] should not call directly + # because there is a risk of mismatch + data_info = dataset.prepare_train_img(i) + mAP = eval_fn(result, data_info['ann_info']) + _mAPs[i] = mAP + prog_bar.update() + + # descending select topk image + _mAPs = list(sorted(_mAPs.items(), key=lambda kv: kv[1])) + good_mAPs = _mAPs[-topk:] + bad_mAPs = _mAPs[:topk] + + good_dir = osp.abspath(osp.join(show_dir, 'good')) + bad_dir = osp.abspath(osp.join(show_dir, 'bad')) + self._save_image_gts_results(dataset, results, good_mAPs, good_dir) + self._save_image_gts_results(dataset, results, bad_mAPs, bad_dir) + + +def parse_args(): + parser = argparse.ArgumentParser( + description='MMDet eval image prediction result for each') + parser.add_argument('config', help='test config file path') + parser.add_argument( + 'prediction_path', help='prediction path where test pkl result') + parser.add_argument( + 'show_dir', help='directory where painted images will be saved') + parser.add_argument('--show', action='store_true', help='show results') + parser.add_argument( + '--wait-time', + type=float, + default=0, + help='the interval of show (s), 0 is block') + parser.add_argument( + '--topk', + default=20, + type=int, + help='saved Number of the highest topk ' + 'and lowest topk after index sorting') + parser.add_argument( + '--show-score-thr', + type=float, + default=0, + help='score threshold (default: 0.)') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. If the value to ' + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + 'Note that the quotation marks are necessary and that no white space ' + 'is allowed.') + args = parser.parse_args() + return args + + +def main(): + args = parse_args() + + mmcv.check_file_exist(args.prediction_path) + + cfg = Config.fromfile(args.config) + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + cfg.data.test.test_mode = True + + cfg.data.test.pop('samples_per_gpu', 0) + cfg.data.test.pipeline = get_loading_pipeline(cfg.data.train.pipeline) + dataset = build_dataset(cfg.data.test) + outputs = mmcv.load(args.prediction_path) + + result_visualizer = ResultVisualizer(args.show, args.wait_time, + args.show_score_thr) + result_visualizer.evaluate_and_show( + dataset, outputs, topk=args.topk, show_dir=args.show_dir) + + +if __name__ == '__main__': + main() diff --git a/tools/analysis_tools/benchmark.py b/tools/analysis_tools/benchmark.py new file mode 100644 index 0000000..91f34c7 --- /dev/null +++ b/tools/analysis_tools/benchmark.py @@ -0,0 +1,187 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import copy +import os +import time + +import torch +from mmcv import Config, DictAction +from mmcv.cnn import fuse_conv_bn +from mmcv.parallel import MMDistributedDataParallel +from mmcv.runner import init_dist, load_checkpoint, wrap_fp16_model + +from mmdet.datasets import (build_dataloader, build_dataset, + replace_ImageToTensor) +from mmdet.models import build_detector + + +def parse_args(): + parser = argparse.ArgumentParser(description='MMDet benchmark a model') + parser.add_argument('config', help='test config file path') + parser.add_argument('checkpoint', help='checkpoint file') + parser.add_argument( + '--repeat-num', + type=int, + default=1, + help='number of repeat times of measurement for averaging the results') + parser.add_argument( + '--max-iter', type=int, default=2000, help='num of max iter') + parser.add_argument( + '--log-interval', type=int, default=50, help='interval of logging') + parser.add_argument( + '--fuse-conv-bn', + action='store_true', + help='Whether to fuse conv and bn, this will slightly increase' + 'the inference speed') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. If the value to ' + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + 'Note that the quotation marks are necessary and that no white space ' + 'is allowed.') + parser.add_argument( + '--launcher', + choices=['none', 'pytorch', 'slurm', 'mpi'], + default='none', + help='job launcher') + parser.add_argument('--local_rank', type=int, default=0) + args = parser.parse_args() + if 'LOCAL_RANK' not in os.environ: + os.environ['LOCAL_RANK'] = str(args.local_rank) + return args + + +def measure_inference_speed(cfg, checkpoint, max_iter, log_interval, + is_fuse_conv_bn): + # set cudnn_benchmark + if cfg.get('cudnn_benchmark', False): + torch.backends.cudnn.benchmark = True + cfg.model.pretrained = None + cfg.data.test.test_mode = True + + # build the dataloader + samples_per_gpu = cfg.data.test.pop('samples_per_gpu', 1) + if samples_per_gpu > 1: + # Replace 'ImageToTensor' to 'DefaultFormatBundle' + cfg.data.test.pipeline = replace_ImageToTensor(cfg.data.test.pipeline) + dataset = build_dataset(cfg.data.test) + data_loader = build_dataloader( + dataset, + samples_per_gpu=1, + # Because multiple processes will occupy additional CPU resources, + # FPS statistics will be more unstable when workers_per_gpu is not 0. + # It is reasonable to set workers_per_gpu to 0. + workers_per_gpu=0, + dist=True, + shuffle=False) + + # build the model and load checkpoint + cfg.model.train_cfg = None + model = build_detector(cfg.model, test_cfg=cfg.get('test_cfg')) + fp16_cfg = cfg.get('fp16', None) + if fp16_cfg is not None: + wrap_fp16_model(model) + load_checkpoint(model, checkpoint, map_location='cpu') + if is_fuse_conv_bn: + model = fuse_conv_bn(model) + + model = MMDistributedDataParallel( + model.cuda(), + device_ids=[torch.cuda.current_device()], + broadcast_buffers=False) + model.eval() + + # the first several iterations may be very slow so skip them + num_warmup = 5 + pure_inf_time = 0 + fps = 0 + + # benchmark with 2000 image and take the average + for i, data in enumerate(data_loader): + + torch.cuda.synchronize() + start_time = time.perf_counter() + + with torch.no_grad(): + model(return_loss=False, rescale=True, **data) + + torch.cuda.synchronize() + elapsed = time.perf_counter() - start_time + + if i >= num_warmup: + pure_inf_time += elapsed + if (i + 1) % log_interval == 0: + fps = (i + 1 - num_warmup) / pure_inf_time + print( + f'Done image [{i + 1:<3}/ {max_iter}], ' + f'fps: {fps:.1f} img / s, ' + f'times per image: {1000 / fps:.1f} ms / img', + flush=True) + + if (i + 1) == max_iter: + fps = (i + 1 - num_warmup) / pure_inf_time + print( + f'Overall fps: {fps:.1f} img / s, ' + f'times per image: {1000 / fps:.1f} ms / img', + flush=True) + break + return fps + + +def repeat_measure_inference_speed(cfg, + checkpoint, + max_iter, + log_interval, + is_fuse_conv_bn, + repeat_num=1): + assert repeat_num >= 1 + + fps_list = [] + + for _ in range(repeat_num): + # + cp_cfg = copy.deepcopy(cfg) + + fps_list.append( + measure_inference_speed(cp_cfg, checkpoint, max_iter, log_interval, + is_fuse_conv_bn)) + + if repeat_num > 1: + fps_list_ = [round(fps, 1) for fps in fps_list] + times_pre_image_list_ = [round(1000 / fps, 1) for fps in fps_list] + mean_fps_ = sum(fps_list_) / len(fps_list_) + mean_times_pre_image_ = sum(times_pre_image_list_) / len( + times_pre_image_list_) + print( + f'Overall fps: {fps_list_}[{mean_fps_:.1f}] img / s, ' + f'times per image: ' + f'{times_pre_image_list_}[{mean_times_pre_image_:.1f}] ms / img', + flush=True) + return fps_list + + return fps_list[0] + + +def main(): + args = parse_args() + + cfg = Config.fromfile(args.config) + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + + if args.launcher == 'none': + raise NotImplementedError('Only supports distributed mode') + else: + init_dist(args.launcher, **cfg.dist_params) + + repeat_measure_inference_speed(cfg, args.checkpoint, args.max_iter, + args.log_interval, args.fuse_conv_bn, + args.repeat_num) + + +if __name__ == '__main__': + main() diff --git a/tools/analysis_tools/coco_error_analysis.py b/tools/analysis_tools/coco_error_analysis.py new file mode 100644 index 0000000..102ea4e --- /dev/null +++ b/tools/analysis_tools/coco_error_analysis.py @@ -0,0 +1,339 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import copy +import os +from argparse import ArgumentParser +from multiprocessing import Pool + +import matplotlib.pyplot as plt +import numpy as np +from pycocotools.coco import COCO +from pycocotools.cocoeval import COCOeval + + +def makeplot(rs, ps, outDir, class_name, iou_type): + cs = np.vstack([ + np.ones((2, 3)), + np.array([0.31, 0.51, 0.74]), + np.array([0.75, 0.31, 0.30]), + np.array([0.36, 0.90, 0.38]), + np.array([0.50, 0.39, 0.64]), + np.array([1, 0.6, 0]), + ]) + areaNames = ['allarea', 'small', 'medium', 'large'] + types = ['C75', 'C50', 'Loc', 'Sim', 'Oth', 'BG', 'FN'] + for i in range(len(areaNames)): + area_ps = ps[..., i, 0] + figure_title = iou_type + '-' + class_name + '-' + areaNames[i] + aps = [ps_.mean() for ps_ in area_ps] + ps_curve = [ + ps_.mean(axis=1) if ps_.ndim > 1 else ps_ for ps_ in area_ps + ] + ps_curve.insert(0, np.zeros(ps_curve[0].shape)) + fig = plt.figure() + ax = plt.subplot(111) + for k in range(len(types)): + ax.plot(rs, ps_curve[k + 1], color=[0, 0, 0], linewidth=0.5) + ax.fill_between( + rs, + ps_curve[k], + ps_curve[k + 1], + color=cs[k], + label=str(f'[{aps[k]:.3f}]' + types[k]), + ) + plt.xlabel('recall') + plt.ylabel('precision') + plt.xlim(0, 1.0) + plt.ylim(0, 1.0) + plt.title(figure_title) + plt.legend() + # plt.show() + fig.savefig(outDir + f'/{figure_title}.png') + plt.close(fig) + + +def autolabel(ax, rects): + """Attach a text label above each bar in *rects*, displaying its height.""" + for rect in rects: + height = rect.get_height() + if height > 0 and height <= 1: # for percent values + text_label = '{:2.0f}'.format(height * 100) + else: + text_label = '{:2.0f}'.format(height) + ax.annotate( + text_label, + xy=(rect.get_x() + rect.get_width() / 2, height), + xytext=(0, 3), # 3 points vertical offset + textcoords='offset points', + ha='center', + va='bottom', + fontsize='x-small', + ) + + +def makebarplot(rs, ps, outDir, class_name, iou_type): + areaNames = ['allarea', 'small', 'medium', 'large'] + types = ['C75', 'C50', 'Loc', 'Sim', 'Oth', 'BG', 'FN'] + fig, ax = plt.subplots() + x = np.arange(len(areaNames)) # the areaNames locations + width = 0.60 # the width of the bars + rects_list = [] + figure_title = iou_type + '-' + class_name + '-' + 'ap bar plot' + for i in range(len(types) - 1): + type_ps = ps[i, ..., 0] + aps = [ps_.mean() for ps_ in type_ps.T] + rects_list.append( + ax.bar( + x - width / 2 + (i + 1) * width / len(types), + aps, + width / len(types), + label=types[i], + )) + + # Add some text for labels, title and custom x-axis tick labels, etc. + ax.set_ylabel('Mean Average Precision (mAP)') + ax.set_title(figure_title) + ax.set_xticks(x) + ax.set_xticklabels(areaNames) + ax.legend() + + # Add score texts over bars + for rects in rects_list: + autolabel(ax, rects) + + # Save plot + fig.savefig(outDir + f'/{figure_title}.png') + plt.close(fig) + + +def get_gt_area_group_numbers(cocoEval): + areaRng = cocoEval.params.areaRng + areaRngStr = [str(aRng) for aRng in areaRng] + areaRngLbl = cocoEval.params.areaRngLbl + areaRngStr2areaRngLbl = dict(zip(areaRngStr, areaRngLbl)) + areaRngLbl2Number = dict.fromkeys(areaRngLbl, 0) + for evalImg in cocoEval.evalImgs: + if evalImg: + for gtIgnore in evalImg['gtIgnore']: + if not gtIgnore: + aRngLbl = areaRngStr2areaRngLbl[str(evalImg['aRng'])] + areaRngLbl2Number[aRngLbl] += 1 + return areaRngLbl2Number + + +def make_gt_area_group_numbers_plot(cocoEval, outDir, verbose=True): + areaRngLbl2Number = get_gt_area_group_numbers(cocoEval) + areaRngLbl = areaRngLbl2Number.keys() + if verbose: + print('number of annotations per area group:', areaRngLbl2Number) + + # Init figure + fig, ax = plt.subplots() + x = np.arange(len(areaRngLbl)) # the areaNames locations + width = 0.60 # the width of the bars + figure_title = 'number of annotations per area group' + + rects = ax.bar(x, areaRngLbl2Number.values(), width) + + # Add some text for labels, title and custom x-axis tick labels, etc. + ax.set_ylabel('Number of annotations') + ax.set_title(figure_title) + ax.set_xticks(x) + ax.set_xticklabels(areaRngLbl) + + # Add score texts over bars + autolabel(ax, rects) + + # Save plot + fig.tight_layout() + fig.savefig(outDir + f'/{figure_title}.png') + plt.close(fig) + + +def make_gt_area_histogram_plot(cocoEval, outDir): + n_bins = 100 + areas = [ann['area'] for ann in cocoEval.cocoGt.anns.values()] + + # init figure + figure_title = 'gt annotation areas histogram plot' + fig, ax = plt.subplots() + + # Set the number of bins + ax.hist(np.sqrt(areas), bins=n_bins) + + # Add some text for labels, title and custom x-axis tick labels, etc. + ax.set_xlabel('Squareroot Area') + ax.set_ylabel('Number of annotations') + ax.set_title(figure_title) + + # Save plot + fig.tight_layout() + fig.savefig(outDir + f'/{figure_title}.png') + plt.close(fig) + + +def analyze_individual_category(k, + cocoDt, + cocoGt, + catId, + iou_type, + areas=None): + nm = cocoGt.loadCats(catId)[0] + print(f'--------------analyzing {k + 1}-{nm["name"]}---------------') + ps_ = {} + dt = copy.deepcopy(cocoDt) + nm = cocoGt.loadCats(catId)[0] + imgIds = cocoGt.getImgIds() + dt_anns = dt.dataset['annotations'] + select_dt_anns = [] + for ann in dt_anns: + if ann['category_id'] == catId: + select_dt_anns.append(ann) + dt.dataset['annotations'] = select_dt_anns + dt.createIndex() + # compute precision but ignore superclass confusion + gt = copy.deepcopy(cocoGt) + child_catIds = gt.getCatIds(supNms=[nm['supercategory']]) + for idx, ann in enumerate(gt.dataset['annotations']): + if ann['category_id'] in child_catIds and ann['category_id'] != catId: + gt.dataset['annotations'][idx]['ignore'] = 1 + gt.dataset['annotations'][idx]['iscrowd'] = 1 + gt.dataset['annotations'][idx]['category_id'] = catId + cocoEval = COCOeval(gt, copy.deepcopy(dt), iou_type) + cocoEval.params.imgIds = imgIds + cocoEval.params.maxDets = [100] + cocoEval.params.iouThrs = [0.1] + cocoEval.params.useCats = 1 + if areas: + cocoEval.params.areaRng = [[0**2, areas[2]], [0**2, areas[0]], + [areas[0], areas[1]], [areas[1], areas[2]]] + cocoEval.evaluate() + cocoEval.accumulate() + ps_supercategory = cocoEval.eval['precision'][0, :, k, :, :] + ps_['ps_supercategory'] = ps_supercategory + # compute precision but ignore any class confusion + gt = copy.deepcopy(cocoGt) + for idx, ann in enumerate(gt.dataset['annotations']): + if ann['category_id'] != catId: + gt.dataset['annotations'][idx]['ignore'] = 1 + gt.dataset['annotations'][idx]['iscrowd'] = 1 + gt.dataset['annotations'][idx]['category_id'] = catId + cocoEval = COCOeval(gt, copy.deepcopy(dt), iou_type) + cocoEval.params.imgIds = imgIds + cocoEval.params.maxDets = [100] + cocoEval.params.iouThrs = [0.1] + cocoEval.params.useCats = 1 + if areas: + cocoEval.params.areaRng = [[0**2, areas[2]], [0**2, areas[0]], + [areas[0], areas[1]], [areas[1], areas[2]]] + cocoEval.evaluate() + cocoEval.accumulate() + ps_allcategory = cocoEval.eval['precision'][0, :, k, :, :] + ps_['ps_allcategory'] = ps_allcategory + return k, ps_ + + +def analyze_results(res_file, + ann_file, + res_types, + out_dir, + extraplots=None, + areas=None): + for res_type in res_types: + assert res_type in ['bbox', 'segm'] + if areas: + assert len(areas) == 3, '3 integers should be specified as areas, \ + representing 3 area regions' + + directory = os.path.dirname(out_dir + '/') + if not os.path.exists(directory): + print(f'-------------create {out_dir}-----------------') + os.makedirs(directory) + + cocoGt = COCO(ann_file) + cocoDt = cocoGt.loadRes(res_file) + imgIds = cocoGt.getImgIds() + for res_type in res_types: + res_out_dir = out_dir + '/' + res_type + '/' + res_directory = os.path.dirname(res_out_dir) + if not os.path.exists(res_directory): + print(f'-------------create {res_out_dir}-----------------') + os.makedirs(res_directory) + iou_type = res_type + cocoEval = COCOeval( + copy.deepcopy(cocoGt), copy.deepcopy(cocoDt), iou_type) + cocoEval.params.imgIds = imgIds + cocoEval.params.iouThrs = [0.75, 0.5, 0.1] + cocoEval.params.maxDets = [100] + if areas: + cocoEval.params.areaRng = [[0**2, areas[2]], [0**2, areas[0]], + [areas[0], areas[1]], + [areas[1], areas[2]]] + cocoEval.evaluate() + cocoEval.accumulate() + ps = cocoEval.eval['precision'] + ps = np.vstack([ps, np.zeros((4, *ps.shape[1:]))]) + catIds = cocoGt.getCatIds() + recThrs = cocoEval.params.recThrs + with Pool(processes=48) as pool: + args = [(k, cocoDt, cocoGt, catId, iou_type, areas) + for k, catId in enumerate(catIds)] + analyze_results = pool.starmap(analyze_individual_category, args) + for k, catId in enumerate(catIds): + nm = cocoGt.loadCats(catId)[0] + print(f'--------------saving {k + 1}-{nm["name"]}---------------') + analyze_result = analyze_results[k] + assert k == analyze_result[0] + ps_supercategory = analyze_result[1]['ps_supercategory'] + ps_allcategory = analyze_result[1]['ps_allcategory'] + # compute precision but ignore superclass confusion + ps[3, :, k, :, :] = ps_supercategory + # compute precision but ignore any class confusion + ps[4, :, k, :, :] = ps_allcategory + # fill in background and false negative errors and plot + ps[ps == -1] = 0 + ps[5, :, k, :, :] = ps[4, :, k, :, :] > 0 + ps[6, :, k, :, :] = 1.0 + makeplot(recThrs, ps[:, :, k], res_out_dir, nm['name'], iou_type) + if extraplots: + makebarplot(recThrs, ps[:, :, k], res_out_dir, nm['name'], + iou_type) + makeplot(recThrs, ps, res_out_dir, 'allclass', iou_type) + if extraplots: + makebarplot(recThrs, ps, res_out_dir, 'allclass', iou_type) + make_gt_area_group_numbers_plot( + cocoEval=cocoEval, outDir=res_out_dir, verbose=True) + make_gt_area_histogram_plot(cocoEval=cocoEval, outDir=res_out_dir) + + +def main(): + parser = ArgumentParser(description='COCO Error Analysis Tool') + parser.add_argument('result', help='result file (json format) path') + parser.add_argument('out_dir', help='dir to save analyze result images') + parser.add_argument( + '--ann', + default='data/coco/annotations/instances_val2017.json', + help='annotation file path') + parser.add_argument( + '--types', type=str, nargs='+', default=['bbox'], help='result types') + parser.add_argument( + '--extraplots', + action='store_true', + help='export extra bar/stat plots') + parser.add_argument( + '--areas', + type=int, + nargs='+', + default=[1024, 9216, 10000000000], + help='area regions') + args = parser.parse_args() + analyze_results( + args.result, + args.ann, + args.types, + out_dir=args.out_dir, + extraplots=args.extraplots, + areas=args.areas) + + +if __name__ == '__main__': + main() diff --git a/tools/analysis_tools/confusion_matrix.py b/tools/analysis_tools/confusion_matrix.py new file mode 100644 index 0000000..71e4eb0 --- /dev/null +++ b/tools/analysis_tools/confusion_matrix.py @@ -0,0 +1,261 @@ +import argparse +import os + +import matplotlib.pyplot as plt +import mmcv +import numpy as np +from matplotlib.ticker import MultipleLocator +from mmcv import Config, DictAction +from mmcv.ops import nms + +from mmdet.core.evaluation.bbox_overlaps import bbox_overlaps +from mmdet.datasets import build_dataset + + +def parse_args(): + parser = argparse.ArgumentParser( + description='Generate confusion matrix from detection results') + parser.add_argument('config', help='test config file path') + parser.add_argument( + 'prediction_path', help='prediction path where test .pkl result') + parser.add_argument( + 'save_dir', help='directory where confusion matrix will be saved') + parser.add_argument( + '--show', action='store_true', help='show confusion matrix') + parser.add_argument( + '--color-theme', + default='plasma', + help='theme of the matrix color map') + parser.add_argument( + '--score-thr', + type=float, + default=0.3, + help='score threshold to filter detection bboxes') + parser.add_argument( + '--tp-iou-thr', + type=float, + default=0.5, + help='IoU threshold to be considered as matched') + parser.add_argument( + '--nms-iou-thr', + type=float, + default=None, + help='nms IoU threshold, only applied when users want to change the' + 'nms IoU threshold.') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. If the value to ' + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + 'Note that the quotation marks are necessary and that no white space ' + 'is allowed.') + args = parser.parse_args() + return args + + +def calculate_confusion_matrix(dataset, + results, + score_thr=0, + nms_iou_thr=None, + tp_iou_thr=0.5): + """Calculate the confusion matrix. + + Args: + dataset (Dataset): Test or val dataset. + results (list[ndarray]): A list of detection results in each image. + score_thr (float|optional): Score threshold to filter bboxes. + Default: 0. + nms_iou_thr (float|optional): nms IoU threshold, the detection results + have done nms in the detector, only applied when users want to + change the nms IoU threshold. Default: None. + tp_iou_thr (float|optional): IoU threshold to be considered as matched. + Default: 0.5. + """ + num_classes = len(dataset.CLASSES) + confusion_matrix = np.zeros(shape=[num_classes + 1, num_classes + 1]) + assert len(dataset) == len(results) + prog_bar = mmcv.ProgressBar(len(results)) + for idx, per_img_res in enumerate(results): + if isinstance(per_img_res, tuple): + res_bboxes, _ = per_img_res + else: + res_bboxes = per_img_res + ann = dataset.get_ann_info(idx) + gt_bboxes = ann['bboxes'] + labels = ann['labels'] + analyze_per_img_dets(confusion_matrix, gt_bboxes, labels, res_bboxes, + score_thr, tp_iou_thr, nms_iou_thr) + prog_bar.update() + return confusion_matrix + + +def analyze_per_img_dets(confusion_matrix, + gt_bboxes, + gt_labels, + result, + score_thr=0, + tp_iou_thr=0.5, + nms_iou_thr=None): + """Analyze detection results on each image. + + Args: + confusion_matrix (ndarray): The confusion matrix, + has shape (num_classes + 1, num_classes + 1). + gt_bboxes (ndarray): Ground truth bboxes, has shape (num_gt, 4). + gt_labels (ndarray): Ground truth labels, has shape (num_gt). + result (ndarray): Detection results, has shape + (num_classes, num_bboxes, 5). + score_thr (float): Score threshold to filter bboxes. + Default: 0. + tp_iou_thr (float): IoU threshold to be considered as matched. + Default: 0.5. + nms_iou_thr (float|optional): nms IoU threshold, the detection results + have done nms in the detector, only applied when users want to + change the nms IoU threshold. Default: None. + """ + true_positives = np.zeros_like(gt_labels) + for det_label, det_bboxes in enumerate(result): + if nms_iou_thr: + det_bboxes, _ = nms( + det_bboxes[:, :4], + det_bboxes[:, -1], + nms_iou_thr, + score_threshold=score_thr) + ious = bbox_overlaps(det_bboxes[:, :4], gt_bboxes) + for i, det_bbox in enumerate(det_bboxes): + score = det_bbox[4] + det_match = 0 + if score >= score_thr: + for j, gt_label in enumerate(gt_labels): + if ious[i, j] >= tp_iou_thr: + det_match += 1 + if gt_label == det_label: + true_positives[j] += 1 # TP + confusion_matrix[gt_label, det_label] += 1 + if det_match == 0: # BG FP + confusion_matrix[-1, det_label] += 1 + for num_tp, gt_label in zip(true_positives, gt_labels): + if num_tp == 0: # FN + confusion_matrix[gt_label, -1] += 1 + + +def plot_confusion_matrix(confusion_matrix, + labels, + save_dir=None, + show=True, + title='Normalized Confusion Matrix', + color_theme='plasma'): + """Draw confusion matrix with matplotlib. + + Args: + confusion_matrix (ndarray): The confusion matrix. + labels (list[str]): List of class names. + save_dir (str|optional): If set, save the confusion matrix plot to the + given path. Default: None. + show (bool): Whether to show the plot. Default: True. + title (str): Title of the plot. Default: `Normalized Confusion Matrix`. + color_theme (str): Theme of the matrix color map. Default: `plasma`. + """ + # normalize the confusion matrix + per_label_sums = confusion_matrix.sum(axis=1)[:, np.newaxis] + confusion_matrix = \ + confusion_matrix.astype(np.float32) / per_label_sums * 100 + + num_classes = len(labels) + fig, ax = plt.subplots( + figsize=(0.5 * num_classes, 0.5 * num_classes * 0.8), dpi=180) + cmap = plt.get_cmap(color_theme) + im = ax.imshow(confusion_matrix, cmap=cmap) + plt.colorbar(mappable=im, ax=ax) + + title_font = {'weight': 'bold', 'size': 12} + ax.set_title(title, fontdict=title_font) + label_font = {'size': 10} + plt.ylabel('Ground Truth Label', fontdict=label_font) + plt.xlabel('Prediction Label', fontdict=label_font) + + # draw locator + xmajor_locator = MultipleLocator(1) + xminor_locator = MultipleLocator(0.5) + ax.xaxis.set_major_locator(xmajor_locator) + ax.xaxis.set_minor_locator(xminor_locator) + ymajor_locator = MultipleLocator(1) + yminor_locator = MultipleLocator(0.5) + ax.yaxis.set_major_locator(ymajor_locator) + ax.yaxis.set_minor_locator(yminor_locator) + + # draw grid + ax.grid(True, which='minor', linestyle='-') + + # draw label + ax.set_xticks(np.arange(num_classes)) + ax.set_yticks(np.arange(num_classes)) + ax.set_xticklabels(labels) + ax.set_yticklabels(labels) + + ax.tick_params( + axis='x', bottom=False, top=True, labelbottom=False, labeltop=True) + plt.setp( + ax.get_xticklabels(), rotation=45, ha='left', rotation_mode='anchor') + + # draw confution matrix value + for i in range(num_classes): + for j in range(num_classes): + ax.text( + j, + i, + '{}%'.format(int(confusion_matrix[i, j])), + ha='center', + va='center', + color='w', + size=7) + + ax.set_ylim(len(confusion_matrix) - 0.5, -0.5) # matplotlib>3.1.1 + + fig.tight_layout() + if save_dir is not None: + plt.savefig( + os.path.join(save_dir, 'confusion_matrix.png'), format='png') + if show: + plt.show() + + +def main(): + args = parse_args() + + cfg = Config.fromfile(args.config) + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + + results = mmcv.load(args.prediction_path) + assert isinstance(results, list) + if isinstance(results[0], list): + pass + elif isinstance(results[0], tuple): + results = [result[0] for result in results] + else: + raise TypeError('invalid type of prediction results') + + if isinstance(cfg.data.test, dict): + cfg.data.test.test_mode = True + elif isinstance(cfg.data.test, list): + for ds_cfg in cfg.data.test: + ds_cfg.test_mode = True + dataset = build_dataset(cfg.data.test) + + confusion_matrix = calculate_confusion_matrix(dataset, results, + args.score_thr, + args.nms_iou_thr, + args.tp_iou_thr) + plot_confusion_matrix( + confusion_matrix, + dataset.CLASSES + ('background', ), + save_dir=args.save_dir, + show=args.show) + + +if __name__ == '__main__': + main() diff --git a/tools/analysis_tools/eval_metric.py b/tools/analysis_tools/eval_metric.py new file mode 100644 index 0000000..1fcdc1c --- /dev/null +++ b/tools/analysis_tools/eval_metric.py @@ -0,0 +1,80 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse + +import mmcv +from mmcv import Config, DictAction + +from mmdet.datasets import build_dataset + + +def parse_args(): + parser = argparse.ArgumentParser(description='Evaluate metric of the ' + 'results saved in pkl format') + parser.add_argument('config', help='Config of the model') + parser.add_argument('pkl_results', help='Results in pickle format') + parser.add_argument( + '--format-only', + action='store_true', + help='Format the output results without perform evaluation. It is' + 'useful when you want to format the result to a specific format and ' + 'submit it to the test server') + parser.add_argument( + '--eval', + type=str, + nargs='+', + help='Evaluation metrics, which depends on the dataset, e.g., "bbox",' + ' "segm", "proposal" for COCO, and "mAP", "recall" for PASCAL VOC') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. If the value to ' + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + 'Note that the quotation marks are necessary and that no white space ' + 'is allowed.') + parser.add_argument( + '--eval-options', + nargs='+', + action=DictAction, + help='custom options for evaluation, the key-value pair in xxx=yyy ' + 'format will be kwargs for dataset.evaluate() function') + args = parser.parse_args() + return args + + +def main(): + args = parse_args() + + cfg = Config.fromfile(args.config) + assert args.eval or args.format_only, ( + 'Please specify at least one operation (eval/format the results) with ' + 'the argument "--eval", "--format-only"') + if args.eval and args.format_only: + raise ValueError('--eval and --format_only cannot be both specified') + + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + cfg.data.test.test_mode = True + + dataset = build_dataset(cfg.data.test) + outputs = mmcv.load(args.pkl_results) + + kwargs = {} if args.eval_options is None else args.eval_options + if args.format_only: + dataset.format_results(outputs, **kwargs) + if args.eval: + eval_kwargs = cfg.get('evaluation', {}).copy() + # hard-code way to remove EvalHook args + for key in [ + 'interval', 'tmpdir', 'start', 'gpu_collect', 'save_best', + 'rule' + ]: + eval_kwargs.pop(key, None) + eval_kwargs.update(dict(metric=args.eval, **kwargs)) + print(dataset.evaluate(outputs, **eval_kwargs)) + + +if __name__ == '__main__': + main() diff --git a/tools/analysis_tools/get_flops.py b/tools/analysis_tools/get_flops.py new file mode 100644 index 0000000..0ac59a5 --- /dev/null +++ b/tools/analysis_tools/get_flops.py @@ -0,0 +1,97 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse + +import numpy as np +import torch +from mmcv import Config, DictAction + +from mmdet.models import build_detector + +try: + from mmcv.cnn import get_model_complexity_info +except ImportError: + raise ImportError('Please upgrade mmcv to >0.6.2') + + +def parse_args(): + parser = argparse.ArgumentParser(description='Train a detector') + parser.add_argument('config', help='train config file path') + parser.add_argument( + '--shape', + type=int, + nargs='+', + default=[1280, 800], + help='input image size') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. If the value to ' + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + 'Note that the quotation marks are necessary and that no white space ' + 'is allowed.') + parser.add_argument( + '--size-divisor', + type=int, + default=32, + help='Pad the input image, the minimum size that is divisible ' + 'by size_divisor, -1 means do not pad the image.') + args = parser.parse_args() + return args + + +def main(): + + args = parse_args() + + if len(args.shape) == 1: + h = w = args.shape[0] + elif len(args.shape) == 2: + h, w = args.shape + else: + raise ValueError('invalid input shape') + orig_shape = (3, h, w) + divisor = args.size_divisor + if divisor > 0: + h = int(np.ceil(h / divisor)) * divisor + w = int(np.ceil(w / divisor)) * divisor + + input_shape = (3, h, w) + + cfg = Config.fromfile(args.config) + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + + model = build_detector( + cfg.model, + train_cfg=cfg.get('train_cfg'), + test_cfg=cfg.get('test_cfg')) + if torch.cuda.is_available(): + model.cuda() + model.eval() + + if hasattr(model, 'forward_dummy'): + model.forward = model.forward_dummy + else: + raise NotImplementedError( + 'FLOPs counter is currently not currently supported with {}'. + format(model.__class__.__name__)) + + flops, params = get_model_complexity_info(model, input_shape) + split_line = '=' * 30 + + if divisor > 0 and \ + input_shape != orig_shape: + print(f'{split_line}\nUse size divisor set input shape ' + f'from {orig_shape} to {input_shape}\n') + print(f'{split_line}\nInput shape: {input_shape}\n' + f'Flops: {flops}\nParams: {params}\n{split_line}') + print('!!!Please be cautious if you use the results in papers. ' + 'You may need to check if all ops are supported and verify that the ' + 'flops computation is correct.') + + +if __name__ == '__main__': + main() diff --git a/tools/analysis_tools/optimize_anchors.py b/tools/analysis_tools/optimize_anchors.py new file mode 100644 index 0000000..d0da0cb --- /dev/null +++ b/tools/analysis_tools/optimize_anchors.py @@ -0,0 +1,370 @@ +# Copyright (c) OpenMMLab. All rights reserved. +"""Optimize anchor settings on a specific dataset. + +This script provides two method to optimize YOLO anchors including k-means +anchor cluster and differential evolution. You can use ``--algorithm k-means`` +and ``--algorithm differential_evolution`` to switch two method. + +Example: + Use k-means anchor cluster:: + + python tools/analysis_tools/optimize_anchors.py ${CONFIG} \ + --algorithm k-means --input-shape ${INPUT_SHAPE [WIDTH HEIGHT]} \ + --output-dir ${OUTPUT_DIR} + Use differential evolution to optimize anchors:: + + python tools/analysis_tools/optimize_anchors.py ${CONFIG} \ + --algorithm differential_evolution \ + --input-shape ${INPUT_SHAPE [WIDTH HEIGHT]} \ + --output-dir ${OUTPUT_DIR} +""" +import argparse +import os.path as osp + +import mmcv +import numpy as np +import torch +from mmcv import Config +from scipy.optimize import differential_evolution + +from mmdet.core import bbox_cxcywh_to_xyxy, bbox_overlaps, bbox_xyxy_to_cxcywh +from mmdet.datasets import build_dataset +from mmdet.utils import get_root_logger + + +def parse_args(): + parser = argparse.ArgumentParser(description='Optimize anchor parameters.') + parser.add_argument('config', help='Train config file path.') + parser.add_argument( + '--device', default='cuda:0', help='Device used for calculating.') + parser.add_argument( + '--input-shape', + type=int, + nargs='+', + default=[608, 608], + help='input image size') + parser.add_argument( + '--algorithm', + default='differential_evolution', + help='Algorithm used for anchor optimizing.' + 'Support k-means and differential_evolution for YOLO.') + parser.add_argument( + '--iters', + default=1000, + type=int, + help='Maximum iterations for optimizer.') + parser.add_argument( + '--output-dir', + default=None, + type=str, + help='Path to save anchor optimize result.') + + args = parser.parse_args() + return args + + +class BaseAnchorOptimizer: + """Base class for anchor optimizer. + + Args: + dataset (obj:`Dataset`): Dataset object. + input_shape (list[int]): Input image shape of the model. + Format in [width, height]. + logger (obj:`logging.Logger`): The logger for logging. + device (str, optional): Device used for calculating. + Default: 'cuda:0' + out_dir (str, optional): Path to save anchor optimize result. + Default: None + """ + + def __init__(self, + dataset, + input_shape, + logger, + device='cuda:0', + out_dir=None): + self.dataset = dataset + self.input_shape = input_shape + self.logger = logger + self.device = device + self.out_dir = out_dir + bbox_whs, img_shapes = self.get_whs_and_shapes() + ratios = img_shapes.max(1, keepdims=True) / np.array([input_shape]) + + # resize to input shape + self.bbox_whs = bbox_whs / ratios + + def get_whs_and_shapes(self): + """Get widths and heights of bboxes and shapes of images. + + Returns: + tuple[np.ndarray]: Array of bbox shapes and array of image + shapes with shape (num_bboxes, 2) in [width, height] format. + """ + self.logger.info('Collecting bboxes from annotation...') + bbox_whs = [] + img_shapes = [] + prog_bar = mmcv.ProgressBar(len(self.dataset)) + for idx in range(len(self.dataset)): + ann = self.dataset.get_ann_info(idx) + data_info = self.dataset.data_infos[idx] + img_shape = np.array([data_info['width'], data_info['height']]) + gt_bboxes = ann['bboxes'] + for bbox in gt_bboxes: + wh = bbox[2:4] - bbox[0:2] + img_shapes.append(img_shape) + bbox_whs.append(wh) + prog_bar.update() + print('\n') + bbox_whs = np.array(bbox_whs) + img_shapes = np.array(img_shapes) + self.logger.info(f'Collected {bbox_whs.shape[0]} bboxes.') + return bbox_whs, img_shapes + + def get_zero_center_bbox_tensor(self): + """Get a tensor of bboxes centered at (0, 0). + + Returns: + Tensor: Tensor of bboxes with shape (num_bboxes, 4) + in [xmin, ymin, xmax, ymax] format. + """ + whs = torch.from_numpy(self.bbox_whs).to( + self.device, dtype=torch.float32) + bboxes = bbox_cxcywh_to_xyxy( + torch.cat([torch.zeros_like(whs), whs], dim=1)) + return bboxes + + def optimize(self): + raise NotImplementedError + + def save_result(self, anchors, path=None): + anchor_results = [] + for w, h in anchors: + anchor_results.append([round(w), round(h)]) + self.logger.info(f'Anchor optimize result:{anchor_results}') + if path: + json_path = osp.join(path, 'anchor_optimize_result.json') + mmcv.dump(anchor_results, json_path) + self.logger.info(f'Result saved in {json_path}') + + +class YOLOKMeansAnchorOptimizer(BaseAnchorOptimizer): + r"""YOLO anchor optimizer using k-means. Code refer to `AlexeyAB/darknet. + `_. + + Args: + num_anchors (int) : Number of anchors. + iters (int): Maximum iterations for k-means. + """ + + def __init__(self, num_anchors, iters, **kwargs): + + super(YOLOKMeansAnchorOptimizer, self).__init__(**kwargs) + self.num_anchors = num_anchors + self.iters = iters + + def optimize(self): + anchors = self.kmeans_anchors() + self.save_result(anchors, self.out_dir) + + def kmeans_anchors(self): + self.logger.info( + f'Start cluster {self.num_anchors} YOLO anchors with K-means...') + bboxes = self.get_zero_center_bbox_tensor() + cluster_center_idx = torch.randint( + 0, bboxes.shape[0], (self.num_anchors, )).to(self.device) + + assignments = torch.zeros((bboxes.shape[0], )).to(self.device) + cluster_centers = bboxes[cluster_center_idx] + if self.num_anchors == 1: + cluster_centers = self.kmeans_maximization(bboxes, assignments, + cluster_centers) + anchors = bbox_xyxy_to_cxcywh(cluster_centers)[:, 2:].cpu().numpy() + anchors = sorted(anchors, key=lambda x: x[0] * x[1]) + return anchors + + prog_bar = mmcv.ProgressBar(self.iters) + for i in range(self.iters): + converged, assignments = self.kmeans_expectation( + bboxes, assignments, cluster_centers) + if converged: + self.logger.info(f'K-means process has converged at iter {i}.') + break + cluster_centers = self.kmeans_maximization(bboxes, assignments, + cluster_centers) + prog_bar.update() + print('\n') + avg_iou = bbox_overlaps(bboxes, + cluster_centers).max(1)[0].mean().item() + + anchors = bbox_xyxy_to_cxcywh(cluster_centers)[:, 2:].cpu().numpy() + anchors = sorted(anchors, key=lambda x: x[0] * x[1]) + self.logger.info(f'Anchor cluster finish. Average IOU: {avg_iou}') + + return anchors + + def kmeans_maximization(self, bboxes, assignments, centers): + """Maximization part of EM algorithm(Expectation-Maximization)""" + new_centers = torch.zeros_like(centers) + for i in range(centers.shape[0]): + mask = (assignments == i) + if mask.sum(): + new_centers[i, :] = bboxes[mask].mean(0) + return new_centers + + def kmeans_expectation(self, bboxes, assignments, centers): + """Expectation part of EM algorithm(Expectation-Maximization)""" + ious = bbox_overlaps(bboxes, centers) + closest = ious.argmax(1) + converged = (closest == assignments).all() + return converged, closest + + +class YOLODEAnchorOptimizer(BaseAnchorOptimizer): + """YOLO anchor optimizer using differential evolution algorithm. + + Args: + num_anchors (int) : Number of anchors. + iters (int): Maximum iterations for k-means. + strategy (str): The differential evolution strategy to use. + Should be one of: + + - 'best1bin' + - 'best1exp' + - 'rand1exp' + - 'randtobest1exp' + - 'currenttobest1exp' + - 'best2exp' + - 'rand2exp' + - 'randtobest1bin' + - 'currenttobest1bin' + - 'best2bin' + - 'rand2bin' + - 'rand1bin' + + Default: 'best1bin'. + population_size (int): Total population size of evolution algorithm. + Default: 15. + convergence_thr (float): Tolerance for convergence, the + optimizing stops when ``np.std(pop) <= abs(convergence_thr) + + convergence_thr * np.abs(np.mean(population_energies))``, + respectively. Default: 0.0001. + mutation (tuple[float]): Range of dithering randomly changes the + mutation constant. Default: (0.5, 1). + recombination (float): Recombination constant of crossover probability. + Default: 0.7. + """ + + def __init__(self, + num_anchors, + iters, + strategy='best1bin', + population_size=15, + convergence_thr=0.0001, + mutation=(0.5, 1), + recombination=0.7, + **kwargs): + + super(YOLODEAnchorOptimizer, self).__init__(**kwargs) + + self.num_anchors = num_anchors + self.iters = iters + self.strategy = strategy + self.population_size = population_size + self.convergence_thr = convergence_thr + self.mutation = mutation + self.recombination = recombination + + def optimize(self): + anchors = self.differential_evolution() + self.save_result(anchors, self.out_dir) + + def differential_evolution(self): + bboxes = self.get_zero_center_bbox_tensor() + + bounds = [] + for i in range(self.num_anchors): + bounds.extend([(0, self.input_shape[0]), (0, self.input_shape[1])]) + + result = differential_evolution( + func=self.avg_iou_cost, + bounds=bounds, + args=(bboxes, ), + strategy=self.strategy, + maxiter=self.iters, + popsize=self.population_size, + tol=self.convergence_thr, + mutation=self.mutation, + recombination=self.recombination, + updating='immediate', + disp=True) + self.logger.info( + f'Anchor evolution finish. Average IOU: {1 - result.fun}') + anchors = [(w, h) for w, h in zip(result.x[::2], result.x[1::2])] + anchors = sorted(anchors, key=lambda x: x[0] * x[1]) + return anchors + + @staticmethod + def avg_iou_cost(anchor_params, bboxes): + assert len(anchor_params) % 2 == 0 + anchor_whs = torch.tensor( + [[w, h] + for w, h in zip(anchor_params[::2], anchor_params[1::2])]).to( + bboxes.device, dtype=bboxes.dtype) + anchor_boxes = bbox_cxcywh_to_xyxy( + torch.cat([torch.zeros_like(anchor_whs), anchor_whs], dim=1)) + ious = bbox_overlaps(bboxes, anchor_boxes) + max_ious, _ = ious.max(1) + cost = 1 - max_ious.mean().item() + return cost + + +def main(): + logger = get_root_logger() + args = parse_args() + cfg = args.config + cfg = Config.fromfile(cfg) + + input_shape = args.input_shape + assert len(input_shape) == 2 + + anchor_type = cfg.model.bbox_head.anchor_generator.type + assert anchor_type == 'YOLOAnchorGenerator', \ + f'Only support optimize YOLOAnchor, but get {anchor_type}.' + + base_sizes = cfg.model.bbox_head.anchor_generator.base_sizes + num_anchors = sum([len(sizes) for sizes in base_sizes]) + + train_data_cfg = cfg.data.train + while 'dataset' in train_data_cfg: + train_data_cfg = train_data_cfg['dataset'] + dataset = build_dataset(train_data_cfg) + + if args.algorithm == 'k-means': + optimizer = YOLOKMeansAnchorOptimizer( + dataset=dataset, + input_shape=input_shape, + device=args.device, + num_anchors=num_anchors, + iters=args.iters, + logger=logger, + out_dir=args.output_dir) + elif args.algorithm == 'differential_evolution': + optimizer = YOLODEAnchorOptimizer( + dataset=dataset, + input_shape=input_shape, + device=args.device, + num_anchors=num_anchors, + iters=args.iters, + logger=logger, + out_dir=args.output_dir) + else: + raise NotImplementedError( + f'Only support k-means and differential_evolution, ' + f'but get {args.algorithm}') + + optimizer.optimize() + + +if __name__ == '__main__': + main() diff --git a/tools/analysis_tools/robustness_eval.py b/tools/analysis_tools/robustness_eval.py new file mode 100644 index 0000000..da5ec28 --- /dev/null +++ b/tools/analysis_tools/robustness_eval.py @@ -0,0 +1,251 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import os.path as osp +from argparse import ArgumentParser + +import mmcv +import numpy as np + + +def print_coco_results(results): + + def _print(result, ap=1, iouThr=None, areaRng='all', maxDets=100): + titleStr = 'Average Precision' if ap == 1 else 'Average Recall' + typeStr = '(AP)' if ap == 1 else '(AR)' + iouStr = '0.50:0.95' \ + if iouThr is None else f'{iouThr:0.2f}' + iStr = f' {titleStr:<18} {typeStr} @[ IoU={iouStr:<9} | ' + iStr += f'area={areaRng:>6s} | maxDets={maxDets:>3d} ] = {result:0.3f}' + print(iStr) + + stats = np.zeros((12, )) + stats[0] = _print(results[0], 1) + stats[1] = _print(results[1], 1, iouThr=.5) + stats[2] = _print(results[2], 1, iouThr=.75) + stats[3] = _print(results[3], 1, areaRng='small') + stats[4] = _print(results[4], 1, areaRng='medium') + stats[5] = _print(results[5], 1, areaRng='large') + stats[6] = _print(results[6], 0, maxDets=1) + stats[7] = _print(results[7], 0, maxDets=10) + stats[8] = _print(results[8], 0) + stats[9] = _print(results[9], 0, areaRng='small') + stats[10] = _print(results[10], 0, areaRng='medium') + stats[11] = _print(results[11], 0, areaRng='large') + + +def get_coco_style_results(filename, + task='bbox', + metric=None, + prints='mPC', + aggregate='benchmark'): + + assert aggregate in ['benchmark', 'all'] + + if prints == 'all': + prints = ['P', 'mPC', 'rPC'] + elif isinstance(prints, str): + prints = [prints] + for p in prints: + assert p in ['P', 'mPC', 'rPC'] + + if metric is None: + metrics = [ + 'AP', 'AP50', 'AP75', 'APs', 'APm', 'APl', 'AR1', 'AR10', 'AR100', + 'ARs', 'ARm', 'ARl' + ] + elif isinstance(metric, list): + metrics = metric + else: + metrics = [metric] + + for metric_name in metrics: + assert metric_name in [ + 'AP', 'AP50', 'AP75', 'APs', 'APm', 'APl', 'AR1', 'AR10', 'AR100', + 'ARs', 'ARm', 'ARl' + ] + + eval_output = mmcv.load(filename) + + num_distortions = len(list(eval_output.keys())) + results = np.zeros((num_distortions, 6, len(metrics)), dtype='float32') + + for corr_i, distortion in enumerate(eval_output): + for severity in eval_output[distortion]: + for metric_j, metric_name in enumerate(metrics): + mAP = eval_output[distortion][severity][task][metric_name] + results[corr_i, severity, metric_j] = mAP + + P = results[0, 0, :] + if aggregate == 'benchmark': + mPC = np.mean(results[:15, 1:, :], axis=(0, 1)) + else: + mPC = np.mean(results[:, 1:, :], axis=(0, 1)) + rPC = mPC / P + + print(f'\nmodel: {osp.basename(filename)}') + if metric is None: + if 'P' in prints: + print(f'Performance on Clean Data [P] ({task})') + print_coco_results(P) + if 'mPC' in prints: + print(f'Mean Performance under Corruption [mPC] ({task})') + print_coco_results(mPC) + if 'rPC' in prints: + print(f'Relative Performance under Corruption [rPC] ({task})') + print_coco_results(rPC) + else: + if 'P' in prints: + print(f'Performance on Clean Data [P] ({task})') + for metric_i, metric_name in enumerate(metrics): + print(f'{metric_name:5} = {P[metric_i]:0.3f}') + if 'mPC' in prints: + print(f'Mean Performance under Corruption [mPC] ({task})') + for metric_i, metric_name in enumerate(metrics): + print(f'{metric_name:5} = {mPC[metric_i]:0.3f}') + if 'rPC' in prints: + print(f'Relative Performance under Corruption [rPC] ({task})') + for metric_i, metric_name in enumerate(metrics): + print(f'{metric_name:5} => {rPC[metric_i] * 100:0.1f} %') + + return results + + +def get_voc_style_results(filename, prints='mPC', aggregate='benchmark'): + + assert aggregate in ['benchmark', 'all'] + + if prints == 'all': + prints = ['P', 'mPC', 'rPC'] + elif isinstance(prints, str): + prints = [prints] + for p in prints: + assert p in ['P', 'mPC', 'rPC'] + + eval_output = mmcv.load(filename) + + num_distortions = len(list(eval_output.keys())) + results = np.zeros((num_distortions, 6, 20), dtype='float32') + + for i, distortion in enumerate(eval_output): + for severity in eval_output[distortion]: + mAP = [ + eval_output[distortion][severity][j]['ap'] + for j in range(len(eval_output[distortion][severity])) + ] + results[i, severity, :] = mAP + + P = results[0, 0, :] + if aggregate == 'benchmark': + mPC = np.mean(results[:15, 1:, :], axis=(0, 1)) + else: + mPC = np.mean(results[:, 1:, :], axis=(0, 1)) + rPC = mPC / P + + print(f'\nmodel: {osp.basename(filename)}') + if 'P' in prints: + print(f'Performance on Clean Data [P] in AP50 = {np.mean(P):0.3f}') + if 'mPC' in prints: + print('Mean Performance under Corruption [mPC] in AP50 = ' + f'{np.mean(mPC):0.3f}') + if 'rPC' in prints: + print('Relative Performance under Corruption [rPC] in % = ' + f'{np.mean(rPC) * 100:0.1f}') + + return np.mean(results, axis=2, keepdims=True) + + +def get_results(filename, + dataset='coco', + task='bbox', + metric=None, + prints='mPC', + aggregate='benchmark'): + assert dataset in ['coco', 'voc', 'cityscapes'] + + if dataset in ['coco', 'cityscapes']: + results = get_coco_style_results( + filename, + task=task, + metric=metric, + prints=prints, + aggregate=aggregate) + elif dataset == 'voc': + if task != 'bbox': + print('Only bbox analysis is supported for Pascal VOC') + print('Will report bbox results\n') + if metric not in [None, ['AP'], ['AP50']]: + print('Only the AP50 metric is supported for Pascal VOC') + print('Will report AP50 metric\n') + results = get_voc_style_results( + filename, prints=prints, aggregate=aggregate) + + return results + + +def get_distortions_from_file(filename): + + eval_output = mmcv.load(filename) + + return get_distortions_from_results(eval_output) + + +def get_distortions_from_results(eval_output): + distortions = [] + for i, distortion in enumerate(eval_output): + distortions.append(distortion.replace('_', ' ')) + return distortions + + +def main(): + parser = ArgumentParser(description='Corruption Result Analysis') + parser.add_argument('filename', help='result file path') + parser.add_argument( + '--dataset', + type=str, + choices=['coco', 'voc', 'cityscapes'], + default='coco', + help='dataset type') + parser.add_argument( + '--task', + type=str, + nargs='+', + choices=['bbox', 'segm'], + default=['bbox'], + help='task to report') + parser.add_argument( + '--metric', + nargs='+', + choices=[ + None, 'AP', 'AP50', 'AP75', 'APs', 'APm', 'APl', 'AR1', 'AR10', + 'AR100', 'ARs', 'ARm', 'ARl' + ], + default=None, + help='metric to report') + parser.add_argument( + '--prints', + type=str, + nargs='+', + choices=['P', 'mPC', 'rPC'], + default='mPC', + help='corruption benchmark metric to print') + parser.add_argument( + '--aggregate', + type=str, + choices=['all', 'benchmark'], + default='benchmark', + help='aggregate all results or only those \ + for benchmark corruptions') + + args = parser.parse_args() + + for task in args.task: + get_results( + args.filename, + dataset=args.dataset, + task=task, + metric=args.metric, + prints=args.prints, + aggregate=args.aggregate) + + +if __name__ == '__main__': + main() diff --git a/tools/analysis_tools/test_robustness.py b/tools/analysis_tools/test_robustness.py new file mode 100644 index 0000000..29cb41d --- /dev/null +++ b/tools/analysis_tools/test_robustness.py @@ -0,0 +1,387 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import copy +import os +import os.path as osp + +import mmcv +import torch +from mmcv import DictAction +from mmcv.parallel import MMDataParallel, MMDistributedDataParallel +from mmcv.runner import (get_dist_info, init_dist, load_checkpoint, + wrap_fp16_model) +from pycocotools.coco import COCO +from pycocotools.cocoeval import COCOeval +from tools.analysis_tools.robustness_eval import get_results + +from mmdet import datasets +from mmdet.apis import multi_gpu_test, set_random_seed, single_gpu_test +from mmdet.core import eval_map +from mmdet.datasets import build_dataloader, build_dataset +from mmdet.models import build_detector + + +def coco_eval_with_return(result_files, + result_types, + coco, + max_dets=(100, 300, 1000)): + for res_type in result_types: + assert res_type in ['proposal', 'bbox', 'segm', 'keypoints'] + + if mmcv.is_str(coco): + coco = COCO(coco) + assert isinstance(coco, COCO) + + eval_results = {} + for res_type in result_types: + result_file = result_files[res_type] + assert result_file.endswith('.json') + + coco_dets = coco.loadRes(result_file) + img_ids = coco.getImgIds() + iou_type = 'bbox' if res_type == 'proposal' else res_type + cocoEval = COCOeval(coco, coco_dets, iou_type) + cocoEval.params.imgIds = img_ids + if res_type == 'proposal': + cocoEval.params.useCats = 0 + cocoEval.params.maxDets = list(max_dets) + cocoEval.evaluate() + cocoEval.accumulate() + cocoEval.summarize() + if res_type == 'segm' or res_type == 'bbox': + metric_names = [ + 'AP', 'AP50', 'AP75', 'APs', 'APm', 'APl', 'AR1', 'AR10', + 'AR100', 'ARs', 'ARm', 'ARl' + ] + eval_results[res_type] = { + metric_names[i]: cocoEval.stats[i] + for i in range(len(metric_names)) + } + else: + eval_results[res_type] = cocoEval.stats + + return eval_results + + +def voc_eval_with_return(result_file, + dataset, + iou_thr=0.5, + logger='print', + only_ap=True): + det_results = mmcv.load(result_file) + annotations = [dataset.get_ann_info(i) for i in range(len(dataset))] + if hasattr(dataset, 'year') and dataset.year == 2007: + dataset_name = 'voc07' + else: + dataset_name = dataset.CLASSES + mean_ap, eval_results = eval_map( + det_results, + annotations, + scale_ranges=None, + iou_thr=iou_thr, + dataset=dataset_name, + logger=logger) + + if only_ap: + eval_results = [{ + 'ap': eval_results[i]['ap'] + } for i in range(len(eval_results))] + + return mean_ap, eval_results + + +def parse_args(): + parser = argparse.ArgumentParser(description='MMDet test detector') + parser.add_argument('config', help='test config file path') + parser.add_argument('checkpoint', help='checkpoint file') + parser.add_argument('--out', help='output result file') + parser.add_argument( + '--corruptions', + type=str, + nargs='+', + default='benchmark', + choices=[ + 'all', 'benchmark', 'noise', 'blur', 'weather', 'digital', + 'holdout', 'None', 'gaussian_noise', 'shot_noise', 'impulse_noise', + 'defocus_blur', 'glass_blur', 'motion_blur', 'zoom_blur', 'snow', + 'frost', 'fog', 'brightness', 'contrast', 'elastic_transform', + 'pixelate', 'jpeg_compression', 'speckle_noise', 'gaussian_blur', + 'spatter', 'saturate' + ], + help='corruptions') + parser.add_argument( + '--severities', + type=int, + nargs='+', + default=[0, 1, 2, 3, 4, 5], + help='corruption severity levels') + parser.add_argument( + '--eval', + type=str, + nargs='+', + choices=['proposal', 'proposal_fast', 'bbox', 'segm', 'keypoints'], + help='eval types') + parser.add_argument( + '--iou-thr', + type=float, + default=0.5, + help='IoU threshold for pascal voc evaluation') + parser.add_argument( + '--summaries', + type=bool, + default=False, + help='Print summaries for every corruption and severity') + parser.add_argument( + '--workers', type=int, default=32, help='workers per gpu') + parser.add_argument('--show', action='store_true', help='show results') + parser.add_argument( + '--show-dir', help='directory where painted images will be saved') + parser.add_argument( + '--show-score-thr', + type=float, + default=0.3, + help='score threshold (default: 0.3)') + parser.add_argument('--tmpdir', help='tmp dir for writing some results') + parser.add_argument('--seed', type=int, default=None, help='random seed') + parser.add_argument( + '--launcher', + choices=['none', 'pytorch', 'slurm', 'mpi'], + default='none', + help='job launcher') + parser.add_argument('--local_rank', type=int, default=0) + parser.add_argument( + '--final-prints', + type=str, + nargs='+', + choices=['P', 'mPC', 'rPC'], + default='mPC', + help='corruption benchmark metric to print at the end') + parser.add_argument( + '--final-prints-aggregate', + type=str, + choices=['all', 'benchmark'], + default='benchmark', + help='aggregate all results or only those for benchmark corruptions') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. If the value to ' + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + 'Note that the quotation marks are necessary and that no white space ' + 'is allowed.') + args = parser.parse_args() + if 'LOCAL_RANK' not in os.environ: + os.environ['LOCAL_RANK'] = str(args.local_rank) + return args + + +def main(): + args = parse_args() + + assert args.out or args.show or args.show_dir, \ + ('Please specify at least one operation (save or show the results) ' + 'with the argument "--out", "--show" or "show-dir"') + + if args.out is not None and not args.out.endswith(('.pkl', '.pickle')): + raise ValueError('The output file must be a pkl file.') + + cfg = mmcv.Config.fromfile(args.config) + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + # set cudnn_benchmark + if cfg.get('cudnn_benchmark', False): + torch.backends.cudnn.benchmark = True + cfg.model.pretrained = None + cfg.data.test.test_mode = True + if args.workers == 0: + args.workers = cfg.data.workers_per_gpu + + # init distributed env first, since logger depends on the dist info. + if args.launcher == 'none': + distributed = False + else: + distributed = True + init_dist(args.launcher, **cfg.dist_params) + + # set random seeds + if args.seed is not None: + set_random_seed(args.seed) + + if 'all' in args.corruptions: + corruptions = [ + 'gaussian_noise', 'shot_noise', 'impulse_noise', 'defocus_blur', + 'glass_blur', 'motion_blur', 'zoom_blur', 'snow', 'frost', 'fog', + 'brightness', 'contrast', 'elastic_transform', 'pixelate', + 'jpeg_compression', 'speckle_noise', 'gaussian_blur', 'spatter', + 'saturate' + ] + elif 'benchmark' in args.corruptions: + corruptions = [ + 'gaussian_noise', 'shot_noise', 'impulse_noise', 'defocus_blur', + 'glass_blur', 'motion_blur', 'zoom_blur', 'snow', 'frost', 'fog', + 'brightness', 'contrast', 'elastic_transform', 'pixelate', + 'jpeg_compression' + ] + elif 'noise' in args.corruptions: + corruptions = ['gaussian_noise', 'shot_noise', 'impulse_noise'] + elif 'blur' in args.corruptions: + corruptions = [ + 'defocus_blur', 'glass_blur', 'motion_blur', 'zoom_blur' + ] + elif 'weather' in args.corruptions: + corruptions = ['snow', 'frost', 'fog', 'brightness'] + elif 'digital' in args.corruptions: + corruptions = [ + 'contrast', 'elastic_transform', 'pixelate', 'jpeg_compression' + ] + elif 'holdout' in args.corruptions: + corruptions = ['speckle_noise', 'gaussian_blur', 'spatter', 'saturate'] + elif 'None' in args.corruptions: + corruptions = ['None'] + args.severities = [0] + else: + corruptions = args.corruptions + + rank, _ = get_dist_info() + aggregated_results = {} + for corr_i, corruption in enumerate(corruptions): + aggregated_results[corruption] = {} + for sev_i, corruption_severity in enumerate(args.severities): + # evaluate severity 0 (= no corruption) only once + if corr_i > 0 and corruption_severity == 0: + aggregated_results[corruption][0] = \ + aggregated_results[corruptions[0]][0] + continue + + test_data_cfg = copy.deepcopy(cfg.data.test) + # assign corruption and severity + if corruption_severity > 0: + corruption_trans = dict( + type='Corrupt', + corruption=corruption, + severity=corruption_severity) + # TODO: hard coded "1", we assume that the first step is + # loading images, which needs to be fixed in the future + test_data_cfg['pipeline'].insert(1, corruption_trans) + + # print info + print(f'\nTesting {corruption} at severity {corruption_severity}') + + # build the dataloader + # TODO: support multiple images per gpu + # (only minor changes are needed) + dataset = build_dataset(test_data_cfg) + data_loader = build_dataloader( + dataset, + samples_per_gpu=1, + workers_per_gpu=args.workers, + dist=distributed, + shuffle=False) + + # build the model and load checkpoint + cfg.model.train_cfg = None + model = build_detector(cfg.model, test_cfg=cfg.get('test_cfg')) + fp16_cfg = cfg.get('fp16', None) + if fp16_cfg is not None: + wrap_fp16_model(model) + checkpoint = load_checkpoint( + model, args.checkpoint, map_location='cpu') + # old versions did not save class info in checkpoints, + # this walkaround is for backward compatibility + if 'CLASSES' in checkpoint.get('meta', {}): + model.CLASSES = checkpoint['meta']['CLASSES'] + else: + model.CLASSES = dataset.CLASSES + + if not distributed: + model = MMDataParallel(model, device_ids=[0]) + show_dir = args.show_dir + if show_dir is not None: + show_dir = osp.join(show_dir, corruption) + show_dir = osp.join(show_dir, str(corruption_severity)) + if not osp.exists(show_dir): + osp.makedirs(show_dir) + outputs = single_gpu_test(model, data_loader, args.show, + show_dir, args.show_score_thr) + else: + model = MMDistributedDataParallel( + model.cuda(), + device_ids=[torch.cuda.current_device()], + broadcast_buffers=False) + outputs = multi_gpu_test(model, data_loader, args.tmpdir) + + if args.out and rank == 0: + eval_results_filename = ( + osp.splitext(args.out)[0] + '_results' + + osp.splitext(args.out)[1]) + mmcv.dump(outputs, args.out) + eval_types = args.eval + if cfg.dataset_type == 'VOCDataset': + if eval_types: + for eval_type in eval_types: + if eval_type == 'bbox': + test_dataset = mmcv.runner.obj_from_dict( + cfg.data.test, datasets) + logger = 'print' if args.summaries else None + mean_ap, eval_results = \ + voc_eval_with_return( + args.out, test_dataset, + args.iou_thr, logger) + aggregated_results[corruption][ + corruption_severity] = eval_results + else: + print('\nOnly "bbox" evaluation \ + is supported for pascal voc') + else: + if eval_types: + print(f'Starting evaluate {" and ".join(eval_types)}') + if eval_types == ['proposal_fast']: + result_file = args.out + else: + if not isinstance(outputs[0], dict): + result_files = dataset.results2json( + outputs, args.out) + else: + for name in outputs[0]: + print(f'\nEvaluating {name}') + outputs_ = [out[name] for out in outputs] + result_file = args.out + + f'.{name}' + result_files = dataset.results2json( + outputs_, result_file) + eval_results = coco_eval_with_return( + result_files, eval_types, dataset.coco) + aggregated_results[corruption][ + corruption_severity] = eval_results + else: + print('\nNo task was selected for evaluation;' + '\nUse --eval to select a task') + + # save results after each evaluation + mmcv.dump(aggregated_results, eval_results_filename) + + if rank == 0: + # print final results + print('\nAggregated results:') + prints = args.final_prints + aggregate = args.final_prints_aggregate + + if cfg.dataset_type == 'VOCDataset': + get_results( + eval_results_filename, + dataset='voc', + prints=prints, + aggregate=aggregate) + else: + get_results( + eval_results_filename, + dataset='coco', + prints=prints, + aggregate=aggregate) + + +if __name__ == '__main__': + main() diff --git a/tools/dataset_converters/cityscapes.py b/tools/dataset_converters/cityscapes.py new file mode 100644 index 0000000..c8e44b9 --- /dev/null +++ b/tools/dataset_converters/cityscapes.py @@ -0,0 +1,152 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import glob +import os.path as osp + +import cityscapesscripts.helpers.labels as CSLabels +import mmcv +import numpy as np +import pycocotools.mask as maskUtils + + +def collect_files(img_dir, gt_dir): + suffix = 'leftImg8bit.png' + files = [] + for img_file in glob.glob(osp.join(img_dir, '**/*.png')): + assert img_file.endswith(suffix), img_file + inst_file = gt_dir + img_file[ + len(img_dir):-len(suffix)] + 'gtFine_instanceIds.png' + # Note that labelIds are not converted to trainId for seg map + segm_file = gt_dir + img_file[ + len(img_dir):-len(suffix)] + 'gtFine_labelIds.png' + files.append((img_file, inst_file, segm_file)) + assert len(files), f'No images found in {img_dir}' + print(f'Loaded {len(files)} images from {img_dir}') + + return files + + +def collect_annotations(files, nproc=1): + print('Loading annotation images') + if nproc > 1: + images = mmcv.track_parallel_progress( + load_img_info, files, nproc=nproc) + else: + images = mmcv.track_progress(load_img_info, files) + + return images + + +def load_img_info(files): + img_file, inst_file, segm_file = files + inst_img = mmcv.imread(inst_file, 'unchanged') + # ids < 24 are stuff labels (filtering them first is about 5% faster) + unique_inst_ids = np.unique(inst_img[inst_img >= 24]) + anno_info = [] + for inst_id in unique_inst_ids: + # For non-crowd annotations, inst_id // 1000 is the label_id + # Crowd annotations have <1000 instance ids + label_id = inst_id // 1000 if inst_id >= 1000 else inst_id + label = CSLabels.id2label[label_id] + if not label.hasInstances or label.ignoreInEval: + continue + + category_id = label.id + iscrowd = int(inst_id < 1000) + mask = np.asarray(inst_img == inst_id, dtype=np.uint8, order='F') + mask_rle = maskUtils.encode(mask[:, :, None])[0] + + area = maskUtils.area(mask_rle) + # convert to COCO style XYWH format + bbox = maskUtils.toBbox(mask_rle) + + # for json encoding + mask_rle['counts'] = mask_rle['counts'].decode() + + anno = dict( + iscrowd=iscrowd, + category_id=category_id, + bbox=bbox.tolist(), + area=area.tolist(), + segmentation=mask_rle) + anno_info.append(anno) + video_name = osp.basename(osp.dirname(img_file)) + img_info = dict( + # remove img_prefix for filename + file_name=osp.join(video_name, osp.basename(img_file)), + height=inst_img.shape[0], + width=inst_img.shape[1], + anno_info=anno_info, + segm_file=osp.join(video_name, osp.basename(segm_file))) + + return img_info + + +def cvt_annotations(image_infos, out_json_name): + out_json = dict() + img_id = 0 + ann_id = 0 + out_json['images'] = [] + out_json['categories'] = [] + out_json['annotations'] = [] + for image_info in image_infos: + image_info['id'] = img_id + anno_infos = image_info.pop('anno_info') + out_json['images'].append(image_info) + for anno_info in anno_infos: + anno_info['image_id'] = img_id + anno_info['id'] = ann_id + out_json['annotations'].append(anno_info) + ann_id += 1 + img_id += 1 + for label in CSLabels.labels: + if label.hasInstances and not label.ignoreInEval: + cat = dict(id=label.id, name=label.name) + out_json['categories'].append(cat) + + if len(out_json['annotations']) == 0: + out_json.pop('annotations') + + mmcv.dump(out_json, out_json_name) + return out_json + + +def parse_args(): + parser = argparse.ArgumentParser( + description='Convert Cityscapes annotations to COCO format') + parser.add_argument('cityscapes_path', help='cityscapes data path') + parser.add_argument('--img-dir', default='leftImg8bit', type=str) + parser.add_argument('--gt-dir', default='gtFine', type=str) + parser.add_argument('-o', '--out-dir', help='output path') + parser.add_argument( + '--nproc', default=1, type=int, help='number of process') + args = parser.parse_args() + return args + + +def main(): + args = parse_args() + cityscapes_path = args.cityscapes_path + out_dir = args.out_dir if args.out_dir else cityscapes_path + mmcv.mkdir_or_exist(out_dir) + + img_dir = osp.join(cityscapes_path, args.img_dir) + gt_dir = osp.join(cityscapes_path, args.gt_dir) + + set_name = dict( + train='instancesonly_filtered_gtFine_train.json', + val='instancesonly_filtered_gtFine_val.json', + test='instancesonly_filtered_gtFine_test.json') + + for split, json_name in set_name.items(): + print(f'Converting {split} into {json_name}') + with mmcv.Timer( + print_tmpl='It took {}s to convert Cityscapes annotation'): + files = collect_files( + osp.join(img_dir, split), osp.join(gt_dir, split)) + image_infos = collect_annotations(files, nproc=args.nproc) + cvt_annotations(image_infos, osp.join(out_dir, json_name)) + + +if __name__ == '__main__': + main() diff --git a/tools/dataset_converters/images2coco.py b/tools/dataset_converters/images2coco.py new file mode 100644 index 0000000..1c4e2f1 --- /dev/null +++ b/tools/dataset_converters/images2coco.py @@ -0,0 +1,101 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import os + +import mmcv +from PIL import Image + + +def parse_args(): + parser = argparse.ArgumentParser( + description='Convert images to coco format without annotations') + parser.add_argument('img_path', help='The root path of images') + parser.add_argument( + 'classes', type=str, help='The text file name of storage class list') + parser.add_argument( + 'out', + type=str, + help='The output annotation json file name, The save dir is in the ' + 'same directory as img_path') + parser.add_argument( + '-e', + '--exclude-extensions', + type=str, + nargs='+', + help='The suffix of images to be excluded, such as "png" and "bmp"') + args = parser.parse_args() + return args + + +def collect_image_infos(path, exclude_extensions=None): + img_infos = [] + + images_generator = mmcv.scandir(path, recursive=True) + for image_path in mmcv.track_iter_progress(list(images_generator)): + if exclude_extensions is None or ( + exclude_extensions is not None + and not image_path.lower().endswith(exclude_extensions)): + image_path = os.path.join(path, image_path) + img_pillow = Image.open(image_path) + img_info = { + 'filename': image_path, + 'width': img_pillow.width, + 'height': img_pillow.height, + } + img_infos.append(img_info) + return img_infos + + +def cvt_to_coco_json(img_infos, classes): + image_id = 0 + coco = dict() + coco['images'] = [] + coco['type'] = 'instance' + coco['categories'] = [] + coco['annotations'] = [] + image_set = set() + + for category_id, name in enumerate(classes): + category_item = dict() + category_item['supercategory'] = str('none') + category_item['id'] = int(category_id) + category_item['name'] = str(name) + coco['categories'].append(category_item) + + for img_dict in img_infos: + file_name = img_dict['filename'] + assert file_name not in image_set + image_item = dict() + image_item['id'] = int(image_id) + image_item['file_name'] = str(file_name) + image_item['height'] = int(img_dict['height']) + image_item['width'] = int(img_dict['width']) + coco['images'].append(image_item) + image_set.add(file_name) + + image_id += 1 + return coco + + +def main(): + args = parse_args() + assert args.out.endswith( + 'json'), 'The output file name must be json suffix' + + # 1 load image list info + img_infos = collect_image_infos(args.img_path, args.exclude_extensions) + + # 2 convert to coco format data + classes = mmcv.list_from_file(args.classes) + coco_info = cvt_to_coco_json(img_infos, classes) + + # 3 dump + save_dir = os.path.join(args.img_path, '..', 'annotations') + mmcv.mkdir_or_exist(save_dir) + save_path = os.path.join(save_dir, args.out) + mmcv.dump(coco_info, save_path) + print(f'save json file: {save_path}') + + +if __name__ == '__main__': + main() diff --git a/tools/dataset_converters/pascal_voc.py b/tools/dataset_converters/pascal_voc.py new file mode 100644 index 0000000..20f8801 --- /dev/null +++ b/tools/dataset_converters/pascal_voc.py @@ -0,0 +1,237 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import os.path as osp +import xml.etree.ElementTree as ET + +import mmcv +import numpy as np + +from mmdet.core import voc_classes + +label_ids = {name: i for i, name in enumerate(voc_classes())} + + +def parse_xml(args): + xml_path, img_path = args + tree = ET.parse(xml_path) + root = tree.getroot() + size = root.find('size') + w = int(size.find('width').text) + h = int(size.find('height').text) + bboxes = [] + labels = [] + bboxes_ignore = [] + labels_ignore = [] + for obj in root.findall('object'): + name = obj.find('name').text + label = label_ids[name] + difficult = int(obj.find('difficult').text) + bnd_box = obj.find('bndbox') + bbox = [ + int(bnd_box.find('xmin').text), + int(bnd_box.find('ymin').text), + int(bnd_box.find('xmax').text), + int(bnd_box.find('ymax').text) + ] + if difficult: + bboxes_ignore.append(bbox) + labels_ignore.append(label) + else: + bboxes.append(bbox) + labels.append(label) + if not bboxes: + bboxes = np.zeros((0, 4)) + labels = np.zeros((0, )) + else: + bboxes = np.array(bboxes, ndmin=2) - 1 + labels = np.array(labels) + if not bboxes_ignore: + bboxes_ignore = np.zeros((0, 4)) + labels_ignore = np.zeros((0, )) + else: + bboxes_ignore = np.array(bboxes_ignore, ndmin=2) - 1 + labels_ignore = np.array(labels_ignore) + annotation = { + 'filename': img_path, + 'width': w, + 'height': h, + 'ann': { + 'bboxes': bboxes.astype(np.float32), + 'labels': labels.astype(np.int64), + 'bboxes_ignore': bboxes_ignore.astype(np.float32), + 'labels_ignore': labels_ignore.astype(np.int64) + } + } + return annotation + + +def cvt_annotations(devkit_path, years, split, out_file): + if not isinstance(years, list): + years = [years] + annotations = [] + for year in years: + filelist = osp.join(devkit_path, + f'VOC{year}/ImageSets/Main/{split}.txt') + if not osp.isfile(filelist): + print(f'filelist does not exist: {filelist}, ' + f'skip voc{year} {split}') + return + img_names = mmcv.list_from_file(filelist) + xml_paths = [ + osp.join(devkit_path, f'VOC{year}/Annotations/{img_name}.xml') + for img_name in img_names + ] + img_paths = [ + f'VOC{year}/JPEGImages/{img_name}.jpg' for img_name in img_names + ] + part_annotations = mmcv.track_progress(parse_xml, + list(zip(xml_paths, img_paths))) + annotations.extend(part_annotations) + if out_file.endswith('json'): + annotations = cvt_to_coco_json(annotations) + mmcv.dump(annotations, out_file) + return annotations + + +def cvt_to_coco_json(annotations): + image_id = 0 + annotation_id = 0 + coco = dict() + coco['images'] = [] + coco['type'] = 'instance' + coco['categories'] = [] + coco['annotations'] = [] + image_set = set() + + def addAnnItem(annotation_id, image_id, category_id, bbox, difficult_flag): + annotation_item = dict() + annotation_item['segmentation'] = [] + + seg = [] + # bbox[] is x1,y1,x2,y2 + # left_top + seg.append(int(bbox[0])) + seg.append(int(bbox[1])) + # left_bottom + seg.append(int(bbox[0])) + seg.append(int(bbox[3])) + # right_bottom + seg.append(int(bbox[2])) + seg.append(int(bbox[3])) + # right_top + seg.append(int(bbox[2])) + seg.append(int(bbox[1])) + + annotation_item['segmentation'].append(seg) + + xywh = np.array( + [bbox[0], bbox[1], bbox[2] - bbox[0], bbox[3] - bbox[1]]) + annotation_item['area'] = int(xywh[2] * xywh[3]) + if difficult_flag == 1: + annotation_item['ignore'] = 0 + annotation_item['iscrowd'] = 1 + else: + annotation_item['ignore'] = 0 + annotation_item['iscrowd'] = 0 + annotation_item['image_id'] = int(image_id) + annotation_item['bbox'] = xywh.astype(int).tolist() + annotation_item['category_id'] = int(category_id) + annotation_item['id'] = int(annotation_id) + coco['annotations'].append(annotation_item) + return annotation_id + 1 + + for category_id, name in enumerate(voc_classes()): + category_item = dict() + category_item['supercategory'] = str('none') + category_item['id'] = int(category_id) + category_item['name'] = str(name) + coco['categories'].append(category_item) + + for ann_dict in annotations: + file_name = ann_dict['filename'] + ann = ann_dict['ann'] + assert file_name not in image_set + image_item = dict() + image_item['id'] = int(image_id) + image_item['file_name'] = str(file_name) + image_item['height'] = int(ann_dict['height']) + image_item['width'] = int(ann_dict['width']) + coco['images'].append(image_item) + image_set.add(file_name) + + bboxes = ann['bboxes'][:, :4] + labels = ann['labels'] + for bbox_id in range(len(bboxes)): + bbox = bboxes[bbox_id] + label = labels[bbox_id] + annotation_id = addAnnItem( + annotation_id, image_id, label, bbox, difficult_flag=0) + + bboxes_ignore = ann['bboxes_ignore'][:, :4] + labels_ignore = ann['labels_ignore'] + for bbox_id in range(len(bboxes_ignore)): + bbox = bboxes_ignore[bbox_id] + label = labels_ignore[bbox_id] + annotation_id = addAnnItem( + annotation_id, image_id, label, bbox, difficult_flag=1) + + image_id += 1 + + return coco + + +def parse_args(): + parser = argparse.ArgumentParser( + description='Convert PASCAL VOC annotations to mmdetection format') + parser.add_argument('devkit_path', help='pascal voc devkit path') + parser.add_argument('-o', '--out-dir', help='output path') + parser.add_argument( + '--out-format', + default='pkl', + choices=('pkl', 'coco'), + help='output format, "coco" indicates coco annotation format') + args = parser.parse_args() + return args + + +def main(): + args = parse_args() + devkit_path = args.devkit_path + out_dir = args.out_dir if args.out_dir else devkit_path + mmcv.mkdir_or_exist(out_dir) + + years = [] + if osp.isdir(osp.join(devkit_path, 'VOC2007')): + years.append('2007') + if osp.isdir(osp.join(devkit_path, 'VOC2012')): + years.append('2012') + if '2007' in years and '2012' in years: + years.append(['2007', '2012']) + if not years: + raise IOError(f'The devkit path {devkit_path} contains neither ' + '"VOC2007" nor "VOC2012" subfolder') + out_fmt = f'.{args.out_format}' + if args.out_format == 'coco': + out_fmt = '.json' + for year in years: + if year == '2007': + prefix = 'voc07' + elif year == '2012': + prefix = 'voc12' + elif year == ['2007', '2012']: + prefix = 'voc0712' + for split in ['train', 'val', 'trainval']: + dataset_name = prefix + '_' + split + print(f'processing {dataset_name} ...') + cvt_annotations(devkit_path, year, split, + osp.join(out_dir, dataset_name + out_fmt)) + if not isinstance(year, list): + dataset_name = prefix + '_test' + print(f'processing {dataset_name} ...') + cvt_annotations(devkit_path, year, 'test', + osp.join(out_dir, dataset_name + out_fmt)) + print('Done!') + + +if __name__ == '__main__': + main() diff --git a/tools/deployment/mmdet2torchserve.py b/tools/deployment/mmdet2torchserve.py new file mode 100644 index 0000000..70a081a --- /dev/null +++ b/tools/deployment/mmdet2torchserve.py @@ -0,0 +1,110 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from argparse import ArgumentParser, Namespace +from pathlib import Path +from tempfile import TemporaryDirectory + +import mmcv + +try: + from model_archiver.model_packaging import package_model + from model_archiver.model_packaging_utils import ModelExportUtils +except ImportError: + package_model = None + + +def mmdet2torchserve( + config_file: str, + checkpoint_file: str, + output_folder: str, + model_name: str, + model_version: str = '1.0', + force: bool = False, +): + """Converts MMDetection model (config + checkpoint) to TorchServe `.mar`. + + Args: + config_file: + In MMDetection config format. + The contents vary for each task repository. + checkpoint_file: + In MMDetection checkpoint format. + The contents vary for each task repository. + output_folder: + Folder where `{model_name}.mar` will be created. + The file created will be in TorchServe archive format. + model_name: + If not None, used for naming the `{model_name}.mar` file + that will be created under `output_folder`. + If None, `{Path(checkpoint_file).stem}` will be used. + model_version: + Model's version. + force: + If True, if there is an existing `{model_name}.mar` + file under `output_folder` it will be overwritten. + """ + mmcv.mkdir_or_exist(output_folder) + + config = mmcv.Config.fromfile(config_file) + + with TemporaryDirectory() as tmpdir: + config.dump(f'{tmpdir}/config.py') + + args = Namespace( + **{ + 'model_file': f'{tmpdir}/config.py', + 'serialized_file': checkpoint_file, + 'handler': f'{Path(__file__).parent}/mmdet_handler.py', + 'model_name': model_name or Path(checkpoint_file).stem, + 'version': model_version, + 'export_path': output_folder, + 'force': force, + 'requirements_file': None, + 'extra_files': None, + 'runtime': 'python', + 'archive_format': 'default' + }) + manifest = ModelExportUtils.generate_manifest_json(args) + package_model(args, manifest) + + +def parse_args(): + parser = ArgumentParser( + description='Convert MMDetection models to TorchServe `.mar` format.') + parser.add_argument('config', type=str, help='config file path') + parser.add_argument('checkpoint', type=str, help='checkpoint file path') + parser.add_argument( + '--output-folder', + type=str, + required=True, + help='Folder where `{model_name}.mar` will be created.') + parser.add_argument( + '--model-name', + type=str, + default=None, + help='If not None, used for naming the `{model_name}.mar`' + 'file that will be created under `output_folder`.' + 'If None, `{Path(checkpoint_file).stem}` will be used.') + parser.add_argument( + '--model-version', + type=str, + default='1.0', + help='Number used for versioning.') + parser.add_argument( + '-f', + '--force', + action='store_true', + help='overwrite the existing `{model_name}.mar`') + args = parser.parse_args() + + return args + + +if __name__ == '__main__': + args = parse_args() + + if package_model is None: + raise ImportError('`torch-model-archiver` is required.' + 'Try: pip install torch-model-archiver') + + mmdet2torchserve(args.config, args.checkpoint, args.output_folder, + args.model_name, args.model_version, args.force) diff --git a/tools/deployment/mmdet_handler.py b/tools/deployment/mmdet_handler.py new file mode 100644 index 0000000..18fc230 --- /dev/null +++ b/tools/deployment/mmdet_handler.py @@ -0,0 +1,71 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import base64 +import os + +import mmcv +import torch +from ts.torch_handler.base_handler import BaseHandler + +from mmdet.apis import inference_detector, init_detector + + +class MMdetHandler(BaseHandler): + threshold = 0.5 + + def initialize(self, context): + properties = context.system_properties + self.map_location = 'cuda' if torch.cuda.is_available() else 'cpu' + self.device = torch.device(self.map_location + ':' + + str(properties.get('gpu_id')) if torch.cuda. + is_available() else self.map_location) + self.manifest = context.manifest + + model_dir = properties.get('model_dir') + serialized_file = self.manifest['model']['serializedFile'] + checkpoint = os.path.join(model_dir, serialized_file) + self.config_file = os.path.join(model_dir, 'config.py') + + self.model = init_detector(self.config_file, checkpoint, self.device) + self.initialized = True + + def preprocess(self, data): + images = [] + + for row in data: + image = row.get('data') or row.get('body') + if isinstance(image, str): + image = base64.b64decode(image) + image = mmcv.imfrombytes(image) + images.append(image) + + return images + + def inference(self, data, *args, **kwargs): + results = inference_detector(self.model, data) + return results + + def postprocess(self, data): + # Format output following the example ObjectDetectionHandler format + output = [] + for image_index, image_result in enumerate(data): + output.append([]) + if isinstance(image_result, tuple): + bbox_result, segm_result = image_result + if isinstance(segm_result, tuple): + segm_result = segm_result[0] # ms rcnn + else: + bbox_result, segm_result = image_result, None + + for class_index, class_result in enumerate(bbox_result): + class_name = self.model.CLASSES[class_index] + for bbox in class_result: + bbox_coords = bbox[:-1].tolist() + score = float(bbox[-1]) + if score >= self.threshold: + output[image_index].append({ + 'class_name': class_name, + 'bbox': bbox_coords, + 'score': score + }) + + return output diff --git a/tools/deployment/onnx2tensorrt.py b/tools/deployment/onnx2tensorrt.py new file mode 100644 index 0000000..84a9afe --- /dev/null +++ b/tools/deployment/onnx2tensorrt.py @@ -0,0 +1,254 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import os +import os.path as osp +import warnings + +import numpy as np +import onnx +import torch +from mmcv import Config +from mmcv.tensorrt import is_tensorrt_plugin_loaded, onnx2trt, save_trt_engine + +from mmdet.core.export import preprocess_example_input +from mmdet.core.export.model_wrappers import (ONNXRuntimeDetector, + TensorRTDetector) +from mmdet.datasets import DATASETS + + +def get_GiB(x: int): + """return x GiB.""" + return x * (1 << 30) + + +def onnx2tensorrt(onnx_file, + trt_file, + input_config, + verify=False, + show=False, + workspace_size=1, + verbose=False): + import tensorrt as trt + onnx_model = onnx.load(onnx_file) + max_shape = input_config['max_shape'] + min_shape = input_config['min_shape'] + opt_shape = input_config['opt_shape'] + fp16_mode = False + # create trt engine and wrapper + opt_shape_dict = {'input': [min_shape, opt_shape, max_shape]} + max_workspace_size = get_GiB(workspace_size) + trt_engine = onnx2trt( + onnx_model, + opt_shape_dict, + log_level=trt.Logger.VERBOSE if verbose else trt.Logger.ERROR, + fp16_mode=fp16_mode, + max_workspace_size=max_workspace_size) + save_dir, _ = osp.split(trt_file) + if save_dir: + os.makedirs(save_dir, exist_ok=True) + save_trt_engine(trt_engine, trt_file) + print(f'Successfully created TensorRT engine: {trt_file}') + + if verify: + # prepare input + one_img, one_meta = preprocess_example_input(input_config) + img_list, img_meta_list = [one_img], [[one_meta]] + img_list = [_.cuda().contiguous() for _ in img_list] + + # wrap ONNX and TensorRT model + onnx_model = ONNXRuntimeDetector(onnx_file, CLASSES, device_id=0) + trt_model = TensorRTDetector(trt_file, CLASSES, device_id=0) + + # inference with wrapped model + with torch.no_grad(): + onnx_results = onnx_model( + img_list, img_metas=img_meta_list, return_loss=False)[0] + trt_results = trt_model( + img_list, img_metas=img_meta_list, return_loss=False)[0] + + if show: + out_file_ort, out_file_trt = None, None + else: + out_file_ort, out_file_trt = 'show-ort.png', 'show-trt.png' + show_img = one_meta['show_img'] + score_thr = 0.3 + onnx_model.show_result( + show_img, + onnx_results, + score_thr=score_thr, + show=True, + win_name='ONNXRuntime', + out_file=out_file_ort) + trt_model.show_result( + show_img, + trt_results, + score_thr=score_thr, + show=True, + win_name='TensorRT', + out_file=out_file_trt) + with_mask = trt_model.with_masks + # compare a part of result + if with_mask: + compare_pairs = list(zip(onnx_results, trt_results)) + else: + compare_pairs = [(onnx_results, trt_results)] + err_msg = 'The numerical values are different between Pytorch' + \ + ' and ONNX, but it does not necessarily mean the' + \ + ' exported ONNX model is problematic.' + # check the numerical value + for onnx_res, pytorch_res in compare_pairs: + for o_res, p_res in zip(onnx_res, pytorch_res): + np.testing.assert_allclose( + o_res, p_res, rtol=1e-03, atol=1e-05, err_msg=err_msg) + print('The numerical values are the same between Pytorch and ONNX') + + +def parse_normalize_cfg(test_pipeline): + transforms = None + for pipeline in test_pipeline: + if 'transforms' in pipeline: + transforms = pipeline['transforms'] + break + assert transforms is not None, 'Failed to find `transforms`' + norm_config_li = [_ for _ in transforms if _['type'] == 'Normalize'] + assert len(norm_config_li) == 1, '`norm_config` should only have one' + norm_config = norm_config_li[0] + return norm_config + + +def parse_args(): + parser = argparse.ArgumentParser( + description='Convert MMDetection models from ONNX to TensorRT') + parser.add_argument('config', help='test config file path') + parser.add_argument('model', help='Filename of input ONNX model') + parser.add_argument( + '--trt-file', + type=str, + default='tmp.trt', + help='Filename of output TensorRT engine') + parser.add_argument( + '--input-img', type=str, default='', help='Image for test') + parser.add_argument( + '--show', action='store_true', help='Whether to show output results') + parser.add_argument( + '--dataset', + type=str, + default='coco', + help='Dataset name. This argument is deprecated and will be \ + removed in future releases.') + parser.add_argument( + '--verify', + action='store_true', + help='Verify the outputs of ONNXRuntime and TensorRT') + parser.add_argument( + '--verbose', + action='store_true', + help='Whether to verbose logging messages while creating \ + TensorRT engine. Defaults to False.') + parser.add_argument( + '--to-rgb', + action='store_false', + help='Feed model with RGB or BGR image. Default is RGB. This \ + argument is deprecated and will be removed in future releases.') + parser.add_argument( + '--shape', + type=int, + nargs='+', + default=[400, 600], + help='Input size of the model') + parser.add_argument( + '--mean', + type=float, + nargs='+', + default=[123.675, 116.28, 103.53], + help='Mean value used for preprocess input data. This argument \ + is deprecated and will be removed in future releases.') + parser.add_argument( + '--std', + type=float, + nargs='+', + default=[58.395, 57.12, 57.375], + help='Variance value used for preprocess input data. \ + This argument is deprecated and will be removed in future releases.') + parser.add_argument( + '--min-shape', + type=int, + nargs='+', + default=None, + help='Minimum input size of the model in TensorRT') + parser.add_argument( + '--max-shape', + type=int, + nargs='+', + default=None, + help='Maximum input size of the model in TensorRT') + parser.add_argument( + '--workspace-size', + type=int, + default=1, + help='Max workspace size in GiB') + + args = parser.parse_args() + return args + + +if __name__ == '__main__': + + assert is_tensorrt_plugin_loaded(), 'TensorRT plugin should be compiled.' + args = parse_args() + warnings.warn( + 'Arguments like `--to-rgb`, `--mean`, `--std`, `--dataset` would be \ + parsed directly from config file and are deprecated and will be \ + removed in future releases.') + if not args.input_img: + args.input_img = osp.join(osp.dirname(__file__), '../demo/demo.jpg') + + cfg = Config.fromfile(args.config) + + def parse_shape(shape): + if len(shape) == 1: + shape = (1, 3, shape[0], shape[0]) + elif len(args.shape) == 2: + shape = (1, 3) + tuple(shape) + else: + raise ValueError('invalid input shape') + return shape + + if args.shape: + input_shape = parse_shape(args.shape) + else: + img_scale = cfg.test_pipeline[1]['img_scale'] + input_shape = (1, 3, img_scale[1], img_scale[0]) + + if not args.max_shape: + max_shape = input_shape + else: + max_shape = parse_shape(args.max_shape) + + if not args.min_shape: + min_shape = input_shape + else: + min_shape = parse_shape(args.min_shape) + + dataset = DATASETS.get(cfg.data.test['type']) + assert (dataset is not None) + CLASSES = dataset.CLASSES + normalize_cfg = parse_normalize_cfg(cfg.test_pipeline) + + input_config = { + 'min_shape': min_shape, + 'opt_shape': input_shape, + 'max_shape': max_shape, + 'input_shape': input_shape, + 'input_path': args.input_img, + 'normalize_cfg': normalize_cfg + } + # Create TensorRT engine + onnx2tensorrt( + args.model, + args.trt_file, + input_config, + verify=args.verify, + show=args.show, + workspace_size=args.workspace_size, + verbose=args.verbose) diff --git a/tools/deployment/optimizer_scripts/.clang-format b/tools/deployment/optimizer_scripts/.clang-format new file mode 100644 index 0000000..2593ef5 --- /dev/null +++ b/tools/deployment/optimizer_scripts/.clang-format @@ -0,0 +1 @@ +BasedOnStyle: Google \ No newline at end of file diff --git a/tools/deployment/optimizer_scripts/.gitignore b/tools/deployment/optimizer_scripts/.gitignore new file mode 100644 index 0000000..991fd07 --- /dev/null +++ b/tools/deployment/optimizer_scripts/.gitignore @@ -0,0 +1,7 @@ +__pycache__ +.vscode +*.pyc +models.py +temp.py +.ssh/ +docker/test_models/ \ No newline at end of file diff --git a/tools/deployment/optimizer_scripts/README.md b/tools/deployment/optimizer_scripts/README.md new file mode 100644 index 0000000..cac99c5 --- /dev/null +++ b/tools/deployment/optimizer_scripts/README.md @@ -0,0 +1,189 @@ +# Converter Scripts + +[![pipeline status](http://192.168.200.1:8088/jiyuan/converter_scripts/badges/master/pipeline.svg)](http://192.168.200.1:8088/jiyuan/converter_scripts/commits/master) + +This project collects various optimization scripts and converter scritps for +Kneron toolchain. This collection does not include the Keras to ONNX converter +and the Caffe to ONNX converter. They are in seperate projects. + +**The scripts not listed below are used as libraries and cannot be used +directly.** + +## onnx2onnx.py + +### 1.1. Description + +General optimizations on ONNX model for Kneron toolchain. Though Kneron +toolchains are designed to take ONNX models as input, they have some +restrictions on the models (e.g. inferenced shapes for all value_info). Thus, we +have this tool to do some general optimization and conversion on ONNX models. +**Notice that this script should take an valid ONNX model as input.** It cannot +turn an invalid ONNX model into a valid one. + +### 1.2. Basic Usage + +```bash +python onnx2onnx.py input.onnx -o output.onnx +``` + +### 1.3. Optimizations Included + +* Fusing BN into Conv. +* Fusing BN into Gemm. +* Fusing consecutive Gemm. +* Eliminating Identify layers and Dropout layers. +* Eliminating last shape changing nodes. +* Replacing initializers into Constant nodes. +* Replacing global AveragePool with GAP. +* Replacing Squeeze and Unsqueeze with Reshape. +* Replacing 1x1 depthwise with BN. +* Inferencing Upsample shapes. +* Transposing B in Gemm. + +## pytorch2onnx.py + +### 2.1. Description + +Convert Pytorch models or Pytorch generated ONNX models into Kneron toolchain +compatible ONNX files. This script include most of the optimizations in +`onnx2onnx.py`. It also includes some optimizations for Pytorch model only. + +### 2.2. Basic Usage + +```bash +# Take Pytorch model name, input channel number, input height, input width +python pytorch2onnx.py input.pth output.onnx --input-size 3 224 224 +# Or take Pytorch exported ONNX. +python pytorch2onnx.py input.onnx output.onnx +``` + +### 2.3. Optimizations Included + +* Adding name to nodes. +* Unsqueeze nodes constant folding. +* Reshape nodes constant folding. +* Optimizations in `onnx2onnx.py`. + +## editor.py + +### 3.1. Description + +This is an simple ONNX editor which achieves the following functions: + +* Add nop BN or Conv nodes. +* Delete specific nodes or inputs. +* Cut the graph from certain node (Delete all the nodes following the node). +* Reshape inputs and outputs + +### 3.2 Usage + +``` +usage: editor.py [-h] [-c CUT_NODE [CUT_NODE ...]] + [--cut-type CUT_TYPE [CUT_TYPE ...]] + [-d DELETE_NODE [DELETE_NODE ...]] + [--delete-input DELETE_INPUT [DELETE_INPUT ...]] + [-i INPUT_CHANGE [INPUT_CHANGE ...]] + [-o OUTPUT_CHANGE [OUTPUT_CHANGE ...]] + [--add-conv ADD_CONV [ADD_CONV ...]] + [--add-bn ADD_BN [ADD_BN ...]] + in_file out_file + +Edit an ONNX model. The processing sequense is 'delete nodes/values' -> 'add +nodes' -> 'change shapes'. Cutting cannot be done with other operations +together + +positional arguments: + in_file input ONNX FILE + out_file ouput ONNX FILE + +optional arguments: + -h, --help show this help message and exit + -c CUT_NODE [CUT_NODE ...], --cut CUT_NODE [CUT_NODE ...] + remove nodes from the given nodes(inclusive) + --cut-type CUT_TYPE [CUT_TYPE ...] + remove nodes by type from the given nodes(inclusive) + -d DELETE_NODE [DELETE_NODE ...], --delete DELETE_NODE [DELETE_NODE ...] + delete nodes by names and only those nodes + --delete-input DELETE_INPUT [DELETE_INPUT ...] + delete inputs by names + -i INPUT_CHANGE [INPUT_CHANGE ...], --input INPUT_CHANGE [INPUT_CHANGE ...] + change input shape (e.g. -i 'input_0 1 3 224 224') + -o OUTPUT_CHANGE [OUTPUT_CHANGE ...], --output OUTPUT_CHANGE [OUTPUT_CHANGE ...] + change output shape (e.g. -o 'input_0 1 3 224 224') + --add-conv ADD_CONV [ADD_CONV ...] + add nop conv using specific input + --add-bn ADD_BN [ADD_BN ...] + add nop bn using specific input +``` + +### 3.3. Example + +Here is an example of when and how to use the editor.py. + +```bash +# In the `res` folder, there is a vdsr model from tensorflow. +# We need to convert this model firstly. +./tf2onnx.sh res/vdsr_41_20layer_1.pb res/tmp.onnx images:0 output:0 +# This onnx file seems valid. But, it's channel last for the input and output. +# It is using Traspose to convert to channel first, affacting the performance. +# Thus, here we use the editor to delete these Transpose and reset the shapes. +python editor.py debug.onnx new.onnx -d Conv2D__6 Conv2D_19__84 -i 'images:0 1 3 41 41' -o 'output:0 1 3 41 41' +# Now, it has no Transpose and take channel first inputs directly. +``` + +## test_models_opt.py + +### 4.1. Description +Compare all original and optimized onnx models under a specified directory. +Using different endings to locate original and optimized model paths. Apply +onnxruntime inference to the models, and compare the results from original +and optimized models. Calculate basic statistics and store to a csv file. + +### 4.2. Usage + +```bash +python DIR ending1 ending2 csv_out_file -p=Y/N + +# csv_out_file is file path for the stats data. +# -p --plot is the plot option, if Y, stats plots will be generated. +``` + +### 4.3. Statistics +* max_rel_diff +* max_abs_diff +* mean_rel_diff +* mean_abs_diff +* std_rel_diff +* std_abs_diff +* acc_with_diff_precision +* percentile + +### 4.4. Plots +* Max Relative Difference Histogram +* Max Absolute Difference Histogram +* Rel_diff Percentiles of Raw and Optimized Models +* Abs_diff Percentiles of Raw and Optimized Models +* Accuracies with Different Precisions + +## tensorflow2onnx.py + +### 5.1. Description +Convert and optimize tensorflow models. If input file is frozen tensorflow .pb model, +convert to onnx model and do the custmized optimization afterwards. If input model is already +onnx model, apply optimization and save optimized model. + +### 5.2 Dependency + +This scripts depends on the tensorflow-onnx project. Please [check and install it](https://github.com/onnx/tensorflow-onnx/tree/r1.5) before using this script. We currently support up to version 1.5.5. For other versions, you may need to try it our yourself. + +### 5.3. Basic Usage +```bash +python tensorflow2onnx.py in_file out_file -t=True/False + +# -t --test, is the option for test mode, if True, shape change after input will not be eliminated. +``` + +### 5.4. Model Save Paths +`in_file` is the input model path, `out_file` specifies output optimized model path. +If input file is `.pb` model, an unoptimized onnx model will be saved to the output directory as well. + diff --git a/tools/deployment/optimizer_scripts/consecutive_conv_opt.py b/tools/deployment/optimizer_scripts/consecutive_conv_opt.py new file mode 100644 index 0000000..0ed4a28 --- /dev/null +++ b/tools/deployment/optimizer_scripts/consecutive_conv_opt.py @@ -0,0 +1,85 @@ +import numpy as np +import onnx +import sys + +from tools.other import topological_sort +from tools import helper + + +def fuse_bias_in_consecutive_1x1_conv(g): + for second in g.node: + # Find two conv + if second.op_type != "Conv": + continue + first = helper.find_node_by_output_name(g, second.input[0]) + if first is None or first.op_type != "Conv": + continue + # Check if the first one has only one folloing node + if ( + len( + helper.find_following_nodes_by_input_value_name( + g, first.output[0] + ) + ) + != 1 + ): + continue + # If first node has no bias, continue + if len(first.input) == 2: + continue + # Check their kernel size + first_kernel_shape = helper.get_list_attribute_by_name( + first, "kernel_shape", "int" + ) + second_kernel_shape = helper.get_list_attribute_by_name( + second, "kernel_shape", "int" + ) + prod = ( + first_kernel_shape[0] + * first_kernel_shape[1] + * second_kernel_shape[0] + * second_kernel_shape[1] + ) + if prod != 1: + continue + print("Found: ", first.name, " ", second.name) + # Get bias of the nodes + first_bias_node = helper.find_node_by_output_name(g, first.input[2]) + second_weight_node = helper.find_node_by_output_name( + g, second.input[1] + ) + second_bias_node = helper.find_node_by_output_name(g, second.input[2]) + first_bias = helper.constant_to_numpy(first_bias_node) + second_weight = helper.constant_to_numpy(second_weight_node) + second_bias = helper.constant_to_numpy(second_bias_node) + # Calculate the weight for second node + first_bias = np.reshape(first_bias, (1, first_bias.size)) + second_weight = np.reshape( + second_weight, (second_weight.shape[0], second_weight.shape[1]) + ) + second_weight = np.transpose(second_weight) + new_second_bias = second_bias + np.matmul(first_bias, second_weight) + new_second_bias = np.reshape(new_second_bias, (new_second_bias.size,)) + # Generate new weight + new_first_bias = np.reshape(first_bias, (first_bias.size,)) + for i in range(new_first_bias.shape[0]): + new_first_bias[i] = 0.0 + new_first_bias_node = helper.numpy_to_constant( + first_bias_node.output[0], new_first_bias + ) + new_second_bias_node = helper.numpy_to_constant( + second_bias_node.output[0], new_second_bias + ) + # Delete old weight and add new weights + g.node.remove(first_bias_node) + g.node.remove(second_bias_node) + g.node.extend([new_first_bias_node, new_second_bias_node]) + topological_sort(g) + + +if __name__ == "__main__": + if len(sys.argv) != 3: + exit(1) + m = onnx.load(sys.argv[1]) + fuse_bias_in_consecutive_1x1_conv(m.graph) + onnx.save(m, sys.argv[2]) diff --git a/tools/deployment/optimizer_scripts/docker/Dockerfile b/tools/deployment/optimizer_scripts/docker/Dockerfile new file mode 100644 index 0000000..bb62f7f --- /dev/null +++ b/tools/deployment/optimizer_scripts/docker/Dockerfile @@ -0,0 +1,24 @@ +FROM continuumio/miniconda3:latest +LABEL maintainer="jiyuan@kneron.us" + +# Install python packages +RUN conda update -y conda && \ +conda install -y python=3.6 && \ +conda install -y -c intel caffe && \ +conda install -y -c pytorch pytorch=1.3.1 torchvision=0.4.2 cpuonly && \ +conda install -y -c conda-forge tensorflow=1.5.1 keras=2.2.4 && \ +pip install onnx==1.4.1 onnxruntime==1.1.0 tf2onnx==1.5.4 && \ +ln -s /opt/conda/lib/libgflags.so.2.2.2 /opt/conda/lib/libgflags.so.2 + +# Install git lfs packages +RUN apt-get update && apt-get install -y curl apt-utils && \ +curl -s https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh | bash && \ +apt-get install -y git-lfs + +RUN conda clean -a -y && rm -rf /var/lib/apt/lists/* + +# copy the test data +COPY ./test_models /test_models + +# Clean the environment and finalize the process +WORKDIR /root \ No newline at end of file diff --git a/tools/deployment/optimizer_scripts/editor.py b/tools/deployment/optimizer_scripts/editor.py new file mode 100644 index 0000000..b04183c --- /dev/null +++ b/tools/deployment/optimizer_scripts/editor.py @@ -0,0 +1,235 @@ +import onnx +import onnx.utils + +try: + from onnx import optimizer +except ImportError: + import onnxoptimizer as optimizer +import argparse + +import tools.modhelper as helper +import tools.other as other +import tools.replacing as replacing + +# Main process +# Argument parser +parser = argparse.ArgumentParser( + description="Edit an ONNX model.\nThe processing sequense is 'delete " + "nodes/values' -> 'add nodes' -> 'change shapes'.\nCutting " + "cannot be done with other operations together" +) +parser.add_argument("in_file", type=str, help="input ONNX FILE") +parser.add_argument("out_file", type=str, help="ouput ONNX FILE") +parser.add_argument( + "-c", + "--cut", + dest="cut_node", + type=str, + nargs="+", + help="remove nodes from the given nodes(inclusive)", +) +parser.add_argument( + "--cut-type", + dest="cut_type", + type=str, + nargs="+", + help="remove nodes by type from the given nodes(inclusive)", +) +parser.add_argument( + "-d", + "--delete", + dest="delete_node", + type=str, + nargs="+", + help="delete nodes by names and only those nodes", +) +parser.add_argument( + "--delete-input", + dest="delete_input", + type=str, + nargs="+", + help="delete inputs by names", +) +parser.add_argument( + "--delete-output", + dest="delete_output", + type=str, + nargs="+", + help="delete outputs by names", +) +parser.add_argument( + "-i", + "--input", + dest="input_change", + type=str, + nargs="+", + help="change input shape (e.g. -i 'input_0 1 3 224 224')", +) +parser.add_argument( + "-o", + "--output", + dest="output_change", + type=str, + nargs="+", + help="change output shape (e.g. -o 'input_0 1 3 224 224')", +) +parser.add_argument( + "--add-conv", + dest="add_conv", + type=str, + nargs="+", + help="add nop conv using specific input", +) +parser.add_argument( + "--add-bn", + dest="add_bn", + type=str, + nargs="+", + help="add nop bn using specific input", +) +parser.add_argument( + "--rename-output", + dest="rename_output", + type=str, + nargs="+", + help="Rename the specific output(e.g. --rename-output old_name new_name)", +) +parser.add_argument( + "--pixel-bias-value", + dest="pixel_bias_value", + type=str, + nargs="+", + help='(per channel) set pixel value bias bn layer at model front for ' + 'normalization( e.g. --pixel_bias_value "[104.0, 117.0, 123.0]" )', +) +parser.add_argument( + "--pixel-scale-value", + dest="pixel_scale_value", + type=str, + nargs="+", + help='(per channel) set pixel value scale bn layer at model front for ' + 'normalization( e.g. --pixel_scale_value ' + '"[0.0078125, 0.0078125, 0.0078125]" )', +) + +args = parser.parse_args() + +# Load model and polish +m = onnx.load(args.in_file) +m = other.polish_model(m) +g = m.graph +replacing.replace_initializer_with_Constant(g) +other.topological_sort(g) + +# Remove nodes according to the given arguments. +if args.delete_node is not None: + helper.delete_nodes(g, args.delete_node) + +if args.delete_input is not None: + helper.delete_input(g, args.delete_input) + +if args.delete_output is not None: + helper.delete_output(g, args.delete_output) + +# Add do-nothing Conv node +if args.add_conv is not None: + other.add_nop_conv_after(g, args.add_conv) + other.topological_sort(g) + +# Add do-nothing BN node +if args.add_bn is not None: + other.add_nop_bn_after(g, args.add_bn) + other.topological_sort(g) + +# Add bias scale BN node +if args.pixel_bias_value is not None or args.pixel_scale_value is not None: + + if len(g.input) > 1: + raise ValueError( + " '--pixel-bias-value' and '--pixel-scale-value' " + "only support one input node model currently" + ) + + i_n = g.input[0] + + pixel_bias_value = [0] * i_n.type.tensor_type.shape.dim[1].dim_value + pixel_scale_value = [1] * i_n.type.tensor_type.shape.dim[1].dim_value + + if args.pixel_bias_value is not None and len(args.pixel_bias_value) == 1: + pixel_bias_value = [ + float(n) + for n in args.pixel_bias_value[0] + .replace("[", "") + .replace("]", "") + .split(",") + ] + + if args.pixel_scale_value is not None and len(args.pixel_scale_value) == 1: + pixel_scale_value = [ + float(n) + for n in args.pixel_scale_value[0] + .replace("[", "") + .replace("]", "") + .split(",") + ] + + if i_n.type.tensor_type.shape.dim[1].dim_value != len( + pixel_bias_value + ) or i_n.type.tensor_type.shape.dim[1].dim_value != len(pixel_scale_value): + raise ValueError( + "--pixel-bias-value (" + + str(pixel_bias_value) + + ") and --pixel-scale-value (" + + str(pixel_scale_value) + + ") should be same as input dimension:" + + str(i_n.type.tensor_type.shape.dim[1].dim_value) + ) + other.add_bias_scale_bn_after( + g, i_n.name, pixel_bias_value, pixel_scale_value + ) + +# Change input and output shapes as requested +if args.input_change is not None: + other.change_input_shape(g, args.input_change) +if args.output_change is not None: + other.change_output_shape(g, args.output_change) + +# Cutting nodes according to the given arguments. +if args.cut_node is not None or args.cut_type is not None: + if args.cut_node is None: + other.remove_nodes(g, cut_types=args.cut_type) + elif args.cut_type is None: + other.remove_nodes(g, cut_nodes=args.cut_node) + else: + other.remove_nodes(g, cut_nodes=args.cut_node, cut_types=args.cut_type) + other.topological_sort(g) + +# Rename nodes +if args.rename_output: + if len(args.rename_output) % 2 != 0: + print("Rename output should be paires of names.") + else: + for i in range(0, len(args.rename_output), 2): + other.rename_output_name( + g, args.rename_output[i], args.rename_output[i + 1] + ) + +# Remove useless nodes +if ( + args.delete_node + or args.delete_input + or args.input_change + or args.output_change +): + # If shape changed during the modification, redo shape inference. + while len(g.value_info) > 0: + g.value_info.pop() +passes = ["extract_constant_to_initializer"] +m = optimizer.optimize(m, passes) +g = m.graph +replacing.replace_initializer_with_Constant(g) +other.topological_sort(g) +# Polish and output +m = other.polish_model(m) +other.add_output_to_value_info(m.graph) +onnx.save(m, args.out_file) diff --git a/tools/deployment/optimizer_scripts/norm_on_scaled_onnx.py b/tools/deployment/optimizer_scripts/norm_on_scaled_onnx.py new file mode 100644 index 0000000..7d462c2 --- /dev/null +++ b/tools/deployment/optimizer_scripts/norm_on_scaled_onnx.py @@ -0,0 +1,54 @@ +import onnx +import sys +import json + +from tools import special + +if len(sys.argv) != 3: + print("python norm_on_scaled_onnx.py input.onnx input.json") + exit(1) + +# Modify onnx +m = onnx.load(sys.argv[1]) +special.add_0_5_to_normalized_input(m) +onnx.save(m, sys.argv[1][:-4] + "norm.onnx") + +# Change input node +origin_file = open(sys.argv[2], "r") +origin_json = json.load(origin_file) +origin_json["input_node"]["output_datapath_radix"] = [8] +new_json_str = json.dumps(origin_json) + +# Modify json +file = open(sys.argv[1][:-4] + "norm.onnx" + ".json", "w") +s = """{{ + \"{0}\" : + {{ + \"bias_bitwidth\" : 16, + \"{0}_bias\" : [15], + \"{0}_weight\" : [3,3,3], + \"conv_coarse_shift\" : [-4,-4,-4], + \"conv_fine_shift\" : [0,0,0], + \"conv_total_shift\" : [-4,-4,-4], + \"cpu_mode\" : false, + \"delta_input_bitwidth\" : [0], + \"delta_output_bitwidth\" : 8, + \"flag_radix_bias_eq_output\" : true, + \"input_scale\" : [[1.0,1.0,1.0]], + \"output_scale\" : [1.0, 1.0, 1.0], + \"psum_bitwidth\" : 16, + \"weight_bitwidth\" : 8, + \"input_datapath_bitwidth\" : [8], + \"input_datapath_radix\" : [8], + \"working_input_bitwidth\" : 8, + \"working_input_radix\" : [8], + \"working_output_bitwidth\" : 16, + \"working_output_radix\" : 15, + \"output_datapath_bitwidth\" : 8, + \"output_datapath_radix\" : 7 + }},\n""".format( + "input_norm" +) +file.write(s + new_json_str[1:]) +file.close() +origin_file.close() diff --git a/tools/deployment/optimizer_scripts/onnx1_3to1_4.py b/tools/deployment/optimizer_scripts/onnx1_3to1_4.py new file mode 100644 index 0000000..6c6613f --- /dev/null +++ b/tools/deployment/optimizer_scripts/onnx1_3to1_4.py @@ -0,0 +1,144 @@ +# ref http://192.168.200.1:8088/jiyuan/converter_scripts.git + +import sys +import onnx +from tools import other, helper + +""" +Change onnx model from version 1.3 to version 1.4. +- Modify the BN node by removing the spatial attribute +- Modify the Upsample node by removing the 'scales' attribute, + and adding a constant node instead. +- Model's ir_version and opset_import are updated. +""" + + +def remove_BN_spatial(g): + for node in g.node: + if node.op_type != "BatchNormalization": + continue + for att in node.attribute: + if att.name == "spatial": + node.attribute.remove(att) + + +def upsample_attribute_to_const(g): + for node in g.node: + if node.op_type != "Upsample": + continue + scales_exist = False + for att in node.attribute: + if att.name == "scales": + scales_exist = True + break + if not scales_exist: + continue + + shape = [len(att.floats)] + node.attribute.remove(att) + new_node = helper.list_to_constant( + node.name + "_input", shape, att.floats + ) + + g.node.extend([new_node]) + value_info = onnx.helper.make_tensor_value_info( + node.name + "_input", onnx.TensorProto.FLOAT, shape + ) + node.input.extend([node.name + "_input"]) + g.value_info.extend([value_info]) + + +def relu6_to_clip(g): + for node in g.node: + if node.op_type != "Relu": + continue + max_val = helper.get_var_attribute_by_name(node, "max", "float") + if max_val is None: + continue + new_node = onnx.helper.make_node( + "Clip", + node.input, + node.output, + name=node.name, + max=max_val, + min=0.0, + ) + g.node.remove(node) + g.node.extend([new_node]) + + +def PRelu_weight_reshape(g): + # For PRelu with single dimension weight. Expand it to 1, x, 1, 1 + for node in g.node: + if node.op_type != "PRelu": + continue + slope = helper.find_node_by_output_name(g, node.input[1]) + if slope is not None: + # Constant node + if len(slope.attribute[0].t.dims) != 1: + continue + slope.attribute[0].t.dims.append(slope.attribute[0].t.dims[0]) + slope.attribute[0].t.dims[0] = 1 + slope.attribute[0].t.dims.append(1) + slope.attribute[0].t.dims.append(1) + else: + # Initializer + for i in g.initializer: + if i.name == node.input[1]: + slope = i + break + if len(slope.dims) != 1: + continue + slope.dims.append(slope.dims[0]) + slope.dims[0] = 1 + slope.dims.append(1) + slope.dims.append(1) + input_value = helper.find_input_by_name(g, node.input[1]) + new_input = onnx.helper.make_tensor_value_info( + node.input[1], + input_value.type.tensor_type.elem_type, + (1, slope.dims[1], 1, 1), + ) + g.input.remove(input_value) + g.input.append(new_input) + value_info = helper.find_value_by_name(g, node.input[1]) + if value_info is not None: + g.value_info.remove(value_info) + + +def do_convert(m): + graph = m.graph + + # Modify the nodes. + remove_BN_spatial(graph) + upsample_attribute_to_const(graph) + relu6_to_clip(graph) + PRelu_weight_reshape(graph) + other.topological_sort(graph) + + # Change model properties. + m.ir_version = 4 + m.opset_import[0].version = 9 + return m + + +if __name__ == "__main__": + if len(sys.argv) != 3: + print("Usage:{} file_in file_out".format(sys.argv[0])) + exit(1) + + model = onnx.load(sys.argv[1]) + graph = model.graph + + # Modify the nodes. + remove_BN_spatial(graph) + upsample_attribute_to_const(graph) + relu6_to_clip(graph) + PRelu_weight_reshape(graph) + other.topological_sort(graph) + + # Change model properties. + model.ir_version = 4 + model.opset_import[0].version = 9 + + onnx.save(model, sys.argv[2]) diff --git a/tools/deployment/optimizer_scripts/onnx1_4to1_6.py b/tools/deployment/optimizer_scripts/onnx1_4to1_6.py new file mode 100644 index 0000000..caa5540 --- /dev/null +++ b/tools/deployment/optimizer_scripts/onnx1_4to1_6.py @@ -0,0 +1,211 @@ +# ref http://192.168.200.1:8088/jiyuan/converter_scripts.git + +import sys +import onnx +import onnx.utils +from tools import other, helper, replacing + +""" +Change onnx model from version 1.4 to version 1.6. +""" + + +def replace_all_attribute_to_const_node_in_pad_node(g): + node_to_remove = [] + node_to_extend = [] + for node in g.node: + if node.op_type != "Pad": + continue + + pad_loc_node = None # must have + pad_mode = "constant" + pad_value_node = helper.list_to_constant( + node.name + "_pad_value", [], [0.0] + ) # need scalar + for att in node.attribute: + if att.name == "mode": + pad_mode = helper.get_var_attribute_by_name( + node, "mode", "string" + ) + if att.name == "pads": + pad_loc_node = helper.list_to_constant( + node.name + "_pad_loc", [len(att.ints)], att.ints + ) + if att.name == "value": + pad_value_node = helper.list_to_constant( + node.name + "_pad_value", [], [att.f] + ) + + new_node = onnx.helper.make_node( + "Pad", + [node.input[0], pad_loc_node.name, pad_value_node.name], + [node.output[0]], + name=node.output[0], + mode=pad_mode, + ) + node_to_remove.append(node) + node_to_extend.append(new_node) + node_to_extend.append(pad_loc_node) + node_to_extend.append(pad_value_node) + + for node in node_to_remove: + g.node.remove(node) + for node in node_to_extend: + g.node.extend([node]) + + +def upsampling_to_resize(g): + for node in g.node: + if node.op_type != "Upsample": + continue + upsampling_mode = helper.get_var_attribute_by_name( + node, "mode", "string" + ) + + scale_value_node = helper.find_node_by_output_name(g, node.input[1]) + if scale_value_node.op_type != "Constant": + raise TypeError( + 'seems there is a dynamic "scales" param in Upsampling node: ' + + node.name + + " , you might need to do constant folding first" + ) + + roi_node = helper.list_to_constant(node.name + "_roi_value", [0], []) + + new_node = onnx.helper.make_node( + "Resize", + [node.input[0], roi_node.name, scale_value_node.name], + [node.output[0]], + name=node.output[0], + mode=upsampling_mode, + coordinate_transformation_mode="asymmetric", + ) + + g.node.remove(node) + g.node.extend([new_node]) + g.node.extend([roi_node]) + + +def replace_all_attribute_to_const_node_in_slice_node(g): + for node in g.node: + if node.op_type != "Slice": + continue + + axes_const_node = None + ends_const_node = None + starts_const_node = None + steps_const_node = None + for att in node.attribute: + if att.name == "axes": + axes_const_node = helper.list_to_constant( + node.name + "_axes_value", [len(att.ints)], att.ints + ) + + if att.name == "ends": + ends_const_node = helper.list_to_constant( + node.name + "_ends_value", [len(att.ints)], att.ints + ) + + if att.name == "starts": + starts_const_node = helper.list_to_constant( + node.name + "_starts_value", [len(att.ints)], att.ints + ) + + if att.name == "steps": + steps_const_node = helper.list_to_constant( + node.name + "_steps_value", [len(att.ints)], att.ints + ) + + # pop out from back + attr_len = len(node.attribute) + for i in range(attr_len): + node.attribute.remove(node.attribute[attr_len - 1 - i]) + + # according the spec, we need to add node in specific order + if starts_const_node is not None: + g.node.extend([starts_const_node]) + node.input.extend([starts_const_node.name]) + if ends_const_node is not None: + g.node.extend([ends_const_node]) + node.input.extend([ends_const_node.name]) + if axes_const_node is not None: + g.node.extend([axes_const_node]) + node.input.extend([axes_const_node.name]) + if steps_const_node is not None: + g.node.extend([steps_const_node]) + node.input.extend([steps_const_node.name]) + + +def replace_min_max_attribute_to_const_node_in_clip_node(g): + for node in g.node: + if node.op_type != "Clip": + continue + + max_const_node = None + min_const_node = None + for att in node.attribute: + if att.name == "max": + max_const_node = helper.list_to_constant( + node.name + "_max_value", [], [att.f] + ) + + if att.name == "min": + min_const_node = helper.list_to_constant( + node.name + "_min_value", [], [att.f] + ) + + # pop out from back + node.attribute.remove(node.attribute[1]) + node.attribute.remove(node.attribute[0]) + + # according the spec, we need to add node in specific order + g.node.extend([min_const_node]) + g.node.extend([max_const_node]) + node.input.extend([min_const_node.name]) + node.input.extend([max_const_node.name]) + + +def onnx1_4to1_6(model: onnx.ModelProto) -> onnx.ModelProto: + """Update ir_version from 4 to 6 and update opset from 9 to 11. + + Args: + model (onnx.ModelProto): input onnx model. + + Returns: + onnx.ModelProto: updated onnx model. + """ + graph = model.graph + + if model.opset_import[0].version == 11: + print("(Stop) the input model is already opset 11, no need to upgrade") + exit(1) + + # deal with empty node name issue + other.add_name_to_node(graph) + # simplify the node param type from initializer to constant + replacing.replace_initializer_with_Constant(graph) + + # Modify the nodes. + replace_min_max_attribute_to_const_node_in_clip_node(graph) + replace_all_attribute_to_const_node_in_slice_node(graph) + replace_all_attribute_to_const_node_in_pad_node(graph) + upsampling_to_resize(graph) + other.topological_sort(graph) + + # Change model properties. + model.ir_version = 6 + model.opset_import[0].version = 11 + + model = other.polish_model(model) + return model + + +if __name__ == "__main__": + if len(sys.argv) != 3: + print("Usage:{} file_in file_out".format(sys.argv[0])) + exit(1) + + model = onnx.load(sys.argv[1]) + model = onnx1_4to1_6(model) + + onnx.save(model, sys.argv[2]) diff --git a/tools/deployment/optimizer_scripts/onnx2onnx.py b/tools/deployment/optimizer_scripts/onnx2onnx.py new file mode 100644 index 0000000..884dd2b --- /dev/null +++ b/tools/deployment/optimizer_scripts/onnx2onnx.py @@ -0,0 +1,208 @@ +import onnx +import onnx.utils + +import argparse +import logging + +from tools import eliminating +from tools import other +from tools import special +from tools import combo + +# from tools import temp + + +def onnx2onnx_flow( + m: onnx.ModelProto, + disable_fuse_bn=False, + bn_on_skip=False, + bn_before_add=False, + bgr=False, + norm=False, + rgba2yynn=False, + eliminate_tail=False, + opt_matmul=False, + duplicate_shared_weights=True, +) -> onnx.ModelProto: + """Optimize the onnx. + + Args: + m (ModelProto): the input onnx ModelProto + disable_fuse_bn (bool, optional): do not fuse BN into Conv. + Defaults to False. + bn_on_skip (bool, optional): add BN operator on skip branches. + Defaults to False. + bn_before_add (bool, optional): add BN before Add node on every branch. + Defaults to False. + bgr (bool, optional): add an Conv layer to convert rgb input to bgr. + Defaults to False. + norm (bool, optional): add an Conv layer to add 0.5 tp the input. + Defaults to False. + rgba2yynn (bool, optional): add an Conv layer to convert rgb to yynn. + Defaults to False. + eliminate_tail (bool, optional): remove trailing NPU unsupported nodes. + Defaults to False. + opt_matmul(bool, optional): optimize MatMul layers due to NPU limit. + Defaults to False. + duplicate_shared_weights(bool, optional): duplicate shared weights. + Defaults to True. + + Returns: + ModelProto: the optimized onnx model object. + """ + # temp.weight_broadcast(m.graph) + m = combo.preprocess(m, disable_fuse_bn, duplicate_shared_weights) + # temp.fuse_bias_in_consecutive_1x1_conv(m.graph) + + # Add BN on skip branch + if bn_on_skip: + other.add_bn_on_skip_branch(m.graph) + elif bn_before_add: + other.add_bn_before_add(m.graph) + other.add_bn_before_activation(m.graph) + + # My optimization + m = combo.common_optimization(m) + # Special options + if bgr: + special.change_input_from_bgr_to_rgb(m) + if norm: + special.add_0_5_to_normalized_input(m) + if rgba2yynn: + special.add_rgb2yynn_node(m) + + # Remove useless last node + if eliminate_tail: + eliminating.remove_useless_last_nodes(m.graph) + + # Postprocessing + m = combo.postprocess(m) + + # Put matmul after postprocess to avoid transpose moving downwards + if opt_matmul: + special.special_MatMul_process(m.graph) + m = other.polish_model(m) + + return m + + +# Main process +if __name__ == "__main__": + # Argument parser + parser = argparse.ArgumentParser( + description="Optimize an ONNX model for Kneron compiler" + ) + parser.add_argument("in_file", help="input ONNX FILE") + parser.add_argument( + "-o", "--output", dest="out_file", type=str, help="ouput ONNX FILE" + ) + parser.add_argument("--log", default="i", type=str, help="set log level") + parser.add_argument( + "--bgr", + action="store_true", + default=False, + help="set if the model is trained in BGR mode", + ) + parser.add_argument( + "--norm", + action="store_true", + default=False, + help="set if you have the input -0.5~0.5", + ) + parser.add_argument( + "--rgba2yynn", + action="store_true", + default=False, + help="set if the model has yynn input but you want " + "to take rgba images", + ) + parser.add_argument( + "--add-bn-on-skip", + dest="bn_on_skip", + action="store_true", + default=False, + help="set if you only want to add BN on skip branches", + ) + parser.add_argument( + "--add-bn", + dest="bn_before_add", + action="store_true", + default=False, + help="set if you want to add BN before Add", + ) + parser.add_argument( + "-t", + "--eliminate-tail-unsupported", + dest="eliminate_tail", + action="store_true", + default=False, + help="whether remove the last unsupported node for hardware", + ) + parser.add_argument( + "--no-bn-fusion", + dest="disable_fuse_bn", + action="store_true", + default=False, + help="set if you have met errors which related to inferenced " + "shape mismatch. This option will prevent fusing " + "BatchNormalization into Conv.", + ) + parser.add_argument( + "--opt-matmul", + dest="opt_matmul", + action="store_true", + default=False, + help="set if you want to optimize MatMul operations " + "for kneron hardware.", + ) + parser.add_argument( + "--no-duplicate-shared-weights", + dest="no_duplicate_shared_weights", + action="store_true", + default=False, + help="do not duplicate shared weights. Defaults to False.", + ) + args = parser.parse_args() + + if args.out_file is None: + outfile = args.in_file[:-5] + "_polished.onnx" + else: + outfile = args.out_file + + if args.log == "w": + logging.basicConfig(level=logging.WARN) + elif args.log == "d": + logging.basicConfig(level=logging.DEBUG) + elif args.log == "e": + logging.basicConfig(level=logging.ERROR) + else: + logging.basicConfig(level=logging.INFO) + + # onnx Polish model includes: + # -- nop + # -- eliminate_identity + # -- eliminate_nop_transpose + # -- eliminate_nop_pad + # -- eliminate_unused_initializer + # -- fuse_consecutive_squeezes + # -- fuse_consecutive_transposes + # -- fuse_add_bias_into_conv + # -- fuse_transpose_into_gemm + + # Basic model organize + m = onnx.load(args.in_file) + + m = onnx2onnx_flow( + m, + args.disable_fuse_bn, + args.bn_on_skip, + args.bn_before_add, + args.bgr, + args.norm, + args.rgba2yynn, + args.eliminate_tail, + args.opt_matmul, + not args.no_duplicate_shared_weights, + ) + + onnx.save(m, outfile) diff --git a/tools/deployment/optimizer_scripts/onnx_vs_onnx.py b/tools/deployment/optimizer_scripts/onnx_vs_onnx.py new file mode 100644 index 0000000..d416045 --- /dev/null +++ b/tools/deployment/optimizer_scripts/onnx_vs_onnx.py @@ -0,0 +1,181 @@ +import onnxruntime +import onnx +import argparse +import numpy as np +from tools import helper + + +onnx2np_dtype = { + 0: "float", + 1: "float32", + 2: "uint8", + 3: "int8", + 4: "uint16", + 5: "int16", + 6: "int32", + 7: "int64", + 8: "str", + 9: "bool", + 10: "float16", + 11: "double", + 12: "uint32", + 13: "uint64", + 14: "complex64", + 15: "complex128", + 16: "float", +} + + +def onnx_model_results(path_a, path_b, total_times=10): + """using onnxruntime to inference two onnx models' ouputs + + :onnx model paths: two model paths + :total_times: inference times, default to be 10 + :returns: inference results of two models + """ + # load model a and model b to runtime + session_a = onnxruntime.InferenceSession(path_a, None) + session_b = onnxruntime.InferenceSession(path_b, None) + outputs_a = session_a.get_outputs() + outputs_b = session_b.get_outputs() + + # check outputs + assert len(outputs_a) == len( + outputs_b + ), "Two models have different output numbers." + for i in range(len(outputs_a)): + out_shape_a, out_shape_b = outputs_a[i].shape, outputs_b[i].shape + out_shape_a = list( + map(lambda x: x if isinstance(x, int) else 1, out_shape_a) + ) + out_shape_b = list( + map(lambda x: x if isinstance(x, int) else 1, out_shape_b) + ) + assert ( + out_shape_a == out_shape_b + ), "Output {} has unmatched shapes".format(i) + + # load onnx graph_a and graph_b, to find the initializer and inputs + # then compare to remove the items in the inputs which will be initialized + model_a, model_b = onnx.load(path_a), onnx.load(path_b) + graph_a, graph_b = model_a.graph, model_b.graph + inputs_a, inputs_b = graph_a.input, graph_b.input + init_a, init_b = graph_a.initializer, graph_b.initializer + + # remove initializer from raw inputs + input_names_a, input_names_b = set([ele.name for ele in inputs_a]), set( + [ele.name for ele in inputs_b] + ) + init_names_a, init_names_b = set([ele.name for ele in init_a]), set( + [ele.name for ele in init_b] + ) + real_inputs_names_a, real_inputs_names_b = ( + input_names_a - init_names_a, + input_names_b - init_names_b, + ) + + # prepare and figure out matching of real inputs a and real inputs b + # try to keep original orders of each inputs + real_inputs_a, real_inputs_b = [], [] + for item in inputs_a: + if item.name in real_inputs_names_a: + real_inputs_a.append(item) + for item in inputs_b: + if item.name in real_inputs_names_b: + real_inputs_b.append(item) + + # suppose there's only one real single input tensor for each model + # find the real single inputs for model_a and model_b + real_single_input_a = None + real_single_input_b = None + size_a, size_b = 0, 0 + shape_a, shape_b = [], [] + for item_a in real_inputs_a: + size, shape = helper.find_size_shape_from_value(item_a) + if size: + assert ( + real_single_input_a is None + ), "Multiple inputs of first model, single input expected." + real_single_input_a = item_a + size_a, shape_a = size, shape + for item_b in real_inputs_b: + size, shape = helper.find_size_shape_from_value(item_b) + if size: + assert ( + real_single_input_b is None + ), "Multiple inputs of second model, single input expected." + real_single_input_b = item_b + size_b, shape_b = size, shape + assert size_a == size_b, "Sizes of two models do not match." + + # construct inputs tensors + input_data_type_a = real_single_input_a.type.tensor_type.elem_type + input_data_type_b = real_single_input_b.type.tensor_type.elem_type + input_data_type_a = onnx2np_dtype[input_data_type_a] + input_data_type_b = onnx2np_dtype[input_data_type_b] + + # run inference + times = 0 + results_a = [[] for i in range(len(outputs_a))] + results_b = [[] for i in range(len(outputs_b))] + while times < total_times: + # initialize inputs by random data, default to be uniform + data = np.random.random(size_a) + input_a = np.reshape(data, shape_a).astype(input_data_type_a) + input_b = np.reshape(data, shape_b).astype(input_data_type_b) + + input_dict_a = {} + input_dict_b = {} + for item_a in real_inputs_a: + item_type_a = onnx2np_dtype[item_a.type.tensor_type.elem_type] + input_dict_a[item_a.name] = ( + np.array([]).astype(item_type_a) + if item_a.name != real_single_input_a.name + else input_a + ) + for item_b in real_inputs_b: + item_type_b = onnx2np_dtype[item_b.type.tensor_type.elem_type] + input_dict_b[item_b.name] = ( + np.array([]).astype(item_type_b) + if item_b.name != real_single_input_b.name + else input_b + ) + + ra = session_a.run([], input_dict_a) + rb = session_b.run([], input_dict_b) + for i in range(len(outputs_a)): + results_a[i].append(ra[i]) + results_b[i].append(rb[i]) + times += 1 + + return results_a, results_b + + +if __name__ == "__main__": + # Argument parser. + parser = argparse.ArgumentParser( + description="Compare two ONNX models to check if " + "they have the same output." + ) + parser.add_argument("in_file_a", help="input ONNX file a") + parser.add_argument("in_file_b", help="input ONNX file b") + + args = parser.parse_args() + + results_a, results_b = onnx_model_results( + args.in_file_a, args.in_file_b, total_times=10 + ) + ra_flat = helper.flatten_with_depth(results_a, 0) + rb_flat = helper.flatten_with_depth(results_b, 0) + shape_a = [item[1] for item in ra_flat] + shape_b = [item[1] for item in rb_flat] + assert shape_a == shape_b, "two results data shape doesn't match" + ra_raw = [item[0] for item in ra_flat] + rb_raw = [item[0] for item in rb_flat] + + try: + np.testing.assert_almost_equal(ra_raw, rb_raw, 4) + print("Two models have the same behaviour.") + except Exception as mismatch: + print(mismatch) + exit(1) diff --git a/tools/deployment/optimizer_scripts/onnx_vs_onnx_opt.py b/tools/deployment/optimizer_scripts/onnx_vs_onnx_opt.py new file mode 100644 index 0000000..5ac4e6b --- /dev/null +++ b/tools/deployment/optimizer_scripts/onnx_vs_onnx_opt.py @@ -0,0 +1,248 @@ +import argparse +import glob +import csv +import numpy as np +import matplotlib.pyplot as plt + +from tools import helper +import onnx_vs_onnx as onnx_tester + + +def compare_results(results_a, results_b): + """compare onnx model inference results + calculate basic statistical values + results: results from inference multiple times + returns: list of basic statistical values + """ + # input results data can be of nonuniform shape + # get flatten data to compare + ra_flat = helper.flatten_with_depth(results_a, 0) + rb_flat = helper.flatten_with_depth(results_b, 0) + shape_a = [item[1] for item in ra_flat] + shape_b = [item[1] for item in rb_flat] + assert shape_a == shape_b, "two results data shape doesn't match" + ra_raw = [item[0] for item in ra_flat] + rb_raw = [item[0] for item in rb_flat] + + # the statistical values + max_rel_diff = ( + 0 # defined to be max( { abs(diff)/max(abs(ra), abs(rb) ) } ) + ) + max_abs_diff = 0 # defined to be max( { abs(ra-rb) } ) + mean_rel_diff = 0 + mean_abs_diff = 0 + std_rel_diff = 0 + std_abs_diff = 0 + acc_with_diff_precision = [] + rel_diff = [] + abs_diff_percentiles = [] # rel_diff percentiles + rel_diff_percentiles = [] # abs_diff precentiles + + raw_diff = [ra_raw[i] - rb_raw[i] for i in range(len(ra_raw))] + abs_diff = [abs(num) for num in raw_diff] + for i in range(len(ra_raw)): + divider = max([abs(ra_raw[i]), abs(rb_raw[i])]) + val = abs_diff[i] / divider if divider != 0 else 0 + rel_diff.append(val) + + max_rel_diff = max(rel_diff) + max_abs_diff = max(abs_diff) + mean_rel_diff = np.average(rel_diff) + mean_abs_diff = np.average(abs_diff) + std_rel_diff = np.std(rel_diff) + std_abs_diff = np.std(abs_diff) + + # calculate accuracy with different precison + for digit in range(8): + correct = 0 + for i in range(len(ra_raw)): + if format(ra_raw[i], "." + str(digit) + "f") == format( + rb_raw[i], "." + str(digit) + "f" + ): + correct += 1 + acc_with_diff_precision.append( + [digit, float(format(correct / len(ra_raw), ".3f"))] + ) + + # analyze rel_diff distribution + rel_diff.sort() + abs_diff.sort() + for i in range(20): + rel_diff_percentiles.append( + ["{}%".format(i * 5), rel_diff[int((i / 20) * len(rel_diff))]] + ) + abs_diff_percentiles.append( + ["{}%".format(i * 5), abs_diff[int((i / 20) * len(abs_diff))]] + ) + + results = [ + ["max_rel_diff", max_rel_diff], + ["max_abs_diff", max_abs_diff], + ["mean_rel_diff", mean_rel_diff], + ["mean_abs_diff", mean_abs_diff], + ["std_rel_diff", std_rel_diff], + ["std_abs_diff", std_abs_diff], + ["acc_with_diff_precision", acc_with_diff_precision], + ["rel_diff_percentiles", rel_diff_percentiles], + ["abs_diff_percentiles", abs_diff_percentiles], + ] + + return results + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="test model optimization results" + ) + + parser.add_argument( + "dir", type=str, help="the directory that stores onnx models" + ) + parser.add_argument( + "ending1", type=str, help="model file name ending(eg, .onnx)" + ) + parser.add_argument( + "ending2", type=str, help="opt model file name ending(eg. _opt.onnx)" + ) + parser.add_argument("out_file", type=str, help="output csv file name") + parser.add_argument("-p", "--plot", default="N", help="get plots (Y/N)") + parser.add_argument( + "-i", "--iter_times", default=10, type=int, help="inference times" + ) + + args = parser.parse_args() + + old_models_paths = glob.glob(args.dir + "*" + args.ending1) + new_models_paths = glob.glob(args.dir + "*" + args.ending2) + + stats_table = [ + [ + "Model", + "max_rel_diff", + "max_abs_diff", + "mean_rel_diff", + "mean_abs_diff", + "std_rel_diff", + "std_abs_diff", + "acc_with_diff_precision", + "rel_diff_percentiles", + "abs_diff_percentiles", + ] + ] + + for new_model_path in new_models_paths: + old_model_path = new_model_path[: -len(args.ending2)] + args.ending1 + if old_model_path not in old_models_paths: + continue + + # run inference + results_a, results_b = onnx_tester.onnx_model_results( + old_model_path, new_model_path, total_times=args.iter_times + ) + + # compare inference results + comparision = compare_results(results_a, results_b) + + new_line = [old_model_path.split("/")[-1]] + for item in comparision: + new_line.append(item[1]) + + stats_table.append(new_line) + + # try to read existing file + old_stats_table = [] + try: + old_file = open(args.out_file, "r") + reader = csv.reader(old_file) + old_header = reader.__next__() + for row in reader: + old_stats_table.append(row) + old_file.close() + except Exception: + pass + + # compare and merge possible old stat data file with new stat data file + header = stats_table[0] + stats_table = stats_table[1:] + new_model_names = set([item[0] for item in stats_table]) + for row in old_stats_table: + if row[0] not in new_model_names: + stats_table.append(row) + stats_table.insert(0, header) + + # write a new stat data file, overwrite old file + new_file = open(args.out_file, "w", newline="") + writer = csv.writer(new_file) + for row in stats_table: + writer.writerow(row) + new_file.close() + + # make some plots + if args.plot == "Y": + if len(stats_table) < 2: + exit(0) + + sample_table = ( + stats_table[1:] if len(stats_table) < 6 else stats_table[1:6] + ) + + max_rel_diffs = [round(float(item[1]), 2) for item in stats_table[1:]] + plt.hist(max_rel_diffs, bins=15) + plt.title("Max Relavtive Difference Histogram") + plt.xlabel("Max Relative Difference") + plt.ylabel("Counts") + plt.savefig("max_rel_diff_hist.png") + plt.close() + + max_abs_diffs = [round(float(item[2]), 2) for item in stats_table[1:]] + plt.hist(max_abs_diffs, bins=15) + plt.title("Max Absolute Difference Histogram") + plt.xlabel("Max Absolute Difference") + plt.ylabel("Counts") + plt.savefig("max_abs_diff_hist.png") + plt.close() + + for line in sample_table: + model_name = line[0] + percentiles = line[-2] + x = [ + round(i * (1 / len(percentiles)), 2) + for i in range(len(percentiles)) + ] + y = [ele[1] for ele in percentiles] + plt.plot(x, y, label=model_name) + plt.title("Rel_diff Percentiles of Raw and Optimized Models") + plt.xlabel("percentage") + plt.ylabel("relative difference") + plt.legend() + plt.savefig("rel_diff_percentiles.png") + plt.close() + + for line in sample_table: + model_name = line[0] + percentiles = line[-1] + x = [ + round(i * (1 / len(percentiles)), 2) + for i in range(len(percentiles)) + ] + y = [ele[1] for ele in percentiles] + plt.plot(x, y, label=model_name) + plt.title("Abs_diff Percentiles of Raw and Optimized Models") + plt.xlabel("percentage") + plt.ylabel("absolute difference") + plt.legend() + plt.savefig("abs_diff_percentiles.png") + plt.close() + + for line in sample_table: + model_name = line[0] + accuracies = line[-3] + x = [acc[0] for acc in accuracies] + y = [acc[1] for acc in accuracies] + plt.plot(x, y, label=model_name) + plt.title("Accuracies with Different Precisions") + plt.xlabel("Decimals") + plt.ylabel("Precision") + plt.legend() + plt.savefig("precisions.png") + plt.close() diff --git a/tools/deployment/optimizer_scripts/pytorch2onnx.py b/tools/deployment/optimizer_scripts/pytorch2onnx.py new file mode 100644 index 0000000..9dd79ec --- /dev/null +++ b/tools/deployment/optimizer_scripts/pytorch2onnx.py @@ -0,0 +1,93 @@ +import onnx +import onnx.utils + +import sys +import logging +import argparse + +from pytorch_exported_onnx_preprocess import torch_exported_onnx_flow + +# Debug use +# logging.basicConfig(level=logging.DEBUG) + +###################################### +# Generate a prototype onnx # +###################################### + +parser = argparse.ArgumentParser( + description="Optimize a Pytorch generated model for Kneron compiler" +) +parser.add_argument("in_file", help="input ONNX or PTH FILE") +parser.add_argument("out_file", help="ouput ONNX FILE") +parser.add_argument( + "--input-size", + dest="input_size", + nargs=3, + help="if you using pth, please use this argument to set up the input " + "size of the model. It should be in 'CH H W' format, " + "e.g. '--input-size 3 256 512'.", +) +parser.add_argument( + "--no-bn-fusion", + dest="disable_fuse_bn", + action="store_true", + default=False, + help="set if you have met errors which related to inferenced shape " + "mismatch. This option will prevent fusing BatchNormalization " + "into Conv.", +) + +args = parser.parse_args() + +if len(args.in_file) <= 4: + # When the filename is too short. + logging.error("Invalid input file: {}".format(args.in_file)) + exit(1) +elif args.in_file[-4:] == ".pth": + # Pytorch pth case + logging.warning("Converting from pth to onnx is not recommended.") + onnx_in = args.out_file + # Import pytorch libraries + from torch.autograd import Variable + import torch + import torch.onnx + + # import torchvision + # Standard ImageNet input - 3 channels, 224x224. + # Values don't matter as we care about network structure. + # But they can also be real inputs. + if args.input_size is None: + logging.error("'--input-size' is required for the pth input file.") + exit(1) + dummy_input = Variable( + torch.randn( + 1, + int(args.input_size[0]), + int(args.input_size[1]), + int(args.input_size[2]), + ) + ) + # Obtain your model, it can be also constructed in your script explicitly. + model = torch.load(sys.argv[1], map_location="cpu") + # model = torchvision.models.resnet34(pretrained=True) + # Invoke export. + # torch.save(model, "resnet34.pth") + torch.onnx.export(model, dummy_input, args.out_file, opset_version=11) +elif args.in_file[-4:] == "onnx": + onnx_in = args.in_file +else: + # When the file is neither an onnx or a pytorch pth. + logging.error("Invalid input file: {}".format(args.in_file)) + exit(1) + +onnx_out = args.out_file + +###################################### +# Optimize onnx # +###################################### + +m = onnx.load(onnx_in) + +m = torch_exported_onnx_flow(m, args.disable_fuse_bn) + +onnx.save(m, onnx_out) diff --git a/tools/deployment/optimizer_scripts/pytorch_exported_onnx_preprocess.py b/tools/deployment/optimizer_scripts/pytorch_exported_onnx_preprocess.py new file mode 100644 index 0000000..356f0e3 --- /dev/null +++ b/tools/deployment/optimizer_scripts/pytorch_exported_onnx_preprocess.py @@ -0,0 +1,82 @@ +import onnx +import onnx.utils + +import logging +import argparse + +from .tools import combo + + +# Define general pytorch exported onnx optimize process +def torch_exported_onnx_flow( + m: onnx.ModelProto, disable_fuse_bn=False +) -> onnx.ModelProto: + """Optimize the Pytorch exported onnx. + + Args: + m (ModelProto): the input onnx model + disable_fuse_bn (bool, optional): do not fuse BN into Conv. + Defaults to False. + + Returns: + ModelProto: the optimized onnx model + """ + m = combo.preprocess(m, disable_fuse_bn) + m = combo.pytorch_constant_folding(m) + m = combo.common_optimization(m) + m = combo.postprocess(m) + + return m + + +# Main Process +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Optimize a Pytorch generated model for Kneron compiler" + ) + parser.add_argument("in_file", help="input ONNX") + parser.add_argument("out_file", help="ouput ONNX FILE") + parser.add_argument("--log", default="i", type=str, help="set log level") + parser.add_argument( + "--no-bn-fusion", + dest="disable_fuse_bn", + action="store_true", + default=False, + help="set if you have met errors which related to inferenced shape " + "mismatch. This option will prevent fusing BatchNormalization " + "into Conv.", + ) + + args = parser.parse_args() + + if args.log == "w": + logging.basicConfig(level=logging.WARN) + elif args.log == "d": + logging.basicConfig(level=logging.DEBUG) + elif args.log == "e": + logging.basicConfig(level=logging.ERROR) + else: + logging.basicConfig(level=logging.INFO) + + if len(args.in_file) <= 4: + # When the filename is too short. + logging.error("Invalid input file: {}".format(args.in_file)) + exit(1) + elif args.in_file[-4:] == "onnx": + onnx_in = args.in_file + else: + # When the file is not an onnx file. + logging.error("Invalid input file: {}".format(args.in_file)) + exit(1) + + onnx_out = args.out_file + + ###################################### + # Optimize onnx # + ###################################### + + m = onnx.load(onnx_in) + + m = torch_exported_onnx_flow(m, args.disable_fuse_bn) + + onnx.save(m, onnx_out) diff --git a/tools/deployment/optimizer_scripts/res/first_insert_layer.json b/tools/deployment/optimizer_scripts/res/first_insert_layer.json new file mode 100644 index 0000000..4fe3f59 --- /dev/null +++ b/tools/deployment/optimizer_scripts/res/first_insert_layer.json @@ -0,0 +1,27 @@ +{ + "LAYERNAME" : + { + "bias_bitwidth" : 16, + "LAYERNAME_bias" : [15], + "LAYERNAME_weight" : [3,3,3], + "conv_coarse_shift" : [-4,-4,-4], + "conv_fine_shift" : [0,0,0], + "conv_total_shift" : [-4,-4,-4], + "cpu_mode" : false, + "delta_input_bitwidth" : [0], + "delta_output_bitwidth" : 8, + "flag_radix_bias_eq_output" : true, + "input_scale" : [[1.0,1.0,1.0]], + "output_scale" : [1.0, 1.0, 1.0], + "psum_bitwidth" : 16, + "weight_bitwidth" : 8, + "input_datapath_bitwidth" : [8], + "input_datapath_radix" : [7], + "working_input_bitwidth" : 8, + "working_input_radix" : [7], + "working_output_bitwidth" : 16, + "working_output_radix" : 15, + "output_datapath_bitwidth" : 8, + "output_datapath_radix" : 7 + } +} diff --git a/tools/deployment/optimizer_scripts/res/test_onnx_tester_on_difference.sh b/tools/deployment/optimizer_scripts/res/test_onnx_tester_on_difference.sh new file mode 100644 index 0000000..342b198 --- /dev/null +++ b/tools/deployment/optimizer_scripts/res/test_onnx_tester_on_difference.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +python onnx_tester.py /test_models/mobilenet_v2_224.onnx /test_models/mobilenet_v2_224.cut.onnx +if [ $? -eq 0 ]; then + echo "Those two model results should be different!" + exit 1 +fi + +exit 0 diff --git a/tools/deployment/optimizer_scripts/tensorflow2onnx.py b/tools/deployment/optimizer_scripts/tensorflow2onnx.py new file mode 100644 index 0000000..44b8667 --- /dev/null +++ b/tools/deployment/optimizer_scripts/tensorflow2onnx.py @@ -0,0 +1,180 @@ +import tensorflow as tf +import tf2onnx +import argparse +import logging +import sys +import onnx +import onnx.utils +from tensorflow.python.platform import gfile +from tools import combo, eliminating, replacing, other + + +def tf2onnx_flow(pb_path: str, test_mode=False) -> onnx.ModelProto: + """Convert frozen graph pb file into onnx + + Args: + pb_path (str): input pb file path + test_mode (bool, optional): test mode. Defaults to False. + + Raises: + Exception: invalid input file + + Returns: + onnx.ModelProto: converted onnx + """ + TF2ONNX_VERSION = int(tf2onnx.version.version.replace(".", "")) + + if 160 <= TF2ONNX_VERSION: + from tf2onnx import tf_loader + else: + from tf2onnx import loader as tf_loader + + if pb_path[-3:] == ".pb": + model_name = pb_path.split("/")[-1][:-3] + + # always reset tensorflow session at begin + tf.reset_default_graph() + + with tf.Session() as sess: + with gfile.FastGFile(pb_path, "rb") as f: + graph_def = tf.GraphDef() + graph_def.ParseFromString(f.read()) + sess.graph.as_default() + tf.import_graph_def(graph_def, name="") + + if 160 <= int(tf2onnx.version.version.replace(".", "")): + ( + onnx_nodes, + op_cnt, + attr_cnt, + output_shapes, + dtypes, + functions, + ) = tf2onnx.tf_utils.tflist_to_onnx(sess.graph, {}) + else: + ( + onnx_nodes, + op_cnt, + attr_cnt, + output_shapes, + dtypes, + ) = tf2onnx.tfonnx.tflist_to_onnx( + sess.graph.get_operations(), {} + ) + + for n in onnx_nodes: + if len(n.output) == 0: + onnx_nodes.remove(n) + + # find inputs and outputs of graph + nodes_inputs = set() + nodes_outputs = set() + + for n in onnx_nodes: + if n.op_type == "Placeholder": + continue + for input in n.input: + nodes_inputs.add(input) + for output in n.output: + nodes_outputs.add(output) + + graph_input_names = set() + for input_name in nodes_inputs: + if input_name not in nodes_outputs: + graph_input_names.add(input_name) + + graph_output_names = set() + for n in onnx_nodes: + if n.input and n.input[0] not in nodes_outputs: + continue + if len(n.output) == 0: + n.output.append(n.name + ":0") + graph_output_names.add(n.output[0]) + else: + output_name = n.output[0] + if (output_name not in nodes_inputs) and ( + 0 < len(n.input) + ): + graph_output_names.add(output_name) + + logging.info("Model Inputs: %s", str(list(graph_input_names))) + logging.info("Model Outputs: %s", str(list(graph_output_names))) + + graph_def, inputs, outputs = tf_loader.from_graphdef( + model_path=pb_path, + input_names=list(graph_input_names), + output_names=list(graph_output_names), + ) + + with tf.Graph().as_default() as tf_graph: + tf.import_graph_def(graph_def, name="") + + if 160 <= TF2ONNX_VERSION: + with tf_loader.tf_session(graph=tf_graph): + onnx_graph = tf2onnx.tfonnx.process_tf_graph( + tf_graph=tf_graph, + input_names=inputs, + output_names=outputs, + opset=11, + ) + else: + with tf.Session(graph=tf_graph): + onnx_graph = tf2onnx.tfonnx.process_tf_graph( + tf_graph=tf_graph, + input_names=inputs, + output_names=outputs, + opset=11, + ) + + # Optimize with tf2onnx.optimizer + onnx_graph = tf2onnx.optimizer.optimize_graph(onnx_graph) + model_proto = onnx_graph.make_model(model_name) + + # Make tf2onnx output compatible with the spec. of other.polish_model + replacing.replace_initializer_with_Constant(model_proto.graph) + model_proto = other.polish_model(model_proto) + + else: + raise Exception( + 'expect .pb file as input, but got "' + str(pb_path) + '"' + ) + + # rename + m = model_proto + + m = combo.preprocess(m) + m = combo.common_optimization(m) + m = combo.tensorflow_optimization(m) + m = combo.postprocess(m) + + if not test_mode: + g = m.graph + eliminating.eliminate_shape_changing_after_input(g) + + m = other.polish_model(m) + return m + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Convert tensorflow pb file to onnx file and optimized " + "onnx file. Or just optimize tensorflow onnx file." + ) + parser.add_argument("in_file", help="input file") + parser.add_argument("out_file", help="output optimized model file") + parser.add_argument( + "-t", + "--test_mode", + default=False, + help="test mode will not eliminate shape changes after input", + ) + + args = parser.parse_args() + logging.basicConfig( + stream=sys.stdout, + format="[%(asctime)s] %(levelname)s: %(message)s", + level=logging.INFO, + ) + m = tf2onnx_flow(args.in_file, args.test_mode) + onnx.save(m, args.out_file) + logging.info("Save Optimized ONNX: %s", args.out_file) diff --git a/tools/deployment/optimizer_scripts/tflite_vs_onnx.py b/tools/deployment/optimizer_scripts/tflite_vs_onnx.py new file mode 100644 index 0000000..e8405cf --- /dev/null +++ b/tools/deployment/optimizer_scripts/tflite_vs_onnx.py @@ -0,0 +1,85 @@ +import argparse +import numpy as np +import tensorflow as tf +import onnx +import onnxruntime + +from tools import helper + + +def compare_tflite_and_onnx(tflite_file, onnx_file, total_times=10): + # Setup onnx session and get meta data + onnx_session = onnxruntime.InferenceSession(onnx_file, None) + onnx_outputs = onnx_session.get_outputs() + assert len(onnx_outputs) == 1, "The onnx model has more than one output" + onnx_model = onnx.load(onnx_file) + onnx_graph = onnx_model.graph + onnx_inputs = onnx_graph.input + assert len(onnx_inputs) == 1, "The onnx model has more than one input" + _, onnx_input_shape = helper.find_size_shape_from_value(onnx_inputs[0]) + # Setup TFLite sessio and get meta data + tflite_session = tf.lite.Interpreter(model_path=tflite_file) + tflite_session.allocate_tensors() + tflite_inputs = tflite_session.get_input_details() + tflite_outputs = tflite_session.get_output_details() + tflite_input_shape = tflite_inputs[0]["shape"] + # Compare input shape + assert len(onnx_input_shape) == len( + tflite_input_shape + ), "TFLite and ONNX shape unmatch." + assert onnx_input_shape == [ + tflite_input_shape[0], + tflite_input_shape[3], + tflite_input_shape[1], + tflite_input_shape[2], + ], "TFLite and ONNX shape unmatch." + # Generate random number and run + tflite_results = [] + onnx_results = [] + for _ in range(total_times): + # Generate input + tflite_input_data = np.array( + np.random.random_sample(tflite_input_shape), dtype=np.float32 + ) + onnx_input_data = np.transpose(tflite_input_data, [0, 3, 1, 2]) + # Run tflite + tflite_session.set_tensor(tflite_inputs[0]["index"], tflite_input_data) + tflite_session.invoke() + tflite_results.append( + tflite_session.get_tensor(tflite_outputs[0]["index"]) + ) + # Run onnx + onnx_input_dict = {onnx_inputs[0].name: onnx_input_data} + onnx_results.append(onnx_session.run([], onnx_input_dict)[0]) + + return tflite_results, onnx_results + + +if __name__ == "__main__": + # Argument parser. + parser = argparse.ArgumentParser( + description="Compare a TFLite model and an ONNX model to check " + "if they have the same output." + ) + parser.add_argument("tflite_file", help="input tflite file") + parser.add_argument("onnx_file", help="input ONNX file") + + args = parser.parse_args() + + results_a, results_b = compare_tflite_and_onnx( + args.tflite_file, args.onnx_file, total_times=10 + ) + ra_flat = helper.flatten_with_depth(results_a, 0) + rb_flat = helper.flatten_with_depth(results_b, 0) + shape_a = [item[1] for item in ra_flat] + shape_b = [item[1] for item in rb_flat] + assert shape_a == shape_b, "two results data shape doesn't match" + ra_raw = [item[0] for item in ra_flat] + rb_raw = [item[0] for item in rb_flat] + + try: + np.testing.assert_almost_equal(ra_raw, rb_raw, 8) + print("Two models have the same behaviour.") + except Exception as mismatch: + print(mismatch) + exit(1) diff --git a/tools/deployment/optimizer_scripts/tools/__init__.py b/tools/deployment/optimizer_scripts/tools/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tools/deployment/optimizer_scripts/tools/combo.py b/tools/deployment/optimizer_scripts/tools/combo.py new file mode 100644 index 0000000..1a20ebb --- /dev/null +++ b/tools/deployment/optimizer_scripts/tools/combo.py @@ -0,0 +1,267 @@ +"""Combo functions that are usually called together. +""" + +import logging + +try: + from onnx import optimizer +except ImportError: + import onnxoptimizer as optimizer + +from . import helper +from . import other +from . import replacing +from . import eliminating +from . import fusing +from . import constant_folding +from . import removing_transpose +from .common_pattern import torch_pattern_match, tf_pattern_match +from .helper import logger + + +def preprocess( + model_proto, disable_fuse_bn=False, duplicate_shared_weights=True +): + """The most common used functions before other processing. + + Args: + model_proto: the original model input + duplicate_shared_weights(bool, optional): duplicate shared weights. + Defaults to True. + + Return: + the new model after preprocessing + + It includes: + + - inference shapes + - optimize model by ONNX library + - give names to the nodes + - replace initializer with Constant node + - replace -1 batch size with 1 + - eliminate dropout and identity + - eliminate no children inputs + - topological sort + + The optimizations provided by ONNX: + + - eliminate_identity + - eliminate_nop_dropout + - eliminate_nop_transpose + - eliminate_nop_pad + - eliminate_unused_initializer + - eliminate_deadend + - fuse_consecutive_squeezes + - fuse_consecutive_transposes + - fuse_add_bias_into_conv + - fuse_transpose_into_gemm + - fuse_matmul_add_bias_into_gemm + - fuse_bn_into_conv + - fuse_pad_into_conv + + """ + logger.info("Preprocessing the model...") + helper.setup_current_opset_version(model_proto) + eliminating.eliminate_empty_value_infos(model_proto.graph) + other.add_name_to_node(model_proto.graph) + other.rename_all_node_name(model_proto.graph) + replacing.replace_initializer_with_Constant(model_proto.graph) + other.topological_sort(model_proto.graph) + m = other.polish_model(model_proto) + passes = [ + "extract_constant_to_initializer", + "eliminate_nop_dropout", + "eliminate_deadend", + "fuse_matmul_add_bias_into_gemm", + "fuse_pad_into_conv", + ] + if not disable_fuse_bn: + passes.append("fuse_bn_into_conv") + m = optimizer.optimize(m, passes) + g = m.graph + # Add name again since onnx optimizer higher than 1.7 may remove node names + other.add_name_to_node(g) + if duplicate_shared_weights: + replacing.replace_initializer_with_Constant( + g, duplicate_shared_weights=True + ) + other.duplicate_param_shared_constant(g) + else: + replacing.replace_initializer_with_Constant( + g, duplicate_shared_weights=False + ) + other.topological_sort(g) + m = other.polish_model(m) + g = m.graph + eliminating.eliminate_consecutive_Cast(m.graph) + eliminating.eliminate_Cast_after_input(m.graph) + eliminating.eliminate_nop_pads(g) + eliminating.eliminate_nop_cast(g) + eliminating.eliminate_Identify_and_Dropout(g) + eliminating.eliminate_trivial_maxpool(g) + eliminating.eliminate_no_children_input(g) + other.format_value_info_shape(g) + other.topological_sort(g) + m = other.inference_shapes(m) + g = m.graph + replacing.replace_split_with_slices(g) + other.topological_sort(g) + + return m + + +def common_optimization(m): + """Common optimizations can be used in most cases. + + :param m: the original model input\\ + :return: the new model after preprocessing + + It includes: + + - transpose B in Gemm + - fuse BN into Gemm + - fuse consecutive Gemm + - replace AveragePool with GAP + - replace Squeeze/Unsqueeze with Reshape + - replace Reshape with Flatten + """ + logger.info("Doing nodes fusion and replacement... ") + m = other.polish_model(m) + g = m.graph + other.transpose_B_in_Gemm(g) + fusing.fuse_BN_into_Gemm(g) + fusing.fuse_BN_with_Reshape_into_Gemm(g) + fusing.fuse_Gemm_into_Gemm(g) + fusing.fuse_consecutive_reducemean(g) + fusing.fuse_slice_nodes_into_conv(g) + fusing.fuse_relu_min_into_clip(g) + other.duplicate_shared_Flatten(g) + replacing.replace_average_pool_with_GAP(g) + + m = other.polish_model(m) + g = m.graph + + replacing.replace_Squeeze_with_Reshape(g) + replacing.replace_Unsqueeze_with_Reshape(g) + replacing.replace_Reshape_with_Flatten(g) + replacing.replace_ReduceMean_with_GlobalAveragePool(g) + replacing.replace_Sum_with_Adds(g) + replacing.replace_constant_input_concat_with_pad(g) + other.topological_sort(g) + return m + + +def pytorch_constant_folding(m): + """Constant folding needed by Pytorch exported models. It should be done + before using onnx optimizers since the dynamic shape structure may affect + the optimizations. + + :param m: the original model input\\ + :return: the new model after preprocessing + """ + logger.info("Working on constant folding.") + replacing.replace_shape_with_constant(m.graph) + replacing.replace_ConstantOfShape_with_constant(m.graph) + + # constant_folding + m = other.inference_shapes(m) + while constant_folding.constant_folding(m.graph): + logging.debug("After constant folding jobs.") + other.topological_sort(m.graph) + while len(m.graph.value_info) != 0: + m.graph.value_info.pop() + + m = other.inference_shapes(m) + replacing.replace_shape_with_constant(m.graph) + other.topological_sort(m.graph) + m = torch_pattern_match(m) + m = optimizer.optimize(m, ["eliminate_deadend"]) + return m + + +def tensorflow_optimization(m): + """Optimizations for tf models can be used in most cases. + + :param m: the original model input\\ + :return: the new model after preprocessing + + It includes: + + - eliminate shape change after input + - eliminate Reshape cast + - eliminate Squeeze before Reshape + - fuse Transpose into Constant + - replace Shape with Constant + """ + + fusing.fuse_Transpose_into_Constant(m.graph) + fusing.fuse_MatMul_and_Add_into_Gemm(m.graph) + other.topological_sort(m.graph) + + m = other.polish_model(m) + + # constant folding + replacing.replace_shape_with_constant(m.graph) + + # constant_folding + m = other.inference_shapes(m) + while constant_folding.constant_folding(m.graph): + logging.debug("After constant folding jobs.") + other.topological_sort(m.graph) + while len(m.graph.value_info) != 0: + m.graph.value_info.pop() + + m = other.inference_shapes(m) + replacing.replace_shape_with_constant(m.graph) + other.topological_sort(m.graph) + m = tf_pattern_match(m) + m = optimizer.optimize(m, ["eliminate_deadend"]) + + eliminating.eliminate_consecutive_reshape(m.graph) + eliminating.eliminate_Squeeze_before_Reshape(m.graph) + other.topological_sort(m.graph) + return m + + +def postprocess(m): + """Inference the shape and prepare for export. + + :param m: the original model input\\ + :return: the new model after preprocessing + """ + logger.info("Postprocessing the model...") + while len(m.graph.value_info) > 0: + m.graph.value_info.pop() + m = other.polish_model(m) + eliminating.eliminate_single_input_Concat(m.graph) + eliminating.eliminate_nop_Maxpool_and_AveragePool(m.graph) + eliminating.eliminate_trivial_elementwise_calculation(m.graph) + m = other.polish_model(m) + + replacing.replace_depthwise_1x1_with_bn(m.graph) + m = other.polish_model(m) + + # removing transpose + m = removing_transpose.eliminate_transposes(m) + m = other.polish_model(m) + removing_transpose.remove_trivial_transpose(m.graph) + removing_transpose.fuse_Transpose_into_Gemm_weight(m.graph) + + # fuse some nodes + fusing.fuse_mul_and_add_into_bn(m.graph) + m = other.polish_model(m) + fusing.fuse_mul_and_add_into_gemm(m.graph) + m = other.polish_model(m) + fusing.fuse_conv_and_add_into_conv(m.graph) + m = other.polish_model(m) + replacing.replace_mul_to_bn(m.graph) + replacing.replace_div_to_bn(m.graph) + replacing.replace_add_to_bn(m.graph) + replacing.replace_sub_to_bn(m.graph) + replacing.replace_sub_with_bn_and_add(m.graph) + m = other.polish_model(m) + + other.add_output_to_value_info(m.graph) + m = optimizer.optimize(m, ["eliminate_deadend"]) + m.producer_name = "kneron_formatter" + return m diff --git a/tools/deployment/optimizer_scripts/tools/common_pattern.py b/tools/deployment/optimizer_scripts/tools/common_pattern.py new file mode 100644 index 0000000..19d4b35 --- /dev/null +++ b/tools/deployment/optimizer_scripts/tools/common_pattern.py @@ -0,0 +1,177 @@ +from collections import defaultdict +import numpy as np +import onnx.helper +import onnx.utils + +from . import helper +from . import other + + +def torch_pattern_match(m): + # Create a map from optype to the nodes. + optype2node = defaultdict(list) + for node in m.graph.node: + optype2node[node.op_type].append(node) + for matmul_node in optype2node["MatMul"]: + pattern_matmul_mul_add(m.graph, matmul_node) + for resize_node in optype2node["Resize"]: + # torch nn.UpsamplingBilinear2d will be given us 4 input: + # "X, roi, scales, sizes" + if len(resize_node.input) != 4: + continue + make_UpsamplingBilinear2d_value_info(m.graph, resize_node.name) + m = onnx.shape_inference.infer_shapes(m) + polish_RESIZE_input_param_node(m.graph, resize_node.name) + m = other.polish_model(m) + return m + + +def tf_pattern_match(m): + # Create a map from optype to the nodes. + optype2node = defaultdict(list) + for node in m.graph.node: + optype2node[node.op_type].append(node) + for matmul_node in optype2node["MatMul"]: + pattern_matmul_mul_add(m.graph, matmul_node) + for resize_node in optype2node["Resize"]: + # In tensorflow2onnx, ReizeXXX will be given us 4 input: + # "X, roi, scales, sizes" + # and node output name will be given the "node name + :0" + if len(resize_node.input) != 4: + continue + make_UpsamplingBilinear2d_value_info(m.graph, resize_node.name) + m = onnx.shape_inference.infer_shapes(m) + polish_RESIZE_input_param_node(m.graph, resize_node.name) + m = other.polish_model(m) + return m + + +def pattern_matmul_mul_add(g, matmul_node): + # Check node match - Mul node + next_nodes = helper.find_nodes_by_input_name(g, matmul_node.output[0]) + if len(next_nodes) != 1: + return + if next_nodes[0].op_type != "Mul": + return + mul_node = next_nodes[0] + # Check node match - Add node + next_nodes = helper.find_nodes_by_input_name(g, mul_node.output[0]) + if len(next_nodes) != 1: + return + if next_nodes[0].op_type != "Add": + return + add_node = next_nodes[0] + # Check Mul weight + mul_weight_node = helper.find_node_by_output_name(g, mul_node.input[1]) + if mul_weight_node.op_type != "Constant": + return + weight_size, mul_weight = helper.constant_to_list(mul_weight_node) + for i in mul_weight: + if i != 1: + return + channel = weight_size[0] + # Check Add weight + add_weight_node = helper.find_node_by_output_name(g, add_node.input[1]) + if add_weight_node.op_type != "Constant": + return + # Check MatMul weight to see if it need weight broadcast + matmul_weight_node = helper.find_node_by_output_name( + g, matmul_node.input[1] + ) + matmul_weight = helper.constant_to_numpy(matmul_weight_node) + if matmul_weight.shape[1] == 1: + # Weight broadcast + new_matmul_weight = np.tile(matmul_weight, channel) + new_matmul_weight_node = helper.numpy_to_constant( + matmul_weight_node.name, new_matmul_weight + ) + g.node.remove(matmul_weight_node) + g.node.extend([new_matmul_weight_node]) + value = helper.find_value_by_name(g, matmul_weight_node.output[0]) + if value is not None: + g.value_info.remove(value) + # Remove Mul node + g.node.remove(mul_weight_node) + value = helper.find_value_by_name(g, mul_weight_node.output[0]) + if value is not None: + g.value_info.remove(value) + g.node.remove(mul_node) + value = helper.find_value_by_name(g, mul_node.output[0]) + if value is not None: + g.value_info.remove(value) + # Fuse Matmul and Add + gemm_node = onnx.helper.make_node( + "Gemm", + [matmul_node.input[0], matmul_node.input[1], add_node.input[1]], + [add_node.output[0]], + name=matmul_node.name, + alpha=1.0, + beta=1.0, + transA=0, + transB=0, + ) + g.node.extend([gemm_node]) + # Clean up + g.node.remove(matmul_node) + g.node.remove(add_node) + value = helper.find_value_by_name(g, matmul_node.output[0]) + if value is not None: + g.value_info.remove(value) + other.topological_sort(g) + + +def make_UpsamplingBilinear2d_value_info(g, resize_node_name): + resize_node = helper.find_node_by_node_name(g, resize_node_name) + + shape_data_node = helper.find_node_by_output_name(g, resize_node.input[3]) + shape_data = helper.constant_to_numpy(shape_data_node).astype(int) + l_shape_data = list(shape_data) + if l_shape_data[0] == 0: + l_shape_data[0] = 1 + l_shape_data[0] + shape_data = np.array(l_shape_data) + + new_output_value_info = onnx.helper.make_tensor_value_info( + resize_node.output[0], + onnx.helper.TensorProto.FLOAT, + shape_data.tolist(), + ) + + g.value_info.extend([new_output_value_info]) + + +def polish_RESIZE_input_param_node(g, resize_node_name): + resize_node = helper.find_node_by_node_name(g, resize_node_name) + + shape_data_node = helper.find_node_by_output_name(g, resize_node.input[3]) + shape_data = helper.constant_to_numpy(shape_data_node).astype(int) + + # handle 0 batch size which is invalid + if shape_data[0] == 0: + shape_data[0] = 1 + + pre_node_output_value_info = helper.find_value_by_name( + g, resize_node.input[0] + ) + ori_shape = np.array( + [ + pre_node_output_value_info.type.tensor_type.shape.dim[0].dim_value, + pre_node_output_value_info.type.tensor_type.shape.dim[1].dim_value, + pre_node_output_value_info.type.tensor_type.shape.dim[2].dim_value, + pre_node_output_value_info.type.tensor_type.shape.dim[3].dim_value, + ] + ) + + resize_node.input.remove(resize_node.input[3]) + + resize_scales = np.array(shape_data / ori_shape).astype(float) + resize_scale_node = helper.list_to_constant( + "resize_scales_node_" + resize_node.name, + resize_scales.shape, + resize_scales, + data_type=onnx.helper.TensorProto.FLOAT, + ) + + resize_node.input[2] = resize_scale_node.name + g.node.extend([resize_scale_node]) + + other.topological_sort(g) diff --git a/tools/deployment/optimizer_scripts/tools/constant_folding.py b/tools/deployment/optimizer_scripts/tools/constant_folding.py new file mode 100644 index 0000000..45ef674 --- /dev/null +++ b/tools/deployment/optimizer_scripts/tools/constant_folding.py @@ -0,0 +1,973 @@ +import onnx.utils +import onnx +import numpy as np +import logging +import traceback + +from . import helper +from .other import topological_sort +from .helper import logger + + +def are_all_inputs_Constant_with_one_child(g, node): + for input_name in node.input: + input_node = helper.find_node_by_output_name(g, input_name) + if input_node is None or input_node.op_type != "Constant": + return False + relative_outputs = helper.find_nodes_by_input_name(g, input_name) + if len(relative_outputs) > 1: + return False + return True + + +def constant_folding(g): + """ Do constant folding until nothing more can be done. + + :param g: The onnx GraphProto\\ + :return: If any node is folded, return True. Otherwise, return False. + """ + keep_folding = True # Keep the while loop + folded = False # Return value + try: + # Before constant folding, duplicate the constant nodes. + duplicate_constant_node(g) + while keep_folding: + keep_folding = False + for node in g.node: + # Check if the node is foldable + if node.op_type not in constant_folding_nodes.keys(): + continue + # Check if parents of the node are all + # single follower constant node. + if not are_all_inputs_Constant_with_one_child(g, node): + continue + # Constant folding for the specific node + if constant_folding_nodes[node.op_type](g, node): + logging.debug( + "Constant nodes and %s %s are folded.", + node.op_type, + node.name, + ) + folded = True + keep_folding = True + else: + logging.debug( + "Constant nodes and %s %s are skipped.", + node.op_type, + node.name, + ) + except Exception: + logger.error("An exception is raised while constant folding.") + logger.error(traceback.format_exc()) + return folded + + +def duplicate_constant_node(g): + """ + Duplicate the constant node if its following nodes contain + constant folding nodes. Create and link the new constant nodes + to the constant folding nodes. + """ + for node in g.node: + # Find a valid constant node + if node.op_type != "Constant": + continue + output_val_info = helper.find_value_by_name(g, node.output[0]) + if output_val_info is None: + print( + "Cannot inference the shape of Const node output: " + + node.output[0] + ) + exit(1) + data_shape = helper.get_shape_from_value_info(output_val_info) + output_nodes = helper.find_nodes_by_input_name(g, node.output[0]) + + # For constant that has only one following node, no need to duplicate + if len(output_nodes) < 2: + continue + + # Check if its following nodes are foldable + foldable_output_nodes = list( + filter( + lambda n: n.op_type in constant_folding_nodes.keys(), + output_nodes, + ) + ) + if not foldable_output_nodes: + continue + + # Duplicate the node needed by foldable nodes + for i in range(len(foldable_output_nodes)): + logging.debug( + f"Found constant {node.name} and " + f"{foldable_output_nodes[i].op_type} " + f"{foldable_output_nodes[i].name} are availble for folding. " + "Duplicate constant.", + ) + output_name = node.output[0] + "_dup_" + str(i) + new_constant_node = onnx.helper.make_node( + "Constant", + [], + [output_name], + name=output_name, + value=node.attribute[0].t, + ) + new_val_info = onnx.helper.make_tensor_value_info( + output_name, node.attribute[0].t.data_type, data_shape + ) + input_ind = list(foldable_output_nodes[i].input).index( + node.output[0] + ) + foldable_output_nodes[i].input[input_ind] = output_name + + g.node.extend([new_constant_node]) + g.value_info.extend([new_val_info]) + + # If all following nodes are foldable node, delete the original node. + if len(foldable_output_nodes) == len(output_nodes): + g.node.remove(node) + g.value_info.remove(output_val_info) + + topological_sort(g) + + return + + +def slice_constant_folding(g, node): + op_version = helper.get_current_opset_version() + # only support opset 9 & 11 + if op_version == 11: + return slice_constant_folding_Opset_11(g, node) + elif op_version == 9: + return slice_constant_folding_Opset_9(g, node) + + +def slice_constant_folding_Opset_11(g, node): + """Fold constant and slice nodes to a single constant node.""" + pre_node = helper.find_node_by_output_name(g, node.input[0]) + pre_shape, data_list = helper.constant_to_list(pre_node) + + starts_node = helper.find_node_by_output_name(g, node.input[1]) + _, starts = helper.constant_to_list(starts_node) + + ends_node = helper.find_node_by_output_name(g, node.input[2]) + _, ends = helper.constant_to_list(ends_node) + + axes_node = ( + None + if len(node.input) <= 3 + else helper.find_node_by_output_name(g, node.input[3]) + ) + if not axes_node: + axes = list(range(len(helper.get_shape(data_list)))) + else: + _, axes = helper.constant_to_list(axes_node) + + steps_node = ( + None + if len(node.input) <= 4 + else helper.find_node_by_output_name(g, node.input[4]) + ) + if not steps_node: + steps = [1] * len(helper.get_shape(data_list)) + else: + _, steps = helper.constant_to_list(steps_node) + + data_list = list(map(int, data_list)) + starts = list(map(int, starts)) + ends = list(map(int, ends)) + axes = list(map(int, axes)) + steps = list(map(int, steps)) + + data_list = np.reshape(data_list, pre_shape) + + new_data = None + for idx, _ in enumerate(axes): + new_data = np.apply_along_axis( + lambda x: x[starts[idx]:ends[idx]:steps[idx]], idx, data_list + ) + + new_node = helper.list_to_constant( + node.output[0], + helper.get_shape(new_data), + helper.flatten_to_list(new_data), + ) + g.node.extend([new_node]) + value_info = helper.find_value_by_name(g, pre_node.output[0]) + if value_info is not None: + g.value_info.remove(value_info) + g.node.remove(node) + g.node.remove(pre_node) + + return True + + +def slice_constant_folding_Opset_9(g, node): + """Fold constant and slice nodes to a single constant node.""" + pre_node = helper.find_node_by_output_name(g, node.input[0]) + pre_shape, data_list = helper.constant_to_list(pre_node) + + data_list = np.reshape(data_list, pre_shape) + axes = helper.get_attribute_by_name(node, "axes") + ends = list(helper.get_attribute_by_name(node, "ends").ints) + starts = list(helper.get_attribute_by_name(node, "starts").ints) + + if not axes: + axes = list(range(len(helper.get_shape(data_list)))) + else: + axes = list(axes.ints) + + new_data = helper.slice_data(data_list, starts, ends, axes) + new_node = helper.list_to_constant( + node.output[0], + helper.get_shape(new_data), + helper.flatten_to_list(new_data), + ) + g.node.extend([new_node]) + value_info = helper.find_value_by_name(g, pre_node.output[0]) + if value_info is not None: + g.value_info.remove(value_info) + g.node.remove(node) + g.node.remove(pre_node) + + return True + + +def cast_constant_folding(g, node): + """Fold constant and cast node to a single constant node.""" + pre_node = helper.find_node_by_output_name(g, node.input[0]) + shape, data = helper.constant_to_list(pre_node) + data_type = node.attribute[0].i + if data_type in (6, 7): + data = list(map(int, data)) + elif data_type == onnx.helper.TensorProto.FLOAT: + data = list(map(float, data)) + else: + raise RuntimeError("data type not supported") + + if shape == 1: + tensor = onnx.helper.make_tensor( + name=pre_node.attribute[0].name, + data_type=data_type, + dims=[], + vals=data, + ) + else: + tensor = onnx.helper.make_tensor( + name=pre_node.attribute[0].name, + data_type=data_type, + dims=shape, + vals=helper.flatten_to_list(data), + ) + new_node = onnx.helper.make_node( + "Constant", [], [node.output[0]], name=node.output[0], value=tensor + ) + g.node.extend([new_node]) + + value_info = helper.find_value_by_name(g, pre_node.output[0]) + if value_info is not None: + g.value_info.remove(value_info) + value_info = helper.find_value_by_name(g, node.output[0]) + if value_info is not None: + g.value_info.remove(value_info) + g.node.remove(pre_node) + g.node.remove(node) + + return True + + +def reduceprod_constant_folding(g, node): + """Fold constant and reduceprod nodes to a single constant node.""" + pre_node = helper.find_node_by_output_name(g, node.input[0]) + shape, data_set = helper.constant_to_list(pre_node) + tensor = pre_node.attribute[0].t + + data_set = np.reshape(data_set, shape) + for att in node.attribute: + if att.name == "axes": + axes = list(att.ints) + else: + keepdims = int(att.i) + + new_data = np.prod(data_set, axis=tuple(axes), keepdims=keepdims == 1) + new_shape = helper.get_shape(new_data) + new_flat_data = helper.flatten_to_list(new_data) + new_tensor = onnx.helper.make_tensor( + name=node.output[0], + data_type=tensor.data_type, + dims=new_shape, + vals=new_flat_data, + ) + new_node = onnx.helper.make_node( + "Constant", [], [node.output[0]], name=node.output[0], value=new_tensor + ) + + g.node.extend([new_node]) + value_info = None + for item in g.value_info: + if item.name == pre_node.output[0]: + value_info = item + if value_info is not None: + g.value_info.remove(value_info) + g.node.remove(pre_node) + g.node.remove(node) + + return True + + +def reshape_constant_input_folding(g, node): + """Fold constant and reshape nodes to a single constant node.""" + pre_data_node = helper.find_node_by_output_name(g, node.input[0]) + pre_shape_node = helper.find_node_by_output_name(g, node.input[1]) + + data = helper.constant_to_numpy(pre_data_node) + _, shape = helper.constant_to_list(pre_shape_node) + new_data = np.reshape(data, shape) + + new_tensor = onnx.helper.make_tensor( + name=node.output[0], + data_type=pre_data_node.attribute[0].t.data_type, + dims=new_data.shape, + vals=helper.flatten_to_list(new_data), + ) + new_node = onnx.helper.make_node( + "Constant", [], [node.output[0]], name=node.output[0], value=new_tensor + ) + g.node.extend([new_node]) + + data_val_info = helper.find_value_by_name(g, pre_data_node.output[0]) + shape_val_info = helper.find_value_by_name(g, pre_shape_node.output[0]) + + g.value_info.remove(data_val_info) + g.value_info.remove(shape_val_info) + + g.node.remove(node) + g.node.remove(pre_data_node) + g.node.remove(pre_shape_node) + + return True + + +def concat_constant_folding(g, node): + """Fold constant and concat nodes to a single constant node.""" + node_to_del = [] + valid_inputs = True + for input_name in node.input: + input_node = helper.find_node_by_output_name(g, input_name) + input_node_output = helper.find_nodes_by_input_name(g, input_name) + if len(input_node_output) > 1: + valid_inputs = False + break + if input_node.op_type != "Constant": + valid_inputs = False + break + + if not valid_inputs: + return False + + input_data = [] + input_shapes = [] + for input_name in node.input: + input_node = helper.find_node_by_output_name(g, input_name) + s, d = helper.constant_to_list(input_node) + d = np.reshape(d, s) + input_data.append(d) + input_shapes.append(s) + node_to_del.append(input_node) + + concat_data = np.concatenate(input_data, axis=node.attribute[0].i) + node_data_type = input_node.attribute[0].t.data_type + if concat_data.dtype in [np.int32, np.int64]: + node_data_type = onnx.helper.TensorProto.INT64 + elif concat_data.dtype in [np.float32, np.float64]: + node_data_type = onnx.helper.TensorProto.FLOAT + + new_node = helper.list_to_constant( + node.output[0], + helper.get_shape(concat_data), + helper.flatten_to_list(concat_data), + data_type=node_data_type, + ) + g.node.extend([new_node]) + node_to_del.append(node) + + for input_name in node.input: + val_info = helper.find_value_by_name(g, input_name) + if val_info: + g.value_info.remove(val_info) + + while node_to_del: + node = node_to_del.pop() + g.node.remove(node) + + return True + + +def transpose_constant_folding(g, node): + """Fold constant and transpose nodes to a single constant node.""" + node_to_del = [] + pre_node = helper.find_node_by_output_name(g, node.input[0]) + shape, data = helper.constant_to_list(pre_node) + np_data = np.reshape(data, shape) + permutation = list(node.attribute[0].ints) + + new_data = np.transpose(np_data, permutation) + new_shape = new_data.shape + new_node = helper.list_to_constant( + node.output[0], + new_shape, + new_data.flatten().tolist(), + data_type=pre_node.attribute[0].t.data_type, + ) + + g.node.extend([new_node]) + node_to_del.extend([node, pre_node]) + + pre_val_info = helper.find_value_by_name(g, node.input[0]) + g.value_info.remove(pre_val_info) + + next_val_info = helper.find_value_by_name(g, node.output[0]) + g.value_info.remove(next_val_info) + + new_val_info = onnx.helper.make_tensor_value_info( + node.output[0], pre_node.attribute[0].t.data_type, new_shape + ) + g.value_info.extend([new_val_info]) + + while node_to_del: + node = node_to_del.pop() + g.node.remove(node) + folded = True + + return folded + + +def unsqueeze_constant_folding(g, node): + """Fold constant and unsqueeze nodes to a single constant node.""" + node_to_del = [] + pre_node = helper.find_node_by_output_name(g, node.input[0]) + shape, data = helper.constant_to_list(pre_node) + if type(shape) == int: + np_data = data[0] + else: + np_data = np.reshape(data, shape) + axes = list(node.attribute[0].ints) + axes.sort() + + for dim in axes: + np_data = np.expand_dims(np_data, axis=dim) + new_shape = np_data.shape + new_node = helper.list_to_constant( + node.output[0], + new_shape, + np_data.flatten().tolist(), + data_type=pre_node.attribute[0].t.data_type, + ) + g.node.extend([new_node]) + node_to_del.extend([node, pre_node]) + + pre_val_info = helper.find_value_by_name(g, node.input[0]) + next_val_info = helper.find_value_by_name(g, node.output[0]) + if pre_val_info is not None: + g.value_info.remove(pre_val_info) + else: + print(node.name) + if next_val_info is not None: + g.value_info.remove(next_val_info) + + new_val_info = onnx.helper.make_tensor_value_info( + node.output[0], pre_node.attribute[0].t.data_type, new_shape + ) + g.value_info.extend([new_val_info]) + + while node_to_del: + node = node_to_del.pop() + g.node.remove(node) + + return True + + +def gather_constant_folding(g, node): + """Fold constant and gather nodes to a single constant node.""" + node_to_del = [] + + pre_data_node = helper.find_node_by_output_name(g, node.input[0]) + pre_indices_node = helper.find_node_by_output_name(g, node.input[1]) + + shape, data = helper.constant_to_list(pre_data_node) + indice_shape, indices = helper.constant_to_list(pre_indices_node) + if type(indice_shape) == int: + indices = indices[0] + + np_data = np.reshape(data, shape) + if len(node.attribute) < 1: + axis = 0 + else: + axis = node.attribute[0].i + + new_data = np.take(np_data, indices, axis=axis) + new_shape = new_data.shape + new_node = helper.list_to_constant( + node.output[0], + new_shape, + new_data.flatten().tolist(), + data_type=pre_data_node.attribute[0].t.data_type, + ) + + node_to_del.extend([node, pre_data_node, pre_indices_node]) + g.node.extend([new_node]) + + val_info_1 = helper.find_value_by_name(g, node.input[0]) + val_info_2 = helper.find_value_by_name(g, node.input[1]) + val_info_3 = helper.find_value_by_name(g, node.output[0]) + new_val_info = onnx.helper.make_tensor_value_info( + new_node.output[0], pre_data_node.attribute[0].t.data_type, new_shape + ) + + if val_info_1 is not None: + g.value_info.remove(val_info_1) + if val_info_2 is not None: + g.value_info.remove(val_info_2) + if val_info_3 is not None: + g.value_info.remove(val_info_3) + g.value_info.extend([new_val_info]) + + while node_to_del: + node = node_to_del.pop() + g.node.remove(node) + + return True + + +def add_constant_folding(g, node): + """Fold constant and add nodes to a single constant node.""" + node_to_del = [] + pre_node_1 = helper.find_node_by_output_name(g, node.input[0]) + pre_node_2 = helper.find_node_by_output_name(g, node.input[1]) + if not pre_node_1 or not pre_node_2: + return False + + shape1, data1 = helper.constant_to_list(pre_node_1) + shape2, data2 = helper.constant_to_list(pre_node_2) + np_data1 = np.reshape(data1, shape1) + np_data2 = np.reshape(data2, shape2) + try: + new_data = np.add(np_data1, np_data2) + except Exception: + raise RuntimeError("can't broadcast and add two data sets") + + new_node = helper.list_to_constant( + node.output[0], + new_data.shape, + new_data.flatten().tolist(), + data_type=pre_node_1.attribute[0].t.data_type, + ) + + g.node.extend([new_node]) + node_to_del.extend([node, pre_node_1, pre_node_2]) + g.value_info.remove(helper.find_value_by_name(g, pre_node_1.output[0])) + g.value_info.remove(helper.find_value_by_name(g, pre_node_2.output[0])) + folded = True + + while node_to_del: + node = node_to_del.pop() + g.node.remove(node) + + return folded + + +def sqrt_constant_folding(g, node): + """Fold constant and sqrt nodes to a single node.""" + node_to_del = [] + pre_node = helper.find_node_by_output_name(g, node.input[0]) + shape, data = helper.constant_to_list(pre_node) + np_data = np.sqrt(np.reshape(data, shape)) + output_val_info = helper.find_value_by_name(g, node.output[0]) + input_val_info = helper.find_value_by_name(g, node.input[0]) + data_type = output_val_info.type.tensor_type.elem_type + + new_tensor = onnx.helper.make_tensor( + name=node.output[0] + "_data", + data_type=data_type, + dims=shape, + vals=np_data.flatten().tolist(), + ) + new_node = onnx.helper.make_node( + "Constant", [], [node.output[0]], name=node.output[0], value=new_tensor + ) + + g.value_info.remove(input_val_info) + node_to_del.extend([pre_node, node]) + g.node.extend([new_node]) + + while node_to_del: + node = node_to_del.pop() + g.node.remove(node) + + return True + + +def reciprocal_constant_folding(g, node): + """Fold constant and reciprocal nodes to a single constant node.""" + node_to_del = [] + + pre_node = helper.find_node_by_output_name(g, node.input[0]) + shape, data = helper.constant_to_list(pre_node) + data = list(map(lambda x: x if abs(x) > 1.0e-8 else 1.0e-8, data)) + np_data = np.reshape(data, shape) + np_data = np.reciprocal(np_data) + + input_val_info = helper.find_value_by_name(g, node.input[0]) + output_val_info = helper.find_value_by_name(g, node.output[0]) + data_type = output_val_info.type.tensor_type.elem_type + + new_tensor = onnx.helper.make_tensor( + name=node.output[0] + "_data", + data_type=data_type, + dims=shape, + vals=np_data.flatten().tolist(), + ) + new_node = onnx.helper.make_node( + "Constant", [], [node.output[0]], name=node.output[0], value=new_tensor + ) + + node_to_del.extend([node, pre_node]) + g.node.extend([new_node]) + + g.value_info.remove(input_val_info) + + while node_to_del: + node = node_to_del.pop() + g.node.remove(node) + + return True + + +def mul_constant_folding(g, node): + """Fold constant and mul nodes to a single constant node.""" + node_to_del = [] + pre_node_1 = helper.find_node_by_output_name(g, node.input[0]) + pre_node_2 = helper.find_node_by_output_name(g, node.input[1]) + + pre_value_info1 = helper.find_value_by_name(g, node.input[0]) + pre_value_info2 = helper.find_value_by_name(g, node.input[1]) + if pre_value_info1 is None or pre_value_info2 is None: + return False + + shape1, data1 = helper.constant_to_list(pre_node_1) + shape2, data2 = helper.constant_to_list(pre_node_2) + np_data1 = np.reshape(data1, shape1) + np_data2 = np.reshape(data2, shape2) + + try: + new_data = np.multiply(np_data1, np_data2) + except Exception: + raise RuntimeError("can not broadcast and multiply two data sets") + + # Special shape for single element. + if shape1 == 1 and shape2 == 1: + new_shape = [] + else: + new_shape = new_data.shape + + new_tensor = onnx.helper.make_tensor( + name=node.output[0] + "_data", + data_type=pre_node_1.attribute[0].t.data_type, + dims=new_shape, + vals=new_data.flatten().tolist(), + ) + new_node = onnx.helper.make_node( + "Constant", [], [node.output[0]], name=node.output[0], value=new_tensor + ) + + node_to_del.extend([node, pre_node_1, pre_node_2]) + g.node.extend([new_node]) + + g.value_info.remove(pre_value_info1) + g.value_info.remove(pre_value_info2) + + while node_to_del: + node = node_to_del.pop() + g.node.remove(node) + + return True + + +def div_constant_folding(g, node): + """Fold constant and mul nodes to a single constant node.""" + node_to_del = [] + pre_node_1 = helper.find_node_by_output_name(g, node.input[0]) + pre_node_2 = helper.find_node_by_output_name(g, node.input[1]) + + pre_value_info1 = helper.find_value_by_name(g, node.input[0]) + pre_value_info2 = helper.find_value_by_name(g, node.input[1]) + if pre_value_info1 is None or pre_value_info2 is None: + return False + + shape1, data1 = helper.constant_to_list(pre_node_1) + shape2, data2 = helper.constant_to_list(pre_node_2) + np_data1 = np.reshape(data1, shape1) + np_data2 = np.reshape(data2, shape2) + + try: + new_data = np.divide(np_data1, np_data2) + except Exception: + raise RuntimeError("can not broadcast and multiply two data sets") + + # Special shape for single element. + if shape1 == 1 and shape2 == 1: + new_shape = [] + else: + new_shape = new_data.shape + + # Check data type if it is int + if pre_node_1.attribute[0].t.data_type == 7: + new_data = new_data.astype("int64") + + new_tensor = onnx.helper.make_tensor( + name=node.output[0] + "_data", + data_type=pre_node_1.attribute[0].t.data_type, + dims=new_shape, + vals=new_data.flatten().tolist(), + ) + new_node = onnx.helper.make_node( + "Constant", [], [node.output[0]], name=node.output[0], value=new_tensor + ) + + node_to_del.extend([node, pre_node_1, pre_node_2]) + g.node.extend([new_node]) + + g.value_info.remove(pre_value_info1) + g.value_info.remove(pre_value_info2) + + while node_to_del: + node = node_to_del.pop() + g.node.remove(node) + + return True + + +def sub_constant_folding(g, node): + """Fold constant and sub nodes to a single node.""" + node_to_del = [] + pre_node_1 = helper.find_node_by_output_name(g, node.input[0]) + pre_node_2 = helper.find_node_by_output_name(g, node.input[1]) + pre_val_info_1 = helper.find_value_by_name(g, node.input[0]) + pre_val_info_2 = helper.find_value_by_name(g, node.input[1]) + + shape1, data1 = helper.constant_to_list(pre_node_1) + shape2, data2 = helper.constant_to_list(pre_node_2) + + new_data = np.subtract(data1, data2) + # Special shape for single element. + if shape1 == 1 and shape2 == 1: + new_shape = [] + else: + new_shape = new_data.shape + + new_tensor = onnx.helper.make_tensor( + name=node.output[0] + "_data", + data_type=pre_node_1.attribute[0].t.data_type, + dims=new_shape, + vals=helper.flatten_to_list(new_data), + ) + new_node = onnx.helper.make_node( + "Constant", [], [node.output[0]], name=node.output[0], value=new_tensor + ) + + g.node.extend([new_node]) + node_to_del.extend([node, pre_node_1, pre_node_2]) + + g.value_info.remove(pre_val_info_1) + g.value_info.remove(pre_val_info_2) + + while node_to_del: + node = node_to_del.pop() + g.node.remove(node) + + return True + + +def neg_constant_folding(g, node): + node_to_del = [] + pre_node = helper.find_node_by_output_name(g, node.input[0]) + + shape, data_list = helper.constant_to_list(pre_node) + new_data_list = [-num for num in data_list] + + new_tensor = onnx.helper.make_tensor( + name=pre_node.name + "_neg_tensor", + data_type=pre_node.attribute[0].t.data_type, + dims=shape, + vals=new_data_list, + ) + new_node = onnx.helper.make_node( + "Constant", [], [node.output[0]], name=node.output[0], value=new_tensor + ) + + g.node.extend([new_node]) + node_to_del.extend([pre_node, node]) + g.value_info.remove(helper.find_value_by_name(g, node.input[0])) + + while node_to_del: + g.node.remove(node_to_del.pop()) + + return True + + +def floor_constant_folding(g, node): + node_to_del = [] + pre_node = helper.find_node_by_output_name(g, node.input[0]) + + shape, data = helper.constant_to_list(pre_node) + new_data = np.floor(data).flatten().tolist() + + if shape == 1: + new_shape = [] + else: + new_shape = shape + + new_tensor = onnx.helper.make_tensor( + name=node.output[0] + "_data", + data_type=pre_node.attribute[0].t.data_type, + dims=new_shape, + vals=helper.flatten_to_list(new_data), + ) + new_node = onnx.helper.make_node( + "Constant", [], [node.output[0]], name=node.output[0], value=new_tensor + ) + + g.node.extend([new_node]) + node_to_del.extend([pre_node, node]) + old_value = helper.find_value_by_name(g, node.input[0]) + if old_value is not None: + g.value_info.remove(old_value) + + while node_to_del: + g.node.remove(node_to_del.pop()) + + return True + + +def bn_constant_folding(g, node): + """Fold constant and mul nodes to a single constant node.""" + # Prepare data + node_to_del = [] + input_node = helper.find_node_by_output_name(g, node.input[0]) + scale_node = helper.find_node_by_output_name(g, node.input[1]) + bias_node = helper.find_node_by_output_name(g, node.input[2]) + mean_node = helper.find_node_by_output_name(g, node.input[3]) + var_node = helper.find_node_by_output_name(g, node.input[4]) + + input_value_info = [] + for i in range(5): + input_value_info.append(helper.find_value_by_name(g, node.input[i])) + + if input_value_info[0] is None: + return False + + input_data = helper.constant_to_numpy(input_node) + scale_data = helper.constant_to_numpy(scale_node) + bias_data = helper.constant_to_numpy(bias_node) + mean_data = helper.constant_to_numpy(mean_node) + var_data = helper.constant_to_numpy(var_node) + + epsilon = helper.get_var_attribute_by_name(node, "epsilon", "float") + if epsilon is None: + epsilon = 0.00001 + + # Calculate new node + new_data = ( + scale_data * (input_data - mean_data) / np.sqrt(var_data + epsilon) + + bias_data + ) + + new_node = helper.numpy_to_constant(node.output[0], new_data) + + # Reconnect the graph + node_to_del.extend( + [node, input_node, scale_node, bias_node, mean_node, var_node] + ) + g.node.extend([new_node]) + + for value in input_value_info: + if value is not None: + g.value_info.remove(value) + + while node_to_del: + node = node_to_del.pop() + g.node.remove(node) + + return True + + +def DequantizeLinear_constant_folding(g, node): + """Fold constant and mul nodes to a single constant node.""" + # Prepare data + node_to_del = [] + x_node = helper.find_node_by_output_name(g, node.input[0]) + x_scale_node = helper.find_node_by_output_name(g, node.input[1]) + if len(node.input) > 2: + x_zero_point_node = helper.find_node_by_output_name(g, node.input[2]) + else: + x_zero_point_node = None + + input_value_info = [] + for i in range(len(node.input)): + input_value_info.append(helper.find_value_by_name(g, node.input[i])) + + if input_value_info[0] is None: + return False + + x_data = helper.constant_to_numpy(x_node) + x_scale_data = helper.constant_to_numpy(x_scale_node) + if x_zero_point_node is not None: + x_zero_point_data = helper.constant_to_numpy(x_zero_point_node) + else: + x_zero_point_data = np.array([0.0]) + + # Calculate new node + new_data = ( + x_data.astype(np.float32) - x_zero_point_data.astype(np.float32) + ) * x_scale_data + + new_node = helper.numpy_to_constant(node.output[0], new_data) + + # Reconnect the graph + node_to_del.extend([node, x_node, x_scale_node]) + if x_zero_point_node is not None: + node_to_del.append(x_zero_point_node) + g.node.extend([new_node]) + + for value in input_value_info: + if value is not None: + g.value_info.remove(value) + + while node_to_del: + node = node_to_del.pop() + g.node.remove(node) + + return True + + +# Available constant folding names to function map. +constant_folding_nodes = { + "Add": add_constant_folding, + "BatchNormalization": bn_constant_folding, + "Cast": cast_constant_folding, + "Concat": concat_constant_folding, + "DequantizeLinear": DequantizeLinear_constant_folding, + "Div": div_constant_folding, + "Floor": floor_constant_folding, + "Gather": gather_constant_folding, + "Mul": mul_constant_folding, + "Reciprocal": reciprocal_constant_folding, + "ReduceProd": reduceprod_constant_folding, + "Reshape": reshape_constant_input_folding, + "Slice": slice_constant_folding, + "Sqrt": sqrt_constant_folding, + "Transpose": transpose_constant_folding, + "Unsqueeze": unsqueeze_constant_folding, + "Sub": sub_constant_folding, + "Neg": neg_constant_folding, +} diff --git a/tools/deployment/optimizer_scripts/tools/eliminating.py b/tools/deployment/optimizer_scripts/tools/eliminating.py new file mode 100644 index 0000000..7871665 --- /dev/null +++ b/tools/deployment/optimizer_scripts/tools/eliminating.py @@ -0,0 +1,751 @@ +import collections +import struct +import onnx +import numpy as np +from . import other +from . import helper +from . import modhelper +from .general_graph import Graph + + +def eliminate_Identify_and_Dropout(g): + """ + Eliminate Identify layers + + :param g: the onnx graph + """ + node_to_remove = [] + for node in g.node: + if node.op_type != "Identity" and node.op_type != "Dropout": + continue + # If this node is the last, leave it to `eliminate_useless_last node` + if helper.find_output_by_name(g, node.output[0]) is not None: + continue + # Replace the parents in all the following nodes + following_nodes = helper.find_following_nodes_by_input_value_name( + g, node.output[0] + ) + for following_node in following_nodes: + modhelper.replace_node_input( + following_node, node.output[0], node.input[0] + ) + # Delete value info + value_between = helper.find_value_by_name(g, node.output[0]) + try: + g.value_info.remove(value_between) + except Exception: + print("No value info to delete while eliminating identity layers.") + # Node is waiting for elimination + node_to_remove.append(node) + for node in node_to_remove: + g.node.remove(node) + + +# Remove last useless nodes +def remove_useless_last_nodes(g): + """Remove useless nodes from the tail of the graph""" + USELESS = [ + "Reshape", + "Identity", + "Transpose", + "Flatten", + "Dropout", + "Mystery", + "Constant", + "Squeeze", + "Unsqueeze", + "Softmax", + ] + graph = Graph(g) + todo = collections.deque() + for node in graph.output_nodes: + if len(node.children) == 0: + todo.append(node) + node_to_remove = [] + while todo: + # BFS find nodes to remove + cur_node = todo.popleft() + if cur_node.proto is None: + continue + if cur_node.proto.op_type not in USELESS: + continue + # Find the output + cur_node_output = helper.find_output_by_name( + g, cur_node.proto.output[0] + ) + for cur_input in cur_node.parents: + cur_input.children.remove(cur_node) + if len(cur_input.children) == 0: + todo.append(cur_input) + if cur_node_output is not None: + cur_input_output = helper.find_value_by_name( + g, cur_input.proto.output[0] + ) + cur_input_output_in_output = helper.find_output_by_name( + g, cur_input.proto.output[0] + ) + if ( + cur_input_output is not None + and cur_input_output_in_output is None + ): + g.output.extend([cur_input_output]) + node_to_remove.append(cur_node.proto) + try: + g.value_info.remove( + helper.find_value_by_name(g, cur_node.proto.output[0]) + ) + except ValueError: + pass + if cur_node_output is not None: + g.output.remove(cur_node_output) + cur_node.proto = None + cur_node.parents.clear() + for node in node_to_remove: + g.node.remove(node) + + +###################################### +# TF only optimization passes # +###################################### + + +def eliminate_shape_changing_after_input(g): + """ + Eliminate the Reshape node after input and reshape the input + + :param g: the onnx graph + """ + node_to_remove = [] + REMOVE_LIST = [ + "Reshape", + "Transpose", + "Flatten", + "Dropout", + "Squeeze", + "Unsqueeze", + ] + for node in g.node: + # Find an input and the shape node + if node.op_type not in REMOVE_LIST: + continue + old_input = helper.find_input_by_name(g, node.input[0]) + if old_input is None: + continue + # If the input is used by multiple nodes, skip. + counter = 0 + for tnode in g.node: + if old_input.name in tnode.input: + counter += 1 + if counter > 1: + continue + # Remove Weight if any. + output_val_info = helper.find_value_by_name(g, node.output[0]) + + if node.op_type == "Reshape": + shape_node = helper.find_node_by_output_name(g, node.input[1]) + if shape_node.op_type != "Constant": + continue + + # manuelly set the input shape + shape_info = helper.find_value_by_name(g, shape_node.output[0]) + old_size, old_shape = helper.find_size_shape_from_value(shape_info) + + _, new_shape = helper.constant_to_list(shape_node) + for i in range(len(new_shape)): + if new_shape[i] == -1: + dim = int(old_size // np.prod(new_shape) * (-1)) + new_shape[i] = dim + new_input = onnx.helper.make_tensor_value_info( + output_val_info.name, + output_val_info.type.tensor_type.elem_type, + new_shape, + ) + + node_to_remove.append(node) + + shape_outputs = helper.find_nodes_by_input_name( + g, shape_node.output[0] + ) + if len(shape_outputs) == 1: + node_to_remove.append(shape_node) + g.value_info.remove( + helper.find_value_by_name(g, shape_node.output[0]) + ) + + g.input.remove(old_input) + g.input.extend([new_input]) + g.value_info.remove(output_val_info) + elif node.op_type == "Transpose": + permutation = list(node.attribute[0].ints) + pre_shape = helper.get_shape_from_value_info(old_input) + new_shape = [pre_shape[i] for i in permutation] + + new_input = onnx.helper.make_tensor_value_info( + output_val_info.name, + output_val_info.type.tensor_type.elem_type, + new_shape, + ) + + node_to_remove.append(node) + + g.input.remove(old_input) + g.input.extend([new_input]) + g.value_info.remove(output_val_info) + elif node.op_type == "Flatten": + axis = node.attribute[0].int + pre_shape = helper.get_shape_from_value_info(old_input) + dim_1, dim_2 = 1, 1 + if axis == 0: + dim_1 = 1 + dim_2 = np.prod(pre_shape) + else: + dim_1 = np.prod(pre_shape[:axis]).astype(int) + dim_2 = np.prod(pre_shape[axis:]).astype(int) + new_shape = [dim_1, dim_2] + + new_input = onnx.helper.make_tensor_value_info( + output_val_info.name, + output_val_info.type.tensor_type.elem_type, + new_shape, + ) + + node_to_remove.append(node) + + g.input.remove(old_input) + g.input.extend([new_input]) + g.value_info.remove(output_val_info) + elif node.op_type == "Dropout": + g.input.remove(old_input) + g.input.extend([output_val_info]) + g.value_info.remove(output_val_info) + + node_to_remove.append(node) + elif node.op_type == "Squeeze": + axis = list(node.attribute[0].ints) + pre_shape = helper.get_shape_from_value_info(old_input) + for pos in sorted(axis)[::-1]: + if pre_shape[pos] != 1: + raise RuntimeError("invalid axis for squeeze") + else: + pre_shape.pop(pos) + new_shape = pre_shape + + new_input = onnx.helper.make_tensor_value_info( + output_val_info.name, + output_val_info.type.tensor_type.elem_type, + new_shape, + ) + + node_to_remove.append(node) + + g.input.remove(old_input) + g.input.extend([new_input]) + g.value_info.remove(output_val_info) + elif node.op_type == "Unsqueeze": + axis = list(node.attribute[0].ints) + pre_shape = helper.get_shape_from_value_info(old_input) + new_shape = pre_shape + for pos in axis: + new_shape.insert(pos, 1) + new_input = onnx.helper.make_tensor_value_info( + output_val_info.name, + output_val_info.type.tensor_type.elem_type, + new_shape, + ) + node_to_remove.append(node) + + g.input.remove(old_input) + g.input.extend([new_input]) + g.value_info.remove(output_val_info) + else: + pass + + for node in node_to_remove: + g.node.remove(node) + + other.topological_sort(g) + + +def eliminate_Reshape_Cast(g): + """Eliminate the cast layer for shape of Reshape layer + + :param g: the onnx graph + """ + # Find all reshape layers + for node in g.node: + if node.op_type != "Reshape": + continue + prev_node = helper.find_node_by_output_name(g, node.input[1]) + if prev_node.op_type != "Cast": + continue + reshape_node = node + cast_node = prev_node + weight_node = helper.find_node_by_output_name(g, cast_node.input[0]) + if weight_node is None: + raise RuntimeError("Unexpected None before Cast-Reshape.") + weight_node.attribute[0].t.data_type = 7 + if weight_node.attribute[0].t.raw_data: + raw_data = weight_node.attribute[0].t.raw_data + int_data = [i[0] for i in struct.iter_unpack("i", raw_data)] + raw_data = struct.pack("q" * len(int_data), *int_data) + elif ( + len(weight_node.attribute[0].t.int64_data) > 0 + or len(weight_node.attribute[0].t.int32_data) > 0 + ): + # It's already int. Do nothing + pass + else: + raise NotImplementedError() + # Change Value info + origin_weight_out = helper.find_value_by_name(g, weight_node.output[0]) + weight_node.output.pop() + weight_node.output.extend([reshape_node.input[1]]) + # Delete + g.value_info.remove(origin_weight_out) + g.node.remove(cast_node) + + +def eliminate_Cast_after_input(g): + """Eliminate the cast layer right after the input + + :param g: the onnx graph + """ + node_to_remove = [] + for node in g.node: + if node.op_type != "Cast": + continue + old_input = helper.find_input_by_name(g, node.input[0]) + if old_input is None: + continue + next_val_info = helper.find_value_by_name(g, node.output[0]) + shape = helper.get_shape_from_value_info(next_val_info) + new_val_info = onnx.helper.make_tensor_value_info( + next_val_info.name, node.attribute[0].i, shape + ) + # Delete old value_info + g.input.remove(old_input) + g.value_info.remove(next_val_info) + # Append nodes to node_to_remove + node_to_remove.append(node) + # Add new input + g.input.extend([new_val_info]) + for node in node_to_remove: + g.node.remove(node) + + +def eliminate_consecutive_Cast(g): + """If two cast is next to each other, remove the first cast + + :param g: the onnx graph + """ + node_to_remove = [] + for node in g.node: + if node.op_type != "Cast": + continue + first_node = helper.find_node_by_output_name(g, node.input[0]) + if first_node is None or first_node.op_type != "Cast": + continue + # Here we have two consecutive Cast Node + # Reset the input of the later node + node.input[0] = first_node.input[0] + # Remove the first node and its output value info + node_to_remove.append(first_node) + first_output = helper.find_value_by_name(g, first_node.output[0]) + g.value_info.remove(first_output) + for node in node_to_remove: + g.node.remove(node) + + +def eliminate_Squeeze_before_Reshape(g): + """If Squeeze and Reshape is next to each other, remove the first node + + :param g: the onnx graph + """ + node_to_remove = [] + for node in g.node: + if node.op_type != "Reshape": + continue + first_node = helper.find_node_by_output_name(g, node.input[0]) + if not first_node: + continue + if first_node.op_type != "Squeeze": + continue + # Here we have two consecutive Cast Node + # Reset the input of the later node + node.input[0] = first_node.input[0] + # Remove the first node and its output value info + node_to_remove.append(first_node) + first_output = helper.find_value_by_name(g, first_node.output[0]) + g.value_info.remove(first_output) + for node in node_to_remove: + g.node.remove(node) + + +def eliminate_no_children_input(g): + """Eliminate inputs with no children at all.""" + # Create a set of input names + input_names = set([i.name for i in g.input]) + # If a name is used in any node, remove this name from the set. + for n in g.node: + for i in n.input: + input_names.discard(i) + # Remove the inputs with the left names. + for i in input_names: + info = helper.find_input_by_name(g, i) + g.input.remove(info) + + +def eliminate_consecutive_reshape(g): + """Replace consecutive reshape nodes by a single node.""" + node_to_del = [] + for node in g.node: + if node.op_type != "Reshape": + continue + pre_data_node = helper.find_node_by_output_name(g, node.input[0]) + pre_shape_node = helper.find_node_by_output_name(g, node.input[1]) + if not pre_data_node or not pre_shape_node: + continue + if pre_shape_node.op_type != "Constant": + continue + if pre_data_node.op_type != "Reshape": + continue + + pre_pre_shape_node = helper.find_node_by_output_name( + g, pre_data_node.input[1] + ) + if pre_pre_shape_node.op_type != "Constant": + continue + + new_reshape_node = onnx.helper.make_node( + "Reshape", + [pre_data_node.input[0], node.input[1]], + [node.output[0]], + name=node.output[0], + ) + + g.node.extend([new_reshape_node]) + node_to_del.append(node) + node_to_del.append(pre_data_node) + node_to_del.append(pre_pre_shape_node) + + val_info_to_del1 = helper.find_value_by_name(g, node.input[0]) + val_info_to_del2 = helper.find_value_by_name(g, pre_data_node.input[1]) + g.value_info.remove(val_info_to_del1) + g.value_info.remove(val_info_to_del2) + + while node_to_del: + node = node_to_del.pop() + g.node.remove(node) + + +def eliminate_single_input_Concat(g): + """ + Eliminate single input Concat layers + + :param g: the onnx graph + """ + node_to_remove = [] + for node in g.node: + if node.op_type != "Concat": + continue + # If this node has more than 1 input, continue. + if len(node.input) > 1: + continue + # If this node is output node, set its previous node as output nodes. + if helper.find_output_by_name(g, node.output[0]) is not None: + todel_output = helper.find_output_by_name(g, node.output[0]) + the_input_value = helper.find_value_by_name(g, node.input[0]) + g.output.remove(todel_output) + g.output.extend([the_input_value]) + node_to_remove.append(node) + continue + # Replace the parents in all the following nodes + following_nodes = helper.find_following_nodes_by_input_value_name( + g, node.output[0] + ) + for following_node in following_nodes: + modhelper.replace_node_input( + following_node, node.output[0], node.input[0] + ) + # Delete value info + value_between = helper.find_value_by_name(g, node.output[0]) + try: + g.value_info.remove(value_between) + except Exception: + print("No value info to delete while eliminating identity layers.") + # Node is waiting for elimination + node_to_remove.append(node) + for node in node_to_remove: + g.node.remove(node) + + +def eliminate_nop_Maxpool_and_AveragePool(g): + """ + Eliminate do nothing MaxPool and AveragePool layers. + Those layers have valid padding, 1x1 kernel and [1,1] strides. + + :param g: the onnx graph + """ + node_to_remove = [] + for node in g.node: + if node.op_type != "MaxPool" and node.op_type != "AveragePool": + continue + # If this node is actually working, continue. + kernel = helper.get_list_attribute_by_name(node, "kernel_shape", "int") + pads = helper.get_list_attribute_by_name(node, "pads", "int") + strides = helper.get_list_attribute_by_name(node, "strides", "int") + if kernel != [1, 1] or pads != [0, 0, 0, 0] or strides != [1, 1]: + continue + # If this node is the output, set its previous node as output nodes. + if helper.find_output_by_name(g, node.output[0]) is not None: + todel_output = helper.find_output_by_name(g, node.output[0]) + the_input_value = helper.find_value_by_name(g, node.input[0]) + g.output.remove(todel_output) + g.output.extend([the_input_value]) + node_to_remove.append(node) + continue + # Replace the parents in all the following nodes + following_nodes = helper.find_following_nodes_by_input_value_name( + g, node.output[0] + ) + for following_node in following_nodes: + modhelper.replace_node_input( + following_node, node.output[0], node.input[0] + ) + # Delete value info + value_between = helper.find_value_by_name(g, node.output[0]) + try: + g.value_info.remove(value_between) + except Exception: + print("No value info to delete while eliminating identity layers.") + # Node is waiting for elimination + node_to_remove.append(node) + for node in node_to_remove: + g.node.remove(node) + + +def eliminate_trivial_maxpool(g): + node_to_del = [] + for node in g.node: + if node.op_type != "MaxPool": + continue + pads = None + strides = None + dilation = None + kernel_shape = None + for att in node.attribute: + if att.name == "pads": + pads = list(att.ints) + elif att.name == "strides": + strides = list(att.ints) + elif att.name == "kernel_shape": + kernel_shape = list(att.ints) + elif att.name == "dilation": + dilation = list(att.ints) + else: + pass + if pads and any([pad != 0 for pad in pads]): + continue + if strides and any([stride != 1 for stride in strides]): + continue + if dilation and any([dila != 1 for dila in dilation]): + continue + if any([dim != 1 for dim in kernel_shape]): + continue + + node_to_del.append(node) + + next_nodes = helper.find_nodes_by_input_name(g, node.output[0]) + + if next_nodes[0] is None: + output_value = helper.find_output_by_name(g, node.output[0]) + if not output_value: + continue + else: + pre_val_info = helper.find_value_by_name(g, node.input[0]) + g.output.extend([pre_val_info]) + g.output.remove(output_value) + + for next_node in next_nodes: + modhelper.replace_node_input( + next_node, node.output[0], node.input[0] + ) + + next_val_info = helper.find_value_by_name(g, node.output[0]) + g.value_info.remove(next_val_info) + + while node_to_del: + g.node.remove(node_to_del.pop()) + + other.topological_sort(g) + + +def eliminate_empty_value_infos(g): + to_remove = [] + for value_info in g.value_info: + if len(value_info.type.tensor_type.shape.dim) == 0: + to_remove.append(value_info) + for value_info in to_remove: + g.value_info.remove(value_info) + + +def eliminate_nop_pads(g): + node_to_remove = [] + for node in g.node: + if node.op_type != "Pad": + continue + # Check if the Pad is empty or not + pads_node = helper.find_node_by_output_name(g, node.input[1]) + pads_np = helper.constant_to_numpy(pads_node) + all_zero = True + for value in pads_np: + if value != 0: + all_zero = False + if not all_zero: + continue + # If this node is the output, set its previous node as output nodes. + if helper.find_output_by_name(g, node.output[0]) is not None: + todel_output = helper.find_output_by_name(g, node.output[0]) + g.output.remove(todel_output) + if helper.find_output_by_name(g, node.input[0]) is None: + the_input_value = helper.find_value_by_name(g, node.input[0]) + if the_input_value is not None: + g.output.extend([the_input_value]) + # Replace the parents in all the following nodes + following_nodes = helper.find_following_nodes_by_input_value_name( + g, node.output[0] + ) + for following_node in following_nodes: + modhelper.replace_node_input( + following_node, node.output[0], node.input[0] + ) + # Delete value info + value_between = helper.find_value_by_name(g, node.output[0]) + try: + g.value_info.remove(value_between) + except Exception: + helper.logger.info( + "No value info to delete while eliminating identity layers." + ) + # Node is waiting for elimination + node_to_remove.append(node) + for node in node_to_remove: + g.node.remove(node) + + +def eliminate_trivial_elementwise_calculation(g): + """Eliminate Add, Sub, Mul, Sub nodes which do nothing.""" + node_to_remove = [] + for node in g.node: + weight_node = None + if node.op_type == "Add" or node.op_type == "Sub": + # For add and sub, check if the weights are 0s. + weight_node = helper.find_node_by_output_name(g, node.input[1]) + if weight_node is None or weight_node.op_type != "Constant": + continue + weight_np = helper.constant_to_numpy(weight_node) + if np.any(weight_np): + continue + elif node.op_type == "Mul" or node.op_type == "Div": + # For Mul and Div, check if the weights are 1s. + weight_node = helper.find_node_by_output_name(g, node.input[1]) + if weight_node is None or weight_node.op_type != "Constant": + continue + weight_np = helper.constant_to_numpy(weight_node) + weight_np = weight_np - 1 + if np.any(weight_np): + continue + else: + # For other nodes, just skip + continue + # Remove the node + node_to_remove.append(node) + output_value_info = helper.find_value_by_name(g, node.output[0]) + if output_value_info is not None: + g.value_info.remove(output_value_info) + # Replace next node input if any. + following_nodes = helper.find_following_nodes_by_input_value_name( + g, node.output[0] + ) + for following_node in following_nodes: + modhelper.replace_node_input( + following_node, node.output[0], node.input[0] + ) + todel_output = helper.find_output_by_name(g, node.output[0]) + if todel_output is not None: + g.output.remove(todel_output) + previous_output = helper.find_output_by_name(g, node.input[0]) + if previous_output is None: + the_input_value = helper.find_value_by_name(g, node.input[0]) + g.output.extend([the_input_value]) + # Delete the constant node if it is not used by other nodes + constant_following_nodes = ( + helper.find_following_nodes_by_input_value_name( + g, weight_node.output[0] + ) + ) + if len(constant_following_nodes) == 1: + node_to_remove.append(weight_node) + output_value_info = helper.find_value_by_name( + g, weight_node.output[0] + ) + if output_value_info is not None: + g.value_info.remove(output_value_info) + for node in node_to_remove: + g.node.remove(node) + + +def eliminate_nop_cast(g): + """Eliminate do nothing Cast nodes.""" + node_to_remove = [] + for node in g.node: + if node.op_type != "Cast": + continue + # Get input value_info + input_value = helper.find_value_by_name(g, node.input[0]) + if input_value is None: + helper.logger.debug( + f"Cannot find the input value_info for Cast node {node.name}. " + "Skip elimination check." + ) + continue + # Get output value_info + output_value = helper.find_value_by_name(g, node.output[0]) + if output_value is None: + output_value = helper.find_output_by_name(g, node.output[0]) + if output_value is None: + helper.logger.debug( + f"Cannot find the output value_info for Cast node {node.name}." + " Skip elimination check." + ) + continue + # Compare the type. + if ( + input_value.type.tensor_type.elem_type + != output_value.type.tensor_type.elem_type + ): + continue + # If this node is the output, set its previous node as output nodes. + if helper.find_output_by_name(g, node.output[0]) is not None: + todel_output = helper.find_output_by_name(g, node.output[0]) + g.output.remove(todel_output) + if helper.find_output_by_name(g, node.input[0]) is None: + the_input_value = helper.find_value_by_name(g, node.input[0]) + if the_input_value is not None: + g.output.extend([the_input_value]) + # Replace the parents in all the following nodes + following_nodes = helper.find_following_nodes_by_input_value_name( + g, node.output[0] + ) + for following_node in following_nodes: + modhelper.replace_node_input( + following_node, node.output[0], node.input[0] + ) + # Delete value info + value_between = helper.find_value_by_name(g, node.output[0]) + if value_between is not None: + g.value_info.remove(value_between) + # Node is waiting for elimination + node_to_remove.append(node) + for node in node_to_remove: + g.node.remove(node) diff --git a/tools/deployment/optimizer_scripts/tools/fusing.py b/tools/deployment/optimizer_scripts/tools/fusing.py new file mode 100644 index 0000000..e19ca94 --- /dev/null +++ b/tools/deployment/optimizer_scripts/tools/fusing.py @@ -0,0 +1,1201 @@ +import onnx.helper +import numpy as np +from . import helper +from .other import topological_sort +from .modhelper import delete_value_with_name_if_exists, replace_node_input + + +def fuse_Transpose_into_Constant(g): + """ + Fuse Transpose layers into the Constant layers before + + :param g: the onnx graph + """ + node_to_remove = [] + for node in g.node: + if node.op_type != "Transpose": + continue + prev_node = helper.find_node_by_output_name(g, node.input[0]) + if prev_node is None or prev_node.op_type != "Constant": + continue + + pre_shape, data_list = helper.constant_to_list(prev_node) + w = np.reshape(data_list, pre_shape) + w = w.transpose(node.attribute[0].ints) + new_shape = w.shape + w = w.flatten() + + new_tensor = onnx.helper.make_tensor( + name=prev_node.name + "_data", + data_type=prev_node.attribute[0].t.data_type, + dims=new_shape, + vals=w.tolist(), + ) + new_node = onnx.helper.make_node( + "Constant", + [], + [node.output[0]], + name=node.output[0], + value=new_tensor, + ) + + value_between = helper.find_value_by_name(g, prev_node.output[0]) + value_type = value_between.type.tensor_type.elem_type + g.value_info.remove(value_between) + + g.node.extend([new_node]) + node_to_remove.append(node) + node_to_remove.append(prev_node) + + if new_node.output[0] not in [i.name for i in g.value_info]: + new_value = onnx.helper.make_tensor_value_info( + name=new_node.output[0], elem_type=value_type, shape=new_shape + ) + g.value_info.extend([new_value]) + if new_node.output[0]: + val_info_to_del = helper.find_value_by_name( + g, new_node.output[0] + ) + g.value_info.remove(val_info_to_del) + + for node in node_to_remove: + g.node.remove(node) + + topological_sort(g) + + +def fuse_Add_into_Conv(g): + """ + Fuse Transpose layers into the Constant layers before + + :param g: the onnx graph + """ + node_to_remove = [] + for node in g.node: + if node.op_type != "Add": + continue + conv_node = helper.find_node_by_output_name(g, node.input[0]) + cons_node = helper.find_node_by_output_name(g, node.input[1]) + if conv_node is None or cons_node is None: + continue + if conv_node.op_type != "Conv" or cons_node.op_type != "Constant": + continue + if len(conv_node.input) > 2: + continue + # This layer should be fused. Connect constant node into convolution. + add_node = node + conv_node.input.extend([cons_node.output[0]]) + old_value = helper.find_value_by_name(g, conv_node.output[0]) + conv_node.output[0] = add_node.output[0] + # Remove origin conv_node_output + g.value_info.remove(old_value) + # Remove current node + node_to_remove.append(add_node) + # Apply changes to the model + for node in node_to_remove: + g.node.remove(node) + + +def fuse_BN_into_Gemm(g): + """Fuse the following BN into the previous Gemm. + + :param g: the graph + """ + node_to_remove = [] + for node in g.node: + # Check for BN and Gemm + if node.op_type != "BatchNormalization": + continue + gemm_node = helper.find_node_by_output_name(g, node.input[0]) + if gemm_node is None: + continue + if gemm_node.op_type != "Gemm": + continue + if ( + len( + helper.find_following_nodes_by_input_value_name( + g, gemm_node.output[0] + ) + ) + > 1 + ): + continue + bn_node = node + # Get original weights + gemm_b_node = helper.find_node_by_output_name(g, gemm_node.input[1]) + gemm_b = helper.constant_to_numpy(gemm_b_node) + gemm_c_node = helper.find_node_by_output_name(g, gemm_node.input[2]) + gemm_c = helper.constant_to_numpy(gemm_c_node) + bn_scale_node = helper.find_node_by_output_name(g, bn_node.input[1]) + bn_scale = helper.constant_to_numpy(bn_scale_node) + bn_bias_node = helper.find_node_by_output_name(g, bn_node.input[2]) + bn_bias = helper.constant_to_numpy(bn_bias_node) + bn_mean_node = helper.find_node_by_output_name(g, bn_node.input[3]) + bn_mean = helper.constant_to_numpy(bn_mean_node) + bn_var_node = helper.find_node_by_output_name(g, bn_node.input[4]) + bn_var = helper.constant_to_numpy(bn_var_node) + # Apply attributes + # epsilon + epsilon = helper.get_attribute_by_name(bn_node, "epsilon") + if epsilon is None: + epsilon = 0.00001 + else: + epsilon = epsilon.f + bn_var = bn_var + epsilon + # alpha + alpha = helper.get_attribute_by_name(gemm_node, "alpha") + if alpha is None: + alpha = 1 + else: + alpha = alpha.f + gemm_b = gemm_b * alpha + # beta + beta = helper.get_attribute_by_name(gemm_node, "beta") + if beta is None: + beta = 1 + else: + beta = beta.f + gemm_c = gemm_c * beta + # transA + transA = helper.get_attribute_by_name(gemm_node, "transA") + if transA is not None and transA.i == 1: + raise RuntimeError("Do not support transA") + # transB + transB = helper.get_attribute_by_name(gemm_node, "transB") + if transB is not None and transB.i == 1: + gemm_b = gemm_b.transpose() + # Calculate new weights + new_gemm_b = gemm_b * bn_scale / np.sqrt(bn_var) + new_gemm_c = (gemm_c - bn_mean) * bn_scale / np.sqrt(bn_var) + bn_bias + # Replace original weights + new_gemm_b_node = helper.numpy_to_constant( + gemm_b_node.name + "_fused", new_gemm_b + ) + new_gemm_c_node = helper.numpy_to_constant( + gemm_c_node.name + "_fused", new_gemm_c + ) + g.node.extend([new_gemm_b_node, new_gemm_c_node]) + node_to_remove.extend( + [ + gemm_b_node, + gemm_c_node, + bn_node, + bn_scale_node, + bn_bias_node, + bn_mean_node, + bn_var_node, + ] + ) + # Modify attributes + # alpha + alpha = helper.get_attribute_by_name(gemm_node, "alpha") + if alpha is not None: + alpha.f = 1.0 + # beta + beta = helper.get_attribute_by_name(gemm_node, "beta") + if beta is not None: + beta.f = 1.0 + # transB + transB = helper.get_attribute_by_name(gemm_node, "transB") + if transB is not None: + transB.i = 0 + # Connect the new graph + gemm_node.input[1] = new_gemm_b_node.output[0] + gemm_node.input[2] = new_gemm_c_node.output[0] + gemm_b_value = helper.find_value_by_name(g, gemm_b_node.output[0]) + gemm_c_value = helper.find_value_by_name(g, gemm_c_node.output[0]) + gemm_b_value.name = new_gemm_b_node.output[0] + gemm_c_value.name = new_gemm_c_node.output[0] + gemm_value = helper.find_value_by_name(g, gemm_node.output[0]) + g.value_info.remove(gemm_value) + gemm_node.output[0] = bn_node.output[0] + for i in range(1, 5): + value = helper.find_value_by_name(g, bn_node.input[i]) + g.value_info.remove(value) + # Remove useless nodes + for node in node_to_remove: + g.node.remove(node) + topological_sort(g) + + +def fuse_BN_with_Reshape_into_Gemm(g): + """Fuse the following BN into the previous Gemm, even with Reshape or \\ + Squeeze and Unsqueeze surrounding. + + :param g: the graph + """ + node_to_remove = [] + for node in g.node: + # Check for BN and Gemm pattern: Gemm A BN B + # Find BatchNorm Node + if node.op_type != "BatchNormalization": + continue + bn_node = node + # Find A Node + a_node = helper.find_node_by_output_name(g, node.input[0]) + if a_node is None or len(a_node.input) == 0: + continue + # Find Gemm Node + gemm_node = helper.find_node_by_output_name(g, a_node.input[0]) + if gemm_node is None or gemm_node.op_type != "Gemm": + continue + # Find B Node + b_node_list = helper.find_following_nodes_by_input_value_name( + g, bn_node.output[0] + ) + if len(b_node_list) == 0: + the_output = helper.find_output_by_name(g, bn_node.output[0]) + if the_output is None: + continue + b_node = None + elif len(b_node_list) > 1: + continue + else: + b_node = b_node_list[0] + # Check for branches + if ( + len( + helper.find_following_nodes_by_input_value_name( + g, gemm_node.output[0] + ) + ) + > 1 + ): + continue + if ( + len( + helper.find_following_nodes_by_input_value_name( + g, a_node.output[0] + ) + ) + > 1 + ): + continue + # Check type of A + if a_node.op_type == "Unsqueeze": + axes = helper.get_attribute_by_name(a_node, "axes") + if axes.ints != [2]: + continue + elif a_node.op_type == "Reshape": + a = helper.constant_to_list( + helper.find_node_by_output_name(g, a_node.input[1]) + )[1] + if len(a) != 3 or a[2] != 1: + continue + else: + continue + # Check type of B + if b_node is None: + pass + elif b_node.op_type == "Flatten": + pass + elif b_node.op_type == "Squeeze": + axes = helper.get_attribute_by_name(a_node, "axes") + if axes.ints != [2]: + continue + elif b_node.op_type == "Reshape": + a = helper.constant_to_list( + helper.find_node_by_output_name(g, b_node.input[1]) + )[1] + if len(a) != 2: + continue + else: + continue + # Construct new Nodes + # Get original weights + gemm_b_node = helper.find_node_by_output_name(g, gemm_node.input[1]) + gemm_b = helper.constant_to_numpy(gemm_b_node) + gemm_c_node = helper.find_node_by_output_name(g, gemm_node.input[2]) + gemm_c = helper.constant_to_numpy(gemm_c_node) + bn_scale_node = helper.find_node_by_output_name(g, bn_node.input[1]) + bn_scale = helper.constant_to_numpy(bn_scale_node) + bn_bias_node = helper.find_node_by_output_name(g, bn_node.input[2]) + bn_bias = helper.constant_to_numpy(bn_bias_node) + bn_mean_node = helper.find_node_by_output_name(g, bn_node.input[3]) + bn_mean = helper.constant_to_numpy(bn_mean_node) + bn_var_node = helper.find_node_by_output_name(g, bn_node.input[4]) + bn_var = helper.constant_to_numpy(bn_var_node) + # Apply attributes + # epsilon + epsilon = helper.get_attribute_by_name(bn_node, "epsilon") + if epsilon is None: + epsilon = 0.00001 + else: + epsilon = epsilon.f + bn_var = bn_var + epsilon + # alpha + alpha = helper.get_attribute_by_name(gemm_node, "alpha") + if alpha is None: + alpha = 1 + else: + alpha = alpha.f + gemm_b = gemm_b * alpha + # beta + beta = helper.get_attribute_by_name(gemm_node, "beta") + if beta is None: + beta = 1 + else: + beta = beta.f + gemm_c = gemm_c * beta + # transA + transA = helper.get_attribute_by_name(gemm_node, "transA") + if transA is not None and transA.i == 1: + raise RuntimeError("Do not support transA") + # transB + transB = helper.get_attribute_by_name(gemm_node, "transB") + if transB is not None and transB.i == 1: + gemm_b = gemm_b.transpose() + # Calculate new weights + new_gemm_b = gemm_b * bn_scale / np.sqrt(bn_var) + new_gemm_c = (gemm_c - bn_mean) * bn_scale / np.sqrt(bn_var) + bn_bias + # Replace original weights + new_gemm_b_node = helper.numpy_to_constant( + gemm_b_node.name + "_fused", new_gemm_b + ) + new_gemm_c_node = helper.numpy_to_constant( + gemm_c_node.name + "_fused", new_gemm_c + ) + g.node.extend([new_gemm_b_node, new_gemm_c_node]) + # Modify attributes + # alpha + alpha = helper.get_attribute_by_name(gemm_node, "alpha") + if alpha is not None: + alpha.f = 1.0 + # beta + beta = helper.get_attribute_by_name(gemm_node, "beta") + if beta is not None: + beta.f = 1.0 + # transB + transB = helper.get_attribute_by_name(gemm_node, "transB") + if transB is not None: + transB.i = 0 + # Remove useless nodes + node_to_remove.extend( + [ + gemm_b_node, + gemm_c_node, + bn_node, + bn_scale_node, + bn_bias_node, + bn_mean_node, + bn_var_node, + a_node, + ] + ) + if a_node.op_type == "Reshape": + node_to_remove.append( + helper.find_node_by_output_name(g, a_node.input[1]) + ) + if b_node is not None: + node_to_remove.append(b_node) + if b_node.op_type == "Reshape": + node_to_remove.append( + helper.find_node_by_output_name(g, b_node.input[1]) + ) + # Delete useless value infos + value = helper.find_value_by_name(g, a_node.output[0]) + g.value_info.remove(value) + if a_node.op_type == "Reshape": + value = helper.find_value_by_name(g, a_node.input[1]) + g.value_info.remove(value) + for i in range(1, 5): + value = helper.find_value_by_name(g, bn_node.input[i]) + g.value_info.remove(value) + value = helper.find_value_by_name(g, bn_node.output[0]) + if value is not None: + g.value_info.remove(value) + if b_node is not None: + value = helper.find_value_by_name(g, gemm_node.output[0]) + g.value_info.remove(value) + if b_node.op_type == "Reshape": + value = helper.find_value_by_name(g, b_node.input[1]) + g.value_info.remove(value) + # Connect the new graph + # Connect Gemm new weights + gemm_node.input[1] = new_gemm_b_node.output[0] + gemm_node.input[2] = new_gemm_c_node.output[0] + gemm_b_value = helper.find_value_by_name(g, gemm_b_node.output[0]) + gemm_c_value = helper.find_value_by_name(g, gemm_c_node.output[0]) + gemm_b_value.name = new_gemm_b_node.output[0] + gemm_b_value.type.tensor_type.shape.dim[ + 0 + ].dim_value = new_gemm_b.shape[0] + gemm_b_value.type.tensor_type.shape.dim[ + 1 + ].dim_value = new_gemm_b.shape[1] + gemm_c_value.name = new_gemm_c_node.output[0] + if b_node is None: + # If b node is None, set the Gemm output as the graph output + output_value = helper.find_output_by_name(g, bn_node.output[0]) + g.output.remove(output_value) + g.output.extend( + [helper.find_value_by_name(g, gemm_node.output[0])] + ) + else: + # Else, set node B output as gemm output + gemm_node.output[0] = b_node.output[0] + # Remove useless nodes + for node in node_to_remove: + g.node.remove(node) + topological_sort(g) + + +def fuse_Gemm_into_Gemm(g): + """Fuse the previous Gemm into the following Gemm. + + :param g: the graph + """ + node_to_remove = [] + for node in g.node: + # Check for Gemm and Gemm + if node.op_type != "Gemm": + continue + prev_node = helper.find_node_by_output_name(g, node.input[0]) + if prev_node is None: + continue + if prev_node.op_type != "Gemm": + continue + # Get original weights + prev_b_node = helper.find_node_by_output_name(g, prev_node.input[1]) + prev_b = helper.constant_to_numpy(prev_b_node) + prev_c_node = helper.find_node_by_output_name(g, prev_node.input[2]) + prev_c = helper.constant_to_numpy(prev_c_node) + b_node = helper.find_node_by_output_name(g, node.input[1]) + b = helper.constant_to_numpy(b_node) + c_node = helper.find_node_by_output_name(g, node.input[2]) + c = helper.constant_to_numpy(c_node) + # Apply attributes + # alpha + alpha = helper.get_attribute_by_name(node, "alpha") + if alpha is None: + alpha = 1 + else: + alpha = alpha.f + b = b * alpha + alpha = helper.get_attribute_by_name(prev_node, "alpha") + if alpha is None: + alpha = 1 + else: + alpha = alpha.f + prev_b = prev_b * alpha + # beta + beta = helper.get_attribute_by_name(node, "beta") + if beta is None: + beta = 1 + else: + beta = beta.f + c = c * beta + beta = helper.get_attribute_by_name(prev_node, "beta") + if beta is None: + beta = 1 + else: + beta = beta.f + prev_c = prev_c * beta + # transA + transA = helper.get_attribute_by_name(node, "transA") + if transA is not None and transA.i == 1: + raise RuntimeError("Do not support transA") + transA = helper.get_attribute_by_name(prev_node, "transA") + if transA is not None and transA.i == 1: + raise RuntimeError("Do not support transA") + # transB + transB = helper.get_attribute_by_name(node, "transB") + if transB is not None and transB.i == 1: + b = b.transpose() + transB = helper.get_attribute_by_name(prev_node, "transB") + if transB is not None and transB.i == 1: + prev_b = prev_b.transpose() + # Calculate new weights + new_b = prev_b.dot(b) + new_c = prev_c.dot(b) + c + # Replace original weights + new_b_node = helper.numpy_to_constant(b_node.name + "_fused", new_b) + new_c_node = helper.numpy_to_constant(c_node.name + "_fused", new_c) + g.node.extend([new_b_node, new_c_node]) + node_to_remove.extend( + [b_node, c_node, prev_b_node, prev_c_node, prev_node] + ) + # Modify attributes + # alpha + alpha = helper.get_attribute_by_name(node, "alpha") + if alpha is not None: + alpha.f = 1.0 + # beta + beta = helper.get_attribute_by_name(node, "beta") + if beta is not None: + beta.f = 1.0 + # transB + transB = helper.get_attribute_by_name(node, "transB") + if transB is not None: + transB.i = 0 + # Connect the new graph + node.input[0] = prev_node.input[0] + delete_value_with_name_if_exists(g, prev_node.output[0]) + for i in range(1, 3): + delete_value_with_name_if_exists(g, prev_node.input[i]) + delete_value_with_name_if_exists(g, node.input[i]) + node.input[1] = new_b_node.output[0] + node.input[2] = new_c_node.output[0] + # Remove useless nodes + for node in node_to_remove: + g.node.remove(node) + topological_sort(g) + + +def fuse_MatMul_and_Add_into_Gemm(g): + """ + Fuse MatMul and Add layers into a new Gemm layers. + + :param g: the onnx graph + :raises ValueError: MatMul must be followed by an Add node + """ + node_to_remove = [] + node_to_add = [] + for node in g.node: + if node.op_type != "MatMul": + continue + add_node = None + for i in g.node: + if not i.input: + continue + if i.input[0] == node.output[0]: + add_node = i + break + value_to_remove = helper.find_value_by_name(g, node.output[0]) + if ( + add_node is None + or value_to_remove is None + or add_node.op_type != "Add" + ): + continue + input_list = node.input + input_list.append(add_node.input[1]), + new_node = onnx.helper.make_node( + "Gemm", + input_list, + add_node.output, + name=node.name, + alpha=1.0, + beta=1.0, + transA=0, + transB=0, + ) + node_to_add.append(new_node) + node_to_remove.append(node) + node_to_remove.append(add_node) + g.value_info.remove(value_to_remove) + for node in node_to_remove: + g.node.remove(node) + g.node.extend(node_to_add) + + +def fuse_consecutive_transposes(g): + node_to_del = [] + for node in g.node: + if node.op_type != "Transpose": + continue + pre_node = helper.find_node_by_output_name(g, node.input[0]) + if pre_node.op_type != "Transpose": + continue + + pre_permutation = list(pre_node.attribute[0].ints) + cur_permutation = list(node.attribute[0].ints) + if len(pre_permutation) != len(cur_permutation): + continue + + new_permutation = [] + for ind in cur_permutation: + new_permutation.append(pre_permutation[ind]) + + new_trans_node = onnx.helper.make_node( + "Transpose", + [pre_node.input[0]], + [node.output[0]], + name=node.name, + perm=new_permutation, + ) + + g.node.extend([new_trans_node]) + node_to_del.extend([pre_node, node]) + + mid_val_info = helper.find_value_by_name(g, node.input[0]) + if mid_val_info: + g.value_info.remove(mid_val_info) + + while node_to_del: + node = node_to_del.pop() + g.node.remove(node) + + topological_sort(g) + + +def fuse_mul_and_add_into_bn(g): + node_to_del = [] + for node in g.node: + if node.op_type != "Add": + continue + add_node = node + input_nodes_add = [ + helper.find_node_by_output_name(g, input_name) + for input_name in add_node.input + ] + if any([n is None for n in input_nodes_add]): + continue + mul_node, const_add = None, None + for input_node_add in input_nodes_add: + if input_node_add.op_type == "Mul": + mul_node = input_node_add + elif input_node_add.op_type == "Constant": + const_add = input_node_add + else: + pass + if not mul_node or not const_add: + continue + data_input_name, const_mul = None, None + for input_name in mul_node.input: + input_node = helper.find_node_by_output_name(g, input_name) + if not input_node: + data_input_name = input_name + elif input_node.op_type == "Constant": + if not const_mul: + const_mul = input_node + else: + data_input_name = input_name + else: + data_input_name = input_name + + if not const_mul: + continue + + scale_shape, scale_data = helper.constant_to_list(const_mul) + bias_shape, __ = helper.constant_to_list(const_add) + c_dim = len(scale_data) + if scale_shape != bias_shape: + continue + + data_input_value = helper.find_value_by_name(g, data_input_name) + if data_input_value is None: + data_input_value = helper.find_input_by_name(g, data_input_name) + _, previous_node_output_shape = helper.find_size_shape_from_value( + data_input_value + ) + # only allow 4 dim data input due to the hardware limitation + if ( + previous_node_output_shape is None + or len(previous_node_output_shape) != 4 + ): + continue + + # check if mul's dim and input channel dimension are matched + if previous_node_output_shape[1] != c_dim: + continue + + if scale_shape == [1, c_dim, 1, 1]: + # remove all '1' + for _ in range(3): + const_add.attribute[0].t.dims.remove(1) + const_mul.attribute[0].t.dims.remove(1) + elif scale_shape == [1, c_dim]: + # remove all '1' + const_add.attribute[0].t.dims.remove(1) + const_mul.attribute[0].t.dims.remove(1) + elif scale_shape == 1 and c_dim == 1: + # Single value weight + const_add.attribute[0].t.dims.append(1) + const_mul.attribute[0].t.dims.append(1) + else: + continue + + bn_name = add_node.output[0] + const_mean = helper.list_to_constant( + bn_name + "_mean", [c_dim], [0.0 for _ in range(c_dim)] + ) + const_var = helper.list_to_constant( + bn_name + "_var", [c_dim], [1.0 for _ in range(c_dim)] + ) + + bn_node = onnx.helper.make_node( + "BatchNormalization", + [ + data_input_name, + const_mul.output[0], + const_add.output[0], + const_mean.output[0], + const_var.output[0], + ], + [add_node.output[0]], + name=bn_name, + epsilon=0.00000001, + ) + + mid_val_info = helper.find_value_by_name(g, mul_node.output[0]) + scale_val_info = helper.find_value_by_name(g, const_mul.output[0]) + bais_val_info = helper.find_value_by_name(g, const_add.output[0]) + g.value_info.remove(mid_val_info) + g.value_info.remove(scale_val_info) + g.value_info.remove(bais_val_info) + + new_scale_val_info = onnx.helper.make_tensor_value_info( + const_mul.output[0], const_mul.attribute[0].t.data_type, [c_dim] + ) + new_bais_val_info = onnx.helper.make_tensor_value_info( + const_add.output[0], const_add.attribute[0].t.data_type, [c_dim] + ) + mean_val_info = onnx.helper.make_tensor_value_info( + const_mean.output[0], const_mean.attribute[0].t.data_type, [c_dim] + ) + var_val_info = onnx.helper.make_tensor_value_info( + const_var.output[0], const_var.attribute[0].t.data_type, [c_dim] + ) + + g.value_info.extend([new_scale_val_info]) + g.value_info.extend([new_bais_val_info]) + g.value_info.extend([mean_val_info]) + g.value_info.extend([var_val_info]) + g.node.extend([bn_node]) + g.node.extend([const_mean]) + g.node.extend([const_var]) + node_to_del.extend([mul_node, add_node]) + + while node_to_del: + g.node.remove(node_to_del.pop()) + + topological_sort(g) + + +def fuse_mul_and_add_into_gemm(g): + node_to_del = [] + for node in g.node: + if node.op_type != "Add": + continue + add_node = node + mul_node = helper.find_node_by_output_name(g, add_node.input[0]) + if not mul_node or mul_node.op_type != "Mul": + continue + mul_const = helper.find_node_by_output_name(g, mul_node.input[1]) + if not mul_const or mul_const.op_type != "Constant": + continue + add_const = helper.find_node_by_output_name(g, add_node.input[1]) + if not add_const or add_const.op_type != "Constant": + continue + + input_val = helper.find_value_by_name(g, mul_node.input[0]) + if not input_val: + input_val = helper.find_input_by_name(g, mul_node.input[0]) + if not input_val: + continue + + _, input_shape = helper.find_size_shape_from_value(input_val) + if not input_shape: + continue + + dim = int(np.prod(input_shape)) + if input_shape != [1, dim]: + continue + + mul_const_shape, mul_const_data = helper.constant_to_list(mul_const) + add_const_shape, __ = helper.constant_to_list(add_const) + + if len(mul_const_shape) != 1 or mul_const_shape[0] != dim: + continue + if len(add_const_shape) != 1 or add_const_shape[0] != dim: + continue + + b_data = np.zeros([dim, dim]) + for i in range(dim): + b_data[i][i] = mul_const_data[i] + b_data = b_data.flatten().tolist() + b_tensor = onnx.helper.make_tensor( + name=mul_const.name + "_tensor", + data_type=mul_const.attribute[0].t.data_type, + dims=[dim, dim], + vals=b_data, + ) + b_const_node = onnx.helper.make_node( + "Constant", + [], + [mul_const.output[0]], + value=b_tensor, + name=mul_const.output[0], + ) + + add_const.attribute[0].t.dims.insert(0, 1) + + gemm_node = onnx.helper.make_node( + "Gemm", + [mul_node.input[0], b_const_node.output[0], add_const.output[0]], + [add_node.output[0]], + name=add_node.output[0], + ) + + g.node.extend([gemm_node, b_const_node]) + node_to_del.extend([mul_const, mul_node, add_node]) + + val_info_mid = helper.find_value_by_name(g, mul_node.output[0]) + val_info_mul_const = helper.find_value_by_name(g, mul_const.output[0]) + val_info_add_const = helper.find_value_by_name(g, add_const.output[0]) + if val_info_mid: + g.value_info.remove(val_info_mid) + if val_info_mul_const: + g.value_info.remove(val_info_mul_const) + if val_info_add_const: + g.value_info.remove(val_info_add_const) + + while node_to_del: + g.node.remove(node_to_del.pop()) + + topological_sort(g) + + +def fuse_conv_and_add_into_conv(g): + node_to_del = [] + for node in g.node: + # Check if two nodes can be fused + if node.op_type != "Add": + continue + add_node = node + add_const = helper.find_node_by_output_name(g, add_node.input[1]) + if not add_const or add_const.op_type != "Constant": + continue + + conv_node = helper.find_node_by_output_name(g, add_node.input[0]) + if not conv_node or conv_node.op_type != "Conv": + continue + weight_node = helper.find_node_by_output_name(g, conv_node.input[1]) + if not weight_node or weight_node.op_type != "Constant": + continue + + m_dim = weight_node.attribute[0].t.dims[0] + if add_const.attribute[0].t.dims != [1, m_dim, 1, 1]: + continue + for _ in range(3): + add_const.attribute[0].t.dims.remove(1) + + # Link the add weight to constant. + conv_node.input.extend([add_const.output[0]]) + + # Remove the node + node_to_del.append(node) + output_value_info = helper.find_value_by_name(g, add_node.output[0]) + if output_value_info is not None: + g.value_info.remove(output_value_info) + add_weight_value_info = helper.find_value_by_name( + g, add_const.output[0] + ) + if add_weight_value_info is not None: + g.value_info.remove(add_weight_value_info) + # Replace next node input if any. + following_nodes = helper.find_following_nodes_by_input_value_name( + g, add_node.output[0] + ) + for following_node in following_nodes: + replace_node_input( + following_node, add_node.output[0], add_node.input[0] + ) + # Replace output if any + todel_output = helper.find_output_by_name(g, add_node.output[0]) + if todel_output is not None: + g.output.remove(todel_output) + previous_output = helper.find_output_by_name(g, add_node.input[0]) + if previous_output is None: + the_input_value = helper.find_value_by_name( + g, add_node.input[0] + ) + g.output.extend([the_input_value]) + + while node_to_del: + g.node.remove(node_to_del.pop()) + + topological_sort(g) + + +def fuse_consecutive_reducemean(g): + node_to_del = [] + for node in g.node: + # Find consecutive ReduceMean + if node.op_type != "ReduceMean": + continue + pre_node = helper.find_node_by_output_name(g, node.input[0]) + if pre_node is None or pre_node.op_type != "ReduceMean": + continue + # Check attributes + pre_keepdims = helper.get_var_attribute_by_name( + pre_node, "keepdims", "int" + ) + pre_axes = helper.get_list_attribute_by_name(pre_node, "axes", "int") + cur_keepdims = helper.get_var_attribute_by_name( + node, "keepdims", "int" + ) + cur_axes = helper.get_list_attribute_by_name(node, "axes", "int") + if pre_keepdims != 0 or cur_keepdims != 0: + continue + axes = sorted(pre_axes + cur_axes) + if axes != [2, 3]: + continue + # Merge two ReduceMean into GlobalAveragePool. + new_gap_node = onnx.helper.make_node( + "GlobalAveragePool", + [pre_node.input[0]], + [node.output[0] + "_intermedia"], + name=node.name + "_gap", + ) + new_flatten_node = onnx.helper.make_node( + "Flatten", + [node.output[0] + "_intermedia"], + [node.output[0]], + name=node.name + "_flatten", + axis=1, + ) + + # Clean up + g.node.extend([new_gap_node, new_flatten_node]) + node_to_del.extend([pre_node, node]) + mid_val_info = helper.find_value_by_name(g, node.input[0]) + if mid_val_info: + g.value_info.remove(mid_val_info) + + while node_to_del: + node = node_to_del.pop() + g.node.remove(node) + + topological_sort(g) + + +def fuse_slice_nodes_into_conv(g): + # define pattern checker + def check_is_slice(node): + if node.op_type == "Concat": + return True + if node.op_type != "Slice": + return False + following_nodes = helper.find_following_nodes_by_input_value_name( + g, node.output[0] + ) + if len(following_nodes) != 1: + return False + # also check attributes + if len(node.input) != 5: + return False + # starts should be 0 or 1 + starts_node = helper.find_node_by_output_name(g, node.input[1]) + if starts_node.op_type != "Constant": + return False + _, starts_list = helper.constant_to_list(starts_node) + for num in starts_list: + if num != 0 and num != 1: + return False + # ends + ends_node = helper.find_node_by_output_name(g, node.input[2]) + if ends_node.op_type != "Constant": + return False + # axes should be 2 or 3 + axes_node = helper.find_node_by_output_name(g, node.input[3]) + if axes_node.op_type != "Constant": + return False + _, axes_list = helper.constant_to_list(axes_node) + for num in axes_list: + if num != 2 and num != 3: + return False + # Steps can only be 2 + steps_node = helper.find_node_by_output_name(g, node.input[4]) + if steps_node.op_type != "Constant": + return False + _, steps_list = helper.constant_to_list(steps_node) + for num in steps_list: + if num != 2: + return False + # Recursion + return check_is_slice(following_nodes[0]) + + # defind concat finder + def find_concat_node(node): + while node.op_type != "Concat": + node = helper.find_following_nodes_by_input_value_name( + g, node.output[0] + )[0] + return node + + # define remove node function. + def remove_nodes(input_name): + following_nodes = helper.find_following_nodes_by_input_value_name( + g, input_name + ) + # Remove concat directly + if ( + len(following_nodes) == 1 + and following_nodes[0].op_type == "Concat" + ): + g.node.remove(following_nodes[0]) + return + for following_node in following_nodes: + # Recursion first + remove_nodes(following_node.output[0]) + # Remove weights + for i in range(1, len(following_node.input)): + if ( + len( + helper.find_following_nodes_by_input_value_name( + g, following_node.input[i] + ) + ) + > 1 + ): + # More than one following nodes. Skip. + continue + input_weight = helper.find_node_by_output_name( + g, following_node.input[i] + ) + g.node.remove(input_weight) + # Remove Slice nodes + g.node.remove(following_node) + + # define remove value_info function + def remove_value_infos(input_name): + following_nodes = helper.find_following_nodes_by_input_value_name( + g, input_name + ) + if following_nodes[0].op_type == "Concat": + return + for following_node in following_nodes: + output_value = helper.find_value_by_name( + g, following_node.output[0] + ) + # Remove output values + if output_value is not None: + g.value_info.remove(output_value) + # Remove weight values + for i in range(1, len(following_node.input)): + input_value = helper.find_value_by_name( + g, following_node.input[i] + ) + if input_value is not None: + g.value_info.remove(input_value) + # Recursion + remove_value_infos(following_node.output[0]) + + # define get slice position + def get_slice_position(final_slice_output): + slice_position = [0, 0] + prev_node = helper.find_node_by_output_name(g, final_slice_output) + while prev_node is not None: + starts_np = helper.constant_to_numpy( + helper.find_node_by_output_name(g, prev_node.input[1]) + ) + axes_np = helper.constant_to_numpy( + helper.find_node_by_output_name(g, prev_node.input[3]) + ) + for i in range(len(axes_np)): + if axes_np[i] == 2: + slice_position[0] = starts_np[i] + elif axes_np[i] == 3: + slice_position[1] = starts_np[i] + prev_node = helper.find_node_by_output_name(g, prev_node.input[0]) + return slice_position + + # Check pattern from each input + for input_value in g.input: + nodes_after_input = helper.find_following_nodes_by_input_value_name( + g, input_value.name + ) + pattern_matched = True + for following_node in nodes_after_input: + if following_node.op_type != "Slice": + pattern_matched = False + break + else: + pattern_matched = check_is_slice(following_node) + if not pattern_matched: + continue + # Pattern found. Check limitation + # Currently only support 2D + if len(nodes_after_input) != 4: + continue + # Get the concat node + concat_node = find_concat_node(nodes_after_input[0]) + # Get basic information + input_shape = helper.get_shape_from_value_info(input_value) + channel_num = input_shape[1] + # Construct weight + weight_np = np.zeros( + (input_shape[1] * 4, input_shape[1], 3, 3), dtype=np.float32 + ) + for i in range(4): + # Check each branch + slice_position = get_slice_position(concat_node.input[i]) + for j in range(channel_num): + weight_np[ + i * channel_num + j, + j, + slice_position[0], + slice_position[1], + ] = 1 + weight_node = helper.numpy_to_constant( + concat_node.name + "_weight", weight_np + ) + # Construct Conv node + new_conv = onnx.helper.make_node( + "Conv", + [input_value.name, concat_node.name + "_weight"], + [concat_node.output[0]], + name=concat_node.name + "_fused", + dilations=[1, 1], + group=1, + kernel_shape=[3, 3], + strides=[2, 2], + pads=[0, 0, 2, 2], + ) + # Delete old nodes, weights and value_infos + remove_value_infos(input_value.name) + remove_nodes(input_value.name) + # Replace node + g.node.append(weight_node) + g.node.append(new_conv) + + +def fuse_relu_min_into_clip(g): + node_to_del = [] + for node in g.node: + # Check Min node + if node.op_type != "Min": + continue + min_node = node + # Check Constant node + min_const = helper.find_node_by_output_name(g, min_node.input[1]) + if not min_const or min_const.op_type != "Constant": + continue + min_shape, min_value = helper.constant_to_list(min_const) + if min_shape != 1: + continue + # Check Relu node + relu_node = helper.find_node_by_output_name(g, min_node.input[0]) + if not relu_node or relu_node.op_type != "Relu": + continue + + # Create Clip node + relu_min_const_node = helper.list_to_constant( + relu_node.name + "_min_value", [], [0.0] + ) + clip_node = onnx.helper.make_node( + "Clip", + [ + relu_node.input[0], + relu_min_const_node.output[0], + min_const.output[0], + ], + [min_node.output[0]], + name=min_node.name, + ) + + node_to_del.extend([relu_node, min_node]) + + old_relu_const_val_info = helper.find_value_by_name( + g, min_node.input[0] + ) + if old_relu_const_val_info: + g.value_info.remove(old_relu_const_val_info) + g.node.extend([relu_min_const_node, clip_node]) + + while node_to_del: + g.node.remove(node_to_del.pop()) + + topological_sort(g) diff --git a/tools/deployment/optimizer_scripts/tools/general_graph.py b/tools/deployment/optimizer_scripts/tools/general_graph.py new file mode 100644 index 0000000..f9904f2 --- /dev/null +++ b/tools/deployment/optimizer_scripts/tools/general_graph.py @@ -0,0 +1,85 @@ +from collections import deque + + +class Node: + """A Node which maps a node proto. It has pointers to its parents and + children. + """ + + def __init__(self, onnx_node): + """Initialize a node. This initialization only set up the mapping to + node proto. The pointers should be set up by outside. + """ + self.name = None + self.parents = [] + self.children = [] + self.proto = None + self.output_value = None + if onnx_node is not None: + self.name = onnx_node.name + self.proto = onnx_node + + +class Graph: + """A graph which is constructed from the onnx proto.""" + + def __init__(self, onnx_graph): + """Construct the graph from onnx.""" + self.input_nodes = [] + self.output_nodes = [] + self.name2node = {} + self.output2node = {} + self.proto = onnx_graph + # Add input nodes + for value in onnx_graph.input: + input_node = Node(None) + input_node.name = "Input_" + value.name + input_node.output_value = value + self.name2node[input_node.name] = input_node + self.output2node[value.name] = input_node + self.input_nodes.append(input_node) + output_value_names = [value.name for value in onnx_graph.output] + # Add regular nodes + for onnx_node in onnx_graph.node: + node = Node(onnx_node) + self.name2node[node.name] = node + self.output2node[onnx_node.output[0]] = node + for value_name in onnx_node.input: + node.parents.append(self.output2node[value_name]) + self.output2node[value_name].children.append(node) + if onnx_node.output[0] in output_value_names: + self.output_nodes.append(node) + # Add value infos + for value in onnx_graph.value_info: + node = self.output2node[value.name] + node.output_value = value + + def get_sorted_node_list(self): + """Return a node list in topological order.""" + visited = set() + todo = deque() + result = [] + for node in self.input_nodes: + todo.append(node) + visited.add(node) + for onnx_node in self.proto.node: + if onnx_node.op_type == "Constant": + node = self.name2node[onnx_node.name] + todo.append(node) + visited.add(node) + while todo: + node = todo.popleft() + result.append(node) + for child in node.children: + if child in visited: + continue + ready = True + for child_parent in child.parents: + if child_parent in visited: + continue + ready = False + break + if ready: + todo.append(child) + visited.add(child) + return result diff --git a/tools/deployment/optimizer_scripts/tools/helper.py b/tools/deployment/optimizer_scripts/tools/helper.py new file mode 100644 index 0000000..02da09d --- /dev/null +++ b/tools/deployment/optimizer_scripts/tools/helper.py @@ -0,0 +1,642 @@ +"""This module contains helper functions that do not modify the graph. +""" +import onnx +import onnx.helper +import struct +import numpy as np +import logging + +__ONNX_VERSION__ = -1 + +logger = logging.getLogger("optimizer_scripts") + + +def setup_current_opset_version(m): + global __ONNX_VERSION__ + __ONNX_VERSION__ = m.opset_import[0].version + if __ONNX_VERSION__ not in [11]: + raise RuntimeError( + "Only support opset 11, but got " + str(__ONNX_VERSION__) + ) + + +def get_current_opset_version(): + if __ONNX_VERSION__ == -1: + raise RuntimeError("do setup_current_opset_version first please") + return __ONNX_VERSION__ + + +def find_nodes_by_input_name(g, name): + nodes = [] + for node in g.node: + if name in node.input: + nodes.append(node) + return nodes + + +def find_node_by_output_name(g, name): + """ + Find a node in the graph by its output name + + :param g: the onnx graph\\ + :param name: the target node output name\\ + :returns: the node find by name + """ + for i in g.node: + if name in i.output: + return i + return None + + +def find_node_by_node_name(g, name): + """ + Find a node in the graph by its output name + + :param g: the onnx graph\\ + :param name: the target node output name\\ + :returns: the node find by name + """ + for i in g.node: + if i.name == name: + return i + return None + + +def find_following_nodes_by_input_value_name(g, name): + """ Find the following nodes of a specific value. + + :param g: the onnx graph. \\ + :param name: the value name. \\ + :return: a list of following nodes. + """ + return find_nodes_by_input_name(g, name) + + +def find_value_by_name(g, name): + """ + Find a value_info in the graph by name + + :param g: the onnx graph\\ + :param name: the target value_info name\\ + :returns: the value_info find by name + """ + for i in g.value_info: + if i.name == name: + return i + return None + + +def find_output_by_name(g, name): + """ + Find a value_info in the graph by name + + :param g: the onnx graph\\ + :param name: the target value_info name\\ + :returns: the value_info find by name + """ + for i in g.output: + if i.name == name: + return i + return None + + +def find_input_by_name(g, name): + """ + Find a input in the graph by name + + :param g: the onnx graph\\ + :param name: the target input name\\ + :returns: the input find by name + """ + for i in g.input: + if i.name == name: + return i + return None + + +def list_to_constant(name, shape, data, data_type=None): + """Generate a constant node using the given infomation. + + :name: the node name and the output value name\\ + :shape: the data shape\\ + :data: the data itself\\ + :returns: the generated onnx constant node + """ + if not data_type: + if isinstance(data, int): + data_type = onnx.helper.TensorProto.INT64 + elif isinstance(data, float): + data_type = onnx.helper.TensorProto.FLOAT + elif len(data) > 0 and isinstance(data[0], int): + data_type = onnx.helper.TensorProto.INT64 + else: + data_type = onnx.helper.TensorProto.FLOAT + tensor = onnx.helper.make_tensor(name, data_type, shape, data) + new_w_node = onnx.helper.make_node( + "Constant", [], [name], name=name, value=tensor + ) + return new_w_node + + +def scaler_to_constant(name, data, data_type=None): + """Generate a constant node using the given infomation. + + :name: the node name and the output value name\\ + :shape: the data shape\\ + :data: the data itself\\ + :returns: the generated onnx constant node + """ + if not data_type: + if isinstance(data, int): + data_type = onnx.helper.TensorProto.INT64 + elif isinstance(data, float): + data_type = onnx.helper.TensorProto.FLOAT + else: + logger.error("Cannot create scaler constant with a list.") + exit(1) + tensor = onnx.helper.make_tensor(name, data_type, None, [data]) + new_w_node = onnx.helper.make_node( + "Constant", [], [name], name=name, value=tensor + ) + return new_w_node + + +def numpy_to_constant(name, np_array): + return list_to_constant(name, np_array.shape, np_array.flatten().tolist()) + + +def constant_to_list(node): + """Generate a list from the constant node + + :node: the Constant node\\ + :returns: the shape of the constant node, the data of the constant node + """ + tensor = node.attribute[0].t + # 1. check data type + # 2. get data from raw or data + # 3. get shape from dim + if tensor.data_type == onnx.helper.TensorProto.INT32: + if len(tensor.int32_data) != 0: + data = list(tensor.int32_data) + else: + data = [i[0] for i in struct.iter_unpack("i", tensor.raw_data)] + elif tensor.data_type == onnx.helper.TensorProto.INT64: + if len(tensor.int64_data) != 0: + data = list(tensor.int64_data) + else: + data = [i[0] for i in struct.iter_unpack("q", tensor.raw_data)] + elif tensor.data_type == onnx.helper.TensorProto.INT8: + if len(tensor.int32_data) != 0: + data = list(tensor.int32_data) + else: + data = [i[0] for i in struct.iter_unpack("b", tensor.raw_data)] + elif tensor.data_type == onnx.helper.TensorProto.FLOAT: + if len(tensor.float_data) != 0: + data = list(tensor.float_data) + else: + data = [i[0] for i in struct.iter_unpack("f", tensor.raw_data)] + elif tensor.data_type == onnx.helper.TensorProto.DOUBLE: + if len(tensor.double_data) != 0: + data = list(tensor.double_data) + else: + data = [i[0] for i in struct.iter_unpack("d", tensor.raw_data)] + else: + print("Not supported data type {}".format(tensor.data_type)) + raise RuntimeError + if len(tensor.dims) == 0: + shape = len(data) + else: + shape = list(tensor.dims) + return shape, data + + +def constant_to_numpy(node): + """Generate a numpy array from the constant node + + :node: the Constant node\\ + :returns: the numpy array + """ + shape, data = constant_to_list(node) + return np.array(data).reshape(shape) + + +def all_constant_input(node): + """Find the inputs of the given node. If the inputs of this node are all\\ + constant nodes, return True. Otherwise, return False. + + :param node: the input node which has a Node structure\\ + :return: whether the node of this node are all constant + """ + if node.proto is None: + return False + isConstant = True + for parent in node.parents: + if parent.proto is None or parent.proto.op_type != "Constant": + isConstant = False + break + return isConstant + + +def get_padding(size, kernel_size, strides): + """ Calculate the padding array for same padding in the Tensorflow fashion.\\ + See https://www.tensorflow.org/api_guides/python/nn#Convolution for more. + """ + if size[0] % strides[0] == 0: + pad_h = max(kernel_size[0] - strides[0], 0) + else: + pad_h = max(kernel_size[0] - (size[0] % strides[0]), 0) + if size[1] % strides[1] == 0: + pad_w = max(kernel_size[1] - strides[1], 0) + else: + pad_w = max(kernel_size[1] - (size[1] % strides[1]), 0) + return [pad_h // 2, pad_w // 2, pad_h - pad_h // 2, pad_w - pad_w // 2] + + +def get_shape_from_value_info(value): + """Get shape from a value info. + + :param value: the value_info proto\\ + :return: list of the shape + """ + return [d.dim_value for d in value.type.tensor_type.shape.dim] + + +def find_size_shape_from_value(value): + """ + Find the size of data within the value_info object. + :param value: value_info + :return: int size and list shape of the data in the value_info + """ + if not value: + return None, None + if not value.type.tensor_type.shape.dim: + return 0, [] + size = 1 + shape = [] + for i in range(len(value.type.tensor_type.shape.dim)): + size *= max(1, value.type.tensor_type.shape.dim[i].dim_value) + shape.append(max(1, value.type.tensor_type.shape.dim[i].dim_value)) + + return size, shape + + +def get_attribute_by_name(node, attr_name): + """Get attribute proto with specific name in the given node proto. + + :param node: the node proto.\\ + :param attr_name: a str for the name of the target.\\ + :return: if found, return the attribute_proto. Else, return None. + """ + for attr in node.attribute: + if attr.name == attr_name: + return attr + return None + + +def get_list_attribute_by_name(node, attr_name: str, attr_type: str): + """Get list attribute with specific name in the given node proto. + + :param node: the node proto.\\ + :param attr_name: a str for the name of the target.\\ + :param attr_type: a str which should be "float" or "int".\\ + :return: if found, return the list. Else, return None. + """ + attr_proto = get_attribute_by_name(node, attr_name) + if attr_proto is None: + return None + if attr_type == "int": + if len(attr_proto.ints) == 0: + return None + else: + return list(attr_proto.ints) + elif attr_type == "float": + if len(attr_proto.ints) == 0: + return None + else: + return list(attr_proto.floats) + else: + print("Warning: undefined type for list attribute extraction") + return None + + +def get_var_attribute_by_name(node, attr_name: str, attr_type: str): + """Get variable attribute with specific name in the given node proto. + + :param node: the node proto. + :param attr_name: str for the name of the target. + :param attr_type: str which should be "float", "int", "string" or "tensor". + :return: if found, return the variable. Else, return None. + """ + attr_proto = get_attribute_by_name(node, attr_name) + if attr_proto is None: + return None + if attr_type == "int": + return attr_proto.i + elif attr_type == "float": + return attr_proto.f + elif attr_type == "string": + if isinstance(attr_proto.s, bytes): + return attr_proto.s.decode("utf-8") + else: + return attr_proto.s + elif attr_type == "tensor": + return attr_proto.t + else: + print("Warning: undefined type for variable attribute extraction") + return None + + +def flatten_with_depth(data, depth): + output = [] + if type(data) not in [type(np.array([1])), type([1])]: + return [[data, 0]] + for item in data: + if type(item) not in [type(np.array([1])), type([1])]: + output.append([item, depth + 1]) + else: + output += flatten_with_depth(item, depth + 1) + return output + + +def flatten_to_list(data): + flatten_depth = flatten_with_depth(data, 0) + flat_data = [item[0] for item in flatten_depth] + return flat_data + + +def get_shape(data): + shape = [] + if type(data) not in [type(np.array([1])), type([1])]: + return [] + sub_data = data[0] + shape.append(len(data)) + while type(sub_data) in [type(np.array([1])), type([1])]: + shape.append(len(sub_data)) + sub_data = sub_data[0] + return shape + + +def slice_data(data, starts, ends, axes): + flat_data = [item[0] for item in flatten_with_depth(data, 0)] + shape = get_shape(data) + + starts_updated = [] + ends_updated = [] + for i in range(len(starts)): + start_updated = min(starts[i], shape[i] - 1) % shape[i] + starts_updated.append(start_updated) + for j in range(len(starts)): + if ends[j] >= shape[j]: + end_updated = shape[j] + else: + end_updated = min(ends[j], shape[j]) % shape[j] + ends_updated.append(end_updated) + + index_slices = [] + for i in range(len(shape)): + if i not in axes: + index_slices.append(list(range(shape[i]))) + else: + axe_ind = axes.index(i) + index_slices.append( + list(range(starts_updated[axe_ind], ends_updated[axe_ind])) + ) + + indices = [1] + for i in range(len(shape) - 1, -1, -1): + step = np.prod(shape[i + 1:]) + temp_pos = indices + new_indices = [] + for n in index_slices[i]: + for pos in temp_pos: + new_indices.append(int(n * step + pos)) + indices = new_indices + + sliced_data = [flat_data[k - 1] for k in indices] + + # reshape to correct shape. + new_shape = [] + for i in range(len(shape)): + if i not in axes: + new_shape.append(shape[i]) + else: + axe_ind = axes.index(i) + new_shape.append(ends_updated[axe_ind] - starts_updated[axe_ind]) + if any([dim < 1 for dim in new_shape]): + raise RuntimeError("Invalid starts ends.") + + sliced_data = np.reshape(sliced_data, new_shape) + + return sliced_data + + +def concatenate(data_sets, axis): + # check shapes + shapes = [] + shapes_ = [] + for data_set in data_sets: + shape = get_shape(data_set) + shapes.append(list(shape)) + shape.pop(axis) + shapes_.append(shape) + if not all([s == shapes_[0] for s in shapes_]): + raise RuntimeError("data sets shapes do not match") + + new_dim = sum([s[axis] for s in shapes]) + new_shape = list(shapes[0]) + new_shape[axis] = new_dim + + flat_data_sets = [] + for data_set in data_sets: + flat_data_sets.append(flatten_to_list(data_set)) + + sub_block_size = 1 + for i in range(axis + 1, len(shapes[0])): + sub_block_size *= shapes[0][i] + + split_num = 1 + for i in range(axis): + split_num *= shapes[0][i] + + total_flat_data = [] + for i in range(split_num): + for j in range(len(shapes)): + block_size = sub_block_size * shapes[j][axis] + total_flat_data.extend( + flat_data_sets[j][i * block_size:(i + 1) * block_size] + ) + + new_data = np.reshape(total_flat_data, new_shape) + + return new_data + + +def broadcast_data_sets(data_set_1, data_set_2): + shape1 = get_shape(data_set_1) + shape2 = get_shape(data_set_2) + + # compare shapes and get broadcasted shape + list_a, list_b = ( + (shape1, shape2) if len(shape1) > len(shape2) else (shape2, shape1) + ) + while len(list_a) > len(list_b): + list_b.insert(0, 0) + broadcasted_shape = [] + for i in range(len(list_a)): + if list_b[i] == 0: + broadcasted_shape.append(list_a[i]) + elif list_b[i] == 1: + broadcasted_shape.append(list_a[i]) + elif list_a[i] == 1: + broadcasted_shape.append(list_b[i]) + elif list_a[i] == list_b[i]: + broadcasted_shape.append(list_a[i]) + else: + raise RuntimeError("Can not broadcast two data sets") + + # prepare data for broadcasting. + shape1 = list(map(lambda x: x if x != 0 else 1, shape1)) + shape2 = list(map(lambda x: x if x != 0 else 1, shape2)) + data_1 = np.reshape(data_set_1, shape1) + data_2 = np.reshape(data_set_2, shape2) + + for i in range(len(shape1)): + if shape1[i] != broadcasted_shape[i]: + new_data_total = [ + list(data_1) for _ in range(broadcasted_shape[i]) + ] + data_1 = concatenate(new_data_total, axis=i) + for i in range(len(shape2)): + if shape2[i] != broadcasted_shape[i]: + new_data_total = [ + list(data_2) for _ in range(broadcasted_shape[i]) + ] + data_2 = concatenate(new_data_total, axis=i) + + return data_1, data_2 + + +def add(data_set_1, data_set_2): + broadcasted_data_1, broadcasted_data_2 = broadcast_data_sets( + data_set_1, data_set_2 + ) + + flat_data_1 = flatten_to_list(broadcasted_data_1) + flat_data_2 = flatten_to_list(broadcasted_data_2) + shape = get_shape(broadcasted_data_1) + res = [] + for i in range(len(flat_data_1)): + res.append(flat_data_1[i] + flat_data_2[i]) + + res = np.reshape(res, shape) + + return res + + +def reduceprod(data_set, axis, keepdims=1): + flat_data = flatten_to_list(data_set) + old_shape = get_shape(data_set) + + temp_shape = old_shape + temp_flat_data = flat_data + for ax in axis: + split_num = 1 + step = 1 + for i in range(ax): + split_num *= temp_shape[i] + for i in range(ax + 1, len(temp_shape)): + step *= temp_shape[i] + + block_size = len(temp_flat_data) // split_num + new_flat_data = [] + for j in range(split_num): + block_data = temp_flat_data[j * block_size:(j + 1) * block_size] + reduced_block_data = [] + for k in range(step): + val = block_data[k] + for li in range(1, block_size // step): + val *= block_data[k + li * step] + reduced_block_data.append(val) + new_flat_data.extend(reduced_block_data) + temp_flat_data = new_flat_data + temp_shape[ax] = 1 + + new_flat_data = temp_flat_data + new_shape = temp_shape + if not keepdims: + axis = sorted(list(axis)) + for pos in axis[::-1]: + new_shape.pop(pos) + + return np.reshape(new_flat_data, new_shape) + + +def transpose(data_set, permutation): + # find series of local swaps + data_set = list(data_set) + perm = list(permutation) + shape = get_shape(data_set) + flat_data = flatten_to_list(data_set) + assert set(perm) == set(range(len(shape))), "invalid permutation" + + new_shape = [shape[i] for i in perm] + swaps = [] + bubbled = True + while bubbled: + bubbled = False + for i in range(len(new_shape) - 1): + if perm[i] > perm[i + 1]: + swaps.append([i, i + 1]) + p_1, p_2 = perm[i], perm[i + 1] + perm[i], perm[i + 1] = p_2, p_1 + bubbled = True + + # apply local swaps + current_shape = list(shape) + temp_flat_data = flat_data + + for swap in swaps[::-1]: + ind_1, ind_2 = swap[0], swap[1] + dim_1 = current_shape[ind_1] + dim_2 = current_shape[ind_2] + split_num = 1 + block_size = 1 + + for i in range(ind_1): + split_num *= current_shape[i] + for i in range(ind_2 + 1, len(current_shape)): + block_size *= current_shape[i] + + data_blocks = np.reshape(temp_flat_data, [-1, block_size]) + flat_data_1 = [] + for k in range(split_num): + block = [] + for m in range(dim_2): + for n in range(dim_1): + block_pos = k * dim_1 * dim_2 + n * dim_2 + m + block.extend(data_blocks[block_pos]) + flat_data_1.extend(block) + + temp_flat_data = flat_data_1 + current_shape[ind_1] = dim_2 + current_shape[ind_2] = dim_1 + + return np.reshape(temp_flat_data, current_shape) + + +def subtract(data_set_1, data_set_2): + broadcasted_data_1, broadcasted_data_2 = broadcast_data_sets( + data_set_1, data_set_2 + ) + + shape = get_shape(broadcasted_data_1) + flat_data_1 = flatten_to_list(broadcasted_data_1) + flat_data_2 = flatten_to_list(broadcasted_data_2) + + substracted_data = [ + flat_data_1[i] - flat_data_2[i] for i in range(len(flat_data_1)) + ] + + new_data = np.reshape(substracted_data, shape) + + return new_data diff --git a/tools/deployment/optimizer_scripts/tools/modhelper.py b/tools/deployment/optimizer_scripts/tools/modhelper.py new file mode 100644 index 0000000..ca5e040 --- /dev/null +++ b/tools/deployment/optimizer_scripts/tools/modhelper.py @@ -0,0 +1,96 @@ +""" +This module contains helper functions that do graph modifications. +""" + +from . import helper + + +def replace_node_input(node, old_input, new_input): + for i, input_name in enumerate(node.input): + if input_name == old_input: + node.input[i] = new_input + + +def delete_nodes(g, node_list): + node_to_delete = [] + # Find target nodes + for node in g.node: + if node.name not in node_list: + continue + else: + node_to_delete.append(node) + if len(node_list) != len(node_to_delete): + print("Some nodes do not exist in the graph. Skipping them.") + for node in node_to_delete: + # Check the node whether if it is valid to delete + if len(node.input) == 0: + print( + "Deleting an Constant node. " + "Please make sure you also delete all its following nodes" + ) + elif len(node.input) > 1: + print( + f"Warning: Node {node.name} has more than one input. " + "This script cannot delete merge nodes." + ) + # Connect the nodes around the target node. + # Set the following node input as the previous node output. + following_nodes = helper.find_following_nodes_by_input_value_name( + g, node.output[0] + ) + if len(node.input) == 0: + for following_node in following_nodes: + following_node.input.remove(node.output[0]) + elif ( + len(following_nodes) > 0 + and len(node.input) == 1 + and helper.find_input_by_name(g, node.input[0]) is not None + ): + # The node input is an input + new_input = helper.find_value_by_name(g, node.output[0]) + g.input.append(new_input) + g.input.remove(helper.find_input_by_name(g, node.input[0])) + g.value_info.remove(new_input) + elif len(following_nodes) > 0: + for following_node in following_nodes: + replace_node_input( + following_node, node.output[0], node.input[0] + ) + else: + # If the node is the output, replace it with previous input. + value = helper.find_value_by_name(g, node.input[0]) + output_values = [] + while len(g.output): + output_values.append(g.output.pop()) + while output_values: + output_value = output_values.pop() + if output_value.name == node.output[0]: + g.output.extend([value]) + else: + g.output.extend([output_value]) + # Remove the node and value info. + g.node.remove(node) + + +def delete_input(g, target_list): + for name in target_list: + input_value = helper.find_input_by_name(g, name) + if input_value is None: + print("Cannot find input {}".format(name)) + continue + g.input.remove(input_value) + + +def delete_output(g, target_list): + for name in target_list: + output_value = helper.find_output_by_name(g, name) + if output_value is None: + print("Cannot find output {}".format(name)) + continue + g.output.remove(output_value) + + +def delete_value_with_name_if_exists(g, name): + value = helper.find_value_by_name(g, name) + if value is not None: + g.value_info.remove(value) diff --git a/tools/deployment/optimizer_scripts/tools/other.py b/tools/deployment/optimizer_scripts/tools/other.py new file mode 100644 index 0000000..b003fbb --- /dev/null +++ b/tools/deployment/optimizer_scripts/tools/other.py @@ -0,0 +1,1451 @@ +""" +Optimization functions that are not fusing, eliminating or replacing. +In most cases, these are the modifications on the original nodes. +""" +import struct +import collections +import numpy as np +import onnx.helper +import onnxoptimizer as optimizer +import math +import logging +from . import helper +from .modhelper import replace_node_input +import copy +from .helper import logger + + +def polish_model(model): + """ + This function combines several useful utility functions together. + """ + onnx.checker.check_model(model) + onnx.helper.strip_doc_string(model) + model = onnx.shape_inference.infer_shapes(model) + model = optimizer.optimize(model) + onnx.checker.check_model(model) + return model + + +def format_value_info_shape(g): + """ + Replace -1 and 0 batch size in value info + + :param g: the onnx graph + """ + for value in g.input: + if len(value.type.tensor_type.shape.dim) > 0 and ( + value.type.tensor_type.shape.dim[0].dim_value <= 0 + or not isinstance( + value.type.tensor_type.shape.dim[0].dim_value, int + ) + ): + value.type.tensor_type.shape.dim[0].dim_value = 1 + for value in g.output: + if len(value.type.tensor_type.shape.dim) > 0 and ( + value.type.tensor_type.shape.dim[0].dim_value <= 0 + or not isinstance( + value.type.tensor_type.shape.dim[0].dim_value, int + ) + ): + value.type.tensor_type.shape.dim[0].dim_value = 1 + for value in g.value_info: + if len(value.type.tensor_type.shape.dim) > 0 and ( + value.type.tensor_type.shape.dim[0].dim_value < 0 + or not isinstance( + value.type.tensor_type.shape.dim[0].dim_value, int + ) + ): + value.type.tensor_type.shape.dim[0].dim_value = 1 + + +def add_name_to_node(g): + """ + If no name presents, give a name based on output name. + + :param g: the onnx graph + """ + for node in g.node: + if len(node.name) == 0: + node.name = node.output[0] + + +def rename_all_node_name(g): + """ + rename all nodes if the node name is a number: + + new_name = old_name + "_kn" + + :param g: the onnx graph + """ + + for node in g.node: + if not node.name.isdigit(): + # Skip not number names + continue + new_node_name = node.name + "_kn" + new_node_output0_name = node.output[0] + "_kn" + + # in order to keep same output node name, skip if it is output node. + output_value_info = helper.find_output_by_name(g, node.output[0]) + if output_value_info is not None: + continue + + # rename the input of all the following nodes + following_nodes = helper.find_following_nodes_by_input_value_name( + g, node.output[0] + ) + for following_node in following_nodes: + replace_node_input( + following_node, node.output[0], new_node_output0_name + ) + + # rename value info + value_info = helper.find_value_by_name(g, node.output[0]) + if value_info is not None: + value_info.name = new_node_output0_name + + # rename node + node.output[0] = new_node_output0_name + node.name = new_node_name + + +def add_output_to_value_info(g): + """ + If output does not present in value_info, copy one + + :param g: the onnx graph + """ + for output in g.output: + if helper.find_value_by_name(g, output.name) is None: + g.value_info.extend([output]) + + +def find_first_sequential_output(g, node): + for value_name in node.output: + value = helper.find_output_by_name(g, value_name) + if value is not None: + return value + next_nodes = helper.find_nodes_by_input_name(g, node.output[0]) + if len(next_nodes) == 0: + # No following nodes + return None + return find_first_sequential_output(g, next_nodes[0]) + + +def remove_nodes(g, cut_nodes=[], cut_types=[]): + node_to_delete = [] + # Find target nodes + for node in g.node: + if node.name not in cut_nodes and node.op_type not in cut_types: + continue + else: + node_to_delete.append(node) + # Mapping originnal outputs to new outputs. + # This mapping is to keep the output order. + output_mapping = {} + new_output = set() + for node in node_to_delete: + original_output = find_first_sequential_output(g, node) + if original_output.name not in output_mapping: + output_mapping[original_output.name] = [] + for input_name in node.input: + value = helper.find_value_by_name(g, input_name) + if ( + value is not None + and helper.find_output_by_name(g, input_name) is None + and value.name not in new_output + ): + output_mapping[original_output.name].append(value) + new_output.add(value.name) + # Remove them + while node_to_delete: + g.node.remove(node_to_delete.pop()) + # Remove unreachable nodes + visited_values = set() + unused_constant_map = {} + for input_value in g.input: + visited_values.add(input_value.name) + for node in g.node: + if node.op_type == "Constant": + visited_values.add(node.output[0]) + unused_constant_map[node.output[0]] = node + continue + can_reach = True + for input_name in node.input: + if input_name not in visited_values: + can_reach = False + break + if can_reach: + for output_name in node.output: + visited_values.add(output_name) + else: + node_to_delete.append(node) + # Mapping outputs again + for node in node_to_delete: + original_output = find_first_sequential_output(g, node) + if original_output is None: + continue + if original_output.name not in output_mapping: + output_mapping[original_output.name] = [] + for input_name in node.input: + value = helper.find_value_by_name(g, input_name) + if ( + value is not None + and helper.find_output_by_name(g, input_name) is None + and value.name not in new_output + ): + output_mapping[original_output.name].append(value) + new_output.add(value.name) + # Remove them + while node_to_delete: + g.node.remove(node_to_delete.pop()) + # Remove unused constants + for node in g.node: + for input_name in node.input: + if input_name in unused_constant_map: + del unused_constant_map[input_name] + for node in unused_constant_map.values(): + g.node.remove(node) + # Remove unreachable value infos + reachable_values = set() + for input_value in g.input: + reachable_values.add(input_value.name) + for node in g.node: + for input_name in node.input: + reachable_values.add(input_name) + for output_name in node.output: + reachable_values.add(output_name) + value_to_remove = [] + for value_info in g.value_info: + if value_info.name not in reachable_values: + value_to_remove.append(value_info) + while value_to_remove: + value_info = value_to_remove.pop() + g.value_info.remove(value_info) + # Reorder output + output_values = [] + while len(g.output): + output_values.append(g.output.pop()) + while output_values: + output_value = output_values.pop() + if output_value.name in reachable_values: + logger.info("Keep output {}".format(output_value.name)) + g.output.extend([output_value]) + elif output_value.name in output_mapping: + real_outputs = [ + i + for i in output_mapping[output_value.name] + if i.name in reachable_values + ] + logger.info( + "Replace output {} with {}".format( + output_value.name, [i.name for i in real_outputs] + ) + ) + g.output.extend(real_outputs) + else: + logger.info("Abandon output {}".format(output_value.name)) + continue + + +def transpose_B_in_Gemm(g): + """ + If transB is set in Gemm, transpose it + + :param g: the onnx graph + """ + for node in g.node: + if node.op_type != "Gemm": + continue + do_it = False + for attr in node.attribute: + if attr.name == "transB": + if attr.i == 1: + attr.i = 0 + do_it = True + break + if not do_it: + continue + # Transpose the weight and its output value + w_node = helper.find_node_by_output_name(g, node.input[1]) + w_output = helper.find_value_by_name(g, node.input[1]) + dim_0 = w_output.type.tensor_type.shape.dim[0].dim_value + dim_1 = w_output.type.tensor_type.shape.dim[1].dim_value + w_output.type.tensor_type.shape.dim[0].dim_value = dim_1 + w_output.type.tensor_type.shape.dim[1].dim_value = dim_0 + w_node.attribute[0].t.dims[0] = dim_1 + w_node.attribute[0].t.dims[1] = dim_0 + if w_node.attribute[0].t.raw_data: + raw_data = w_node.attribute[0].t.raw_data + fl_data = [i[0] for i in struct.iter_unpack("f", raw_data)] + else: + fl_data = w_node.attribute[0].t.float_data + w = np.reshape(fl_data, (dim_0, dim_1)) + w = w.transpose((1, 0)).flatten() + if w_node.attribute[0].t.raw_data: + buf = struct.pack("%sf" % len(w), *w) + w_node.attribute[0].t.raw_data = buf + else: + for i in range(len(fl_data)): + w_node.attribute[0].t.float_data[i] = w[i] + + +def topological_sort(g): + """ + Topological sort all the layers. + Assume a node do not take the same value as more than one inputs. + + :param g: the onnx graph + """ + # TODO: Topological sort on the same branch + # Map from node name to its input degree + in_degree = {} + # Map from value info name to the nodes using it as input + output_nodes = collections.defaultdict(list) + # Map from node name to node object + node_map = {} + to_add = collections.deque() + # init + length = len(g.node) + for _ in range(length): + node = g.node.pop() + node_map[node.name] = node + if len([i for i in node.input if i != ""]) == 0: + to_add.append(node.name) + else: + in_degree[node.name] = len([i for i in node.input if i != ""]) + for input_name in node.input: + if input_name == "": + continue + output_nodes[input_name].append(node.name) + # sort + # deal with input first + for value_info in g.input: + input_name = value_info.name + for node_name in output_nodes[input_name]: + in_degree[node_name] -= 1 + if in_degree[node_name] == 0: + to_add.append(node_name) + del in_degree[node_name] + # main sort loop + sorted_nodes = [] + while to_add: + node_name = to_add.pop() + node = node_map[node_name] + del node_map[node_name] + sorted_nodes.append(node) + # Expect only one output name for each node + next_node_names = [] + for output_name in node.output: + next_node_names.extend(output_nodes[output_name]) + for next_node_name in next_node_names: + in_degree[next_node_name] -= 1 + if in_degree[next_node_name] == 0: + to_add.append(next_node_name) + del in_degree[next_node_name] + g.node.extend(sorted_nodes) + if in_degree: + raise RuntimeError( + "Unreachable nodes exist: {}".format(in_degree.keys()) + ) + if node_map: + raise RuntimeError("Unused nodes exist: {}".format(node_map.keys())) + + +def remove_zero_value_info(g): + value_info_list = list(g.value_info) + for vi in value_info_list: + if not vi.type.tensor_type.shape.dim: + g.value_info.remove(vi) + + for dim in vi.type.tensor_type.shape.dim: + if dim.dim_value == 0: + g.value_info.remove(vi) + break + + +def inference_shapes(m): + while len(m.graph.value_info) > 0: + m.graph.value_info.pop() + g = m.graph + inferencing_shapes = True + while inferencing_shapes: + inferencing_shapes = False + if inference_cov_shape(g): + inferencing_shapes = True + if inference_upsample_shape(g): + inferencing_shapes = True + if inference_resize_shape(g): + inferencing_shapes = True + if inference_split_shape(g): + inferencing_shapes = True + if inferencing_shapes: + topological_sort(g) + m = polish_model(m) + g = m.graph + remove_zero_value_info(g) + m = polish_model(m) + return m + + +def inference_resize_shape(g): + for node in g.node: + if node.op_type != "Resize": + continue + + output_value = helper.find_value_by_name(g, node.output[0]) + output_value = ( + helper.find_output_by_name(g, node.output[0]) + if output_value is None + else output_value + ) + if output_value is not None: + continue + + if len(node.input) == 4: # input: X, roi, scales, sizes + shape_node = helper.find_node_by_output_name(g, node.input[3]) + if shape_node.op_type != "Constant": + continue + + _, shape_value = helper.constant_to_list(shape_node) + output_value = onnx.helper.make_tensor_value_info( + node.output[0], + onnx.TensorProto.FLOAT, + [int(v) for v in shape_value], + ) + g.value_info.extend([output_value]) + return True + else: + # If output shape is not given, inference from scales + # Get the input shape + input_value = helper.find_value_by_name(g, node.input[0]) + if input_value is None: + continue + shape_value = helper.get_shape_from_value_info(input_value) + scales_node = helper.find_node_by_output_name(g, node.input[2]) + if scales_node.op_type != "Constant": + continue + _, scales_value = helper.constant_to_list(scales_node) + for i in range(len(shape_value)): + shape_value[i] *= scales_value[i] + output_value = onnx.helper.make_tensor_value_info( + node.output[0], + onnx.TensorProto.FLOAT, + [int(v) for v in shape_value], + ) + g.value_info.extend([output_value]) + return True + return False + + +def inference_upsample_shape(g): + """For onnx v1.4.1+, onnx cannot inference upsample output shape. Let's\\ + do it ourselves. This function only inference the next upsample without\\ + output shape each time. + + :param g: the graph\\ + :return: True if any Upsample shape is generated. Otherwise, False. + """ + for node in g.node: + if node.op_type != "Upsample": + continue + output_value = helper.find_value_by_name(g, node.output[0]) + if output_value is None: + output_value = helper.find_output_by_name(g, node.output[0]) + if output_value and helper.get_shape_from_value_info(output_value): + continue + # Get input shape + input_value = helper.find_value_by_name(g, node.input[0]) + if input_value is None: + continue + if not helper.get_shape_from_value_info(input_value): + continue + input_shape = helper.get_shape_from_value_info(input_value) + # Get upsample weight + weight_node = helper.find_node_by_output_name(g, node.input[1]) + weight_shape, weight = helper.constant_to_list(weight_node) + if len(input_shape) != weight_shape[0]: + raise RuntimeError( + "Unmatch input shape and weight shape: {} vs {}".format( + input_shape, weight_shape + ) + ) + # Calculate shape + output_shape = list(input_shape) + for i in range(len(output_shape)): + output_shape[i] = int(input_shape[i] * weight[i]) + output_value = onnx.helper.make_tensor_value_info( + node.output[0], + input_value.type.tensor_type.elem_type, + output_shape, + ) + g.value_info.extend([output_value]) + return True + return False + + +def inference_cov_shape(g): + processed = False + for node in g.node: + # Check for Conv output shape need to be inferrenced. + if node.op_type != "Conv": + continue + # Input shape is not ready yet. Skip. + input_value_info = helper.find_value_by_name(g, node.input[0]) + if not input_value_info: + input_value_info = helper.find_input_by_name(g, node.input[0]) + if not input_value_info: + continue + _, input_shape = helper.find_size_shape_from_value(input_value_info) + if not input_shape: + continue + # Output shape is already there. Skip. + output_value_info = helper.find_value_by_name(g, node.output[0]) + if not output_value_info: + output_value_info = helper.find_output_by_name(g, node.output[0]) + if output_value_info and helper.get_shape_from_value_info( + output_value_info + ): + continue + + # Now start the inference. + # Check kernel shape + kernel_value_info = helper.find_value_by_name(g, node.input[1]) + _, kernel_shape = helper.find_size_shape_from_value(kernel_value_info) + if not kernel_shape: + continue + # If auto_pad is set, use the auto_pad. + auto_pad = helper.get_var_attribute_by_name(node, "auto_pad", "string") + pads = None + if auto_pad is not None and auto_pad != "NOTSET": + if auto_pad == "SAME_LOWER" or auto_pad == "SAME_UPPER": + new_output_value_info = onnx.helper.make_tensor_value_info( + node.output[0], + input_value_info.type.tensor_type.elem_type, + [ + input_shape[0], + kernel_shape[0], + input_shape[2], + input_shape[3], + ], + ) + if output_value_info: + g.value_info.remove(output_value_info) + g.value_info.extend([new_output_value_info]) + processed = True + continue + elif auto_pad == "VALID": + pads = [0, 0, 0, 0] + else: + logger.error("Unrecognized auto_pad value: " + str(auto_pad)) + exit(1) + + strides = helper.get_attribute_by_name(node, "strides").ints + if not pads: + pads = helper.get_attribute_by_name(node, "pads").ints + dilation = helper.get_attribute_by_name(node, "dilations").ints + + # Pytorch model has the case where strides only have one number + if len(strides) == 1: + strides.append(strides[0]) + if len(dilation) == 1: + dilation.append(dilation[0]) + + H = math.floor( + ( + input_shape[2] + + pads[0] + + pads[2] + - dilation[0] * (kernel_shape[2] - 1) + - 1 + ) + / strides[0] + + 1 + ) + W = math.floor( + ( + input_shape[3] + + pads[1] + + pads[3] + - dilation[1] * (kernel_shape[3] - 1) + - 1 + ) + / strides[1] + + 1 + ) + output_shape = [input_shape[0], kernel_shape[0], H, W] + + new_output_value_info = onnx.helper.make_tensor_value_info( + node.output[0], + input_value_info.type.tensor_type.elem_type, + output_shape, + ) + + processed = True + + if output_value_info: + g.value_info.remove(output_value_info) + g.value_info.extend([new_output_value_info]) + + return processed + + +def inference_split_shape(g): + processed = False + for node in g.node: + if node.op_type != "Split": + continue + + input_val_info = helper.find_value_by_name(g, node.input[0]) + if not input_val_info: + input_val_info = helper.find_input_by_name(g, node.input[0]) + if not input_val_info: + continue + + _, input_shape = helper.find_size_shape_from_value(input_val_info) + if not input_shape: + continue + + output_val_names = list(node.output) + output_vals = [ + helper.find_value_by_name(g, val_name) + for val_name in output_val_names + ] + + output_shapes = [ + helper.find_size_shape_from_value(output_val)[1] + for output_val in output_vals + ] + if not any([len(s) == 0 for s in output_shapes]): + continue + + for att in node.attribute: + if att.name == "axis": + axis = att.i + else: + split = list(att.ints) + + new_output_vals = [] + for i in range(len(output_val_names)): + new_shape = list(input_shape) + new_shape[axis] = split[i] + new_output_val = onnx.helper.make_tensor_value_info( + output_val_names[i], + input_val_info.type.tensor_type.elem_type, + new_shape, + ) + new_output_vals.append(new_output_val) + + for val in output_vals: + if val is not None: + g.value_info.remove(val) + g.value_info.extend(new_output_vals) + + processed = True + + return processed + + +def parse_shape_change_input(s: str): + """The input should be like 'input 1 1 224 224'.""" + s_list = s.split(" ") + if len(s_list) < 2: + print("Cannot parse the shape change input: {}".format(s)) + return None + shape = [] + for i in range(1, len(s_list)): + shape.append(int(s_list[i])) + return s_list[0], shape + + +def change_input_shape(g, target_list): + for target in target_list: + try: + name, shape = parse_shape_change_input(target) + input_value = helper.find_input_by_name(g, name) + if input_value is None: + print("Cannot find input {}".format(name)) + continue + if len(shape) != len(input_value.type.tensor_type.shape.dim): + print("The dimension doesn't match for input {}".format(name)) + continue + for i in range(len(shape)): + input_value.type.tensor_type.shape.dim[i].dim_value = shape[i] + except TypeError: + # This happens when the parser function returns None. + continue + except ValueError: + # This happens when the input cannot be converter into int + print("Cannot parse {} into name and int".format(target)) + continue + + +def change_output_shape(g, target_list): + for target in target_list: + try: + name, shape = parse_shape_change_input(target) + output_value = helper.find_output_by_name(g, name) + if output_value is None: + print("Cannot find output {}".format(name)) + continue + if len(shape) != len(output_value.type.tensor_type.shape.dim): + print("The dimension doesn't match for output {}".format(name)) + continue + for i in range(len(shape)): + output_value.type.tensor_type.shape.dim[i].dim_value = shape[i] + except TypeError: + # This happens when the parser function returns None. + continue + except ValueError: + # This happens when the input cannot be converter into int + print("Cannot parse {} into name and int".format(target)) + continue + + +def add_nop_conv_after(g, value_names): + """Add do-nothing depthwise Conv nodes after the given value info. It will\\ + take the given names as the inputs of the new node and replace the inputs\\ + of the following nodes. + + :param g: the graph\\ + :param value_names: a list of string which are the names of value_info. + """ + for value_name in value_names: + # Find the value first + value = helper.find_value_by_name(g, value_name) + if value is None: + value = helper.find_input_by_name(g, value_name) + if value is None: + value = helper.find_output_by_name(g, value_name) + if value is None: + print("Cannot find an value_info named {}".format(value_name)) + continue + # Get the channel number from value info + shape = helper.get_shape_from_value_info(value) + channel = shape[1] + # Construct 4 weights + node_name = value_name + "_nop_conv" + ones = [1.0] * channel + weight_node = helper.list_to_constant( + node_name + "_weight", [channel, 1, 1, 1], ones + ) + # Construct BN node + conv_node = onnx.helper.make_node( + "Conv", + [value_name, weight_node.output[0]], + [node_name], + name=node_name, + dilations=[1, 1], + group=channel, + kernel_shape=[1, 1], + pads=[0, 0, 0, 0], + strides=[1, 1], + ) + # Reconnect the graph + following_nodes = helper.find_following_nodes_by_input_value_name( + g, value_name + ) + if len(following_nodes) > 0: + for following_node in following_nodes: + replace_node_input(following_node, value_name, node_name) + else: + new_value = onnx.helper.make_tensor_value_info( + node_name, value.type.tensor_type.elem_type, shape + ) + output_values = [] + while len(g.output): + output_values.append(g.output.pop()) + while output_values: + output_value = output_values.pop() + if output_value.name == value_name: + g.output.extend([new_value]) + else: + g.output.extend([output_value]) + # Add node to the graph + g.node.extend([conv_node, weight_node]) + topological_sort(g) + + +def add_nop_bn_after(g, value_names): + """Add do-nothing BatchNormalization nodes after the given value info. + It will take the given names as the inputs of the new node and replace + the inputs of the following nodes. + + :param g: the graph + :param value_names: a list of string which are the names of value_info. + """ + for value_name in value_names: + # Find the value first + value = helper.find_value_by_name(g, value_name) + if value is None: + value = helper.find_input_by_name(g, value_name) + if value is None: + value = helper.find_output_by_name(g, value_name) + if value is None: + print("Cannot find an value_info named {}".format(value_name)) + continue + # Get the channel number from value info + shape = helper.get_shape_from_value_info(value) + channel = shape[1] + # Construct 4 weights + node_name = value_name + "_nop_bn" + ones = [1.0] * channel + zeros = [0.0] * channel + scale_node = helper.list_to_constant( + node_name + "_scale", [channel], ones + ) + bias_node = helper.list_to_constant( + node_name + "_bias", [channel], zeros + ) + mean_node = helper.list_to_constant( + node_name + "_mean", [channel], zeros + ) + var_node = helper.list_to_constant(node_name + "_var", [channel], ones) + # Construct BN node + bn_node = onnx.helper.make_node( + "BatchNormalization", + [ + value_name, + scale_node.output[0], + bias_node.output[0], + mean_node.output[0], + var_node.output[0], + ], + [node_name], + name=node_name, + ) + # Reconnect the graph + following_nodes = helper.find_following_nodes_by_input_value_name( + g, value_name + ) + if len(following_nodes) > 0: + for following_node in following_nodes: + replace_node_input(following_node, value_name, node_name) + else: + new_value = onnx.helper.make_tensor_value_info( + node_name, value.type.tensor_type.elem_type, shape + ) + output_values = [] + while len(g.output): + output_values.append(g.output.pop()) + while output_values: + output_value = output_values.pop() + if output_value.name == value_name: + g.output.extend([new_value]) + else: + g.output.extend([output_value]) + # Add node to the graph + g.node.extend([bn_node, scale_node, bias_node, mean_node, var_node]) + topological_sort(g) + + +def add_bias_scale_bn_after(g, value_name, channel_bias, channel_scale): + """ + Add do-nothing BatchNormalization nodes after the given value info. + It will take the given names as the inputs of the new node and replace + the inputs of the following nodes. + + :param g: the graph + :param value_name: a list of string which are the name of value_info. + """ + # Find the value first + value = helper.find_value_by_name(g, value_name) + if value is None: + value = helper.find_input_by_name(g, value_name) + if value is None: + value = helper.find_output_by_name(g, value_name) + if value is None: + print("Cannot find an value_info named {}".format(value_name)) + return + # Get the channel number from value info + shape = helper.get_shape_from_value_info(value) + channel = shape[1] + # Construct 4 weights + node_name = value_name + "_scale_shift_bn" + ones = [1.0] * channel + zeros = [0.0] * channel + scale_node = helper.list_to_constant( + node_name + "_scale", [len(channel_scale)], channel_scale + ) + bias_node = helper.list_to_constant( + node_name + "_bias", [len(channel_bias)], channel_bias + ) + mean_node = helper.list_to_constant(node_name + "_mean", [channel], zeros) + var_node = helper.list_to_constant(node_name + "_var", [channel], ones) + # Construct BN node + bn_node = onnx.helper.make_node( + "BatchNormalization", + [ + value_name, + scale_node.output[0], + bias_node.output[0], + mean_node.output[0], + var_node.output[0], + ], + [node_name], + name=node_name, + ) + # Reconnect the graph + following_nodes = helper.find_following_nodes_by_input_value_name( + g, value_name + ) + if len(following_nodes) > 0: + for following_node in following_nodes: + replace_node_input(following_node, value_name, node_name) + else: + new_value = onnx.helper.make_tensor_value_info( + node_name, value.type.tensor_type.elem_type, shape + ) + output_values = [] + while len(g.output): + output_values.append(g.output.pop()) + while output_values: + output_value = output_values.pop() + if output_value.name == value_name: + g.output.extend([new_value]) + else: + g.output.extend([output_value]) + # Add node to the graph + g.node.extend([bn_node, scale_node, bias_node, mean_node, var_node]) + topological_sort(g) + + +def duplicate_shared_Flatten(g): + """To feed our compiler, bind Flatten with Gemm. If the output of one\\ + Flatten goes to two Gemm nodes, duplicate the Flatten. + + :param g: the graph + """ + for node in g.node: + # Find a Flatten node + if node.op_type != "Flatten": + continue + # Check Flatten outputs. Get following Gemm + output_nodes = helper.find_following_nodes_by_input_value_name( + g, node.output[0] + ) + if len(output_nodes) < 2: + continue + gemm_nodes = [] + for output_node in output_nodes: + if output_node.op_type == "Gemm": + gemm_nodes.append(output_node) + if len(gemm_nodes) < 2: + continue + # Process all the Gemm nodes except for the first one. + for i in range(1, len(gemm_nodes)): + # Duplicate + new_flatten_name = node.name + "_copy" + str(i) + new_flatten_node = onnx.helper.make_node( + "Flatten", + node.input, + [new_flatten_name], + name=new_flatten_name, + axis=1, + ) + # Connect new graph + replace_node_input(gemm_nodes[i], node.output[0], new_flatten_name) + g.node.extend([new_flatten_node]) + topological_sort(g) + + +def deconv_to_conv_info_extraction(input_size, node_proto): + """Extract the information needed for deconv split. + + :param input_size: input shape of the deconv node.\\ + :param node_proto: the deconv node proto.\\ + :return: a dictionary of extracted params. + """ + attr = dict() + # Get attributes from Deconv node + attr["auto_pad"] = helper.get_var_attribute_by_name( + node_proto, "auto_pad", "string" + ) + attr["dilations"] = helper.get_list_attribute_by_name( + node_proto, "dilations", "int" + ) + attr["group"] = helper.get_var_attribute_by_name( + node_proto, "group", "int" + ) + attr["kernel_shape"] = helper.get_list_attribute_by_name( + node_proto, "kernel_shape", "int" + ) + attr["output_padding"] = helper.get_list_attribute_by_name( + node_proto, "output_padding", "int" + ) + attr["pads"] = helper.get_list_attribute_by_name(node_proto, "pads", "int") + attr["strides"] = helper.get_list_attribute_by_name( + node_proto, "strides", "int" + ) + # Get output_padding + if attr["output_padding"] is None: + if ( + attr["auto_pad"] == "SAME_LOWER" + or attr["auto_pad"] == "SAME_UPPER" + ): + attr["output_padding"] = [ + attr["strides"][0] - 1, + attr["strides"][1], + ] + else: + attr["output_padding"] = [ + max(attr["strides"][0] - attr["kernel_shape"][0], 0), + max(attr["strides"][1] - attr["kernel_shape"][1], 0), + ] + # Calculate conv_padding + if attr["auto_pad"] == "SAME_LOWER" or attr["auto_pad"] == "SAME_UPPER": + pad1_h = ( + attr["kernel_shape"][0] - (attr["kernel_shape"][0] - 1) // 2 - 1 + ) + pad1_w = ( + attr["kernel_shape"][1] - (attr["kernel_shape"][1] - 1) // 2 - 1 + ) + head_h = min( + attr["kernel_shape"][0] // 2, (attr["output_padding"][0] + 1) // 2 + ) + head_w = min( + attr["kernel_shape"][1] // 2, (attr["output_padding"][1] + 1) // 2 + ) + tail_h = attr["output_padding"][0] - head_h + tail_w = attr["output_padding"][1] - head_w + attr["conv_pads"] = [ + pad1_h + head_h, + pad1_w + head_w, + pad1_h + tail_h, + pad1_w + tail_w, + ] + elif attr["pads"] is not None: + sum_of_pads = sum(attr["pads"]) + if sum_of_pads == 0: + # Valid padding + pad1_h = attr["kernel_shape"][0] - 0 - 1 + pad1_w = attr["kernel_shape"][1] - 0 - 1 + head_h = 0 + head_w = 0 + tail_h = attr["output_padding"][0] - head_h + tail_w = attr["output_padding"][1] - head_w + attr["conv_pads"] = [ + pad1_h + head_h, + pad1_w + head_w, + pad1_h + tail_h, + pad1_w + tail_w, + ] + else: + # Calculate output shape + tmp_output_shape = [0, 0] + tmp_output_shape[0] = ( + attr["strides"][0] * (input_size[2] - 1) + + attr["output_padding"][0] + + attr["kernel_shape"][0] + - attr["pads"][0] + - attr["pads"][2] + ) + tmp_output_shape[1] = ( + attr["strides"][1] * (input_size[3] - 1) + + attr["output_padding"][1] + + attr["kernel_shape"][1] + - attr["pads"][1] + - attr["pads"][3] + ) + # Calculate real conv output shape + tmp_center_shape = [0, 0] + tmp_center_shape[0] = (input_size[2] - 1) * attr["strides"][0] + 1 + tmp_center_shape[1] = (input_size[3] - 1) * attr["strides"][1] + 1 + # Calculate padding + total_padding = [0, 0] + total_padding[0] = ( + tmp_output_shape[0] + - tmp_center_shape[0] + + attr["kernel_shape"][0] + - 1 + ) + total_padding[1] = ( + tmp_output_shape[1] + - tmp_center_shape[1] + + attr["kernel_shape"][1] + - 1 + ) + if total_padding[0] < 0 or total_padding[1] < 0: + raise RuntimeError( + node_proto.name + " cannot infer conv padding." + ) + conv_pads_ = [0] * 4 + conv_pads_[0] = total_padding[0] // 2 + conv_pads_[1] = total_padding[1] // 2 + conv_pads_[2] = total_padding[0] - total_padding[0] // 2 + conv_pads_[3] = total_padding[1] - total_padding[1] // 2 + attr["conv_pads"] = conv_pads_ + else: + pad1_h = attr["kernel_shape"][0] - 0 - 1 + pad1_w = attr["kernel_shape"][1] - 0 - 1 + head_h = 0 + head_w = 0 + tail_h = attr["output_padding"][0] - head_h + tail_w = attr["output_padding"][1] - head_w + attr["conv_pads"] = [ + pad1_h + head_h, + pad1_w + head_w, + pad1_h + tail_h, + pad1_w + tail_w, + ] + return attr + + +def split_ConvTranspose(model): + """To feed our compiler, split ConvTranspose into Upsample and Conv. + + :param model: the model + """ + node_to_delete = [] + # Change model properties for upsample. + if model.ir_version < 3: + print("Warning: Current model IR version is not fully supported.") + model.ir_version = 4 + model.opset_import[0].version = 9 + g = model.graph + # Get a Convtranspose layer + for node in g.node: + # Find a Flatten node + if node.op_type != "ConvTranspose": + continue + # Check auto_pad + auto_pad_proto = helper.get_attribute_by_name(node, "auto_pad") + if auto_pad_proto is not None: + print("Currently not split auto_pad ConvTranspose") + continue + # Check output_shape + output_shape_proto = helper.get_attribute_by_name(node, "output_shape") + if output_shape_proto is not None: + print("Currently not split output_shape ConvTranspose") + continue + # Get input shape + input_value = helper.find_value_by_name(g, node.input[0]) + if input_value is None: + input_value = helper.find_input_by_name(g, node.input[0]) + if input_value is None: + print("Cannot get value info named {}.".format(node.input[0])) + exit(1) + input_shape = helper.get_shape_from_value_info(input_value) + # Get attrbutes + attr = deconv_to_conv_info_extraction(input_shape, node) + # Generate Upsample scales + upsample_output_shape = list(input_shape) + upsample_output_shape[2] = (input_shape[2] - 1) * attr["strides"][ + 0 + ] + 1 + upsample_output_shape[3] = (input_shape[3] - 1) * attr["strides"][ + 1 + ] + 1 + upsample_node_name = node.name + "_inner_upsample" + upsample_scale_name = upsample_node_name + "_scales" + scales_np = np.ones([4]).astype("float32") + scales_np[2] = float(upsample_output_shape[2]) / input_shape[2] + scales_np[3] = float(upsample_output_shape[3]) / input_shape[3] + scales_node = helper.numpy_to_constant(upsample_scale_name, scales_np) + # Generate a Upsample layer and an internal value info + upsample_node = onnx.helper.make_node( + "Upsample", + [node.input[0], upsample_scale_name], + [upsample_node_name], + name=upsample_node_name, + mode="zeros", + ) + upsample_value_info = onnx.helper.make_tensor_value_info( + upsample_node_name, + input_value.type.tensor_type.elem_type, + upsample_output_shape, + ) + # Check the weight layer, it may need a transpose + if attr["group"] != input_shape[1]: + weight_node = helper.find_node_by_output_name(g, node.input[1]) + weight_np = helper.constant_to_numpy(weight_node) + new_weight_np = np.transpose(weight_np, [1, 0, 2, 3]) + new_weight_node = helper.numpy_to_constant( + node.input[1], new_weight_np + ) + node_to_delete.append(weight_node) + g.node.extend([new_weight_node]) + value = helper.find_value_by_name(g, node.input[1]) + g.value_info.remove(value) + # Generate a Conv layer + conv_node_name = node.name + "_inner_conv" + conv_node_input = [upsample_node_name] + conv_node_input.extend(node.input[1:]) + conv_node = onnx.helper.make_node( + "Conv", + conv_node_input, + [node.output[0]], + name=conv_node_name, + pads=[int(i) for i in attr["conv_pads"]], + dilations=[int(i) for i in attr["dilations"]], + group=int(attr["group"]), + kernel_shape=[int(i) for i in attr["kernel_shape"]], + strides=[int(1), int(1)], + ) + # Reconnect the graph + g.node.extend([scales_node, upsample_node, conv_node]) + g.value_info.extend([upsample_value_info]) + node_to_delete.append(node) + # Delete useless nodes + for node in node_to_delete: + g.node.remove(node) + topological_sort(g) + + +def add_bn_on_skip_branch(g): + for n in g.node: + # Find merge node (Add) + if n.op_type != "Add": + continue + if len(n.input) != 2: + continue + # TODO: Still need to consider more cases + # Check if skip branch exist + input_node_a = helper.find_node_by_output_name(g, n.input[0]) + output_of_input_node_a = helper.find_nodes_by_input_name( + g, input_node_a.output[0] + ) + input_node_b = helper.find_node_by_output_name(g, n.input[1]) + output_of_input_node_b = helper.find_nodes_by_input_name( + g, input_node_b.output[0] + ) + if ( + len(output_of_input_node_a) == 1 + and len(output_of_input_node_b) == 1 + ): + continue + if len(output_of_input_node_a) == 2: + split_node = input_node_a + elif len(output_of_input_node_b) == 2: + split_node = input_node_b + else: + continue + # Get the channel number from value info + value_name = split_node.output[0] + value = helper.find_value_by_name(g, value_name) + shape = helper.get_shape_from_value_info(value) + channel = shape[1] + # Construct 4 weights + node_name = value_name + "_nop_bn" + ones = [1.0] * channel + zeros = [0.0] * channel + scale_node = helper.list_to_constant( + node_name + "_scale", [channel], ones + ) + bias_node = helper.list_to_constant( + node_name + "_bias", [channel], zeros + ) + mean_node = helper.list_to_constant( + node_name + "_mean", [channel], zeros + ) + var_node = helper.list_to_constant(node_name + "_var", [channel], ones) + # Construct BN node + bn_node = onnx.helper.make_node( + "BatchNormalization", + [ + value_name, + scale_node.output[0], + bias_node.output[0], + mean_node.output[0], + var_node.output[0], + ], + [node_name], + name=node_name, + ) + # Reconnect the graph + replace_node_input(n, value_name, node_name) + # Add node to the graph + g.node.extend([bn_node, scale_node, bias_node, mean_node, var_node]) + topological_sort(g) + + +def add_bn_before_add(g): + for n in g.node: + # Find merge node (Add) + if n.op_type != "Add": + continue + if len(n.input) != 2: + continue + # Get two inputs + input_node_a = helper.find_node_by_output_name(g, n.input[0]) + input_node_b = helper.find_node_by_output_name(g, n.input[1]) + # Skip constant input add + if input_node_a is None or input_node_a.op_type == "Constant": + continue + if input_node_b is None or input_node_b.op_type == "Constant": + continue + + def add_bn_after(prev_node): + # Get the channel number from value info + value_name = prev_node.output[0] + value = helper.find_value_by_name(g, value_name) + shape = helper.get_shape_from_value_info(value) + channel = shape[1] + # Construct 4 weights + node_name = value_name + "_nop_bn" + ones = [1.0] * channel + zeros = [0.0] * channel + scale_node = helper.list_to_constant( + node_name + "_scale", [channel], ones + ) + bias_node = helper.list_to_constant( + node_name + "_bias", [channel], zeros + ) + mean_node = helper.list_to_constant( + node_name + "_mean", [channel], zeros + ) + var_node = helper.list_to_constant( + node_name + "_var", [channel], ones + ) + # Construct BN node + bn_node = onnx.helper.make_node( + "BatchNormalization", + [ + value_name, + scale_node.output[0], + bias_node.output[0], + mean_node.output[0], + var_node.output[0], + ], + [node_name], + name=node_name, + epsilon=0.00000001, + ) + # Reconnect the graph + replace_node_input(n, value_name, node_name) + # Add node to the graph + g.node.extend( + [bn_node, scale_node, bias_node, mean_node, var_node] + ) + + if ( + not input_node_a.op_type == "BatchNormalization" + or len( + helper.find_following_nodes_by_input_value_name( + g, input_node_a.output[0] + ) + ) + > 1 + ): + add_bn_after(input_node_a) + if ( + not input_node_b.op_type == "BatchNormalization" + or len( + helper.find_following_nodes_by_input_value_name( + g, input_node_b.output[0] + ) + ) + > 1 + ): + add_bn_after(input_node_b) + topological_sort(g) + + +def add_bn_before_activation(g): + activation_nodes = set(["Relu", "Clip", "PRelu", "LeakyRelu"]) + previous_nodes = set(["Conv", "BatchNormalization"]) + for n in g.node: + # Find activation node + if n.op_type not in activation_nodes: + continue + # Get input + input_node = helper.find_node_by_output_name(g, n.input[0]) + if input_node is None or input_node.op_type in previous_nodes: + continue + + def add_bn_after(prev_node): + # Get the channel number from value info + value_name = prev_node.output[0] + value = helper.find_value_by_name(g, value_name) + shape = helper.get_shape_from_value_info(value) + channel = shape[1] + # Construct 4 weights + node_name = value_name + "_nop_bn" + ones = [1.0] * channel + zeros = [0.0] * channel + scale_node = helper.list_to_constant( + node_name + "_scale", [channel], ones + ) + bias_node = helper.list_to_constant( + node_name + "_bias", [channel], zeros + ) + mean_node = helper.list_to_constant( + node_name + "_mean", [channel], zeros + ) + var_node = helper.list_to_constant( + node_name + "_var", [channel], ones + ) + # Construct BN node + bn_node = onnx.helper.make_node( + "BatchNormalization", + [ + value_name, + scale_node.output[0], + bias_node.output[0], + mean_node.output[0], + var_node.output[0], + ], + [node_name], + name=node_name, + epsilon=0.00000001, + ) + # Reconnect the graph + replace_node_input(n, value_name, node_name) + # Add node to the graph + g.node.extend( + [bn_node, scale_node, bias_node, mean_node, var_node] + ) + + add_bn_after(input_node) + topological_sort(g) + + +def rename_output_name(g, original_name, new_name): + # Output + output_value = helper.find_output_by_name(g, original_name) + if output_value is None: + logging.error("Cannot find output value named " + original_name) + return + output_value.name = new_name + # Value Info + value_info = helper.find_value_by_name(g, original_name) + if value_info is not None: + value_info.name = new_name + # Node output + node = helper.find_node_by_output_name(g, original_name) + node.output[0] = new_name + # Node input + nodes = helper.find_nodes_by_input_name(g, original_name) + for node in nodes: + replace_node_input(node, original_name, new_name) + + +def duplicate_param_shared_constant(g): + for node in g.node: + input_names = set() + for n, input_node_name in enumerate(node.input): + param_data_node = helper.find_node_by_output_name( + g, input_node_name + ) + if ( + param_data_node is None + or param_data_node.op_type != "Constant" + ): + continue + if param_data_node.name not in input_names: + input_names.add(input_node_name) + continue + + new_node_name = param_data_node.name + "_" + str(n) + helper.logger.debug( + f"Duplicating weight: {param_data_node.name} -> " + f"{new_node_name}" + ) + duplicated_node = copy.deepcopy(param_data_node) + + duplicated_node.name = new_node_name + duplicated_node.output[0] = new_node_name + + node.input[n] = new_node_name + g.node.extend([duplicated_node]) diff --git a/tools/deployment/optimizer_scripts/tools/removing_transpose.py b/tools/deployment/optimizer_scripts/tools/removing_transpose.py new file mode 100644 index 0000000..89f772b --- /dev/null +++ b/tools/deployment/optimizer_scripts/tools/removing_transpose.py @@ -0,0 +1,368 @@ +from . import helper +from . import other +from . import modhelper +import numpy as np +import onnx +import onnx.utils + + +def eliminate_transposes(m): + g = m.graph + keep_eliminating = True + while keep_eliminating: + while swap_transpose_with_single_next_node(g): + pass + splitted = split_transpose_for_multiple_next_nodes(g) + annihilated = annihilate_transposes(g) + multiple_trans_swapped = swap_multiple_transposes_with_node(g) + keep_eliminating = splitted or annihilated or multiple_trans_swapped + + if keep_eliminating: + m = other.polish_model(m) + g = m.graph + + return m + + +def swap_transpose_with_single_next_node(g): + swapped = False + passable_nodes = set( + [ + "Relu", + "Neg", + "LeakyRelu", + "Sqrt", + "Reciprocal", + "Add", + "Mul", + "Tanh", + ] + ) + for node in g.node: + trans_node = node + # Check for transpose node + if trans_node.op_type != "Transpose": + continue + next_nodes = helper.find_nodes_by_input_name(g, trans_node.output[0]) + if len(next_nodes) != 1: + continue + next_node = next_nodes[0] + # Check if the next node is the type can be swapped + if next_node.op_type not in passable_nodes: + continue + + input_nodes = [ + helper.find_node_by_output_name(g, input_name) + for input_name in next_node.input + ] + + # Check if the node has nonconstant input + # other than the Transpose node itself + nonconstant_input = False + for input_node in input_nodes: + if input_node is None: + nonconstant_input = True + break + if input_node.name == trans_node.name: + continue + elif input_node.op_type == "Constant": + continue + else: + nonconstant_input = True + break + if nonconstant_input: + continue + + for input_node in input_nodes: + if input_node.name == trans_node.name: + # if the input is just the transpose node + next_value_info = helper.find_value_by_name( + g, next_node.output[0] + ) + mid_value_info = helper.find_value_by_name( + g, trans_node.output[0] + ) + + output_nodes = helper.find_nodes_by_input_name( + g, next_node.output[0] + ) + for out_node in output_nodes: + modhelper.replace_node_input( + out_node, next_node.output[0], trans_node.name + ) + + next_node.input[0] = trans_node.input[0] + next_node.output[0] = next_node.name + trans_node.input[0] = next_node.name + trans_node.output[0] = trans_node.name + + if next_value_info: + next_value_info.name = trans_node.name + if mid_value_info: + g.value_info.remove(mid_value_info) + else: + # if the input is a constant node + old_tensor = input_node.attribute[0].t + old_shape, data = helper.constant_to_list(input_node) + # If the constant node is a scaler, no action is needed + if type(old_shape) == int: + old_shape = [old_shape] + permutation = list(trans_node.attribute[0].ints) + while len(old_shape) < len(permutation): + old_shape.insert(0, 1) + np_data = np.reshape(data, old_shape) + reverse_perm = [] + for i in range(len(permutation)): + reverse_perm.append(permutation.index(i)) + np_data = np.transpose(np_data, reverse_perm) + new_shape = np_data.shape + new_tensor = onnx.helper.make_tensor( + name=old_tensor.name, + data_type=old_tensor.data_type, + dims=new_shape, + vals=np_data.flatten().tolist(), + ) + new_node = onnx.helper.make_node( + "Constant", + [], + [input_node.output[0]], + name=input_node.name, + value=new_tensor, + ) + g.node.extend([new_node]) + + g.value_info.remove( + helper.find_value_by_name(g, input_node.output[0]) + ) + g.node.remove(input_node) + + swapped = True + + other.topological_sort(g) + return swapped + + +def swap_multiple_transposes_with_node(g): + # here only consider same input transposes + swapped = False + passable_nodes = set(["Add", "Mul"]) + node_to_del = [] + for node in g.node: + if node.op_type not in passable_nodes: + continue + input_nodes = [ + helper.find_node_by_output_name(g, input_name) + for input_name in node.input + ] + if any([input_node is None for input_node in input_nodes]): + continue + if any( + [input_node.op_type != "Transpose" for input_node in input_nodes] + ): + continue + + permutation = list(input_nodes[0].attribute[0].ints) + if any( + [ + list(input_node.attribute[0].ints) != permutation + for input_node in input_nodes + ] + ): + continue + + for input_name in node.input: + input_node = helper.find_node_by_output_name(g, input_name) + modhelper.replace_node_input(node, input_name, input_node.input[0]) + + node_to_del.extend(input_nodes) + for input_node in input_nodes: + input_val_info = helper.find_value_by_name(g, input_node.output[0]) + if input_val_info is not None: + g.value_info.remove(input_val_info) + output_val_info = helper.find_value_by_name(g, node.output[0]) + if output_val_info is not None: + g.value_info.remove(output_val_info) + + output_nodes = helper.find_nodes_by_input_name(g, node.output[0]) + for i in range(len(output_nodes)): + new_trans_node_name = node.name + "_trans_" + str(i) + new_trans_node = onnx.helper.make_node( + "Transpose", + [node.output[0]], + [new_trans_node_name], + name=new_trans_node_name, + perm=permutation, + ) + modhelper.replace_node_input( + output_nodes[i], node.output[0], new_trans_node_name + ) + + g.node.extend([new_trans_node]) + + swapped = True + + while node_to_del: + node = node_to_del.pop() + g.node.remove(node) + + other.topological_sort(g) + return swapped + + +def annihilate_transposes(g): + node_to_del = [] + annihilated = False + for node in g.node: + if node.op_type != "Transpose": + continue + pre_node = helper.find_node_by_output_name(g, node.input[0]) + if not pre_node or pre_node.op_type != "Transpose": + continue + nodes_from_top_transpose = helper.find_nodes_by_input_name( + g, pre_node.output[0] + ) + if len(nodes_from_top_transpose) > 1: + continue + + perm_1 = list(pre_node.attribute[0].ints) + perm_2 = list(node.attribute[0].ints) + if perm_1 != perm_2: + continue + + out_nodes = helper.find_nodes_by_input_name(g, node.output[0]) + for out_node in out_nodes: + modhelper.replace_node_input( + out_node, node.output[0], pre_node.input[0] + ) + + node_to_del.extend([node, pre_node]) + mid_value_info = helper.find_value_by_name(g, pre_node.output[0]) + out_value_info = helper.find_value_by_name(g, node.output[0]) + g.value_info.remove(mid_value_info) + g.value_info.remove(out_value_info) + + annihilated = True + while node_to_del: + node = node_to_del.pop() + g.node.remove(node) + + return annihilated + + +def split_transpose_for_multiple_next_nodes(g): + splitted = False + node_to_del = [] + for node in g.node: + if node.op_type != "Transpose": + continue + output_nodes = helper.find_nodes_by_input_name(g, node.output[0]) + if len(output_nodes) < 2: + continue + for i in range(len(output_nodes)): + output_node = output_nodes[i] + new_trans_node_name = node.name + "_" + str(i) + new_trans_node = onnx.helper.make_node( + "Transpose", + [node.input[0]], + [new_trans_node_name], + name=new_trans_node_name, + perm=list(node.attribute[0].ints), + ) + modhelper.replace_node_input( + output_node, node.output[0], new_trans_node.output[0] + ) + g.node.extend([new_trans_node]) + + node_to_del.append(node) + val_info = helper.find_value_by_name(g, node.output[0]) + g.value_info.remove(val_info) + + splitted = True + + while node_to_del: + node = node_to_del.pop() + g.node.remove(node) + + other.topological_sort(g) + return splitted + + +def remove_trivial_transpose(g): + node_to_del = [] + for node in g.node: + if node.op_type != "Transpose": + continue + permutation = list(node.attribute[0].ints) + if permutation != list(range(len(permutation))): + continue + + next_nodes = helper.find_nodes_by_input_name(g, node.output[0]) + if not next_nodes: + input_val_info = helper.find_value_by_name(g, node.input[0]) + out_val_info = helper.find_output_by_name(g, node.output[0]) + if not input_val_info: + input_val_info = helper.find_input_by_name(g, node.input[0]) + g.output.remove(out_val_info) + g.output.extend([input_val_info]) + else: + out_val_info = helper.find_value_by_name(g, node.output[0]) + for next_node in next_nodes: + modhelper.replace_node_input( + next_node, node.output[0], node.input[0] + ) + g.value_info.remove(out_val_info) + + node_to_del.append(node) + + while node_to_del: + node = node_to_del.pop() + g.node.remove(node) + + other.topological_sort(g) + + +def fuse_Transpose_into_Gemm_weight(g): + node_to_del = [] + for node in g.node: + # Check pattern + if node.op_type != "Gemm": + continue + prev_node = helper.find_node_by_output_name(g, node.input[0]) + if prev_node is None or prev_node.op_type != "Flatten": + continue + transpose_node = helper.find_node_by_output_name(g, prev_node.input[0]) + if transpose_node.op_type != "Transpose": + continue + # Check attribute + perm = helper.get_list_attribute_by_name(transpose_node, "perm", "int") + if perm != [0, 2, 3, 1]: + continue + transB = helper.get_var_attribute_by_name(node, "transB", "int") + if transB is not None and transB == 1: + continue + # Get the original weight + origin_weight = helper.find_node_by_output_name(g, node.input[1]) + origin_np = helper.constant_to_numpy(origin_weight) + # Calculate a new weight + shape = helper.get_shape_from_value_info( + helper.find_value_by_name(g, prev_node.input[0]) + ) + shape.append(-1) + new_np = np.reshape(origin_np, shape) + new_np = np.transpose(new_np, [0, 3, 1, 2, 4]) + new_np = np.reshape(new_np, [-1, new_np.shape[-1]]) + new_weight = helper.numpy_to_constant(origin_weight.output[0], new_np) + # Replace and eliminate + prev_node.input[0] = transpose_node.input[0] + node_to_del.append(transpose_node) + node_to_del.append(origin_weight) + g.value_info.remove( + helper.find_value_by_name(g, transpose_node.output[0]) + ) + g.node.extend([new_weight]) + + while node_to_del: + node = node_to_del.pop() + g.node.remove(node) + + other.topological_sort(g) diff --git a/tools/deployment/optimizer_scripts/tools/replacing.py b/tools/deployment/optimizer_scripts/tools/replacing.py new file mode 100644 index 0000000..fdbaa62 --- /dev/null +++ b/tools/deployment/optimizer_scripts/tools/replacing.py @@ -0,0 +1,1367 @@ +""" +Optimizations that replace one node with another. +""" +import struct +import copy +import logging +import onnx.helper +import numpy as np +from . import helper +from . import modhelper +from .other import topological_sort + + +def replace_initializer_with_Constant(g, duplicate_shared_weights=True): + """ + Replace initializers with Constant and a corresponding value_info + If the initializer has related input, remove it. + + :param g: the onnx graph + """ + + input_map = {i.name: i for i in g.input} + for tensor in g.initializer: + # Check for the initializer related input and remove it + if tensor.name in input_map: + value_info = input_map[tensor.name] + g.input.remove(value_info) + following_nodes = helper.find_nodes_by_input_name(g, tensor.name) + if duplicate_shared_weights and len(following_nodes) >= 2: + for i, node in enumerate(following_nodes): + new_name = ( + tensor.name + "_duplicated_No" + str(i) + if i > 0 + else tensor.name + ) + helper.logger.debug( + f"Duplicating weight: {tensor.name} -> {new_name}" + ) + modhelper.replace_node_input(node, tensor.name, new_name) + new_node = onnx.helper.make_node( + "Constant", [], [new_name], name=new_name, value=tensor + ) + # Add node to lists + g.node.extend([new_node]) + else: + new_name = tensor.name + new_node = onnx.helper.make_node( + "Constant", [], [new_name], name=new_name, value=tensor + ) + # Add node to lists + g.node.extend([new_node]) + + # if value info already exists, remove it as well. + value_info = helper.find_value_by_name(g, tensor.name) + if value_info is not None: + g.value_info.remove(value_info) + + # Remove original initializer + while len(g.initializer) != 0: + g.initializer.pop() + + topological_sort(g) + + +def replace_Reshape_with_Flatten(g): + """ + Replace Reshape node into Flatten node if applicable. + + :param g: the onnx graph + """ + node_to_remove = [] + for node in g.node: + if node.op_type != "Reshape": + continue + found_Gemm = False + # Flatten could be followed by Gemm + for i in g.node: + if len(i.input) == 0 or i.input[0] != node.output[0]: + continue + if i.op_type == "Gemm": + break + # Check weight + shape_node = helper.find_node_by_output_name(g, node.input[1]) + if shape_node.op_type != "Constant": + continue + shape_value = helper.constant_to_numpy(shape_node) + if (shape_value.size != 2 or shape_value[0] != 1) and not found_Gemm: + continue + # Replace it + node.op_type = "Flatten" + for _ in range(len(node.attribute)): + node.attribute.pop() + shape_value = helper.find_value_by_name(g, shape_node.output[0]) + node.input.pop() + node_to_remove.append(shape_node) + # If found shape value_info, remove it + if shape_value is not None: + g.value_info.remove(shape_value) + + for node in node_to_remove: + g.node.remove(node) + + +def replace_Squeeze_with_Reshape(g): + """ + Replace Squeeze nodes with Reshape node. + + :param g: the input graph + """ + node_to_remove = [] + for node in g.node: + # Find Squeeze node + if node.op_type != "Squeeze": + continue + # Get the shape and Construct the shape + output_value = helper.find_value_by_name(g, node.output[0]) + if output_value is None: + output_value = helper.find_output_by_name(g, node.output[0]) + if output_value is None: + raise RuntimeError("Cannot get shape for Squeeze") + shape = [ + dim.dim_value for dim in output_value.type.tensor_type.shape.dim + ] + const_node = helper.list_to_constant( + node.name + "_shape", [len(shape)], shape + ) + # Construct the Reshape layer with same input, output and name. + new_node = onnx.helper.make_node( + "Reshape", + [node.input[0], node.name + "_shape"], + node.output, + name=node.name, + ) + # Append constructed nodes and append old node to remove_list + g.node.extend([const_node, new_node]) + node_to_remove.append(node) + # Remove old nodes + for node in node_to_remove: + g.node.remove(node) + # Topological sort + topological_sort(g) + + +def replace_Unsqueeze_with_Reshape(g): + """ + Replace Unsqueeze nodes with Reshape node. + + :param g: the input graph + """ + node_to_remove = [] + for node in g.node: + # Find Squeeze node + if node.op_type != "Unsqueeze": + continue + # Get the shape and Construct the shape + output_value = helper.find_value_by_name(g, node.output[0]) + if output_value is None: + output_value = helper.find_output_by_name(g, node.output[0]) + if output_value is None: + raise RuntimeError("Cannot get shape for Unsqueeze") + shape = [ + dim.dim_value for dim in output_value.type.tensor_type.shape.dim + ] + + const_node = helper.list_to_constant( + node.name + "_shape", [len(shape)], shape + ) + # Construct the Reshape layer with same input, output and name. + new_node = onnx.helper.make_node( + "Reshape", + [node.input[0], node.name + "_shape"], + node.output, + name=node.name, + ) + # Append constructed nodes and append old node to remove_list + g.node.extend([const_node, new_node]) + node_to_remove.append(node) + # Remove old nodes + for node in node_to_remove: + g.node.remove(node) + # Topological sort + topological_sort(g) + + +def replace_average_pool_with_GAP(g): + """ + Replace AveragePool nodes with GlobalAveragePool node when available. + + :param g: the input graph + """ + node_to_remove = [] + for node in g.node: + # Find a average pool layer + if node.op_type != "AveragePool": + continue + # Check attributes + not_replace = False + for attr in node.attribute: + if attr.name == "pads": + if list(attr.ints) != [0, 0, 0, 0]: + not_replace = True + break + if attr.name == "kernel_shape": + kernel_shape = list(attr.ints) + value_info = helper.find_value_by_name(g, node.input[0]) + if value_info is None: + not_replace = True + break + input_shape = [] + for dim in value_info.type.tensor_type.shape.dim: + input_shape.append(dim.dim_value) + if input_shape[-2:] != kernel_shape: + not_replace = True + break + if not_replace: + continue + # Replace it with GlobalAveragePool + new_node = onnx.helper.make_node( + "GlobalAveragePool", node.input, node.output, name=node.name + ) + g.node.extend([new_node]) + node_to_remove.append(node) + for node in node_to_remove: + g.node.remove(node) + topological_sort(g) + + +def replace_dilated_conv(g): + """ + If the dilation of a convolution is not (1, 1), replace it with a regular + convolution with an expanded kernel. + + :param g: the input graph + """ + node_to_remove = [] + for node in g.node: + # Check if this is a conv layer + if node.op_type != "Conv": + continue + # Check if this has dilation + has_dilations = False + has_strides = False + for attr in node.attribute: + if attr.name == "dilations": + dilations = list(attr.ints) + if dilations != [1, 1]: + has_dilations = True + if attr.name == "strides": + strides = list(attr.ints) + if strides != [1, 1]: + has_strides = True + if has_dilations and has_strides: + print("Warning: Both strides and dilations are set in ", node.name) + continue + if not has_dilations: + continue + # Construct new kernel + w_node = helper.find_node_by_output_name(g, node.input[1]) + w_output = helper.find_value_by_name(g, node.input[1]) + shape = list(w_node.attribute[0].t.dims) + # get original weight from float_data or raw data + weight = list(w_node.attribute[0].t.float_data) + if len(weight) == 0: + # Unpack from raw data + raw_data = w_node.attribute[0].t.raw_data + weight = [i[0] for i in struct.iter_unpack("f", raw_data)] + weight = np.array(weight) + weight = np.reshape(weight, shape) + new_shape = copy.copy(shape) + new_shape[2] = 1 + (shape[2] - 1) * dilations[0] + new_shape[3] = 1 + (shape[3] - 1) * dilations[1] + new_weight = np.zeros(new_shape) + for batch in range(shape[0]): + for ch in range(shape[1]): + for h in range(shape[2]): + nh = h * dilations[0] + for w in range(shape[3]): + nw = w * dilations[1] + new_weight[batch, ch, nh, nw] = weight[batch, ch, h, w] + tensor = onnx.helper.make_tensor( + w_node.attribute[0].t.name, + w_node.attribute[0].t.data_type, + new_shape, + new_weight.ravel(), + ) + new_w_node = onnx.helper.make_node( + "Constant", [], list(w_node.output), name=w_node.name, value=tensor + ) + g.node.extend([new_w_node]) + node_to_remove.append(w_node) + # Modify attributes and value info shapes + w_output.type.tensor_type.shape.dim[2].dim_value = new_shape[2] + w_output.type.tensor_type.shape.dim[3].dim_value = new_shape[3] + for attr in node.attribute: + if attr.name == "kernel_shape": + attr.ints[0] = new_shape[2] + attr.ints[1] = new_shape[3] + if attr.name == "dilations": + attr.ints[0] = 1 + attr.ints[1] = 1 + # Remove old weight nodes + for node in node_to_remove: + g.node.remove(node) + + +def replace_depthwise_1x1_with_bn(g): + """Replace 1x1 DepthwiseConv node into BN node if applicable. + + :param g: the onnx graph + """ + node_to_remove = [] + for node in g.node: + # Check op_type + if node.op_type != "Conv": + continue + # Check attributes + attr_map = {attr.name: attr for attr in node.attribute} + if "group" not in attr_map or attr_map["group"].i == 1: + continue + if ( + attr_map["kernel_shape"].ints[0] != 1 + or attr_map["kernel_shape"].ints[1] != 1 + ): + continue + if "pads" in attr_map and sum(attr_map["pads"].ints) != 0: + continue + # Check scale + scale_node = helper.find_node_by_output_name(g, node.input[1]) + if scale_node is None or scale_node.attribute[0].t.dims[1] != 1: + continue + scale_node.attribute[0].t.dims.pop() + scale_node.attribute[0].t.dims.pop() + scale_node.attribute[0].t.dims.pop() + scale_info = helper.find_value_by_name(g, node.input[1]) + if scale_info is not None: + scale_info.type.tensor_type.shape.dim.pop() + scale_info.type.tensor_type.shape.dim.pop() + scale_info.type.tensor_type.shape.dim.pop() + # Check bias + if len(node.input) == 3: + bias_name = node.input[2] + else: + bias_name = node.name + "_bias" + bias_node = helper.list_to_constant( + bias_name, [attr_map["group"].i], [0.0] * attr_map["group"].i + ) + g.node.extend([bias_node]) + # Construct mean and vars + mean_name = node.name + "_mean" + mean_node = helper.list_to_constant( + mean_name, [attr_map["group"].i], [0.0] * attr_map["group"].i + ) + var_name = node.name + "_var" + var_node = helper.list_to_constant( + var_name, [attr_map["group"].i], [1.0] * attr_map["group"].i + ) + g.node.extend([mean_node, var_node]) + # Convert + bn_node = onnx.helper.make_node( + op_type="BatchNormalization", + inputs=[ + node.input[0], + node.input[1], + bias_name, + mean_name, + var_name, + ], + outputs=node.output, + name=node.name, + epsilon=0.00001, + momentum=0.9, + ) + g.node.extend([bn_node]) + node_to_remove.append(node) + for node in node_to_remove: + g.node.remove(node) + topological_sort(g) + + +def replace_shape_with_constant(g): + """Replace Shape with Constant.\\ + This is the first step of reshape constant folding. + + :param g: the input graph\\ + :return: if anything modified, return true. + """ + node_to_remove = [] + for node in g.node: + # Find a Shape + if node.op_type != "Shape": + continue + # Check its input + input_value = helper.find_value_by_name(g, node.input[0]) + if input_value is None: + input_value = helper.find_input_by_name(g, node.input[0]) + if ( + input_value is None + or len(input_value.type.tensor_type.shape.dim) == 0 + ): + continue + # Check for case where dimension could be 0 or -1 + tmp = True + for d in input_value.type.tensor_type.shape.dim: + tmp = tmp and (d.dim_value > 0) + if not tmp: + continue + # Repalce it + input_shape = [ + d.dim_value for d in input_value.type.tensor_type.shape.dim + ] + node_name = node.output[0] + new_node = helper.list_to_constant( + node_name, [len(input_shape)], input_shape + ) + g.node.extend([new_node]) + node_to_remove.append(node) + + # if the input value_info is not used by other node + # delete this input value_info + val_info_used = sum( + [input_value.name in node.input for node in g.node] + ) + if val_info_used == 1: + g.value_info.remove(input_value) + + replaced = True if len(node_to_remove) > 0 else False + + for node in node_to_remove: + g.node.remove(node) + + topological_sort(g) + + return replaced + + +def replace_ConstantOfShape_with_constant(g): + """Replace Shape with Constant.\\ + This is the first step of reshape constant folding. + + :param g: the input graph\\ + :return: if anything modified, return true. + """ + node_to_remove = [] + for node in g.node: + # Find a Shape + if node.op_type != "ConstantOfShape": + continue + # Check input + input_value = helper.find_value_by_name(g, node.input[0]) + if input_value is None: + input_value = helper.find_input_by_name(g, node.input[0]) + if ( + input_value is None + or len(input_value.type.tensor_type.shape.dim) == 0 + ): + continue + + # Replace to constant node + pre_node = helper.find_node_by_output_name(g, node.input[0]) + _, target_shape = helper.constant_to_list(pre_node) + + value = helper.get_attribute_by_name(node, "value").i + + node_name = node.output[0] + new_node = helper.list_to_constant( + node_name, [target_shape[0]], [value] * target_shape[0] + ) + + g.node.extend([new_node]) + + # remove old node + node_to_remove.append(node) + + # delete value_info + val_info_used = sum( + [input_value.name in node.input for node in g.node] + ) + if val_info_used == 1: + g.value_info.remove(input_value) + + replaced = True if len(node_to_remove) > 0 else False + + for node in node_to_remove: + g.node.remove(node) + + topological_sort(g) + + return replaced + + +def replace_split_with_slices(g): + """Replace split node with slice nodes. + :param g: input graph. + :return: + """ + node_to_remove = [] + for node in g.node: + # Find a Split + if node.op_type != "Split": + continue + + input_value = helper.find_value_by_name(g, node.input[0]) + if not input_value: + input_value = helper.find_input_by_name(g, node.input[0]) + _, shape = helper.find_size_shape_from_value(input_value) + if len(shape) == 0: + continue + + output_val_names = list(node.output) + + axis = 0 + split = [] + for item in node.attribute: + if item.name == "axis": + axis = item.i + if item.name == "split": + split = item.ints + + # For opset 11, axis could be negative. + if axis < 0: + axis = len(shape) + axis + + length = input_value.type.tensor_type.shape.dim[axis].dim_value + if len(split) > 0: + n_out = len(split) + pos = 0 + for i in range(n_out): + pos += split[i] + new_node_name = output_val_names[i] + # Construct starts, ends, axes + starts_name = new_node_name + "_starts_" + str(i) + ends_name = new_node_name + "_ends_" + str(i) + axes_name = new_node_name + "_axes_" + str(i) + starts_node = helper.list_to_constant( + starts_name, (1,), [int(pos - split[i])] + ) + ends_node = helper.list_to_constant( + ends_name, (1,), [int(pos)] + ) + axes_node = helper.list_to_constant( + axes_name, (1,), [int(axis)] + ) + # Construtc node + new_node = onnx.helper.make_node( + op_type="Slice", + inputs=[node.input[0], starts_name, ends_name, axes_name], + outputs=[node.output[i]], + name=new_node_name, + ) + g.node.extend([starts_node, ends_node, axes_node, new_node]) + node_to_remove.append(node) + else: + n_out = len(output_val_names) + width = length // n_out + for i in range(n_out): + new_node_name = output_val_names[i] + # Construct starts, ends, axes + starts_name = new_node_name + "_starts_" + str(i) + ends_name = new_node_name + "_ends_" + str(i) + axes_name = new_node_name + "_axes_" + str(i) + starts_node = helper.list_to_constant( + starts_name, (1,), [int(i * width)] + ) + ends_node = helper.list_to_constant( + ends_name, (1,), [int((1 + i) * width)] + ) + axes_node = helper.list_to_constant( + axes_name, (1,), [int(axis)] + ) + # Construtc node + new_node = onnx.helper.make_node( + op_type="Slice", + inputs=[node.input[0], starts_name, ends_name, axes_name], + outputs=[node.output[i]], + name=new_node_name, + ) + g.node.extend([starts_node, ends_node, axes_node, new_node]) + node_to_remove.append(node) + + for old_node in node_to_remove: + g.node.remove(old_node) + topological_sort(g) + + +def replace_ReduceMean_with_GlobalAveragePool(g): + """ + Replace ReduceMean with GlobalAveragePool node when available. + + If there is preceeded Transpose, check the Transpose and the ReduceMean + together. If the keep_dims is set to 0, add a Flatten. + + :param g: the input graph + """ + node_to_remove = [] + for node in g.node: + # Find a ReduceMean layer + if node.op_type != "ReduceMean": + continue + # Find if it have previous Transpose and its attribute meet the need. + prev_node = helper.find_node_by_output_name(g, node.input[0]) + if prev_node is not None and prev_node.op_type != "Transpose": + prev_node = None + if prev_node is not None: + perm = helper.get_list_attribute_by_name(prev_node, "perm", "int") + if perm != [0, 2, 3, 1]: + prev_node = None + # Check attributes + axes = helper.get_list_attribute_by_name(node, "axes", "int") + keepdims = helper.get_var_attribute_by_name(node, "keepdims", "int") + if axes is None: + continue + if prev_node is None and axes != [2, 3]: + continue + if prev_node is not None and axes != [1, 2]: + continue + if keepdims is None: + keepdims = 1 + # Replace it with GlobalAveragePool + if prev_node: + input_list = prev_node.input + else: + input_list = node.input + if keepdims == 1: + output_list = node.output + else: + output_list = [node.output[0] + "_before_flatten"] + flatten_node = onnx.helper.make_node( + "Flatten", + output_list, + node.output, + name=node.name + "_flatten", + axis=1, + ) + g.node.extend([flatten_node]) + new_node = onnx.helper.make_node( + "GlobalAveragePool", input_list, output_list, name=node.name + ) + g.node.extend([new_node]) + node_to_remove.append(node) + if prev_node: + value = helper.find_value_by_name(g, prev_node.output[0]) + if value: + g.value_info.remove(value) + node_to_remove.append(prev_node) + for node in node_to_remove: + g.node.remove(node) + topological_sort(g) + + +def replace_mul_to_bn(g): + """Replace single Mul node with Batchnorm node. + :param g: input graph. + :return: + """ + node_to_del = [] + for node in g.node: + if node.op_type != "Mul": + continue + + mul_op_node = node + + # only support one input node + if len(mul_op_node.input) != 2: # OP node and value node + continue + + input_op_node_name = mul_op_node.input[0] + mul_value_node = helper.find_node_by_output_name( + g, mul_op_node.input[1] + ) + if not mul_value_node or mul_value_node.op_type != "Constant": + continue + + prev_shape_value_info = helper.find_value_by_name( + g, input_op_node_name + ) + prev_shape_value_info = ( + helper.find_input_by_name(g, input_op_node_name) + if prev_shape_value_info is None + else prev_shape_value_info + ) + if prev_shape_value_info is None: + continue + + _, previous_node_output_shape = helper.find_size_shape_from_value( + prev_shape_value_info + ) + scale_shape, scale_data = helper.constant_to_list(mul_value_node) + + # channel dimension + c_dim = ( + previous_node_output_shape[1] + if len(previous_node_output_shape) > 1 + else 1 + ) + + # only allow channelwise mul or const mul + if scale_shape == [1, c_dim, 1, 1]: + muls = scale_data + elif scale_shape == [c_dim, 1, 1]: + muls = scale_data + elif scale_shape == 1: + muls = scale_data * c_dim + else: + continue + + ones = [1.0] * c_dim + zeros = [0.0] * c_dim + bn_name = mul_op_node.output[0] + mean_value_node = helper.list_to_constant( + bn_name + "_mean", np.array(zeros).shape, zeros + ) + variance_value_node = helper.list_to_constant( + bn_name + "_var", np.array(ones).shape, ones + ) + bias_value_node = helper.list_to_constant( + bn_name + "_add", np.array(zeros).shape, zeros + ) + new_mul_value_node = helper.list_to_constant( + bn_name + "_mul", np.array(muls).shape, muls + ) + + bn_node = onnx.helper.make_node( + "BatchNormalization", + [ + input_op_node_name, + new_mul_value_node.output[0], + bias_value_node.output[0], + mean_value_node.output[0], + variance_value_node.output[0], + ], + [mul_op_node.output[0]], + name=bn_name, + epsilon=0.00000001, + ) + + scale_val_info = helper.find_value_by_name(g, mul_value_node.output[0]) + g.value_info.remove(scale_val_info) + + g.node.extend([bn_node]) + g.node.extend([mean_value_node]) + g.node.extend([variance_value_node]) + g.node.extend([bias_value_node]) + g.node.extend([new_mul_value_node]) + + node_to_del.extend([mul_op_node]) + node_to_del.extend([mul_value_node]) + + while node_to_del: + g.node.remove(node_to_del.pop()) + + topological_sort(g) + + +def replace_div_to_bn(g): + """Replace single Div node with Batchnorm node. + :param g: input graph. + :return: + """ + node_to_del = [] + for node in g.node: + if node.op_type != "Div": + continue + + div_op_node = node + + # only support one input node + if len(div_op_node.input) != 2: # OP node and value node + continue + + input_op_node_name = div_op_node.input[0] + div_value_node = helper.find_node_by_output_name( + g, div_op_node.input[1] + ) + if not div_value_node or div_value_node.op_type != "Constant": + continue + + prev_shape_value_info = helper.find_value_by_name( + g, input_op_node_name + ) + prev_shape_value_info = ( + helper.find_input_by_name(g, input_op_node_name) + if prev_shape_value_info is None + else prev_shape_value_info + ) + if prev_shape_value_info is None: + continue + + _, previous_node_output_shape = helper.find_size_shape_from_value( + prev_shape_value_info + ) + scale_shape, scale_data = helper.constant_to_list(div_value_node) + + # channel dimension + c_dim = ( + previous_node_output_shape[1] + if len(previous_node_output_shape) > 1 + else 1 + ) + + # only allow channelwise div or const div + if scale_shape == [1, c_dim, 1, 1]: + muls = scale_data + elif scale_shape == [c_dim, 1, 1]: + muls = scale_data + elif scale_shape == 1: + muls = scale_data * c_dim + else: + continue + + ones = [1.0] * c_dim + zeros = [0.0] * c_dim + muls = (1 / np.array(muls)).tolist() + bn_name = div_op_node.output[0] + mean_value_node = helper.list_to_constant( + bn_name + "_mean", np.array(zeros).shape, zeros + ) + variance_value_node = helper.list_to_constant( + bn_name + "_var", np.array(ones).shape, ones + ) + bias_value_node = helper.list_to_constant( + bn_name + "_add", np.array(zeros).shape, zeros + ) + new_mul_value_node = helper.list_to_constant( + bn_name + "_mul", np.array(muls).shape, muls + ) + + bn_node = onnx.helper.make_node( + "BatchNormalization", + [ + input_op_node_name, + new_mul_value_node.output[0], + bias_value_node.output[0], + mean_value_node.output[0], + variance_value_node.output[0], + ], + [div_op_node.output[0]], + name=bn_name, + epsilon=0.00000001, + ) + + scale_val_info = helper.find_value_by_name(g, div_value_node.output[0]) + g.value_info.remove(scale_val_info) + + g.node.extend([bn_node]) + g.node.extend([mean_value_node]) + g.node.extend([variance_value_node]) + g.node.extend([bias_value_node]) + g.node.extend([new_mul_value_node]) + + node_to_del.extend([div_op_node]) + node_to_del.extend([div_value_node]) + + while node_to_del: + g.node.remove(node_to_del.pop()) + + topological_sort(g) + + +def replace_add_to_bn(g): + """Replace single Add node with Batchnorm node. + :param g: input graph. + :return: + """ + node_to_del = [] + for node in g.node: + if node.op_type != "Add": + continue + + add_op_node = node + + # only support one input node + if len(add_op_node.input) != 2: # OP node and value node + continue + + input_op_node_name = add_op_node.input[0] + add_value_node = helper.find_node_by_output_name( + g, add_op_node.input[1] + ) + if not add_value_node or add_value_node.op_type != "Constant": + continue + + prev_shape_value_info = helper.find_value_by_name( + g, input_op_node_name + ) + prev_shape_value_info = ( + helper.find_input_by_name(g, input_op_node_name) + if prev_shape_value_info is None + else prev_shape_value_info + ) + if prev_shape_value_info is None: + continue + + _, previous_node_output_shape = helper.find_size_shape_from_value( + prev_shape_value_info + ) + bias_shape, bias_data = helper.constant_to_list(add_value_node) + + # channel dimension + c_dim = ( + previous_node_output_shape[1] + if len(previous_node_output_shape) > 1 + else 1 + ) + + # only allow channelwise add or const add + if bias_shape == [1, c_dim, 1, 1]: + bias = bias_data + elif bias_shape == [c_dim, 1, 1]: + bias = bias_data + elif bias_shape == 1: + bias = bias_data * c_dim + else: + continue + + ones = [1.0] * c_dim + zeros = [0.0] * c_dim + bn_name = add_op_node.output[0] + mean_value_node = helper.list_to_constant( + bn_name + "_mean", np.array(zeros).shape, zeros + ) + variance_value_node = helper.list_to_constant( + bn_name + "_var", np.array(ones).shape, ones + ) + scale_value_node = helper.list_to_constant( + bn_name + "_mul", np.array(ones).shape, ones + ) + new_add_value_node = helper.list_to_constant( + bn_name + "_add", np.array(bias).shape, bias + ) + + bn_node = onnx.helper.make_node( + "BatchNormalization", + [ + input_op_node_name, + scale_value_node.output[0], + new_add_value_node.output[0], + mean_value_node.output[0], + variance_value_node.output[0], + ], + [add_op_node.output[0]], + name=bn_name, + epsilon=0.00000001, + ) + + add_val_info = helper.find_value_by_name(g, add_value_node.output[0]) + g.value_info.remove(add_val_info) + + g.node.extend([bn_node]) + g.node.extend([mean_value_node]) + g.node.extend([variance_value_node]) + g.node.extend([scale_value_node]) + g.node.extend([new_add_value_node]) + + node_to_del.extend([add_op_node]) + node_to_del.extend([add_value_node]) + + while node_to_del: + g.node.remove(node_to_del.pop()) + + topological_sort(g) + + +def replace_sub_to_bn(g): + """Replace single Sub node with BatchNorm node. + :param g: input graph. + :return: + """ + node_to_del = [] + for node in g.node: + if node.op_type != "Sub": + continue + + sub_op_node = node + + # only support one input node + if len(sub_op_node.input) != 2: # OP node and value node + continue + + # Check the input type + input_1st_name = sub_op_node.input[0] + input_2nd_name = sub_op_node.input[1] + input_1st_node = helper.find_node_by_output_name(g, input_1st_name) + input_2nd_node = helper.find_node_by_output_name(g, input_2nd_name) + if input_1st_node is not None and input_1st_node.op_type == "Constant": + real_input_name = input_2nd_name + reverse = True + constant_node = input_1st_node + elif ( + input_2nd_node is not None and input_2nd_node.op_type == "Constant" + ): + real_input_name = input_1st_name + reverse = False + constant_node = input_2nd_node + else: + continue + + # Get shapes + prev_shape_value_info = helper.find_value_by_name(g, real_input_name) + prev_shape_value_info = ( + helper.find_input_by_name(g, real_input_name) + if prev_shape_value_info is None + else prev_shape_value_info + ) + if prev_shape_value_info is None: + continue + + _, previous_node_output_shape = helper.find_size_shape_from_value( + prev_shape_value_info + ) + bias_shape, bias_data = helper.constant_to_list(constant_node) + + # channel dimension + c_dim = ( + previous_node_output_shape[1] + if len(previous_node_output_shape) > 1 + else 1 + ) + + # only allow channelwise sub or const sub + if bias_shape == [1, c_dim, 1, 1]: + bias = bias_data + elif bias_shape == [c_dim, 1, 1]: + bias = bias_data + elif bias_shape == 1: + bias = bias_data * c_dim + else: + continue + + ones = [1.0] * c_dim + zeros = [0.0] * c_dim + # If reversed provide special scaler + if reverse: + scale = [-1.0] * c_dim + else: + scale = ones + bias *= -1 + bn_name = sub_op_node.output[0] + mean_value_node = helper.list_to_constant( + bn_name + "_mean", np.array(zeros).shape, zeros + ) + variance_value_node = helper.list_to_constant( + bn_name + "_var", np.array(ones).shape, ones + ) + scale_value_node = helper.list_to_constant( + bn_name + "_mul", np.array(scale).shape, scale + ) + new_add_value_node = helper.list_to_constant( + bn_name + "_add", np.array(bias).shape, bias + ) + + bn_node = onnx.helper.make_node( + "BatchNormalization", + [ + real_input_name, + scale_value_node.output[0], + new_add_value_node.output[0], + mean_value_node.output[0], + variance_value_node.output[0], + ], + [sub_op_node.output[0]], + name=bn_name, + epsilon=0.00000001, + ) + + add_val_info = helper.find_value_by_name(g, constant_node.output[0]) + g.value_info.remove(add_val_info) + + g.node.extend([bn_node]) + g.node.extend([mean_value_node]) + g.node.extend([variance_value_node]) + g.node.extend([scale_value_node]) + g.node.extend([new_add_value_node]) + + node_to_del.extend([sub_op_node]) + node_to_del.extend([constant_node]) + + while node_to_del: + g.node.remove(node_to_del.pop()) + + topological_sort(g) + + +def replace_sub_with_bn_and_add(g): + """Replace two input Sub node with BN and Add: A - B = A + (-1) * B + :param g: input graph. + :return: + """ + for node in g.node: + if node.op_type != "Sub": + continue + + sub_op_node = node + + # only support one input node + if len(sub_op_node.input) != 2: # OP node and value node + continue + + # Check the input type + input_1st_name = sub_op_node.input[0] + input_2nd_name = sub_op_node.input[1] + input_1st_node = helper.find_node_by_output_name(g, input_1st_name) + input_2nd_node = helper.find_node_by_output_name(g, input_2nd_name) + if input_1st_node is not None and input_1st_node.op_type == "Constant": + continue + elif ( + input_2nd_node is not None and input_2nd_node.op_type == "Constant" + ): + continue + + # Get shapes + input_2nd_value_info = helper.find_value_by_name(g, input_2nd_name) + if input_2nd_value_info is None: + input_2nd_value_info = helper.find_input_by_name(g, input_2nd_name) + if input_2nd_value_info is None: + continue + + # Get channel dimension + _, input_2nd_shape = helper.find_size_shape_from_value( + input_2nd_value_info + ) + if len(input_2nd_shape) < 2: + helper.logger.debug( + f"{sub_op_node.name} cannot be replaced " + "due to the input shape." + ) + c_dim = input_2nd_shape[1] + + # Create * -1 bn node. + ones = [1.0] * c_dim + zeros = [0.0] * c_dim + scale = [-1.0] * c_dim + bn_name = input_2nd_name + "_neg_for_" + node.name + mean_value_node = helper.list_to_constant( + bn_name + "_mean", np.array(zeros).shape, zeros + ) + variance_value_node = helper.list_to_constant( + bn_name + "_var", np.array(ones).shape, ones + ) + scale_value_node = helper.list_to_constant( + bn_name + "_mul", np.array(scale).shape, scale + ) + bias_value_node = helper.list_to_constant( + bn_name + "_add", np.array(zeros).shape, zeros + ) + bn_node = onnx.helper.make_node( + "BatchNormalization", + [ + input_2nd_name, + scale_value_node.output[0], + bias_value_node.output[0], + mean_value_node.output[0], + variance_value_node.output[0], + ], + [bn_name], + name=bn_name, + epsilon=0.00000001, + ) + + # Change sub to add + sub_op_node.op_type = "Add" + # Replace add input + modhelper.replace_node_input(sub_op_node, input_2nd_name, bn_name) + + g.node.extend( + [ + scale_value_node, + bias_value_node, + mean_value_node, + variance_value_node, + bn_node, + ] + ) + + topological_sort(g) + + +def replace_Sum_with_Adds(g): + node_to_del = [] + + for node in g.node: + # Check for sum + if node.op_type != "Sum": + continue + # Check for input number + if len(node.input) == 1: + # If input number is 1, delete the sum node. + following_nodes = helper.find_following_nodes_by_input_value_name( + g, node.output[0] + ) + for following_node in following_nodes: + modhelper.replace_node_input( + following_node, node.output[0], node.input[0] + ) + node_to_del.append(node) + if helper.find_value_by_name(node.output[0]) is not None: + g.value_info.remove(helper.find_value_by_name(node.output[0])) + elif len(node.input) == 2: + # If input number is 2, replace it with add. + node.op_type = "Add" + continue + elif len(node.input) > 2: + # If input number is larger than 2, replace it with n-1 add. + input_count = len(node.input) + # First node has 2 inputs + first_node = onnx.helper.make_node( + "Add", + [node.input[0], node.input[1]], + [node.output[0] + "_replacement_1"], + name=node.name + "_replacement_1", + ) + # Last node has the same output as the original sum node + last_node = onnx.helper.make_node( + "Add", + [ + node.output[0] + "_replacement_" + str(input_count - 2), + node.input[input_count - 1], + ], + [node.output[0]], + name=node.name, + ) + g.node.extend([first_node, last_node]) + for i in range(2, input_count - 1): + new_node = onnx.helper.make_node( + "Add", + [ + node.output[0] + "_replacement_" + str(i - 1), + node.input[i], + ], + [node.output[0] + "_replacement_" + str(i)], + name=node.name + "_replacement_" + str(i), + ) + g.node.extend([new_node]) + node_to_del.append(node) + else: + logging.error("Sum node must have at least 1 input.") + quit(1) + + while node_to_del: + g.node.remove(node_to_del.pop()) + + topological_sort(g) + + +def replace_constant_input_concat_with_pad(g): + """ + If single input is concating with constant node of same number. + Replace it with pad. Currently only support 2-3 inputs. + :param g: input graph. + :return: + """ + node_to_del = [] + for node in g.node: + # Check for Concat node + if node.op_type != "Concat": + continue + + # Check concat node input + mode = None + value = 0 + real_input_name = None + if len(node.input) == 2: + input_1st_node = helper.find_node_by_output_name(g, node.input[0]) + input_2nd_node = helper.find_node_by_output_name(g, node.input[1]) + if ( + input_1st_node is not None + and input_1st_node.op_type == "Constant" + ): + mode = "left" + constant_value = helper.constant_to_numpy(input_1st_node) + real_input_name = node.input[1] + value = constant_value.flatten()[0] + # Check if the values are all the same. + if np.any(constant_value - value): + continue + elif ( + input_2nd_node is not None + and input_2nd_node.op_type == "Constant" + ): + mode = "right" + constant_value = helper.constant_to_numpy(input_2nd_node) + real_input_name = node.input[0] + value = constant_value.flatten()[0] + # Check if the values are all the same. + if np.any(constant_value - value): + continue + else: + # No constant input case + continue + elif len(node.input) == 3: + # For 3 inputs concat node, the 1st and the 3rd input should be + # constant with the same value. + input_1st_node = helper.find_node_by_output_name(g, node.input[0]) + input_2nd_node = helper.find_node_by_output_name(g, node.input[1]) + input_3rd_node = helper.find_node_by_output_name(g, node.input[2]) + if ( + input_1st_node is None + or input_1st_node.op_type != "Constant" + or input_3rd_node is None + or input_3rd_node.op_type != "Constant" + ): + continue + mode = "both" + real_input_name = node.input[1] + input_1st_value = helper.constant_to_numpy(input_1st_node) + input_3rd_value = helper.constant_to_numpy(input_3rd_node) + value = input_1st_value.flatten()[0] + # Check if all the values are all the same + if np.any(input_1st_value - value): + continue + elif np.any(input_3rd_value - value): + continue + else: + # Too many inputs case. + continue + # Make weight nodes + input_value_info = helper.find_value_by_name(g, real_input_name) + input_shape = helper.get_shape_from_value_info(input_value_info) + pads = [0] * (len(input_shape) * 2) + axis = helper.get_var_attribute_by_name(node, "axis", "int") + if axis < 0: + axis = len(input_shape) - axis + if mode == "left": + left_value_info = helper.find_value_by_name(g, node.input[0]) + left_input_shape = helper.get_shape_from_value_info( + left_value_info + ) + pads[axis] = left_input_shape[axis] + elif mode == "right": + right_value_info = helper.find_value_by_name(g, node.input[1]) + right_input_shape = helper.get_shape_from_value_info( + right_value_info + ) + pads[axis + len(input_shape)] = right_input_shape[axis] + else: + # mode shoule be both + left_value_info = helper.find_value_by_name(g, node.input[0]) + left_input_shape = helper.get_shape_from_value_info( + left_value_info + ) + pads[axis] = left_input_shape[axis] + right_value_info = helper.find_value_by_name(g, node.input[2]) + right_input_shape = helper.get_shape_from_value_info( + right_value_info + ) + pads[axis + len(input_shape)] = right_input_shape[axis] + pads_node = helper.list_to_constant( + node.name + "_pads", (len(pads),), pads + ) + constant_value_node = helper.scaler_to_constant( + node.name + "_constant_value", value + ) + # Create new Pad node + new_pad_node = onnx.helper.make_node( + "Pad", + [real_input_name, pads_node.name, constant_value_node.name], + [node.output[0]], + name=node.name, + mode="constant", + ) + # Replace + node_to_del.append(node) + g.node.extend([pads_node, constant_value_node, new_pad_node]) + + while node_to_del: + g.node.remove(node_to_del.pop()) + + topological_sort(g) diff --git a/tools/deployment/optimizer_scripts/tools/special.py b/tools/deployment/optimizer_scripts/tools/special.py new file mode 100644 index 0000000..275f8c5 --- /dev/null +++ b/tools/deployment/optimizer_scripts/tools/special.py @@ -0,0 +1,489 @@ +"""Special operations on model. +""" +import onnx.helper +import numpy as np +from . import helper +from . import other + + +def change_first_conv_from_bgr_to_rgb(m): + """For input channel format BGR model, use this function to change the first + conv weight to adapt the input into RGB. + + :param m: the model proto + """ + # Check for first node. + g = m.graph + input_name = g.input[0].name + first_nodes = helper.find_following_nodes_by_input_value_name( + g, input_name + ) + if len(first_nodes) > 1: + return False + first_node = first_nodes[0] + # Now we have the first node. Check this first node. + if first_node.op_type != "Conv": + return False + weight_value = helper.find_value_by_name(g, first_node.input[1]) + weight_shape = helper.get_shape_from_value_info(weight_value) + if weight_shape[1] != 3: + return False + # Do weight shuffle + weight_node = helper.find_node_by_output_name(g, weight_value.name) + weight_np = helper.constant_to_numpy(weight_node) + b_channel = np.expand_dims(weight_np[:, 0, :, :], axis=1) + g_channel = np.expand_dims(weight_np[:, 1, :, :], axis=1) + r_channel = np.expand_dims(weight_np[:, 2, :, :], axis=1) + new_np = np.concatenate((r_channel, g_channel, b_channel), axis=1) + new_node = helper.numpy_to_constant(weight_value.name, new_np) + # Replace the weight and topological sort + g.node.remove(weight_node) + g.node.extend([new_node]) + other.topological_sort(g) + return True + + +def change_input_from_bgr_to_rgb(m): + """ + For input channel format BGR model, use this function to modify the model + to accepct RGB image.If the first node is a non-group Conv. + Modify weight to adapt the input into RGB. Otherwise create a new node. + + :param m: the model proto + """ + g = m.graph + if len(g.input) > 1: + print("This model has multiple inputs. Cannot change to RGB input.") + return + input_shape = helper.get_shape_from_value_info(g.input[0]) + if len(input_shape) != 4 or input_shape[1] != 3: + print("The input shape is invalid for bgr conversion.") + return + # Try change conv weight first + if change_first_conv_from_bgr_to_rgb(m): + return + # Otherwise, create a special conv node and replace the input + # Construct weight + weight_np = np.zeros((3, 3, 3, 3)).astype("float32") + weight_np[0, 2, 1, 1] = 1.0 + weight_np[1, 1, 1, 1] = 1.0 + weight_np[2, 0, 1, 1] = 1.0 + new_weight = helper.numpy_to_constant("bgr_shuffle_weight", weight_np) + # Construct Conv + new_conv = onnx.helper.make_node( + "Conv", + ["rgb_input", "bgr_shuffle_weight"], + [g.input[0].name], + name="bgr_shuffle", + dilations=[1, 1], + kernel_shape=[3, 3], + pads=[1, 1, 1, 1], + strides=[1, 1], + ) + # Connect the graph + old_input_value = g.input.pop() + new_input_value = onnx.helper.make_tensor_value_info( + "rgb_input", old_input_value.type.tensor_type.elem_type, input_shape + ) + g.input.extend([new_input_value]) + g.node.extend([new_weight, new_conv]) + # topological sort + other.topological_sort(g) + + +def add_0_5_to_normalized_input(m): + """For normalized input between -0.5 ~ 0.5, add 0.5 to the input to keep it + between 0 ~ 1. + + :param m: the model proto + """ + g = m.graph + if len(g.input) > 1: + print("This model has multiple inputs. Cannot normalize input.") + return + input_shape = helper.get_shape_from_value_info(g.input[0]) + if len(input_shape) != 4: + print("The input shape is not BCHW. Cannot normalize input.") + return + # Construct weight + ch = input_shape[1] + weight_np = np.zeros((ch, ch, 3, 3)).astype("float32") + for i in range(ch): + weight_np[i, i, 1, 1] = 1.0 + new_weight = helper.numpy_to_constant("input_norm_weight", weight_np) + # Construct bias + bias_np = np.array([0.5] * ch).astype("float32") + new_bias = helper.numpy_to_constant("input_norm_bias", bias_np) + # Construct Conv + new_conv = onnx.helper.make_node( + "Conv", + ["origin_input", "input_norm_weight", "input_norm_bias"], + [g.input[0].name], + name="input_norm", + dilations=[1, 1], + kernel_shape=[3, 3], + pads=[1, 1, 1, 1], + strides=[1, 1], + ) + # Construct value_infos + old_input_value = g.input.pop() + weight_value = onnx.helper.make_tensor_value_info( + "input_norm_weight", + old_input_value.type.tensor_type.elem_type, + [3, 3, 3, 3], + ) + bias_value = onnx.helper.make_tensor_value_info( + "input_norm_bias", old_input_value.type.tensor_type.elem_type, [3] + ) + # Connect the graph + new_input_value = onnx.helper.make_tensor_value_info( + "origin_input", old_input_value.type.tensor_type.elem_type, input_shape + ) + g.input.extend([new_input_value]) + g.node.extend([new_weight, new_bias, new_conv]) + g.value_info.extend([weight_value, bias_value, old_input_value]) + # topological sort + other.topological_sort(g) + + +def add_rgb2yynn_node(m): + """Add a conv layer which can convert rgb to yynn input.""" + g = m.graph + if len(g.input) > 1: + print("This model has multiple inputs. Cannot change to rgb input.") + return + input_shape = helper.get_shape_from_value_info(g.input[0]) + if len(input_shape) != 4: + print("The input shape is not BCHW. Cannot normalize input.") + return + # Construct weight + weight_np = np.zeros((3, 3, 4, 4)).astype("float32") + weight_np[1, 1, :3, :2] = np.array([[[[0.299], [0.587], [0.114]]]]) + weight_np[1, 1, 3, 2:] = 1.0 + weight_np = np.transpose(weight_np, (3, 2, 0, 1)) + new_weight = helper.numpy_to_constant("input_rgb2yynn_weight", weight_np) + # Construct conv node + new_conv = onnx.helper.make_node( + "Conv", + ["new_input", "input_rgb2yynn_weight"], + [g.input[0].name], + name="input_rgba2yynn", + dilations=[1, 1], + kernel_shape=[3, 3], + pads=[1, 1, 1, 1], + strides=[1, 1], + ) + # Construct value_infos + old_input_value = g.input.pop() + weight_value = onnx.helper.make_tensor_value_info( + "input_rgb2yynn_weight", + old_input_value.type.tensor_type.elem_type, + [4, 4, 3, 3], + ) + # Connect the graph + new_input_value = onnx.helper.make_tensor_value_info( + "new_input", old_input_value.type.tensor_type.elem_type, input_shape + ) + g.input.extend([new_input_value]) + g.node.extend([new_weight, new_conv]) + g.value_info.extend([weight_value, old_input_value]) + # topological sort + other.topological_sort(g) + + +def swap_MatMul_inputs(g, original_matmul_node): + # Create Transpose nodes + input_a_value = helper.find_value_by_name(g, original_matmul_node.input[0]) + input_a_shape = helper.get_shape_from_value_info(input_a_value) + if len(input_a_shape) == 2: + perm = [1, 0] + else: + perm = [0, 2, 1] + new_input_b_node = onnx.helper.make_node( + "Transpose", + inputs=[input_a_value.name], + outputs=[input_a_value.name + "_transposed"], + name=f"{input_a_value.name}_transposed_for_" + f"{original_matmul_node.name}", + perm=perm, + ) + input_b_value = helper.find_value_by_name(g, original_matmul_node.input[1]) + input_b_shape = helper.get_shape_from_value_info(input_b_value) + if len(input_b_shape) == 3: + perm = [0, 2, 1] + else: + perm = [0, 1, 3, 2] + new_input_a_node = onnx.helper.make_node( + "Transpose", + inputs=[input_b_value.name], + outputs=[input_b_value.name + "_transposed"], + name=f"{input_b_value.name}_transposed_for_" + f"{original_matmul_node.name}", + perm=perm, + ) + # Create new MatMul node + new_matmul_node = onnx.helper.make_node( + "MatMul", + inputs=[new_input_a_node.output[0], new_input_b_node.output[0]], + outputs=[original_matmul_node.output[0] + "_transposed"], + name=original_matmul_node.name + "_transposed", + ) + # Create final Transpose node + output_value = helper.find_value_by_name(g, original_matmul_node.output[0]) + output_shape = helper.get_shape_from_value_info(output_value) + if len(output_shape) == 3: + perm = [0, 2, 1] + else: + perm = [0, 1, 3, 2] + new_final_transpose_node = onnx.helper.make_node( + "Transpose", + inputs=[new_matmul_node.output[0]], + outputs=[original_matmul_node.output[0]], + name=original_matmul_node.name + "_final_transpose", + perm=perm, + ) + # Add new nodes + g.node.extend( + [ + new_input_a_node, + new_input_b_node, + new_matmul_node, + new_final_transpose_node, + ] + ) + # Delete original nodes + g.node.remove(original_matmul_node) + + +def split_MatMul_batch_then_concat(g, original_matmul_node): + new_nodes = [] + final_concat_inputs = [] + # Get the batch count + input_a_value = helper.find_value_by_name(g, original_matmul_node.input[0]) + input_a_shape = helper.get_shape_from_value_info(input_a_value) + input_b_value = helper.find_value_by_name(g, original_matmul_node.input[1]) + input_b_shape = helper.get_shape_from_value_info(input_b_value) + if len(input_a_shape) == 3: + batch_count = input_a_shape[0] + else: + batch_count = input_a_shape[1] + for i in range(batch_count): + # Create Split nodes for input A + starts_node = helper.list_to_constant( + f"{input_a_value.name}_sliced_{i}_starts", (1,), [i] + ) + ends_node = helper.list_to_constant( + f"{input_a_value.name}_sliced_{i}_ends", (1,), [i + 1] + ) + axes_node = helper.list_to_constant( + f"{input_a_value.name}_sliced_{i}_axes", + (1,), + [len(input_a_shape) - 3], + ) + new_sliced_a_node = onnx.helper.make_node( + "Slice", + inputs=[ + input_a_value.name, + starts_node.output[0], + ends_node.output[0], + axes_node.output[0], + ], + outputs=[f"{input_a_value.name}_sliced_{i}"], + name=f"{input_a_value.name}_sliced_{i}_for_" + f"{original_matmul_node.name}", + ) + new_nodes.extend( + [starts_node, ends_node, axes_node, new_sliced_a_node] + ) + # Create Split nodes for input B + starts_node = helper.list_to_constant( + f"{input_b_value.name}_sliced_{i}_starts", (1,), [i] + ) + ends_node = helper.list_to_constant( + f"{input_b_value.name}_sliced_{i}_ends", (1,), [i + 1] + ) + axes_node = helper.list_to_constant( + f"{input_b_value.name}_sliced_{i}_axes", + (1,), + [len(input_b_shape) - 3], + ) + new_sliced_b_node = onnx.helper.make_node( + "Slice", + inputs=[ + input_b_value.name, + starts_node.output[0], + ends_node.output[0], + axes_node.output[0], + ], + outputs=[f"{input_b_value.name}_sliced_{i}"], + name=f"{input_b_value.name}_sliced_{i}_for_" + f"{original_matmul_node.name}", + ) + new_nodes.extend( + [starts_node, ends_node, axes_node, new_sliced_b_node] + ) + # Create MatMul nodes + new_matmul_node = onnx.helper.make_node( + "MatMul", + inputs=[new_sliced_a_node.output[0], new_sliced_b_node.output[0]], + outputs=[f"{original_matmul_node.output[0]}_sliced_{i}"], + name=f"{original_matmul_node.name}_sliced_{i}", + ) + new_nodes.append(new_matmul_node) + final_concat_inputs.append(new_matmul_node.output[0]) + # Create Concat nodes + output_value = helper.find_value_by_name(g, original_matmul_node.output[0]) + if output_value is None: + output_value = helper.find_output_by_name( + g, original_matmul_node.output[0] + ) + if output_value is None: + helper.logger.error( + f"Cannot find value_info for {original_matmul_node.output[0]}" + ) + output_shape = helper.get_shape_from_value_info(output_value) + new_concat_node = onnx.helper.make_node( + "Concat", + inputs=final_concat_inputs, + outputs=[original_matmul_node.output[0]], + name=f"{original_matmul_node.name}_final_concat", + axis=len(output_shape) - 3, + ) + new_nodes.append(new_concat_node) + # Add new nodes + g.node.extend(new_nodes) + # Delete original nodes + g.node.remove(original_matmul_node) + + +def split_MatMul_Constant_input_then_concat(g, original_matmul_node): + new_nodes = [] + final_concat_inputs = [] + # Get the batch count + input_b_node = helper.find_node_by_output_name( + g, original_matmul_node.input[1] + ) + input_b_np = helper.constant_to_numpy(input_b_node) + if len(input_b_np.shape) == 3: + batch_count = input_b_np.shape[0] + else: + batch_count = input_b_np.shape[1] + for i in range(batch_count): + # Create new constant node + if len(input_b_np.shape) == 3: + new_np = input_b_np[i:i + 1, ...] + else: + new_np = input_b_np[:, i:i + 1, ...] + new_weight = helper.numpy_to_constant( + f"{input_b_node.name}_sliced_{i}", new_np + ) + new_nodes.append(new_weight) + # Create MatMul nodes + new_matmul_node = onnx.helper.make_node( + "MatMul", + inputs=[original_matmul_node.input[0], new_weight.output[0]], + outputs=[f"{original_matmul_node.output[0]}_sliced_{i}"], + name=f"{original_matmul_node.name}_sliced_{i}", + ) + new_nodes.append(new_matmul_node) + final_concat_inputs.append(new_matmul_node.output[0]) + # Create Concat nodes + output_value = helper.find_value_by_name(g, original_matmul_node.output[0]) + output_shape = helper.get_shape_from_value_info(output_value) + new_concat_node = onnx.helper.make_node( + "Concat", + inputs=final_concat_inputs, + outputs=[original_matmul_node.output[0]], + name=f"{original_matmul_node.name}_final_concat", + axis=len(output_shape) - 3, + ) + new_nodes.append(new_concat_node) + # Add new nodes + g.node.extend(new_nodes) + # Delete original value info + input_b_value = helper.find_value_by_name(g, original_matmul_node.input[1]) + if input_b_value is not None: + g.value_info.remove(input_b_value) + # Delete original nodes + g.node.remove(original_matmul_node) + g.node.remove(input_b_node) + + +def special_MatMul_process(g): + for node in g.node: + if node.op_type != "MatMul": + continue + input_a_name = node.input[0] + input_a_value = helper.find_value_by_name(g, input_a_name) + input_b_name = node.input[1] + input_b_value = helper.find_value_by_name(g, input_b_name) + if input_a_value is None or input_b_value is None: + continue + input_a_shape = helper.get_shape_from_value_info(input_a_value) + input_b_shape = helper.get_shape_from_value_info(input_b_value) + # Check shapes and choose the process + # Normal case, Skip + if len(input_b_shape) == 2: + continue + # Too many dimensions or too few dimensions. Not supported. Skip + if len(input_a_shape) > 4 or len(input_b_shape) > 4: + helper.logger.warning( + f"Cannot optimize MatMul {node.name}: " + "inputs have too many dimensions." + ) + continue + if len(input_a_shape) < 2 or len(input_b_shape) < 2: + helper.logger.warning( + f"Cannot optimize MatMul {node.name}: " + "inputs have two few dimensions." + ) + continue + # For 4 dimension, check the first dimension (should be 1) + # and treated as 3 dimensions. + extra_dim = None + if len(input_a_shape) == 4: + extra_dim = input_a_shape[0] + input_a_shape = input_a_shape[1:] + if len(input_b_shape) == 4: + if input_b_shape[0] != extra_dim: + helper.logger.warning( + f"Cannot optimize MatMul {node.name}: " + "input dimension batch sizes does not match " + f"({extra_dim} vs {input_b_shape[0]})." + ) + continue + input_b_shape = input_b_shape[1:] + # Check input B dimension + # If B is 1 x W x V, it is the same as normal case. + if input_b_shape[0] == 1: + continue + # If B is B x W x V, but B is a constant. + input_b_node = helper.find_node_by_output_name(g, input_b_name) + if input_b_node is not None and input_b_node.op_type == "Constant": + # Constant input + helper.logger.debug( + f"Optimizing MatMul node {node.name}: split constant input." + ) + split_MatMul_Constant_input_then_concat(g, node) + # If B is B x W x V and A is 1 x H x W, do the swap. + elif len(input_a_shape) == 2 or ( + input_a_shape[0] == 1 and (extra_dim is None or extra_dim == 1) + ): + helper.logger.debug( + f"Optimizing MatMul node {node.name}: swap input." + ) + swap_MatMul_inputs(g, node) + # If B is B x W x V and A is B x H x W, do the split. + elif input_b_shape[0] == input_a_shape[0]: + helper.logger.debug( + f"Optimizing MatMul node {node.name}: split input batch." + ) + split_MatMul_batch_then_concat(g, node) + # Other cases are not supported: If B is B x W x V but A is X x H x W. + else: + helper.logger.warning( + f"Cannot optimize MatMul {node.name}: " + "unknown reason. Might be shape mismatch." + ) + continue + other.topological_sort(g) diff --git a/tools/deployment/pytorch2onnx.py b/tools/deployment/pytorch2onnx.py new file mode 100644 index 0000000..c1789b4 --- /dev/null +++ b/tools/deployment/pytorch2onnx.py @@ -0,0 +1,345 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import os.path as osp +import warnings +from functools import partial + +import numpy as np +import onnx +import torch +from mmcv import Config, DictAction + +from mmdet.core.export import build_model_from_cfg, preprocess_example_input +from mmdet.core.export.model_wrappers import ONNXRuntimeDetector + + +def pytorch2onnx(model, + input_img, + input_shape, + normalize_cfg, + opset_version=11, + show=False, + output_file='tmp.onnx', + verify=False, + test_img=None, + do_simplify=False, + dynamic_export=None, + skip_postprocess=False): + + input_config = { + 'input_shape': input_shape, + 'input_path': input_img, + 'normalize_cfg': normalize_cfg + } + # prepare input + one_img, one_meta = preprocess_example_input(input_config) + img_list, img_meta_list = [one_img], [[one_meta]] + + if skip_postprocess: + warnings.warn('Not all models support export onnx without post ' + 'process, especially two stage detectors!') + model.forward = model.forward_dummy + torch.onnx.export( + model, + one_img, + output_file, + input_names=['input'], + export_params=True, + keep_initializers_as_inputs=True, + do_constant_folding=True, + verbose=show, + opset_version=opset_version) + + print(f'Successfully exported ONNX model without ' + f'post process: {output_file}') + return + + # replace original forward function + origin_forward = model.forward + model.forward = partial( + model.forward, + img_metas=img_meta_list, + return_loss=False, + rescale=False) + + output_names = ['dets', 'labels'] + if model.with_mask: + output_names.append('masks') + input_name = 'input' + dynamic_axes = None + if dynamic_export: + dynamic_axes = { + input_name: { + 0: 'batch', + 2: 'height', + 3: 'width' + }, + 'dets': { + 0: 'batch', + 1: 'num_dets', + }, + 'labels': { + 0: 'batch', + 1: 'num_dets', + }, + } + if model.with_mask: + dynamic_axes['masks'] = {0: 'batch', 1: 'num_dets'} + + torch.onnx.export( + model, + img_list, + output_file, + input_names=[input_name], + output_names=output_names, + export_params=True, + keep_initializers_as_inputs=True, + do_constant_folding=True, + verbose=show, + opset_version=opset_version, + dynamic_axes=dynamic_axes) + + model.forward = origin_forward + + # get the custom op path + ort_custom_op_path = '' + try: + from mmcv.ops import get_onnxruntime_op_path + ort_custom_op_path = get_onnxruntime_op_path() + except (ImportError, ModuleNotFoundError): + warnings.warn('If input model has custom op from mmcv, \ + you may have to build mmcv with ONNXRuntime from source.') + + if do_simplify: + import onnxsim + + from mmdet import digit_version + + min_required_version = '0.3.0' + assert digit_version(onnxsim.__version__) >= digit_version( + min_required_version + ), f'Requires to install onnx-simplify>={min_required_version}' + + input_dic = {'input': img_list[0].detach().cpu().numpy()} + model_opt, check_ok = onnxsim.simplify( + output_file, + input_data=input_dic, + custom_lib=ort_custom_op_path, + dynamic_input_shape=dynamic_export) + if check_ok: + onnx.save(model_opt, output_file) + print(f'Successfully simplified ONNX model: {output_file}') + else: + warnings.warn('Failed to simplify ONNX model.') + print(f'Successfully exported ONNX model: {output_file}') + + if verify: + # check by onnx + onnx_model = onnx.load(output_file) + onnx.checker.check_model(onnx_model) + + # wrap onnx model + onnx_model = ONNXRuntimeDetector(output_file, model.CLASSES, 0) + if dynamic_export: + # scale up to test dynamic shape + h, w = [int((_ * 1.5) // 32 * 32) for _ in input_shape[2:]] + h, w = min(1344, h), min(1344, w) + input_config['input_shape'] = (1, 3, h, w) + + if test_img is None: + input_config['input_path'] = input_img + + # prepare input once again + one_img, one_meta = preprocess_example_input(input_config) + img_list, img_meta_list = [one_img], [[one_meta]] + + # get pytorch output + with torch.no_grad(): + pytorch_results = model( + img_list, + img_metas=img_meta_list, + return_loss=False, + rescale=True)[0] + + img_list = [_.cuda().contiguous() for _ in img_list] + if dynamic_export: + img_list = img_list + [_.flip(-1).contiguous() for _ in img_list] + img_meta_list = img_meta_list * 2 + # get onnx output + onnx_results = onnx_model( + img_list, img_metas=img_meta_list, return_loss=False)[0] + # visualize predictions + score_thr = 0.3 + if show: + out_file_ort, out_file_pt = None, None + else: + out_file_ort, out_file_pt = 'show-ort.png', 'show-pt.png' + + show_img = one_meta['show_img'] + model.show_result( + show_img, + pytorch_results, + score_thr=score_thr, + show=True, + win_name='PyTorch', + out_file=out_file_pt) + onnx_model.show_result( + show_img, + onnx_results, + score_thr=score_thr, + show=True, + win_name='ONNXRuntime', + out_file=out_file_ort) + + # compare a part of result + if model.with_mask: + compare_pairs = list(zip(onnx_results, pytorch_results)) + else: + compare_pairs = [(onnx_results, pytorch_results)] + err_msg = 'The numerical values are different between Pytorch' + \ + ' and ONNX, but it does not necessarily mean the' + \ + ' exported ONNX model is problematic.' + # check the numerical value + for onnx_res, pytorch_res in compare_pairs: + for o_res, p_res in zip(onnx_res, pytorch_res): + np.testing.assert_allclose( + o_res, p_res, rtol=1e-03, atol=1e-05, err_msg=err_msg) + print('The numerical values are the same between Pytorch and ONNX') + + +def parse_normalize_cfg(test_pipeline): + transforms = None + for pipeline in test_pipeline: + if 'transforms' in pipeline: + transforms = pipeline['transforms'] + break + assert transforms is not None, 'Failed to find `transforms`' + norm_config_li = [_ for _ in transforms if _['type'] == 'Normalize'] + assert len(norm_config_li) == 1, '`norm_config` should only have one' + norm_config = norm_config_li[0] + return norm_config + + +def parse_args(): + parser = argparse.ArgumentParser( + description='Convert MMDetection models to ONNX') + parser.add_argument('config', help='test config file path') + parser.add_argument('checkpoint', help='checkpoint file') + parser.add_argument('--input-img', type=str, help='Images for input') + parser.add_argument( + '--show', + action='store_true', + help='Show onnx graph and detection outputs') + parser.add_argument('--output-file', type=str, default='tmp.onnx') + parser.add_argument('--opset-version', type=int, default=11) + parser.add_argument( + '--test-img', type=str, default=None, help='Images for test') + parser.add_argument( + '--dataset', + type=str, + default='coco', + help='Dataset name. This argument is deprecated and will be removed \ + in future releases.') + parser.add_argument( + '--verify', + action='store_true', + help='verify the onnx model output against pytorch output') + parser.add_argument( + '--simplify', + action='store_true', + help='Whether to simplify onnx model.') + parser.add_argument( + '--shape', + type=int, + nargs='+', + default=[800, 1216], + help='input image size') + parser.add_argument( + '--mean', + type=float, + nargs='+', + default=[123.675, 116.28, 103.53], + help='mean value used for preprocess input data.This argument \ + is deprecated and will be removed in future releases.') + parser.add_argument( + '--std', + type=float, + nargs='+', + default=[58.395, 57.12, 57.375], + help='variance value used for preprocess input data. ' + 'This argument is deprecated and will be removed in future releases.') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='Override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. If the value to ' + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + 'Note that the quotation marks are necessary and that no white space ' + 'is allowed.') + parser.add_argument( + '--dynamic-export', + action='store_true', + help='Whether to export onnx with dynamic axis.') + parser.add_argument( + '--skip-postprocess', + action='store_true', + help='Whether to export model without post process. Experimental ' + 'option. We do not guarantee the correctness of the exported ' + 'model.') + args = parser.parse_args() + return args + + +if __name__ == '__main__': + args = parse_args() + warnings.warn('Arguments like `--mean`, `--std`, `--dataset` would be \ + parsed directly from config file and are deprecated and \ + will be removed in future releases.') + + assert args.opset_version == 11, 'MMDet only support opset 11 now' + + try: + from mmcv.onnx.symbolic import register_extra_symbolics + except ModuleNotFoundError: + raise NotImplementedError('please update mmcv to version>=v1.0.4') + register_extra_symbolics(args.opset_version) + + cfg = Config.fromfile(args.config) + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + + if args.shape is None: + img_scale = cfg.test_pipeline[1]['img_scale'] + input_shape = (1, 3, img_scale[1], img_scale[0]) + elif len(args.shape) == 1: + input_shape = (1, 3, args.shape[0], args.shape[0]) + elif len(args.shape) == 2: + input_shape = (1, 3) + tuple(args.shape) + else: + raise ValueError('invalid input shape') + + # build the model and load checkpoint + model = build_model_from_cfg(args.config, args.checkpoint, + args.cfg_options) + + if not args.input_img: + args.input_img = osp.join(osp.dirname(__file__), '../../demo/demo.jpg') + + normalize_cfg = parse_normalize_cfg(cfg.test_pipeline) + + # convert model to onnx file + pytorch2onnx( + model, + args.input_img, + input_shape, + normalize_cfg, + opset_version=args.opset_version, + show=args.show, + output_file=args.output_file, + verify=args.verify, + test_img=args.test_img, + do_simplify=args.simplify, + dynamic_export=args.dynamic_export, + skip_postprocess=args.skip_postprocess) diff --git a/tools/deployment/pytorch2onnx_kneron.py b/tools/deployment/pytorch2onnx_kneron.py new file mode 100644 index 0000000..51ea751 --- /dev/null +++ b/tools/deployment/pytorch2onnx_kneron.py @@ -0,0 +1,494 @@ +# All modification made by Kneron Corp.: Copyright (c) 2022 Kneron Corp. +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import os.path as osp +import warnings +from functools import partial + +import numpy as np +import onnx +import torch +from mmcv import Config, DictAction + +from mmdet.core.export import build_model_from_cfg, preprocess_example_input +from mmdet.core.export.model_wrappers import ONNXRuntimeDetector + +from optimizer_scripts.tools import other +from optimizer_scripts.pytorch_exported_onnx_preprocess import ( + torch_exported_onnx_flow, +) + + +def pytorch2onnx( + model, + input_img, + input_shape, + normalize_cfg, + opset_version=11, + show=False, + output_file="tmp.onnx", + verify=False, + test_img=None, + do_simplify=False, + dynamic_export=None, + skip_postprocess=False, + in_model_preprocess=False, +): + + input_config = { + "input_shape": input_shape, + "input_path": input_img, + "normalize_cfg": normalize_cfg, + } + # prepare input + one_img, one_meta = preprocess_example_input(input_config) + img_list, img_meta_list = [one_img], [[one_meta]] + + if skip_postprocess: + warnings.warn( + "Not all models support export onnx without post " + "process, especially two stage detectors!" + ) + model.forward = model.forward_dummy + torch.onnx.export( + model, + one_img, + output_file, + input_names=["input"], + export_params=True, + keep_initializers_as_inputs=False, + do_constant_folding=False, + verbose=show, + opset_version=11, + ) + + print( + "Successfully exported ONNX model without " + f"post process: {output_file}" + ) + + import onnxsim + from mmdet import digit_version + + min_required_version = "0.3.0" + assert digit_version(onnxsim.__version__) >= digit_version( + min_required_version + ), f"Requires to install onnx-simplify>={min_required_version}" + + input_dic = {"input": img_list[0].detach().cpu().numpy()} + model_opt, check_ok = onnxsim.simplify( + output_file, input_data=input_dic, + dynamic_input_shape=dynamic_export + ) + if check_ok: + onnx.save(model_opt, output_file) + print(f"Successfully simplified ONNX model: {output_file}") + else: + warnings.warn("Failed to simplify ONNX model.") + print(f"Successfully exported ONNX model: {output_file}") + # print(normalize_cfg) + m = onnx.load(output_file) + print(len(m.graph.input)) + m = torch_exported_onnx_flow(m, disable_fuse_bn=False) + + if len(m.graph.input) > 1: + raise ValueError( + "'--pixel-bias-value' and '--pixel-scale-value' " + "only support one input node model currently" + ) + + if in_model_preprocess: + print( + "adding BN for doing input data normalization".center(79, '-') + ) + + mean = normalize_cfg["mean"] + std = normalize_cfg["std"] + + i_n = m.graph.input[0] + if i_n.type.tensor_type.shape.dim[1].dim_value != len( + mean + ) or i_n.type.tensor_type.shape.dim[1].dim_value != len(std): + raise ValueError( + "--pixel-bias-value (" + + str(mean) + + ") and --pixel-scale-value (" + + str(std) + + ") should be same as input dimension:" + + str(i_n.type.tensor_type.shape.dim[1].dim_value) + ) + + # add 128 for changing input range from 0~255 to -128~127 (int8) + # due to quantization limitation + norm_bn_bias = [ + -1 * cm / cs + 128. / cs for cm, cs in zip(mean, std) + ] + norm_bn_scale = [1 / cs for cs in std] + + other.add_bias_scale_bn_after( + m.graph, i_n.name, norm_bn_bias, norm_bn_scale + ) + m = other.polish_model(m) + + onnx_out = output_file + onnx.helper.set_model_props( + m, + { + "Kn. T.P. version": " MMDetection_KN v0.1.0", + "in-model-preproc": str(in_model_preprocess), + }, + ) + onnx.save(m, onnx_out) + print("exported success: ", onnx_out) + + if verify: + import onnxruntime as ort + + onnx_model = onnx.load(output_file) + onnx.checker.check_model(onnx_model) + with torch.no_grad(): + if in_model_preprocess: + bn = torch.nn.BatchNorm2d(3) + bn.weight[:] = torch.as_tensor( + norm_bn_scale, dtype=bn.weight.dtype + ) + bn.bias[:] = torch.as_tensor( + norm_bn_bias, dtype=bn.bias.dtype + ) + model = torch.nn.Sequential(bn, model).eval() + + pth_outs = model(one_img) + + def recursive_numpy(ctxs): + if isinstance(ctxs, torch.Tensor): + return ctxs.numpy() + ctxs = [recursive_numpy(ctx) for ctx in ctxs] + return ctxs + + pth_outs = recursive_numpy(pth_outs) + + # NOTE: flatten if nested structure + if not isinstance(pth_outs[0], torch.Tensor): + pth_outs = [pth_out for _ in pth_outs for pth_out in _] + + input_all = [node.name for node in onnx_model.graph.input] + input_initializer = [ + node.name for node in onnx_model.graph.initializer + ] + net_feed_input = list(set(input_all) - set(input_initializer)) + assert len(net_feed_input) == 1 + sess = ort.InferenceSession( + output_file, providers=["CPUExecutionProvider"] + ) + ort_outs = sess.run( + None, {net_feed_input[0]: one_img.detach().numpy()} + ) + err_msg = ( + "The numerical values are different between Pytorch" + + " and ONNX, but it does not necessarily mean the" + + " exported ONNX model is problematic." + ) + for ort_out, pth_out in zip(ort_outs, pth_outs): + np.testing.assert_allclose( + ort_out, pth_out, rtol=1e-02, atol=1e-04, err_msg=err_msg + ) + print("The numerical values are the same between Pytorch and ONNX") + + return + + # replace original forward function + origin_forward = model.forward + model.forward = partial( + model.forward, img_metas=img_meta_list, + return_loss=False, rescale=False + ) + + output_names = ["dets", "labels"] + if model.with_mask: + output_names.append("masks") + input_name = "input" + dynamic_axes = None + if dynamic_export: + dynamic_axes = { + input_name: {0: "batch", 2: "height", 3: "width"}, + "dets": { + 0: "batch", + 1: "num_dets", + }, + "labels": { + 0: "batch", + 1: "num_dets", + }, + } + if model.with_mask: + dynamic_axes["masks"] = {0: "batch", 1: "num_dets"} + + torch.onnx.export( + model, + img_list, + output_file, + input_names=[input_name], + output_names=output_names, + export_params=True, + keep_initializers_as_inputs=True, + do_constant_folding=True, + verbose=show, + opset_version=opset_version, + dynamic_axes=dynamic_axes, + ) + + model.forward = origin_forward + + # get the custom op path + ort_custom_op_path = "" + try: + from mmcv.ops import get_onnxruntime_op_path + + ort_custom_op_path = get_onnxruntime_op_path() + except (ImportError, ModuleNotFoundError): + warnings.warn( + "If input model has custom op from mmcv, \ + you may have to build mmcv with ONNXRuntime from source." + ) + + if do_simplify: + import onnxsim + + from mmdet import digit_version + + min_required_version = "0.3.0" + assert digit_version(onnxsim.__version__) >= digit_version( + min_required_version + ), f"Requires to install onnx-simplify>={min_required_version}" + + input_dic = {"input": img_list[0].detach().cpu().numpy()} + model_opt, check_ok = onnxsim.simplify( + output_file, + input_data=input_dic, + custom_lib=ort_custom_op_path, + dynamic_input_shape=dynamic_export, + ) + if check_ok: + onnx.save(model_opt, output_file) + print(f"Successfully simplified ONNX model: {output_file}") + else: + warnings.warn("Failed to simplify ONNX model.") + print(f"Successfully exported ONNX model: {output_file}") + + if verify: + # check by onnx + onnx_model = onnx.load(output_file) + onnx.checker.check_model(onnx_model) + + # wrap onnx model + onnx_model = ONNXRuntimeDetector(output_file, model.CLASSES, 0) + if dynamic_export: + # scale up to test dynamic shape + h, w = [int((_ * 1.5) // 32 * 32) for _ in input_shape[2:]] + h, w = min(1344, h), min(1344, w) + input_config["input_shape"] = (1, 3, h, w) + + if test_img is None: + input_config["input_path"] = input_img + + # prepare input once again + one_img, one_meta = preprocess_example_input(input_config) + img_list, img_meta_list = [one_img], [[one_meta]] + + # get pytorch output + with torch.no_grad(): + pytorch_results = model( + img_list, img_metas=img_meta_list, + return_loss=False, rescale=True + )[0] + + img_list = [_.cuda().contiguous() for _ in img_list] + if dynamic_export: + img_list = img_list + [_.flip(-1).contiguous() for _ in img_list] + img_meta_list = img_meta_list * 2 + # get onnx output + onnx_results = onnx_model( + img_list, img_metas=img_meta_list, return_loss=False + )[0] + # visualize predictions + score_thr = 0.3 + if show: + out_file_ort, out_file_pt = None, None + else: + out_file_ort, out_file_pt = "show-ort.png", "show-pt.png" + + show_img = one_meta["show_img"] + model.show_result( + show_img, + pytorch_results, + score_thr=score_thr, + show=True, + win_name="PyTorch", + out_file=out_file_pt, + ) + onnx_model.show_result( + show_img, + onnx_results, + score_thr=score_thr, + show=True, + win_name="ONNXRuntime", + out_file=out_file_ort, + ) + + # compare a part of result + if model.with_mask: + compare_pairs = list(zip(onnx_results, pytorch_results)) + else: + compare_pairs = [(onnx_results, pytorch_results)] + err_msg = ( + "The numerical values are different between Pytorch" + + " and ONNX, but it does not necessarily mean the" + + " exported ONNX model is problematic." + ) + # check the numerical value + for onnx_res, pytorch_res in compare_pairs: + for o_res, p_res in zip(onnx_res, pytorch_res): + np.testing.assert_allclose( + o_res, p_res, rtol=1e-03, atol=1e-05, err_msg=err_msg + ) + print("The numerical values are the same between Pytorch and ONNX") + + +def parse_normalize_cfg(test_pipeline): + transforms = None + for pipeline in test_pipeline: + if "transforms" in pipeline: + transforms = pipeline["transforms"] + break + assert transforms is not None, "Failed to find `transforms`" + norm_config_li = [_ for _ in transforms if _["type"] == "Normalize"] + assert len(norm_config_li) == 1, "`norm_config` should only have one" + norm_config = norm_config_li[0] + return norm_config + + +def parse_args(): + parser = argparse.ArgumentParser( + description="Convert MMDetection models to ONNX" + ) + parser.add_argument("config", help="test config file path") + parser.add_argument("checkpoint", help="checkpoint file") + parser.add_argument("--input-img", type=str, help="Images for input") + parser.add_argument( + "--show", action="store_true", + help="Show onnx graph and detection outputs" + ) + parser.add_argument("--output-file", type=str, default="tmp.onnx") + parser.add_argument("--opset-version", type=int, default=11) + parser.add_argument("--test-img", type=str, default=None, + help="Images for test") + parser.add_argument( + "--dataset", + type=str, + default="coco", + help="Dataset name. This argument is deprecated and will be removed \ + in future releases.", + ) + parser.add_argument( + "--verify", + action="store_true", + help="verify the onnx model output against pytorch output", + ) + parser.add_argument( + "--simplify", action="store_true", + help="Whether to simplify onnx model." + ) + parser.add_argument( + "--shape", type=int, nargs="+", + default=None, help="input image size" + ) + parser.add_argument( + "--cfg-options", + nargs="+", + action=DictAction, + help="Override some settings in the used config, the key-value pair " + "in xxx=yyy format will be merged into config file. If the value to " + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + "Note that the quotation marks are necessary and that no white space " + "is allowed.", + ) + parser.add_argument( + "--dynamic-export", + action="store_true", + help="Whether to export onnx with dynamic axis.", + ) + parser.add_argument( + "--skip-postprocess", + action="store_true", + help="Whether to export model without post process. Experimental " + "option. We do not guarantee the correctness of the exported " + "model.", + ) + parser.add_argument( + "--in-model-preprocess", + action="store_true", + help="Add batchnormalization layer in front of model as a role of " + "data preprocessing (noramlization) according to the " + "normalization value in config. ", + ) + args = parser.parse_args() + return args + + +if __name__ == "__main__": + args = parse_args() + warnings.warn( + "Arguments like `--skip-postprocess`, `--dataset` would be \ + parsed directly from config file and are deprecated and \ + will be removed in future releases." + ) + + assert args.opset_version == 11, "MMDet only support opset 11 now" + + try: + from mmcv.onnx.symbolic import register_extra_symbolics + except ModuleNotFoundError: + raise NotImplementedError("please update mmcv to version>=v1.0.4") + register_extra_symbolics(args.opset_version) + + cfg = Config.fromfile(args.config) + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + + if args.shape is None: + img_scale = cfg.test_pipeline[1]["img_scale"] + input_shape = (1, 3, img_scale[1], img_scale[0]) + elif len(args.shape) == 1: + input_shape = (1, 3, args.shape[0], args.shape[0]) + elif len(args.shape) == 2: + input_shape = (1, 3) + tuple(args.shape) + else: + raise ValueError("invalid input shape") + + # build the model and load checkpoint + model = build_model_from_cfg( + args.config, args.checkpoint, args.cfg_options + ) + + if not args.input_img: + args.input_img = osp.join(osp.dirname(__file__), "../../demo/demo.jpg") + + normalize_cfg = parse_normalize_cfg(cfg.test_pipeline) + + # convert model to onnx file + pytorch2onnx( + model, + args.input_img, + input_shape, + normalize_cfg, + opset_version=args.opset_version, + show=args.show, + output_file=args.output_file, + verify=args.verify, + test_img=args.test_img, + do_simplify=args.simplify, + dynamic_export=args.dynamic_export, + skip_postprocess=args.skip_postprocess, + in_model_preprocess=args.in_model_preprocess, + ) diff --git a/tools/deployment/test.py b/tools/deployment/test.py new file mode 100644 index 0000000..b32b773 --- /dev/null +++ b/tools/deployment/test.py @@ -0,0 +1,143 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse + +import mmcv +from mmcv import Config, DictAction +from mmcv.parallel import MMDataParallel + +from mmdet.apis import single_gpu_test +from mmdet.datasets import (build_dataloader, build_dataset, + replace_ImageToTensor) + + +def parse_args(): + parser = argparse.ArgumentParser( + description='MMDet test (and eval) an ONNX model using ONNXRuntime') + parser.add_argument('config', help='test config file path') + parser.add_argument('model', help='Input model file') + parser.add_argument('--out', help='output result file in pickle format') + parser.add_argument( + '--format-only', + action='store_true', + help='Format the output results without perform evaluation. It is' + 'useful when you want to format the result to a specific format and ' + 'submit it to the test server') + parser.add_argument( + '--backend', + required=True, + choices=['onnxruntime', 'tensorrt'], + help='Backend for input model to run. ') + parser.add_argument( + '--eval', + type=str, + nargs='+', + help='evaluation metrics, which depends on the dataset, e.g., "bbox",' + ' "segm", "proposal" for COCO, and "mAP", "recall" for PASCAL VOC') + parser.add_argument('--show', action='store_true', help='show results') + parser.add_argument( + '--show-dir', help='directory where painted images will be saved') + parser.add_argument( + '--show-score-thr', + type=float, + default=0.3, + help='score threshold (default: 0.3)') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. If the value to ' + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + 'Note that the quotation marks are necessary and that no white space ' + 'is allowed.') + parser.add_argument( + '--eval-options', + nargs='+', + action=DictAction, + help='custom options for evaluation, the key-value pair in xxx=yyy ' + 'format will be kwargs for dataset.evaluate() function') + + args = parser.parse_args() + return args + + +def main(): + args = parse_args() + + assert args.out or args.eval or args.format_only or args.show \ + or args.show_dir, \ + ('Please specify at least one operation (save/eval/format/show the ' + 'results / save the results) with the argument "--out", "--eval"' + ', "--format-only", "--show" or "--show-dir"') + + if args.eval and args.format_only: + raise ValueError('--eval and --format_only cannot be both specified') + + if args.out is not None and not args.out.endswith(('.pkl', '.pickle')): + raise ValueError('The output file must be a pkl file.') + + cfg = Config.fromfile(args.config) + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + + # in case the test dataset is concatenated + samples_per_gpu = 1 + if isinstance(cfg.data.test, dict): + cfg.data.test.test_mode = True + samples_per_gpu = cfg.data.test.pop('samples_per_gpu', 1) + if samples_per_gpu > 1: + # Replace 'ImageToTensor' to 'DefaultFormatBundle' + cfg.data.test.pipeline = replace_ImageToTensor( + cfg.data.test.pipeline) + elif isinstance(cfg.data.test, list): + for ds_cfg in cfg.data.test: + ds_cfg.test_mode = True + samples_per_gpu = max( + [ds_cfg.pop('samples_per_gpu', 1) for ds_cfg in cfg.data.test]) + if samples_per_gpu > 1: + for ds_cfg in cfg.data.test: + ds_cfg.pipeline = replace_ImageToTensor(ds_cfg.pipeline) + + # build the dataloader + dataset = build_dataset(cfg.data.test) + data_loader = build_dataloader( + dataset, + samples_per_gpu=samples_per_gpu, + workers_per_gpu=cfg.data.workers_per_gpu, + dist=False, + shuffle=False) + + if args.backend == 'onnxruntime': + from mmdet.core.export.model_wrappers import ONNXRuntimeDetector + model = ONNXRuntimeDetector( + args.model, class_names=dataset.CLASSES, device_id=0) + elif args.backend == 'tensorrt': + from mmdet.core.export.model_wrappers import TensorRTDetector + model = TensorRTDetector( + args.model, class_names=dataset.CLASSES, device_id=0) + + model = MMDataParallel(model, device_ids=[0]) + outputs = single_gpu_test(model, data_loader, args.show, args.show_dir, + args.show_score_thr) + + if args.out: + print(f'\nwriting results to {args.out}') + mmcv.dump(outputs, args.out) + kwargs = {} if args.eval_options is None else args.eval_options + if args.format_only: + dataset.format_results(outputs, **kwargs) + if args.eval: + eval_kwargs = cfg.get('evaluation', {}).copy() + # hard-code way to remove EvalHook args + for key in [ + 'interval', 'tmpdir', 'start', 'gpu_collect', 'save_best', + 'rule' + ]: + eval_kwargs.pop(key, None) + eval_kwargs.update(dict(metric=args.eval, **kwargs)) + print(dataset.evaluate(outputs, **eval_kwargs)) + + +if __name__ == '__main__': + main() diff --git a/tools/deployment/test_torchserver.py b/tools/deployment/test_torchserver.py new file mode 100644 index 0000000..dd45234 --- /dev/null +++ b/tools/deployment/test_torchserver.py @@ -0,0 +1,74 @@ +from argparse import ArgumentParser + +import numpy as np +import requests + +from mmdet.apis import inference_detector, init_detector, show_result_pyplot +from mmdet.core import bbox2result + + +def parse_args(): + parser = ArgumentParser() + parser.add_argument('img', help='Image file') + parser.add_argument('config', help='Config file') + parser.add_argument('checkpoint', help='Checkpoint file') + parser.add_argument('model_name', help='The model name in the server') + parser.add_argument( + '--inference-addr', + default='127.0.0.1:8080', + help='Address and port of the inference server') + parser.add_argument( + '--device', default='cuda:0', help='Device used for inference') + parser.add_argument( + '--score-thr', type=float, default=0.5, help='bbox score threshold') + args = parser.parse_args() + return args + + +def parse_result(input, model_class): + bbox = [] + label = [] + score = [] + for anchor in input: + bbox.append(anchor['bbox']) + label.append(model_class.index(anchor['class_name'])) + score.append([anchor['score']]) + bboxes = np.append(bbox, score, axis=1) + labels = np.array(label) + result = bbox2result(bboxes, labels, len(model_class)) + return result + + +def main(args): + # build the model from a config file and a checkpoint file + model = init_detector(args.config, args.checkpoint, device=args.device) + # test a single image + model_result = inference_detector(model, args.img) + for i, anchor_set in enumerate(model_result): + anchor_set = anchor_set[anchor_set[:, 4] >= 0.5] + model_result[i] = anchor_set + # show the results + show_result_pyplot( + model, + args.img, + model_result, + score_thr=args.score_thr, + title='pytorch_result') + url = 'http://' + args.inference_addr + '/predictions/' + args.model_name + with open(args.img, 'rb') as image: + response = requests.post(url, image) + server_result = parse_result(response.json(), model.CLASSES) + show_result_pyplot( + model, + args.img, + server_result, + score_thr=args.score_thr, + title='server_result') + + for i in range(len(model.CLASSES)): + assert np.allclose(model_result[i], server_result[i]) + + +if __name__ == '__main__': + args = parse_args() + main(args) diff --git a/tools/dist_test.sh b/tools/dist_test.sh new file mode 100644 index 0000000..3c74ec6 --- /dev/null +++ b/tools/dist_test.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +CONFIG=$1 +CHECKPOINT=$2 +GPUS=$3 +PORT=${PORT:-29500} + +PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \ +python -m torch.distributed.launch --nproc_per_node=$GPUS --master_port=$PORT \ + $(dirname "$0")/test.py $CONFIG $CHECKPOINT --launcher pytorch ${@:4} diff --git a/tools/dist_train.sh b/tools/dist_train.sh new file mode 100644 index 0000000..5b43fff --- /dev/null +++ b/tools/dist_train.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +CONFIG=$1 +GPUS=$2 +PORT=${PORT:-29500} + +PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \ +python -m torch.distributed.launch --nproc_per_node=$GPUS --master_port=$PORT \ + $(dirname "$0")/train.py $CONFIG --launcher pytorch ${@:3} diff --git a/tools/misc/browse_dataset.py b/tools/misc/browse_dataset.py new file mode 100644 index 0000000..aebe9cf --- /dev/null +++ b/tools/misc/browse_dataset.py @@ -0,0 +1,105 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import os +from collections import Sequence +from pathlib import Path + +import mmcv +from mmcv import Config, DictAction + +from mmdet.core.utils import mask2ndarray +from mmdet.core.visualization import imshow_det_bboxes +from mmdet.datasets.builder import build_dataset + + +def parse_args(): + parser = argparse.ArgumentParser(description='Browse a dataset') + parser.add_argument('config', help='train config file path') + parser.add_argument( + '--skip-type', + type=str, + nargs='+', + default=['DefaultFormatBundle', 'Normalize', 'Collect'], + help='skip some useless pipeline') + parser.add_argument( + '--output-dir', + default=None, + type=str, + help='If there is no display interface, you can save it') + parser.add_argument('--not-show', default=False, action='store_true') + parser.add_argument( + '--show-interval', + type=float, + default=2, + help='the interval of show (s)') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. If the value to ' + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + 'Note that the quotation marks are necessary and that no white space ' + 'is allowed.') + args = parser.parse_args() + return args + + +def retrieve_data_cfg(config_path, skip_type, cfg_options): + + def skip_pipeline_steps(config): + config['pipeline'] = [ + x for x in config.pipeline if x['type'] not in skip_type + ] + + cfg = Config.fromfile(config_path) + if cfg_options is not None: + cfg.merge_from_dict(cfg_options) + train_data_cfg = cfg.data.train + while 'dataset' in train_data_cfg and train_data_cfg[ + 'type'] != 'MultiImageMixDataset': + train_data_cfg = train_data_cfg['dataset'] + + if isinstance(train_data_cfg, Sequence): + [skip_pipeline_steps(c) for c in train_data_cfg] + else: + skip_pipeline_steps(train_data_cfg) + + return cfg + + +def main(): + args = parse_args() + cfg = retrieve_data_cfg(args.config, args.skip_type, args.cfg_options) + + dataset = build_dataset(cfg.data.train) + + progress_bar = mmcv.ProgressBar(len(dataset)) + + for item in dataset: + filename = os.path.join(args.output_dir, + Path(item['filename']).name + ) if args.output_dir is not None else None + + gt_masks = item.get('gt_masks', None) + if gt_masks is not None: + gt_masks = mask2ndarray(gt_masks) + + imshow_det_bboxes( + item['img'], + item['gt_bboxes'], + item['gt_labels'], + gt_masks, + class_names=dataset.CLASSES, + show=not args.not_show, + wait_time=args.show_interval, + out_file=filename, + bbox_color=(255, 102, 61), + text_color=(255, 102, 61)) + + progress_bar.update() + + +if __name__ == '__main__': + main() diff --git a/tools/misc/download_dataset.py b/tools/misc/download_dataset.py new file mode 100644 index 0000000..09c777d --- /dev/null +++ b/tools/misc/download_dataset.py @@ -0,0 +1,102 @@ +import argparse +from itertools import repeat +from multiprocessing.pool import ThreadPool +from pathlib import Path +from tarfile import TarFile +from zipfile import ZipFile + +import torch + + +def parse_args(): + parser = argparse.ArgumentParser( + description='Download datasets for training') + parser.add_argument( + '--dataset-name', type=str, help='dataset name', default='coco2017') + parser.add_argument( + '--save-dir', + type=str, + help='the dir to save dataset', + default='data/coco') + parser.add_argument( + '--unzip', + action='store_true', + help='whether unzip dataset or not, zipped files will be saved') + parser.add_argument( + '--delete', + action='store_true', + help='delete the download zipped files') + parser.add_argument( + '--threads', type=int, help='number of threading', default=4) + args = parser.parse_args() + return args + + +def download(url, dir, unzip=True, delete=False, threads=1): + + def download_one(url, dir): + f = dir / Path(url).name + if Path(url).is_file(): + Path(url).rename(f) + elif not f.exists(): + print('Downloading {} to {}'.format(url, f)) + torch.hub.download_url_to_file(url, f, progress=True) + if unzip and f.suffix in ('.zip', '.tar'): + print('Unzipping {}'.format(f.name)) + if f.suffix == '.zip': + ZipFile(f).extractall(path=dir) + elif f.suffix == '.tar': + TarFile(f).extractall(path=dir) + if delete: + f.unlink() + print('Delete {}'.format(f)) + + dir = Path(dir) + if threads > 1: + pool = ThreadPool(threads) + pool.imap(lambda x: download_one(*x), zip(url, repeat(dir))) + pool.close() + pool.join() + else: + for u in [url] if isinstance(url, (str, Path)) else url: + download_one(u, dir) + + +def main(): + args = parse_args() + path = Path(args.save_dir) + if not path.exists(): + path.mkdir(parents=True, exist_ok=True) + data2url = dict( + # TODO: Support for downloading Panoptic Segmentation of COCO + coco2017=[ + 'http://images.cocodataset.org/zips/train2017.zip', + 'http://images.cocodataset.org/zips/val2017.zip', + 'http://images.cocodataset.org/zips/test2017.zip', + 'http://images.cocodataset.org/annotations/' + + 'annotations_trainval2017.zip' + ], + lvis=[ + 'https://s3-us-west-2.amazonaws.com/dl.fbaipublicfiles.com/LVIS/lvis_v1_train.json.zip', # noqa + 'https://s3-us-west-2.amazonaws.com/dl.fbaipublicfiles.com/LVIS/lvis_v1_train.json.zip', # noqa + ], + voc2007=[ + 'http://host.robots.ox.ac.uk/pascal/VOC/voc2007/VOCtrainval_06-Nov-2007.tar', # noqa + 'http://host.robots.ox.ac.uk/pascal/VOC/voc2007/VOCtest_06-Nov-2007.tar', # noqa + 'http://host.robots.ox.ac.uk/pascal/VOC/voc2007/VOCdevkit_08-Jun-2007.tar', # noqa + ], + ) + url = data2url.get(args.dataset_name, None) + if url is None: + print('Only support COCO, VOC, and LVIS now!') + return + download( + url, + dir=path, + unzip=args.unzip, + delete=args.delete, + threads=args.threads) + + +if __name__ == '__main__': + main() diff --git a/tools/misc/print_config.py b/tools/misc/print_config.py new file mode 100644 index 0000000..1b2cb30 --- /dev/null +++ b/tools/misc/print_config.py @@ -0,0 +1,51 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import warnings + +from mmcv import Config, DictAction + + +def parse_args(): + parser = argparse.ArgumentParser(description='Print the whole config') + parser.add_argument('config', help='config file path') + parser.add_argument( + '--options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file (deprecate), ' + 'change to --cfg-options instead.') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. If the value to ' + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + 'Note that the quotation marks are necessary and that no white space ' + 'is allowed.') + args = parser.parse_args() + + if args.options and args.cfg_options: + raise ValueError( + '--options and --cfg-options cannot be both ' + 'specified, --options is deprecated in favor of --cfg-options') + if args.options: + warnings.warn('--options is deprecated in favor of --cfg-options') + args.cfg_options = args.options + + return args + + +def main(): + args = parse_args() + + cfg = Config.fromfile(args.config) + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + print(f'Config:\n{cfg.pretty_text}') + + +if __name__ == '__main__': + main() diff --git a/tools/model_converters/detectron2pytorch.py b/tools/model_converters/detectron2pytorch.py new file mode 100644 index 0000000..b7264d5 --- /dev/null +++ b/tools/model_converters/detectron2pytorch.py @@ -0,0 +1,83 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +from collections import OrderedDict + +import mmcv +import torch + +arch_settings = {50: (3, 4, 6, 3), 101: (3, 4, 23, 3)} + + +def convert_bn(blobs, state_dict, caffe_name, torch_name, converted_names): + # detectron replace bn with affine channel layer + state_dict[torch_name + '.bias'] = torch.from_numpy(blobs[caffe_name + + '_b']) + state_dict[torch_name + '.weight'] = torch.from_numpy(blobs[caffe_name + + '_s']) + bn_size = state_dict[torch_name + '.weight'].size() + state_dict[torch_name + '.running_mean'] = torch.zeros(bn_size) + state_dict[torch_name + '.running_var'] = torch.ones(bn_size) + converted_names.add(caffe_name + '_b') + converted_names.add(caffe_name + '_s') + + +def convert_conv_fc(blobs, state_dict, caffe_name, torch_name, + converted_names): + state_dict[torch_name + '.weight'] = torch.from_numpy(blobs[caffe_name + + '_w']) + converted_names.add(caffe_name + '_w') + if caffe_name + '_b' in blobs: + state_dict[torch_name + '.bias'] = torch.from_numpy(blobs[caffe_name + + '_b']) + converted_names.add(caffe_name + '_b') + + +def convert(src, dst, depth): + """Convert keys in detectron pretrained ResNet models to pytorch style.""" + # load arch_settings + if depth not in arch_settings: + raise ValueError('Only support ResNet-50 and ResNet-101 currently') + block_nums = arch_settings[depth] + # load caffe model + caffe_model = mmcv.load(src, encoding='latin1') + blobs = caffe_model['blobs'] if 'blobs' in caffe_model else caffe_model + # convert to pytorch style + state_dict = OrderedDict() + converted_names = set() + convert_conv_fc(blobs, state_dict, 'conv1', 'conv1', converted_names) + convert_bn(blobs, state_dict, 'res_conv1_bn', 'bn1', converted_names) + for i in range(1, len(block_nums) + 1): + for j in range(block_nums[i - 1]): + if j == 0: + convert_conv_fc(blobs, state_dict, f'res{i + 1}_{j}_branch1', + f'layer{i}.{j}.downsample.0', converted_names) + convert_bn(blobs, state_dict, f'res{i + 1}_{j}_branch1_bn', + f'layer{i}.{j}.downsample.1', converted_names) + for k, letter in enumerate(['a', 'b', 'c']): + convert_conv_fc(blobs, state_dict, + f'res{i + 1}_{j}_branch2{letter}', + f'layer{i}.{j}.conv{k+1}', converted_names) + convert_bn(blobs, state_dict, + f'res{i + 1}_{j}_branch2{letter}_bn', + f'layer{i}.{j}.bn{k + 1}', converted_names) + # check if all layers are converted + for key in blobs: + if key not in converted_names: + print(f'Not Convert: {key}') + # save checkpoint + checkpoint = dict() + checkpoint['state_dict'] = state_dict + torch.save(checkpoint, dst) + + +def main(): + parser = argparse.ArgumentParser(description='Convert model keys') + parser.add_argument('src', help='src detectron model path') + parser.add_argument('dst', help='save path') + parser.add_argument('depth', type=int, help='ResNet model depth') + args = parser.parse_args() + convert(args.src, args.dst, args.depth) + + +if __name__ == '__main__': + main() diff --git a/tools/model_converters/publish_model.py b/tools/model_converters/publish_model.py new file mode 100644 index 0000000..219fcdf --- /dev/null +++ b/tools/model_converters/publish_model.py @@ -0,0 +1,43 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import subprocess + +import torch + + +def parse_args(): + parser = argparse.ArgumentParser( + description='Process a checkpoint to be published') + parser.add_argument('in_file', help='input checkpoint filename') + parser.add_argument('out_file', help='output checkpoint filename') + args = parser.parse_args() + return args + + +def process_checkpoint(in_file, out_file): + checkpoint = torch.load(in_file, map_location='cpu') + # remove optimizer for smaller file size + if 'optimizer' in checkpoint: + del checkpoint['optimizer'] + # if it is necessary to remove some sensitive data in checkpoint['meta'], + # add the code here. + if torch.__version__ >= '1.6': + torch.save(checkpoint, out_file, _use_new_zipfile_serialization=False) + else: + torch.save(checkpoint, out_file) + sha = subprocess.check_output(['sha256sum', out_file]).decode() + if out_file.endswith('.pth'): + out_file_name = out_file[:-4] + else: + out_file_name = out_file + final_file = out_file_name + f'-{sha[:8]}.pth' + subprocess.Popen(['mv', out_file, final_file]) + + +def main(): + args = parse_args() + process_checkpoint(args.in_file, args.out_file) + + +if __name__ == '__main__': + main() diff --git a/tools/model_converters/regnet2mmdet.py b/tools/model_converters/regnet2mmdet.py new file mode 100644 index 0000000..fbf8c8f --- /dev/null +++ b/tools/model_converters/regnet2mmdet.py @@ -0,0 +1,90 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +from collections import OrderedDict + +import torch + + +def convert_stem(model_key, model_weight, state_dict, converted_names): + new_key = model_key.replace('stem.conv', 'conv1') + new_key = new_key.replace('stem.bn', 'bn1') + state_dict[new_key] = model_weight + converted_names.add(model_key) + print(f'Convert {model_key} to {new_key}') + + +def convert_head(model_key, model_weight, state_dict, converted_names): + new_key = model_key.replace('head.fc', 'fc') + state_dict[new_key] = model_weight + converted_names.add(model_key) + print(f'Convert {model_key} to {new_key}') + + +def convert_reslayer(model_key, model_weight, state_dict, converted_names): + split_keys = model_key.split('.') + layer, block, module = split_keys[:3] + block_id = int(block[1:]) + layer_name = f'layer{int(layer[1:])}' + block_name = f'{block_id - 1}' + + if block_id == 1 and module == 'bn': + new_key = f'{layer_name}.{block_name}.downsample.1.{split_keys[-1]}' + elif block_id == 1 and module == 'proj': + new_key = f'{layer_name}.{block_name}.downsample.0.{split_keys[-1]}' + elif module == 'f': + if split_keys[3] == 'a_bn': + module_name = 'bn1' + elif split_keys[3] == 'b_bn': + module_name = 'bn2' + elif split_keys[3] == 'c_bn': + module_name = 'bn3' + elif split_keys[3] == 'a': + module_name = 'conv1' + elif split_keys[3] == 'b': + module_name = 'conv2' + elif split_keys[3] == 'c': + module_name = 'conv3' + new_key = f'{layer_name}.{block_name}.{module_name}.{split_keys[-1]}' + else: + raise ValueError(f'Unsupported conversion of key {model_key}') + print(f'Convert {model_key} to {new_key}') + state_dict[new_key] = model_weight + converted_names.add(model_key) + + +def convert(src, dst): + """Convert keys in pycls pretrained RegNet models to mmdet style.""" + # load caffe model + regnet_model = torch.load(src) + blobs = regnet_model['model_state'] + # convert to pytorch style + state_dict = OrderedDict() + converted_names = set() + for key, weight in blobs.items(): + if 'stem' in key: + convert_stem(key, weight, state_dict, converted_names) + elif 'head' in key: + convert_head(key, weight, state_dict, converted_names) + elif key.startswith('s'): + convert_reslayer(key, weight, state_dict, converted_names) + + # check if all layers are converted + for key in blobs: + if key not in converted_names: + print(f'not converted: {key}') + # save checkpoint + checkpoint = dict() + checkpoint['state_dict'] = state_dict + torch.save(checkpoint, dst) + + +def main(): + parser = argparse.ArgumentParser(description='Convert model keys') + parser.add_argument('src', help='src detectron model path') + parser.add_argument('dst', help='save path') + args = parser.parse_args() + convert(args.src, args.dst) + + +if __name__ == '__main__': + main() diff --git a/tools/model_converters/selfsup2mmdet.py b/tools/model_converters/selfsup2mmdet.py new file mode 100644 index 0000000..bc8cce1 --- /dev/null +++ b/tools/model_converters/selfsup2mmdet.py @@ -0,0 +1,42 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +from collections import OrderedDict + +import torch + + +def moco_convert(src, dst): + """Convert keys in pycls pretrained moco models to mmdet style.""" + # load caffe model + moco_model = torch.load(src) + blobs = moco_model['state_dict'] + # convert to pytorch style + state_dict = OrderedDict() + for k, v in blobs.items(): + if not k.startswith('module.encoder_q.'): + continue + old_k = k + k = k.replace('module.encoder_q.', '') + state_dict[k] = v + print(old_k, '->', k) + # save checkpoint + checkpoint = dict() + checkpoint['state_dict'] = state_dict + torch.save(checkpoint, dst) + + +def main(): + parser = argparse.ArgumentParser(description='Convert model keys') + parser.add_argument('src', help='src detectron model path') + parser.add_argument('dst', help='save path') + parser.add_argument( + '--selfsup', type=str, choices=['moco', 'swav'], help='save path') + args = parser.parse_args() + if args.selfsup == 'moco': + moco_convert(args.src, args.dst) + elif args.selfsup == 'swav': + print('SWAV does not need to convert the keys') + + +if __name__ == '__main__': + main() diff --git a/tools/model_converters/upgrade_model_version.py b/tools/model_converters/upgrade_model_version.py new file mode 100644 index 0000000..36ee607 --- /dev/null +++ b/tools/model_converters/upgrade_model_version.py @@ -0,0 +1,210 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import re +import tempfile +from collections import OrderedDict + +import torch +from mmcv import Config + + +def is_head(key): + valid_head_list = [ + 'bbox_head', 'mask_head', 'semantic_head', 'grid_head', 'mask_iou_head' + ] + + return any(key.startswith(h) for h in valid_head_list) + + +def parse_config(config_strings): + temp_file = tempfile.NamedTemporaryFile() + config_path = f'{temp_file.name}.py' + with open(config_path, 'w') as f: + f.write(config_strings) + + config = Config.fromfile(config_path) + is_two_stage = True + is_ssd = False + is_retina = False + reg_cls_agnostic = False + if 'rpn_head' not in config.model: + is_two_stage = False + # check whether it is SSD + if config.model.bbox_head.type == 'SSDHead': + is_ssd = True + elif config.model.bbox_head.type == 'RetinaHead': + is_retina = True + elif isinstance(config.model['bbox_head'], list): + reg_cls_agnostic = True + elif 'reg_class_agnostic' in config.model.bbox_head: + reg_cls_agnostic = config.model.bbox_head \ + .reg_class_agnostic + temp_file.close() + return is_two_stage, is_ssd, is_retina, reg_cls_agnostic + + +def reorder_cls_channel(val, num_classes=81): + # bias + if val.dim() == 1: + new_val = torch.cat((val[1:], val[:1]), dim=0) + # weight + else: + out_channels, in_channels = val.shape[:2] + # conv_cls for softmax output + if out_channels != num_classes and out_channels % num_classes == 0: + new_val = val.reshape(-1, num_classes, in_channels, *val.shape[2:]) + new_val = torch.cat((new_val[:, 1:], new_val[:, :1]), dim=1) + new_val = new_val.reshape(val.size()) + # fc_cls + elif out_channels == num_classes: + new_val = torch.cat((val[1:], val[:1]), dim=0) + # agnostic | retina_cls | rpn_cls + else: + new_val = val + + return new_val + + +def truncate_cls_channel(val, num_classes=81): + + # bias + if val.dim() == 1: + if val.size(0) % num_classes == 0: + new_val = val[:num_classes - 1] + else: + new_val = val + # weight + else: + out_channels, in_channels = val.shape[:2] + # conv_logits + if out_channels % num_classes == 0: + new_val = val.reshape(num_classes, in_channels, *val.shape[2:])[1:] + new_val = new_val.reshape(-1, *val.shape[1:]) + # agnostic + else: + new_val = val + + return new_val + + +def truncate_reg_channel(val, num_classes=81): + # bias + if val.dim() == 1: + # fc_reg | rpn_reg + if val.size(0) % num_classes == 0: + new_val = val.reshape(num_classes, -1)[:num_classes - 1] + new_val = new_val.reshape(-1) + # agnostic + else: + new_val = val + # weight + else: + out_channels, in_channels = val.shape[:2] + # fc_reg | rpn_reg + if out_channels % num_classes == 0: + new_val = val.reshape(num_classes, -1, in_channels, + *val.shape[2:])[1:] + new_val = new_val.reshape(-1, *val.shape[1:]) + # agnostic + else: + new_val = val + + return new_val + + +def convert(in_file, out_file, num_classes): + """Convert keys in checkpoints. + + There can be some breaking changes during the development of mmdetection, + and this tool is used for upgrading checkpoints trained with old versions + to the latest one. + """ + checkpoint = torch.load(in_file) + in_state_dict = checkpoint.pop('state_dict') + out_state_dict = OrderedDict() + meta_info = checkpoint['meta'] + is_two_stage, is_ssd, is_retina, reg_cls_agnostic = parse_config( + '#' + meta_info['config']) + if meta_info['mmdet_version'] <= '0.5.3' and is_retina: + upgrade_retina = True + else: + upgrade_retina = False + + # MMDetection v2.5.0 unifies the class order in RPN + # if the model is trained in version=2.5.0 + if meta_info['mmdet_version'] < '2.5.0': + upgrade_rpn = True + else: + upgrade_rpn = False + + for key, val in in_state_dict.items(): + new_key = key + new_val = val + if is_two_stage and is_head(key): + new_key = 'roi_head.{}'.format(key) + + # classification + if upgrade_rpn: + m = re.search( + r'(conv_cls|retina_cls|rpn_cls|fc_cls|fcos_cls|' + r'fovea_cls).(weight|bias)', new_key) + else: + m = re.search( + r'(conv_cls|retina_cls|fc_cls|fcos_cls|' + r'fovea_cls).(weight|bias)', new_key) + if m is not None: + print(f'reorder cls channels of {new_key}') + new_val = reorder_cls_channel(val, num_classes) + + # regression + if upgrade_rpn: + m = re.search(r'(fc_reg).(weight|bias)', new_key) + else: + m = re.search(r'(fc_reg|rpn_reg).(weight|bias)', new_key) + if m is not None and not reg_cls_agnostic: + print(f'truncate regression channels of {new_key}') + new_val = truncate_reg_channel(val, num_classes) + + # mask head + m = re.search(r'(conv_logits).(weight|bias)', new_key) + if m is not None: + print(f'truncate mask prediction channels of {new_key}') + new_val = truncate_cls_channel(val, num_classes) + + m = re.search(r'(cls_convs|reg_convs).\d.(weight|bias)', key) + # Legacy issues in RetinaNet since V1.x + # Use ConvModule instead of nn.Conv2d in RetinaNet + # cls_convs.0.weight -> cls_convs.0.conv.weight + if m is not None and upgrade_retina: + param = m.groups()[1] + new_key = key.replace(param, f'conv.{param}') + out_state_dict[new_key] = val + print(f'rename the name of {key} to {new_key}') + continue + + m = re.search(r'(cls_convs).\d.(weight|bias)', key) + if m is not None and is_ssd: + print(f'reorder cls channels of {new_key}') + new_val = reorder_cls_channel(val, num_classes) + + out_state_dict[new_key] = new_val + checkpoint['state_dict'] = out_state_dict + torch.save(checkpoint, out_file) + + +def main(): + parser = argparse.ArgumentParser(description='Upgrade model version') + parser.add_argument('in_file', help='input checkpoint file') + parser.add_argument('out_file', help='output checkpoint file') + parser.add_argument( + '--num-classes', + type=int, + default=81, + help='number of classes of the original model') + args = parser.parse_args() + convert(args.in_file, args.out_file, args.num_classes) + + +if __name__ == '__main__': + main() diff --git a/tools/model_converters/upgrade_ssd_version.py b/tools/model_converters/upgrade_ssd_version.py new file mode 100644 index 0000000..befff45 --- /dev/null +++ b/tools/model_converters/upgrade_ssd_version.py @@ -0,0 +1,58 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import tempfile +from collections import OrderedDict + +import torch +from mmcv import Config + + +def parse_config(config_strings): + temp_file = tempfile.NamedTemporaryFile() + config_path = f'{temp_file.name}.py' + with open(config_path, 'w') as f: + f.write(config_strings) + + config = Config.fromfile(config_path) + # check whether it is SSD + if config.model.bbox_head.type != 'SSDHead': + raise AssertionError('This is not a SSD model.') + + +def convert(in_file, out_file): + checkpoint = torch.load(in_file) + in_state_dict = checkpoint.pop('state_dict') + out_state_dict = OrderedDict() + meta_info = checkpoint['meta'] + parse_config('#' + meta_info['config']) + for key, value in in_state_dict.items(): + if 'extra' in key: + layer_idx = int(key.split('.')[2]) + new_key = 'neck.extra_layers.{}.{}.conv.'.format( + layer_idx // 2, layer_idx % 2) + key.split('.')[-1] + elif 'l2_norm' in key: + new_key = 'neck.l2_norm.weight' + elif 'bbox_head' in key: + new_key = key[:21] + '.0' + key[21:] + else: + new_key = key + out_state_dict[new_key] = value + checkpoint['state_dict'] = out_state_dict + + if torch.__version__ >= '1.6': + torch.save(checkpoint, out_file, _use_new_zipfile_serialization=False) + else: + torch.save(checkpoint, out_file) + + +def main(): + parser = argparse.ArgumentParser(description='Upgrade SSD version') + parser.add_argument('in_file', help='input checkpoint file') + parser.add_argument('out_file', help='output checkpoint file') + + args = parser.parse_args() + convert(args.in_file, args.out_file) + + +if __name__ == '__main__': + main() diff --git a/tools/slurm_test.sh b/tools/slurm_test.sh new file mode 100644 index 0000000..6dd67e5 --- /dev/null +++ b/tools/slurm_test.sh @@ -0,0 +1,24 @@ +#!/usr/bin/env bash + +set -x + +PARTITION=$1 +JOB_NAME=$2 +CONFIG=$3 +CHECKPOINT=$4 +GPUS=${GPUS:-8} +GPUS_PER_NODE=${GPUS_PER_NODE:-8} +CPUS_PER_TASK=${CPUS_PER_TASK:-5} +PY_ARGS=${@:5} +SRUN_ARGS=${SRUN_ARGS:-""} + +PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \ +srun -p ${PARTITION} \ + --job-name=${JOB_NAME} \ + --gres=gpu:${GPUS_PER_NODE} \ + --ntasks=${GPUS} \ + --ntasks-per-node=${GPUS_PER_NODE} \ + --cpus-per-task=${CPUS_PER_TASK} \ + --kill-on-bad-exit=1 \ + ${SRUN_ARGS} \ + python -u tools/test.py ${CONFIG} ${CHECKPOINT} --launcher="slurm" ${PY_ARGS} diff --git a/tools/slurm_train.sh b/tools/slurm_train.sh new file mode 100644 index 0000000..b3feb3d --- /dev/null +++ b/tools/slurm_train.sh @@ -0,0 +1,24 @@ +#!/usr/bin/env bash + +set -x + +PARTITION=$1 +JOB_NAME=$2 +CONFIG=$3 +WORK_DIR=$4 +GPUS=${GPUS:-8} +GPUS_PER_NODE=${GPUS_PER_NODE:-8} +CPUS_PER_TASK=${CPUS_PER_TASK:-5} +SRUN_ARGS=${SRUN_ARGS:-""} +PY_ARGS=${@:5} + +PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \ +srun -p ${PARTITION} \ + --job-name=${JOB_NAME} \ + --gres=gpu:${GPUS_PER_NODE} \ + --ntasks=${GPUS} \ + --ntasks-per-node=${GPUS_PER_NODE} \ + --cpus-per-task=${CPUS_PER_TASK} \ + --kill-on-bad-exit=1 \ + ${SRUN_ARGS} \ + python -u tools/train.py ${CONFIG} --work-dir=${WORK_DIR} --launcher="slurm" ${PY_ARGS} diff --git a/tools/test.py b/tools/test.py new file mode 100644 index 0000000..9e007ac --- /dev/null +++ b/tools/test.py @@ -0,0 +1,260 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import os +import os.path as osp +import time +import warnings + +import mmcv +import torch +from mmcv import Config, DictAction +from mmcv.cnn import fuse_conv_bn +from mmcv.parallel import MMDataParallel, MMDistributedDataParallel +from mmcv.runner import (get_dist_info, init_dist, load_checkpoint, + wrap_fp16_model) + +from mmdet.apis import multi_gpu_test, single_gpu_test +from mmdet.datasets import (build_dataloader, build_dataset, + replace_ImageToTensor) +from mmdet.models import build_detector +from mmdet.utils import setup_multi_processes + + +def parse_args(): + parser = argparse.ArgumentParser( + description='MMDet test (and eval) a model') + parser.add_argument('config', help='test config file path') + parser.add_argument('checkpoint', help='checkpoint file') + parser.add_argument( + '--work-dir', + help='the directory to save the file containing evaluation metrics') + parser.add_argument('--out', help='output result file in pickle format') + parser.add_argument( + '--fuse-conv-bn', + action='store_true', + help='Whether to fuse conv and bn, this will slightly increase' + 'the inference speed') + parser.add_argument( + '--gpu-ids', + type=int, + nargs='+', + help='(Deprecated, please use --gpu-id) ids of gpus to use ' + '(only applicable to non-distributed training)') + parser.add_argument( + '--gpu-id', + type=int, + default=0, + help='id of gpu to use ' + '(only applicable to non-distributed testing)') + parser.add_argument( + '--format-only', + action='store_true', + help='Format the output results without perform evaluation. It is' + 'useful when you want to format the result to a specific format and ' + 'submit it to the test server') + parser.add_argument( + '--eval', + type=str, + nargs='+', + help='evaluation metrics, which depends on the dataset, e.g., "bbox",' + ' "segm", "proposal" for COCO, and "mAP", "recall" for PASCAL VOC') + parser.add_argument('--show', action='store_true', help='show results') + parser.add_argument( + '--show-dir', help='directory where painted images will be saved') + parser.add_argument( + '--show-score-thr', + type=float, + default=0.3, + help='score threshold (default: 0.3)') + parser.add_argument( + '--gpu-collect', + action='store_true', + help='whether to use gpu to collect results.') + parser.add_argument( + '--tmpdir', + help='tmp directory used for collecting results from multiple ' + 'workers, available when gpu-collect is not specified') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. If the value to ' + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + 'Note that the quotation marks are necessary and that no white space ' + 'is allowed.') + parser.add_argument( + '--options', + nargs='+', + action=DictAction, + help='custom options for evaluation, the key-value pair in xxx=yyy ' + 'format will be kwargs for dataset.evaluate() function (deprecate), ' + 'change to --eval-options instead.') + parser.add_argument( + '--eval-options', + nargs='+', + action=DictAction, + help='custom options for evaluation, the key-value pair in xxx=yyy ' + 'format will be kwargs for dataset.evaluate() function') + parser.add_argument( + '--launcher', + choices=['none', 'pytorch', 'slurm', 'mpi'], + default='none', + help='job launcher') + parser.add_argument('--local_rank', type=int, default=0) + args = parser.parse_args() + if 'LOCAL_RANK' not in os.environ: + os.environ['LOCAL_RANK'] = str(args.local_rank) + + if args.options and args.eval_options: + raise ValueError( + '--options and --eval-options cannot be both ' + 'specified, --options is deprecated in favor of --eval-options') + if args.options: + warnings.warn('--options is deprecated in favor of --eval-options') + args.eval_options = args.options + return args + + +def main(): + args = parse_args() + + assert args.out or args.eval or args.format_only or args.show \ + or args.show_dir, \ + ('Please specify at least one operation (save/eval/format/show the ' + 'results / save the results) with the argument "--out", "--eval"' + ', "--format-only", "--show" or "--show-dir"') + + if args.eval and args.format_only: + raise ValueError('--eval and --format_only cannot be both specified') + + if args.out is not None and not args.out.endswith(('.pkl', '.pickle')): + raise ValueError('The output file must be a pkl file.') + + cfg = Config.fromfile(args.config) + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + + # set multi-process settings + setup_multi_processes(cfg) + + # set cudnn_benchmark + if cfg.get('cudnn_benchmark', False): + torch.backends.cudnn.benchmark = True + + cfg.model.pretrained = None + if cfg.model.get('neck'): + if isinstance(cfg.model.neck, list): + for neck_cfg in cfg.model.neck: + if neck_cfg.get('rfp_backbone'): + if neck_cfg.rfp_backbone.get('pretrained'): + neck_cfg.rfp_backbone.pretrained = None + elif cfg.model.neck.get('rfp_backbone'): + if cfg.model.neck.rfp_backbone.get('pretrained'): + cfg.model.neck.rfp_backbone.pretrained = None + + # in case the test dataset is concatenated + samples_per_gpu = 1 + if isinstance(cfg.data.test, dict): + cfg.data.test.test_mode = True + samples_per_gpu = cfg.data.test.pop('samples_per_gpu', 1) + if samples_per_gpu > 1: + # Replace 'ImageToTensor' to 'DefaultFormatBundle' + cfg.data.test.pipeline = replace_ImageToTensor( + cfg.data.test.pipeline) + elif isinstance(cfg.data.test, list): + for ds_cfg in cfg.data.test: + ds_cfg.test_mode = True + samples_per_gpu = max( + [ds_cfg.pop('samples_per_gpu', 1) for ds_cfg in cfg.data.test]) + if samples_per_gpu > 1: + for ds_cfg in cfg.data.test: + ds_cfg.pipeline = replace_ImageToTensor(ds_cfg.pipeline) + + if args.gpu_ids is not None: + cfg.gpu_ids = args.gpu_ids[0:1] + warnings.warn('`--gpu-ids` is deprecated, please use `--gpu-id`. ' + 'Because we only support single GPU mode in ' + 'non-distributed testing. Use the first GPU ' + 'in `gpu_ids` now.') + else: + cfg.gpu_ids = [args.gpu_id] + + # init distributed env first, since logger depends on the dist info. + if args.launcher == 'none': + distributed = False + else: + distributed = True + init_dist(args.launcher, **cfg.dist_params) + + rank, _ = get_dist_info() + # allows not to create + if args.work_dir is not None and rank == 0: + mmcv.mkdir_or_exist(osp.abspath(args.work_dir)) + timestamp = time.strftime('%Y%m%d_%H%M%S', time.localtime()) + json_file = osp.join(args.work_dir, f'eval_{timestamp}.json') + + # build the dataloader + dataset = build_dataset(cfg.data.test) + data_loader = build_dataloader( + dataset, + samples_per_gpu=samples_per_gpu, + workers_per_gpu=cfg.data.workers_per_gpu, + dist=distributed, + shuffle=False) + + # build the model and load checkpoint + cfg.model.train_cfg = None + model = build_detector(cfg.model, test_cfg=cfg.get('test_cfg')) + fp16_cfg = cfg.get('fp16', None) + if fp16_cfg is not None: + wrap_fp16_model(model) + checkpoint = load_checkpoint(model, args.checkpoint, map_location='cpu') + if args.fuse_conv_bn: + model = fuse_conv_bn(model) + # old versions did not save class info in checkpoints, this walkaround is + # for backward compatibility + if 'CLASSES' in checkpoint.get('meta', {}): + model.CLASSES = checkpoint['meta']['CLASSES'] + else: + model.CLASSES = dataset.CLASSES + + if not distributed: + model = MMDataParallel(model, device_ids=cfg.gpu_ids) + outputs = single_gpu_test(model, data_loader, args.show, args.show_dir, + args.show_score_thr) + else: + model = MMDistributedDataParallel( + model.cuda(), + device_ids=[torch.cuda.current_device()], + broadcast_buffers=False) + outputs = multi_gpu_test(model, data_loader, args.tmpdir, + args.gpu_collect) + + rank, _ = get_dist_info() + if rank == 0: + if args.out: + print(f'\nwriting results to {args.out}') + mmcv.dump(outputs, args.out) + kwargs = {} if args.eval_options is None else args.eval_options + if args.format_only: + dataset.format_results(outputs, **kwargs) + if args.eval: + eval_kwargs = cfg.get('evaluation', {}).copy() + # hard-code way to remove EvalHook args + for key in [ + 'interval', 'tmpdir', 'start', 'gpu_collect', 'save_best', + 'rule', 'dynamic_intervals' + ]: + eval_kwargs.pop(key, None) + eval_kwargs.update(dict(metric=args.eval, **kwargs)) + metric = dataset.evaluate(outputs, **eval_kwargs) + print(metric) + metric_dict = dict(config=args.config, metric=metric) + if args.work_dir is not None and rank == 0: + mmcv.dump(metric_dict, json_file) + + +if __name__ == '__main__': + main() diff --git a/tools/test_kneron.py b/tools/test_kneron.py new file mode 100644 index 0000000..b7c2103 --- /dev/null +++ b/tools/test_kneron.py @@ -0,0 +1,326 @@ +# All modification made by Kneron Corp.: Copyright (c) 2022 Kneron Corp. +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import os +import os.path as osp +import time +import warnings + +import mmcv +import torch +from mmcv import Config, DictAction +from mmcv.cnn import fuse_conv_bn +from mmcv.parallel import MMDataParallel, MMDistributedDataParallel +from mmcv.runner import (get_dist_info, init_dist, load_checkpoint, + wrap_fp16_model) + +from mmdet.apis import multi_gpu_test, single_gpu_test +from mmdet.datasets import (build_dataloader, build_dataset, + replace_ImageToTensor) +from mmdet.models import build_detector +from mmdet.utils import setup_multi_processes + + +def parse_args(): + parser = argparse.ArgumentParser( + description='MMDet test (and eval) a model') + parser.add_argument('config', help='test config file path') + parser.add_argument('checkpoint', help='weight file ( .pth or .onnx ) ') + parser.add_argument( + '--work-dir', + help='the directory to save the file containing evaluation metrics') + parser.add_argument('--out', help='output result file in pickle format') + parser.add_argument( + '--out-kneron', + help='output result file for kneron public field json file' + ) + parser.add_argument( + '--fuse-conv-bn', + action='store_true', + help='Whether to fuse conv and bn, this will slightly increase' + 'the inference speed') + parser.add_argument( + '--gpu-ids', + type=int, + nargs='+', + help='(Deprecated, please use --gpu-id) ids of gpus to use ' + '(only applicable to non-distributed training)') + parser.add_argument( + '--gpu-id', + type=int, + default=0, + help='id of gpu to use ' + '(only applicable to non-distributed testing)') + parser.add_argument( + '--format-only', + action='store_true', + help='Format the output results without perform evaluation. It is' + 'useful when you want to format the result to a specific format and ' + 'submit it to the test server') + parser.add_argument( + '--eval', + type=str, + nargs='+', + help='evaluation metrics, which depends on the dataset, e.g., "bbox",' + ' "segm", "proposal" for COCO, and "mAP", "recall" for PASCAL VOC') + parser.add_argument('--show', action='store_true', help='show results') + parser.add_argument( + '--show-dir', help='directory where painted images will be saved') + parser.add_argument( + '--show-score-thr', + type=float, + default=0.3, + help='score threshold (default: 0.3)') + parser.add_argument( + '--gpu-collect', + action='store_true', + help='whether to use gpu to collect results.') + parser.add_argument( + '--tmpdir', + help='tmp directory used for collecting results from multiple ' + 'workers, available when gpu-collect is not specified') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. If the value to ' + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + 'Note that the quotation marks are necessary and that no white space ' + 'is allowed.') + parser.add_argument( + '--options', + nargs='+', + action=DictAction, + help='custom options for evaluation, the key-value pair in xxx=yyy ' + 'format will be kwargs for dataset.evaluate() function (deprecate), ' + 'change to --eval-options instead.') + parser.add_argument( + '--eval-options', + nargs='+', + action=DictAction, + help='custom options for evaluation, the key-value pair in xxx=yyy ' + 'format will be kwargs for dataset.evaluate() function') + parser.add_argument( + '--launcher', + choices=['none', 'pytorch', 'slurm', 'mpi'], + default='none', + help='job launcher') + parser.add_argument('--local_rank', type=int, default=0) + args = parser.parse_args() + if 'LOCAL_RANK' not in os.environ: + os.environ['LOCAL_RANK'] = str(args.local_rank) + + if args.options and args.eval_options: + raise ValueError( + '--options and --eval-options cannot be both ' + 'specified, --options is deprecated in favor of --eval-options') + if args.options: + warnings.warn('--options is deprecated in favor of --eval-options') + args.eval_options = args.options + return args + + +def main(): + args = parse_args() + + assert args.out or args.eval or args.format_only or args.show \ + or args.show_dir, \ + ('Please specify at least one operation (save/eval/format/show the ' + 'results / save the results) with the argument "--out", "--eval"' + ', "--format-only", "--show" or "--show-dir"') + + if args.eval and args.format_only: + raise ValueError('--eval and --format_only cannot be both specified') + + if args.out is not None and not args.out.endswith(('.pkl', '.pickle')): + raise ValueError('The output file must be a pkl file.') + + cfg = Config.fromfile(args.config) + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + + # set multi-process settings + setup_multi_processes(cfg) + + # set cudnn_benchmark + if cfg.get('cudnn_benchmark', False): + torch.backends.cudnn.benchmark = True + + cfg.model.pretrained = None + if cfg.model.get('neck'): + if isinstance(cfg.model.neck, list): + for neck_cfg in cfg.model.neck: + if neck_cfg.get('rfp_backbone'): + if neck_cfg.rfp_backbone.get('pretrained'): + neck_cfg.rfp_backbone.pretrained = None + elif cfg.model.neck.get('rfp_backbone'): + if cfg.model.neck.rfp_backbone.get('pretrained'): + cfg.model.neck.rfp_backbone.pretrained = None + + # in case the test dataset is concatenated + samples_per_gpu = 1 + if isinstance(cfg.data.test, dict): + cfg.data.test.test_mode = True + samples_per_gpu = cfg.data.test.pop('samples_per_gpu', 1) + if samples_per_gpu > 1: + # Replace 'ImageToTensor' to 'DefaultFormatBundle' + cfg.data.test.pipeline = replace_ImageToTensor( + cfg.data.test.pipeline) + elif isinstance(cfg.data.test, list): + for ds_cfg in cfg.data.test: + ds_cfg.test_mode = True + samples_per_gpu = max( + [ds_cfg.pop('samples_per_gpu', 1) for ds_cfg in cfg.data.test]) + if samples_per_gpu > 1: + for ds_cfg in cfg.data.test: + ds_cfg.pipeline = replace_ImageToTensor(ds_cfg.pipeline) + + if args.gpu_ids is not None: + cfg.gpu_ids = args.gpu_ids[0:1] + warnings.warn('`--gpu-ids` is deprecated, please use `--gpu-id`. ' + 'Because we only support single GPU mode in ' + 'non-distributed testing. Use the first GPU ' + 'in `gpu_ids` now.') + else: + cfg.gpu_ids = [args.gpu_id] + + # init distributed env first, since logger depends on the dist info. + if args.launcher == 'none': + distributed = False + else: + distributed = True + init_dist(args.launcher, **cfg.dist_params) + + rank, _ = get_dist_info() + # allows not to create + if args.work_dir is not None and rank == 0: + mmcv.mkdir_or_exist(osp.abspath(args.work_dir)) + timestamp = time.strftime('%Y%m%d_%H%M%S', time.localtime()) + json_file = osp.join(args.work_dir, f'eval_{timestamp}.json') + + # build the dataloader + dataset = build_dataset(cfg.data.test) + data_loader = build_dataloader( + dataset, + samples_per_gpu=samples_per_gpu, + workers_per_gpu=cfg.data.workers_per_gpu, + dist=distributed, + shuffle=False) + + # build the model and load checkpoint + cfg.model.train_cfg = None + model = build_detector(cfg.model, test_cfg=cfg.get('test_cfg')) + fp16_cfg = cfg.get('fp16', None) + if os.path.splitext(args.checkpoint)[-1] == '.pth': + if fp16_cfg is not None: + wrap_fp16_model(model) + checkpoint = load_checkpoint( + model, args.checkpoint, map_location='cpu' + ) + if args.fuse_conv_bn: + model = fuse_conv_bn(model) + # old versions did not save class info in checkpoints, + # this workaround is for backward compatibility + if 'CLASSES' in checkpoint.get('meta', {}): + model.CLASSES = checkpoint['meta']['CLASSES'] + else: + model.CLASSES = dataset.CLASSES + elif os.path.splitext(args.checkpoint)[-1] == '.onnx': + import onnxruntime + onnx_sess = onnxruntime.InferenceSession(args.checkpoint) + setattr(model, '__Kn_ONNX_Sess__', onnx_sess) + model.forward = model.forward_kneron + elif os.path.splitext(args.checkpoint)[-1] == '.nef': + try: + import kp + except Exception: + warnings.warn( + 'Kneron PLUS software failed to import; please check document ' + 'http://doc.kneron.com/docs/#plus_python ' + 'for installation guide') + + # Use first scaned kneron usb dongle + device_group = kp.core.connect_devices(usb_port_ids=[0]) + + # Load model + model_nef_descriptor = kp.core.load_model_from_file( + device_group=device_group, file_path=args.checkpoint + ) + + # Generate preprocess setting for PLUS + generic_raw_image_header = kp.GenericRawImageHeader( + model_id=model_nef_descriptor.models[0].id, + resize_mode=kp.ResizeMode.KP_RESIZE_ENABLE, + padding_mode=kp.PaddingMode.KP_PADDING_CORNER, + normalize_mode=kp.NormalizeMode.KP_NORMALIZE_KNERON, + inference_number=0 + ) + kp_params = { + 'device_group': device_group, + 'model_nef_descriptor': model_nef_descriptor, + 'generic_raw_image_header': generic_raw_image_header + } + setattr(model, '__Kn_PLUS_Params__', kp_params) + model.forward = model.forward_kneron + + if not distributed: + model = MMDataParallel(model, device_ids=cfg.gpu_ids) + outputs = single_gpu_test(model, data_loader, args.show, args.show_dir, + args.show_score_thr) + else: + model = MMDistributedDataParallel( + model.cuda(), + device_ids=[torch.cuda.current_device()], + broadcast_buffers=False) + outputs = multi_gpu_test(model, data_loader, args.tmpdir, + args.gpu_collect) + + rank, _ = get_dist_info() + if rank == 0: + if args.out: + print(f'\nwriting results to {args.out}') + mmcv.dump(outputs, args.out) + if args.out_kneron: + print(f'\nwriting results to {args.out_kneron}') + import json + import numpy as np + formate_data = [] + for dets, data in zip(outputs, data_loader): + for label, det in enumerate(dets): + cls = np.full((len(det), 1), label + 1) + det = np.hstack((det, cls)) + det[:, 2] = det[:, 2] - det[:, 0] + det[:, 3] = det[:, 3] - det[:, 1] + if label == 0: + tmp = det + else: + tmp = np.vstack((tmp, det)) + formate_data.append({ + 'img_path': data['img_metas'][0].data[0][0]['filename'], + 'bbox': tmp.tolist() + }) + with open(args.out_kneron, 'w') as f: + json.dump(formate_data, f, ensure_ascii=True, indent=4) + kwargs = {} if args.eval_options is None else args.eval_options + if args.format_only: + dataset.format_results(outputs, **kwargs) + if args.eval: + eval_kwargs = cfg.get('evaluation', {}).copy() + # hard-code way to remove EvalHook args + for key in [ + 'interval', 'tmpdir', 'start', 'gpu_collect', 'save_best', + 'rule', 'dynamic_intervals' + ]: + eval_kwargs.pop(key, None) + eval_kwargs.update(dict(metric=args.eval, **kwargs)) + metric = dataset.evaluate(outputs, **eval_kwargs) + print(metric) + metric_dict = dict(config=args.config, metric=metric) + if args.work_dir is not None and rank == 0: + mmcv.dump(metric_dict, json_file) + + +if __name__ == '__main__': + main() diff --git a/tools/train.py b/tools/train.py new file mode 100644 index 0000000..b9e9981 --- /dev/null +++ b/tools/train.py @@ -0,0 +1,209 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import argparse +import copy +import os +import os.path as osp +import time +import warnings + +import mmcv +import torch +from mmcv import Config, DictAction +from mmcv.runner import get_dist_info, init_dist +from mmcv.utils import get_git_hash + +from mmdet import __version__ +from mmdet.apis import init_random_seed, set_random_seed, train_detector +from mmdet.datasets import build_dataset +from mmdet.models import build_detector +from mmdet.utils import collect_env, get_root_logger, setup_multi_processes + + +def parse_args(): + parser = argparse.ArgumentParser(description='Train a detector') + parser.add_argument('config', help='train config file path') + parser.add_argument('--work-dir', help='the dir to save logs and models') + parser.add_argument( + '--resume-from', help='the checkpoint file to resume from') + parser.add_argument( + '--auto-resume', + action='store_true', + help='resume from the latest checkpoint automatically') + parser.add_argument( + '--no-validate', + action='store_true', + help='whether not to evaluate the checkpoint during training') + group_gpus = parser.add_mutually_exclusive_group() + group_gpus.add_argument( + '--gpus', + type=int, + help='(Deprecated, please use --gpu-id) number of gpus to use ' + '(only applicable to non-distributed training)') + group_gpus.add_argument( + '--gpu-ids', + type=int, + nargs='+', + help='(Deprecated, please use --gpu-id) ids of gpus to use ' + '(only applicable to non-distributed training)') + group_gpus.add_argument( + '--gpu-id', + type=int, + default=0, + help='id of gpu to use ' + '(only applicable to non-distributed training)') + parser.add_argument('--seed', type=int, default=None, help='random seed') + parser.add_argument( + '--deterministic', + action='store_true', + help='whether to set deterministic options for CUDNN backend.') + parser.add_argument( + '--options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file (deprecate), ' + 'change to --cfg-options instead.') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. If the value to ' + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + 'Note that the quotation marks are necessary and that no white space ' + 'is allowed.') + parser.add_argument( + '--launcher', + choices=['none', 'pytorch', 'slurm', 'mpi'], + default='none', + help='job launcher') + parser.add_argument('--local_rank', type=int, default=0) + args = parser.parse_args() + if 'LOCAL_RANK' not in os.environ: + os.environ['LOCAL_RANK'] = str(args.local_rank) + + if args.options and args.cfg_options: + raise ValueError( + '--options and --cfg-options cannot be both ' + 'specified, --options is deprecated in favor of --cfg-options') + if args.options: + warnings.warn('--options is deprecated in favor of --cfg-options') + args.cfg_options = args.options + + return args + + +def main(): + args = parse_args() + + cfg = Config.fromfile(args.config) + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + + # set multi-process settings + setup_multi_processes(cfg) + + # set cudnn_benchmark + if cfg.get('cudnn_benchmark', False): + torch.backends.cudnn.benchmark = True + + # work_dir is determined in this priority: CLI > segment in file > filename + if args.work_dir is not None: + # update configs according to CLI args if args.work_dir is not None + cfg.work_dir = args.work_dir + elif cfg.get('work_dir', None) is None: + # use config filename as default work_dir if cfg.work_dir is None + cfg.work_dir = osp.join('./work_dirs', + osp.splitext(osp.basename(args.config))[0]) + if args.resume_from is not None: + cfg.resume_from = args.resume_from + cfg.auto_resume = args.auto_resume + if args.gpus is not None: + cfg.gpu_ids = range(1) + warnings.warn('`--gpus` is deprecated because we only support ' + 'single GPU mode in non-distributed training. ' + 'Use `gpus=1` now.') + if args.gpu_ids is not None: + cfg.gpu_ids = args.gpu_ids[0:1] + warnings.warn('`--gpu-ids` is deprecated, please use `--gpu-id`. ' + 'Because we only support single GPU mode in ' + 'non-distributed training. Use the first GPU ' + 'in `gpu_ids` now.') + if args.gpus is None and args.gpu_ids is None: + cfg.gpu_ids = [args.gpu_id] + + # init distributed env first, since logger depends on the dist info. + if args.launcher == 'none': + distributed = False + else: + distributed = True + init_dist(args.launcher, **cfg.dist_params) + # re-set gpu_ids with distributed training mode + _, world_size = get_dist_info() + cfg.gpu_ids = range(world_size) + + # create work_dir + mmcv.mkdir_or_exist(osp.abspath(cfg.work_dir)) + # dump config + cfg.dump(osp.join(cfg.work_dir, osp.basename(args.config))) + # init the logger before other steps + timestamp = time.strftime('%Y%m%d_%H%M%S', time.localtime()) + log_file = osp.join(cfg.work_dir, f'{timestamp}.log') + logger = get_root_logger(log_file=log_file, log_level=cfg.log_level) + + # init the meta dict to record some important information such as + # environment info and seed, which will be logged + meta = dict() + # log env info + env_info_dict = collect_env() + env_info = '\n'.join([(f'{k}: {v}') for k, v in env_info_dict.items()]) + dash_line = '-' * 60 + '\n' + logger.info('Environment info:\n' + dash_line + env_info + '\n' + + dash_line) + meta['env_info'] = env_info + meta['config'] = cfg.pretty_text + # log some basic info + logger.info(f'Distributed training: {distributed}') + logger.info(f'Config:\n{cfg.pretty_text}') + + # set random seeds + seed = init_random_seed(args.seed) + logger.info(f'Set random seed to {seed}, ' + f'deterministic: {args.deterministic}') + set_random_seed(seed, deterministic=args.deterministic) + cfg.seed = seed + meta['seed'] = seed + meta['exp_name'] = osp.basename(args.config) + + model = build_detector( + cfg.model, + train_cfg=cfg.get('train_cfg'), + test_cfg=cfg.get('test_cfg')) + model.init_weights() + + datasets = [build_dataset(cfg.data.train)] + if len(cfg.workflow) == 2: + val_dataset = copy.deepcopy(cfg.data.val) + val_dataset.pipeline = cfg.data.train.pipeline + datasets.append(build_dataset(val_dataset)) + if cfg.checkpoint_config is not None: + # save mmdet version, config file content and class names in + # checkpoints as meta data + cfg.checkpoint_config.meta = dict( + mmdet_version=__version__ + get_git_hash()[:7], + CLASSES=datasets[0].CLASSES) + # add an attribute for visualization convenience + model.CLASSES = datasets[0].CLASSES + train_detector( + model, + datasets, + cfg, + distributed=distributed, + validate=(not args.no_validate), + timestamp=timestamp, + meta=meta) + + +if __name__ == '__main__': + main() diff --git a/train.py b/train.py new file mode 100644 index 0000000..f551a66 --- /dev/null +++ b/train.py @@ -0,0 +1,573 @@ +import argparse +import logging +import math +import os +import random +import time +from pathlib import Path +from warnings import warn + +import numpy as np +import torch.distributed as dist +import torch.nn as nn +import torch.nn.functional as F +import torch.optim as optim +import torch.optim.lr_scheduler as lr_scheduler +import torch.utils.data +import yaml +from torch.cuda import amp +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.utils.tensorboard import SummaryWriter +from tqdm import tqdm + +import test # import test.py to get mAP after each epoch +from models.yolo import Model +from utils.autoanchor import check_anchors +from utils.datasets import create_dataloader +from utils.general import labels_to_class_weights, increment_path, labels_to_image_weights, init_seeds, \ + fitness, strip_optimizer, get_latest_run, check_dataset, check_file, check_git_status, check_img_size, \ + print_mutation, set_logging +from utils.google_utils import attempt_download +from utils.loss import compute_loss +from utils.plots import plot_images, plot_labels, plot_results, plot_evolution +from utils.torch_utils import ModelEMA, select_device, intersect_dicts, torch_distributed_zero_first + +logger = logging.getLogger(__name__) + +try: + import wandb +except ImportError: + wandb = None + logger.info("Install Weights & Biases for experiment logging via 'pip install wandb' (recommended)") + + +def train(hyp, opt, device, tb_writer=None, wandb=None): + logger.info(f'Hyperparameters {hyp}') + save_dir, epochs, batch_size, total_batch_size, weights, rank = \ + Path(opt.save_dir), opt.epochs, opt.batch_size, opt.total_batch_size, opt.weights, opt.global_rank + + # Directories + wdir = save_dir / 'weights' + wdir.mkdir(parents=True, exist_ok=True) # make dir + last = wdir / 'last.pt' + best = wdir / 'best.pt' + results_file = save_dir / 'results.txt' + + # Save run settings + with open(save_dir / 'hyp.yaml', 'w') as f: + yaml.dump(hyp, f, sort_keys=False) + with open(save_dir / 'opt.yaml', 'w') as f: + yaml.dump(vars(opt), f, sort_keys=False) + + # Configure + plots = not opt.evolve # create plots + cuda = device.type != 'cpu' + init_seeds(2 + rank) + with open(opt.data) as f: + data_dict = yaml.load(f, Loader=yaml.FullLoader) # data dict + with torch_distributed_zero_first(rank): + check_dataset(data_dict) # check + train_path = data_dict['train'] + test_path = data_dict['val'] + nc, names = (1, ['item']) if opt.single_cls else (int(data_dict['nc']), data_dict['names']) # number classes, names + assert len(names) == nc, '%g names found for nc=%g dataset in %s' % (len(names), nc, opt.data) # check + + # Model + pretrained = weights.endswith('.pt') + if pretrained: + with torch_distributed_zero_first(rank): + attempt_download(weights) # download if not found locally + ckpt = torch.load(weights, map_location=device) # load checkpoint + if hyp.get('anchors'): + ckpt['model'].yaml['anchors'] = round(hyp['anchors']) # force autoanchor + model = Model(opt.cfg or ckpt['model'].yaml, ch=3, nc=nc).to(device) # create + exclude = ['anchor'] if opt.cfg or hyp.get('anchors') else [] # exclude keys + state_dict = ckpt['model'].float().state_dict() # to FP32 + state_dict = intersect_dicts(state_dict, model.state_dict(), exclude=exclude) # intersect + model.load_state_dict(state_dict, strict=False) # load + logger.info('Transferred %g/%g items from %s' % (len(state_dict), len(model.state_dict()), weights)) # report + else: + model = Model(opt.cfg, ch=3, nc=nc).to(device) # create + + # Freeze + freeze = [] # parameter names to freeze (full or partial) + for k, v in model.named_parameters(): + v.requires_grad = True # train all layers + if any(x in k for x in freeze): + print('freezing %s' % k) + v.requires_grad = False + + # Optimizer + nbs = 64 # nominal batch size + accumulate = max(round(nbs / total_batch_size), 1) # accumulate loss before optimizing + hyp['weight_decay'] *= total_batch_size * accumulate / nbs # scale weight_decay + + pg0, pg1, pg2 = [], [], [] # optimizer parameter groups + for k, v in model.named_modules(): + if hasattr(v, 'bias') and isinstance(v.bias, nn.Parameter): + pg2.append(v.bias) # biases + if isinstance(v, nn.BatchNorm2d): + pg0.append(v.weight) # no decay + elif hasattr(v, 'weight') and isinstance(v.weight, nn.Parameter): + pg1.append(v.weight) # apply decay + + if opt.adam: + optimizer = optim.Adam(pg0, lr=hyp['lr0'], betas=(hyp['momentum'], 0.999)) # adjust beta1 to momentum + else: + optimizer = optim.SGD(pg0, lr=hyp['lr0'], momentum=hyp['momentum'], nesterov=True) + + optimizer.add_param_group({'params': pg1, 'weight_decay': hyp['weight_decay']}) # add pg1 with weight_decay + optimizer.add_param_group({'params': pg2}) # add pg2 (biases) + logger.info('Optimizer groups: %g .bias, %g conv.weight, %g other' % (len(pg2), len(pg1), len(pg0))) + del pg0, pg1, pg2 + + # Scheduler https://arxiv.org/pdf/1812.01187.pdf + # https://pytorch.org/docs/stable/_modules/torch/optim/lr_scheduler.html#OneCycleLR + lf = lambda x: ((1 + math.cos(x * math.pi / epochs)) / 2) * (1 - hyp['lrf']) + hyp['lrf'] # cosine + scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lf) + # plot_lr_scheduler(optimizer, scheduler, epochs) + + # Logging + if wandb and wandb.run is None: + opt.hyp = hyp # add hyperparameters + wandb_run = wandb.init(config=opt, resume="allow", + project='YOLOv5' if opt.project == 'runs/train' else Path(opt.project).stem, + name=save_dir.stem, + id=ckpt.get('wandb_id') if 'ckpt' in locals() else None) + + # Resume + start_epoch, best_fitness = 0, 0.0 + if pretrained: + # Optimizer + if ckpt['optimizer'] is not None: + optimizer.load_state_dict(ckpt['optimizer']) + best_fitness = ckpt['best_fitness'] + + # Results + if ckpt.get('training_results') is not None: + with open(results_file, 'w') as file: + file.write(ckpt['training_results']) # write results.txt + + # Epochs + start_epoch = ckpt['epoch'] + 1 + if opt.resume: + assert start_epoch > 0, '%s training to %g epochs is finished, nothing to resume.' % (weights, epochs) + if epochs < start_epoch: + logger.info('%s has been trained for %g epochs. Fine-tuning for %g additional epochs.' % + (weights, ckpt['epoch'], epochs)) + epochs += ckpt['epoch'] # finetune additional epochs + + del ckpt, state_dict + + # Image sizes + gs = int(max(model.stride)) # grid size (max stride) + imgsz, imgsz_test = [check_img_size(x, gs) for x in opt.img_size] # verify imgsz are gs-multiples + + # DP mode + if cuda and rank == -1 and torch.cuda.device_count() > 1: + model = torch.nn.DataParallel(model) + + # SyncBatchNorm + if opt.sync_bn and cuda and rank != -1: + model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model).to(device) + logger.info('Using SyncBatchNorm()') + + # EMA + ema = ModelEMA(model) if rank in [-1, 0] else None + + # DDP mode + if cuda and rank != -1: + model = DDP(model, device_ids=[opt.local_rank], output_device=opt.local_rank) + + # Trainloader + dataloader, dataset = create_dataloader(train_path, imgsz, batch_size, gs, opt, + hyp=hyp, augment=True, cache=opt.cache_images, rect=opt.rect, + rank=rank, world_size=opt.world_size, workers=opt.workers) + mlc = np.concatenate(dataset.labels, 0)[:, 0].max() # max label class + nb = len(dataloader) # number of batches + assert mlc < nc, 'Label class %g exceeds nc=%g in %s. Possible class labels are 0-%g' % (mlc, nc, opt.data, nc - 1) + + # Process 0 + if rank in [-1, 0]: + ema.updates = start_epoch * nb // accumulate # set EMA updates + testloader = create_dataloader(test_path, imgsz_test, total_batch_size, gs, opt, + hyp=hyp, cache=opt.cache_images and not opt.notest, rect=True, + rank=-1, world_size=opt.world_size, workers=opt.workers)[0] # testloader + + if not opt.resume: + labels = np.concatenate(dataset.labels, 0) + c = torch.tensor(labels[:, 0]) # classes + # cf = torch.bincount(c.long(), minlength=nc) + 1. # frequency + # model._initialize_biases(cf.to(device)) + if plots: + plot_labels(labels, save_dir=save_dir) + if tb_writer: + tb_writer.add_histogram('classes', c, 0) + if wandb: + wandb.log({"Labels": [wandb.Image(str(x), caption=x.name) for x in save_dir.glob('*labels*.png')]}) + + # Anchors + if not opt.noautoanchor: + check_anchors(dataset, model=model, thr=hyp['anchor_t'], imgsz=imgsz) + + # Model parameters + hyp['cls'] *= nc / 80. # scale coco-tuned hyp['cls'] to current dataset + model.nc = nc # attach number of classes to model + model.hyp = hyp # attach hyperparameters to model + model.gr = 1.0 # iou loss ratio (obj_loss = 1.0 or iou) + model.class_weights = labels_to_class_weights(dataset.labels, nc).to(device) # attach class weights + model.names = names + + # Start training + t0 = time.time() + nw = max(round(hyp['warmup_epochs'] * nb), 1000) # number of warmup iterations, max(3 epochs, 1k iterations) + # nw = min(nw, (epochs - start_epoch) / 2 * nb) # limit warmup to < 1/2 of training + maps = np.zeros(nc) # mAP per class + results = (0, 0, 0, 0, 0, 0, 0) # P, R, mAP@.5, mAP@.5-.95, val_loss(box, obj, cls) + scheduler.last_epoch = start_epoch - 1 # do not move + scaler = amp.GradScaler(enabled=cuda) + logger.info('Image sizes %g train, %g test\n' + 'Using %g dataloader workers\nLogging results to %s\n' + 'Starting training for %g epochs...' % (imgsz, imgsz_test, dataloader.num_workers, save_dir, epochs)) + for epoch in range(start_epoch, epochs): # epoch ------------------------------------------------------------------ + model.train() + + # Update image weights (optional) + if opt.image_weights: + # Generate indices + if rank in [-1, 0]: + cw = model.class_weights.cpu().numpy() * (1 - maps) ** 2 # class weights + iw = labels_to_image_weights(dataset.labels, nc=nc, class_weights=cw) # image weights + dataset.indices = random.choices(range(dataset.n), weights=iw, k=dataset.n) # rand weighted idx + # Broadcast if DDP + if rank != -1: + indices = (torch.tensor(dataset.indices) if rank == 0 else torch.zeros(dataset.n)).int() + dist.broadcast(indices, 0) + if rank != 0: + dataset.indices = indices.cpu().numpy() + + # Update mosaic border + # b = int(random.uniform(0.25 * imgsz, 0.75 * imgsz + gs) // gs * gs) + # dataset.mosaic_border = [b - imgsz, -b] # height, width borders + + mloss = torch.zeros(4, device=device) # mean losses + if rank != -1: + dataloader.sampler.set_epoch(epoch) + pbar = enumerate(dataloader) + logger.info(('\n' + '%10s' * 8) % ('Epoch', 'gpu_mem', 'box', 'obj', 'cls', 'total', 'targets', 'img_size')) + if rank in [-1, 0]: + pbar = tqdm(pbar, total=nb) # progress bar + optimizer.zero_grad() + for i, (imgs, targets, paths, _) in pbar: # batch ------------------------------------------------------------- + ni = i + nb * epoch # number integrated batches (since train start) + imgs = imgs.to(device, non_blocking=True).float() / 256.0 - 0.5 # uint8 to float32, 0-255 to 0.0-1.0 + + # Warmup + if ni <= nw: + xi = [0, nw] # x interp + # model.gr = np.interp(ni, xi, [0.0, 1.0]) # iou loss ratio (obj_loss = 1.0 or iou) + accumulate = max(1, np.interp(ni, xi, [1, nbs / total_batch_size]).round()) + for j, x in enumerate(optimizer.param_groups): + # bias lr falls from 0.1 to lr0, all other lrs rise from 0.0 to lr0 + x['lr'] = np.interp(ni, xi, [hyp['warmup_bias_lr'] if j == 2 else 0.0, x['initial_lr'] * lf(epoch)]) + if 'momentum' in x: + x['momentum'] = np.interp(ni, xi, [hyp['warmup_momentum'], hyp['momentum']]) + + # Multi-scale + if opt.multi_scale: + sz = random.randrange(imgsz * 0.5, imgsz * 1.5 + gs) // gs * gs # size + sf = sz / max(imgs.shape[2:]) # scale factor + if sf != 1: + ns = [math.ceil(x * sf / gs) * gs for x in imgs.shape[2:]] # new shape (stretched to gs-multiple) + imgs = F.interpolate(imgs, size=ns, mode='bilinear', align_corners=False) + + # Forward + with amp.autocast(enabled=cuda): + pred = model(imgs) # forward + loss, loss_items = compute_loss(pred, targets.to(device), model) # loss scaled by batch_size + if rank != -1: + loss *= opt.world_size # gradient averaged between devices in DDP mode + + # Backward + scaler.scale(loss).backward() + + # Optimize + if ni % accumulate == 0: + scaler.step(optimizer) # optimizer.step + scaler.update() + optimizer.zero_grad() + if ema: + ema.update(model) + + # Print + if rank in [-1, 0]: + mloss = (mloss * i + loss_items) / (i + 1) # update mean losses + mem = '%.3gG' % (torch.cuda.memory_reserved() / 1E9 if torch.cuda.is_available() else 0) # (GB) + s = ('%10s' * 2 + '%10.4g' * 6) % ( + '%g/%g' % (epoch, epochs - 1), mem, *mloss, targets.shape[0], imgs.shape[-1]) + pbar.set_description(s) + + # Plot + if plots and ni < 3: + f = save_dir / f'train_batch{ni}.jpg' # filename + plot_images(images=imgs, targets=targets, paths=paths, fname=f) + # if tb_writer: + # tb_writer.add_image(f, result, dataformats='HWC', global_step=epoch) + # tb_writer.add_graph(model, imgs) # add model to tensorboard + elif plots and ni == 3 and wandb: + wandb.log({"Mosaics": [wandb.Image(str(x), caption=x.name) for x in save_dir.glob('train*.jpg')]}) + # if i>1: break + # end batch ------------------------------------------------------------------------------------------------ + # end epoch ---------------------------------------------------------------------------------------------------- + + # Scheduler + lr = [x['lr'] for x in optimizer.param_groups] # for tensorboard + scheduler.step() + + # DDP process 0 or single-GPU + if rank in [-1, 0]: + # mAP + if ema: + ema.update_attr(model, include=['yaml', 'nc', 'hyp', 'gr', 'names', 'stride']) + final_epoch = epoch + 1 == epochs + if not opt.notest or final_epoch: # Calculate mAP + results, maps, times = test.test(opt.data, + batch_size=total_batch_size, + imgsz=imgsz_test, + model=ema.ema, + single_cls=opt.single_cls, + dataloader=testloader, + save_dir=save_dir, + plots=plots and final_epoch, + log_imgs=opt.log_imgs if wandb else 0) + + # Write + with open(results_file, 'a') as f: + f.write(s + '%10.4g' * 7 % results + '\n') # P, R, mAP@.5, mAP@.5-.95, val_loss(box, obj, cls) + if len(opt.name) and opt.bucket: + os.system('gsutil cp %s gs://%s/results/results%s.txt' % (results_file, opt.bucket, opt.name)) + + # Log + tags = ['train/box_loss', 'train/obj_loss', 'train/cls_loss', # train loss + 'metrics/precision', 'metrics/recall', 'metrics/mAP_0.5', 'metrics/mAP_0.5:0.95', + 'val/box_loss', 'val/obj_loss', 'val/cls_loss', # val loss + 'x/lr0', 'x/lr1', 'x/lr2'] # params + for x, tag in zip(list(mloss[:-1]) + list(results) + lr, tags): + if tb_writer: + tb_writer.add_scalar(tag, x, epoch) # tensorboard + if wandb: + wandb.log({tag: x}) # W&B + + # Update best mAP + fi = fitness(np.array(results).reshape(1, -1)) # weighted combination of [P, R, mAP@.5, mAP@.5-.95] + if fi > best_fitness: + best_fitness = fi + + # Save model + save = (not opt.nosave) or (final_epoch and not opt.evolve) + if save: + with open(results_file, 'r') as f: # create checkpoint + ckpt = {'epoch': epoch, + 'best_fitness': best_fitness, + 'training_results': f.read(), + 'model': ema.ema, + 'optimizer': None if final_epoch else optimizer.state_dict(), + 'wandb_id': wandb_run.id if wandb else None} + + # Save last, best and delete + torch.save(ckpt, last) + if best_fitness == fi: + torch.save(ckpt, best) + del ckpt + # end epoch ---------------------------------------------------------------------------------------------------- + # end training + + if rank in [-1, 0]: + # Strip optimizers + n = opt.name if opt.name.isnumeric() else '' + fresults, flast, fbest = save_dir / f'results{n}.txt', wdir / f'last{n}.pt', wdir / f'best{n}.pt' + for f1, f2 in zip([wdir / 'last.pt', wdir / 'best.pt', results_file], [flast, fbest, fresults]): + if f1.exists(): + os.rename(f1, f2) # rename + if str(f2).endswith('.pt'): # is *.pt + strip_optimizer(f2) # strip optimizer + os.system('gsutil cp %s gs://%s/weights' % (f2, opt.bucket)) if opt.bucket else None # upload + # Finish + if plots: + plot_results(save_dir=save_dir) # save as results.png + if wandb: + wandb.log({"Results": [wandb.Image(str(save_dir / x), caption=x) for x in + ['results.png', 'precision-recall_curve.png']]}) + logger.info('%g epochs completed in %.3f hours.\n' % (epoch - start_epoch + 1, (time.time() - t0) / 3600)) + else: + dist.destroy_process_group() + + wandb.run.finish() if wandb and wandb.run else None + torch.cuda.empty_cache() + return results + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--weights', type=str, default='yolov5s.pt', help='initial weights path') + parser.add_argument('--cfg', type=str, default='', help='model.yaml path') + parser.add_argument('--data', type=str, default='data/coco128.yaml', help='data.yaml path') + parser.add_argument('--hyp', type=str, default='data/hyp.scratch.yaml', help='hyperparameters path') + parser.add_argument('--epochs', type=int, default=300) + parser.add_argument('--batch-size', type=int, default=16, help='total batch size for all GPUs') + parser.add_argument('--img-size', nargs='+', type=int, default=[640, 640], help='[train, test] image sizes') + parser.add_argument('--rect', action='store_true', help='rectangular training') + parser.add_argument('--resume', nargs='?', const=True, default=False, help='resume most recent training') + parser.add_argument('--nosave', action='store_true', help='only save final checkpoint') + parser.add_argument('--notest', action='store_true', help='only test final epoch') + parser.add_argument('--noautoanchor', action='store_true', help='disable autoanchor check') + parser.add_argument('--evolve', action='store_true', help='evolve hyperparameters') + parser.add_argument('--bucket', type=str, default='', help='gsutil bucket') + parser.add_argument('--cache-images', action='store_true', help='cache images for faster training') + parser.add_argument('--image-weights', action='store_true', help='use weighted image selection for training') + parser.add_argument('--device', default='', help='cuda device, i.e. 0 or 0,1,2,3 or cpu') + parser.add_argument('--multi-scale', action='store_true', help='vary img-size +/- 50%%') + parser.add_argument('--single-cls', action='store_true', help='train as single-class dataset') + parser.add_argument('--adam', action='store_true', help='use torch.optim.Adam() optimizer') + parser.add_argument('--sync-bn', action='store_true', help='use SyncBatchNorm, only available in DDP mode') + parser.add_argument('--local_rank', type=int, default=-1, help='DDP parameter, do not modify') + parser.add_argument('--log-imgs', type=int, default=16, help='number of images for W&B logging, max 100') + parser.add_argument('--workers', type=int, default=8, help='maximum number of dataloader workers') + parser.add_argument('--project', default='runs/train', help='save to project/name') + parser.add_argument('--name', default='exp', help='save to project/name') + parser.add_argument('--exist-ok', action='store_true', help='existing project/name ok, do not increment') + opt = parser.parse_args() + + # Set DDP variables + opt.total_batch_size = opt.batch_size + opt.world_size = int(os.environ['WORLD_SIZE']) if 'WORLD_SIZE' in os.environ else 1 + opt.global_rank = int(os.environ['RANK']) if 'RANK' in os.environ else -1 + set_logging(opt.global_rank) + if opt.global_rank in [-1, 0]: + check_git_status() + + # Resume + if opt.resume: # resume an interrupted run + ckpt = opt.resume if isinstance(opt.resume, str) else get_latest_run() # specified or most recent path + assert os.path.isfile(ckpt), 'ERROR: --resume checkpoint does not exist' + with open(Path(ckpt).parent.parent / 'opt.yaml') as f: + opt = argparse.Namespace(**yaml.load(f, Loader=yaml.FullLoader)) # replace + print('opt',opt) + opt.cfg, opt.weights, opt.resume = '', ckpt, True + logger.info('Resuming training from %s' % ckpt) + else: + # opt.hyp = opt.hyp or ('hyp.finetune.yaml' if opt.weights else 'hyp.scratch.yaml') + opt.data, opt.cfg, opt.hyp = check_file(opt.data), check_file(opt.cfg), check_file(opt.hyp) # check files + assert len(opt.cfg) or len(opt.weights), 'either --cfg or --weights must be specified' + opt.img_size.extend([opt.img_size[-1]] * (2 - len(opt.img_size))) # extend to 2 sizes (train, test) + opt.name = 'evolve' if opt.evolve else opt.name + opt.save_dir = increment_path(Path(opt.project) / opt.name, exist_ok=opt.exist_ok | opt.evolve) # increment run + + # DDP mode + device = select_device(opt.device, batch_size=opt.batch_size) + if opt.local_rank != -1: + assert torch.cuda.device_count() > opt.local_rank + torch.cuda.set_device(opt.local_rank) + device = torch.device('cuda', opt.local_rank) + dist.init_process_group(backend='nccl', init_method='env://') # distributed backend + assert opt.batch_size % opt.world_size == 0, '--batch-size must be multiple of CUDA device count' + opt.batch_size = opt.total_batch_size // opt.world_size + + # Hyperparameters + with open(opt.hyp) as f: + hyp = yaml.load(f, Loader=yaml.FullLoader) # load hyps + if 'box' not in hyp: + warn('Compatibility: %s missing "box" which was renamed from "giou" in %s' % + (opt.hyp, 'https://github.com/ultralytics/yolov5/pull/1120')) + hyp['box'] = hyp.pop('giou') + + # Train + logger.info(opt) + if not opt.evolve: + tb_writer = None # init loggers + if opt.global_rank in [-1, 0]: + logger.info(f'Start Tensorboard with "tensorboard --logdir {opt.project}", view at http://localhost:6006/') + tb_writer = SummaryWriter(opt.save_dir) # Tensorboard + train(hyp, opt, device, tb_writer, wandb) + + # Evolve hyperparameters (optional) + else: + # Hyperparameter evolution metadata (mutation scale 0-1, lower_limit, upper_limit) + meta = {'lr0': (1, 1e-5, 1e-1), # initial learning rate (SGD=1E-2, Adam=1E-3) + 'lrf': (1, 0.01, 1.0), # final OneCycleLR learning rate (lr0 * lrf) + 'momentum': (0.3, 0.6, 0.98), # SGD momentum/Adam beta1 + 'weight_decay': (1, 0.0, 0.001), # optimizer weight decay + 'warmup_epochs': (1, 0.0, 5.0), # warmup epochs (fractions ok) + 'warmup_momentum': (1, 0.0, 0.95), # warmup initial momentum + 'warmup_bias_lr': (1, 0.0, 0.2), # warmup initial bias lr + 'box': (1, 0.02, 0.2), # box loss gain + 'cls': (1, 0.2, 4.0), # cls loss gain + 'cls_pw': (1, 0.5, 2.0), # cls BCELoss positive_weight + 'obj': (1, 0.2, 4.0), # obj loss gain (scale with pixels) + 'obj_pw': (1, 0.5, 2.0), # obj BCELoss positive_weight + 'iou_t': (0, 0.1, 0.7), # IoU training threshold + 'anchor_t': (1, 2.0, 8.0), # anchor-multiple threshold + 'anchors': (2, 2.0, 10.0), # anchors per output grid (0 to ignore) + 'fl_gamma': (0, 0.0, 2.0), # focal loss gamma (efficientDet default gamma=1.5) + 'hsv_h': (1, 0.0, 0.1), # image HSV-Hue augmentation (fraction) + 'hsv_s': (1, 0.0, 0.9), # image HSV-Saturation augmentation (fraction) + 'hsv_v': (1, 0.0, 0.9), # image HSV-Value augmentation (fraction) + 'degrees': (1, 0.0, 45.0), # image rotation (+/- deg) + 'translate': (1, 0.0, 0.9), # image translation (+/- fraction) + 'scale': (1, 0.0, 0.9), # image scale (+/- gain) + 'shear': (1, 0.0, 10.0), # image shear (+/- deg) + 'perspective': (0, 0.0, 0.001), # image perspective (+/- fraction), range 0-0.001 + 'flipud': (1, 0.0, 1.0), # image flip up-down (probability) + 'fliplr': (0, 0.0, 1.0), # image flip left-right (probability) + 'mosaic': (1, 0.0, 1.0), # image mixup (probability) + 'mixup': (1, 0.0, 1.0)} # image mixup (probability) + + assert opt.local_rank == -1, 'DDP mode not implemented for --evolve' + opt.notest, opt.nosave = True, True # only test/save final epoch + # ei = [isinstance(x, (int, float)) for x in hyp.values()] # evolvable indices + yaml_file = Path(opt.save_dir) / 'hyp_evolved.yaml' # save best result here + if opt.bucket: + os.system('gsutil cp gs://%s/evolve.txt .' % opt.bucket) # download evolve.txt if exists + + for _ in range(300): # generations to evolve + if Path('evolve.txt').exists(): # if evolve.txt exists: select best hyps and mutate + # Select parent(s) + parent = 'single' # parent selection method: 'single' or 'weighted' + x = np.loadtxt('evolve.txt', ndmin=2) + n = min(5, len(x)) # number of previous results to consider + x = x[np.argsort(-fitness(x))][:n] # top n mutations + w = fitness(x) - fitness(x).min() # weights + if parent == 'single' or len(x) == 1: + # x = x[random.randint(0, n - 1)] # random selection + x = x[random.choices(range(n), weights=w)[0]] # weighted selection + elif parent == 'weighted': + x = (x * w.reshape(n, 1)).sum(0) / w.sum() # weighted combination + + # Mutate + mp, s = 0.8, 0.2 # mutation probability, sigma + npr = np.random + npr.seed(int(time.time())) + g = np.array([x[0] for x in meta.values()]) # gains 0-1 + ng = len(meta) + v = np.ones(ng) + while all(v == 1): # mutate until a change occurs (prevent duplicates) + v = (g * (npr.random(ng) < mp) * npr.randn(ng) * npr.random() * s + 1).clip(0.3, 3.0) + for i, k in enumerate(hyp.keys()): # plt.hist(v.ravel(), 300) + hyp[k] = float(x[i + 7] * v[i]) # mutate + + # Constrain to limits + for k, v in meta.items(): + hyp[k] = max(hyp[k], v[1]) # lower limit + hyp[k] = min(hyp[k], v[2]) # upper limit + hyp[k] = round(hyp[k], 5) # significant digits + + # Train mutation + results = train(hyp.copy(), opt, device, wandb=wandb) + + # Write mutation results + print_mutation(hyp.copy(), results, yaml_file, opt.bucket) + + # Plot results + plot_evolution(yaml_file) + print(f'Hyperparameter evolution complete. Best results saved as: {yaml_file}\n' + f'Command to train a new model with these hyperparameters: $ python train.py --hyp {yaml_file}') diff --git a/tutorial/README.md b/tutorial/README.md new file mode 100644 index 0000000..af5c836 --- /dev/null +++ b/tutorial/README.md @@ -0,0 +1,275 @@ +

Object Detection with YOLOv5

+ +This tutorial will go through a concrete example of how to train a YOLOv5 object detection model via our AI training platform. The coco128 dataset is provided. + + +# Prerequisites +First of all, we have to install the libraries. Python>=3.8 is required. For other libraries, you can check the `requirements.txt` file. Installing these packages is simple. You can install them by running: + +```bash +$ pip install -U pip +$ pip install -r requirements.txt +``` + +# Dataset & Preparation + +Next, we need a dataset for the training model. For this tutorial, we use COCO128 dataset. + +## Annotations Format +After using a tool like [CVAT](https://github.com/openvinotoolkit/cvat), [makesense.ai](https://www.makesense.ai) or [Labelbox](https://labelbox.com) to label your images, export your labels to YOLO format, with one `*.txt` file per image (if no objects in image, no `*.txt` file is required). The `*.txt` file specifications are: + +- One row per object +- Each row is `class x_center y_center width height` format. +- Box coordinates must be in normalized xywh format (from 0 - 1). If your boxes are in pixels, divide `x_center` and `width` by image `width`, and `y_center` and `height` by image height. +- Class numbers are zero-indexed (start from 0). + +Here, let's go through a toy example for preparing the annotation files via [makesense.ai](https://www.makesense.ai). + +(1) Upload images to [makesense.ai](https://www.makesense.ai) and select Object Detection option. + +
+ +
+ +(2) Create labels, and then draw the bounding boxes and choose labels for each image. + +
+ +
+ +(3) Export the annotations with YOLO format. +
+ +
+ +(4) Eventually, you should get `*.txt` file for each image. (if no objects in image, no `*.txt` file is created) +
+ +
+ +## Directory Organization +Your own datasets are expected to have the following structure. We assume `/coco128` is next to the `/yolov5` directory. YOLOv5 locates labels automatically for each image by replacing the last instance of `/images/` in each image path with `/labels/`. + +
+ +
+ +## dataset.yaml + +The yaml file for COCO dataset has been prepared in `./data/coco.yaml`. For custom dataset, you need to prepare the yaml file and save it under `./data/`. The yaml file is expected to have the following format: + +
+ +
+ + +# Train +Let's look at how to train or finetune a model. There are several options and arguments to choose. We provided two types of backbone models, one for 520 (without upsampling) and one for 720 (with upsampling). + +For training on custom dataset, let's use the COCO 128 dataset. Following the instructions in the dataset preparation section, we put the data folder `/coco128` next to the `/yolov5` directory and prepare `coco128.yaml` saved under the folder `/yolov5/data/`. We download the pretrained model from [Model_Zoo](https://github.com/kneron/Model_Zoo/tree/main/detection/yolov5/yolov5s-noupsample). Suppose we would like to finetune a pretrained model for 520 and just run 2 epochs. Execute commands in the folder `yolov5`: + +```shell +!wget https://raw.githubusercontent.com/kneron/Model_Zoo/main/detection/yolov5/yolov5s-noupsample/best.pt +``` + +```shell +CUDA_VISIBLE_DEVICES='0' python train.py --data coco128.yaml --cfg yolov5s-noupsample.yaml --weights 'best.pt' --batch-size 2 --epoch 2 +``` +
+ +
+ +We get the trained model weights in `./runs/train/exp/weights/best.pt`. + +Note that video uses input (640w x 352h) to run faster. Coco has high or flat wide images, so it is better to use input (640w x 640h) + +## Generating .npy for different model input +We can generating `.npy` for different model input by using `yolov5_generate_npy.py`. Execute commands in the folder `generate_npy`: +```shell +python yolov5_generate_npy.py +``` + +
+ +
+ +We could get `*.npy`. + + +# Configure the paths yaml file +You are expected to create a yaml file which stores all the paths related to the trained models. You can check and modify the `pretrained_paths_520.yaml` and `pretrained_paths_720.yaml` under `/yolov5/data/`. Here is the config for our model trained on COCO128 `model_paths_520_coco128.yaml`: + +```bash +grid_dir: ../generate_npy/ +grid20_path: ../generate_npy/20_640x640.npy +grid40_path: ../generate_npy/40_640x640.npy +grid80_path: ../generate_npy/80_640x640.npy + +yolov5_dir: ./ +path: ./runs/train/exp/weights/best.pt +yaml_path: ./models/yolov5s-noupsample.yaml +pt_path: ./yolov5s-noupsample-coco128.pt # pytorch 1.4 +onnx_export_file: ./yolov5s-noupsample-coco128.onnx + +input_w: 640 +input_h: 640 +# number of classes +nc: 80 +# class names +names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train', 'truck', 'boat', 'traffic light', + 'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep', 'cow', + 'elephant', 'bear', 'zebra', 'giraffe', 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', + 'skis', 'snowboard', 'sports ball', 'kite', 'baseball bat', 'baseball glove', 'skateboard', 'surfboard', + 'tennis racket', 'bottle', 'wine glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple', + 'sandwich', 'orange', 'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake', 'chair', 'couch', + 'potted plant', 'bed', 'dining table', 'toilet', 'tv', 'laptop', 'mouse', 'remote', 'keyboard', 'cell phone', + 'microwave', 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase', 'scissors', 'teddy bear', + 'hair drier', 'toothbrush'] +``` + +# Save and Convert to ONNX +For now, we have trained the YOLOv5 model. This section will walk you through how to save the trained model for onnx converter supported format and convert to ONNX. + +## Exporting onnx model in the pytorch1.7 environment +We can convert the model to onnx by using `yolov5_export.py`. Execute commands in the folder `yolov5`: +```shell +python ../exporting/yolov5_export.py --data ../yolov5/data/model_paths_520_coco128.yaml +``` + +
+ +
+ +We could get `yolov5s-noupsample-coco128.onnx` under the folder `yolov5`. + + +## Converting onnx by tool chain +Pull the latest [ONNX converter](https://github.com/kneron/ONNX_Convertor/tree/master/optimizer_scripts) from github. Execute commands in the folder `ONNX_Convertor/optimizer_scripts`: +(reference: https://github.com/kneron/ONNX_Convertor/tree/master/optimizer_scripts) + +```shell +python -m onnxsim yolov5s-noupsample-coco128.onnx yolov5s-noupsample-coco128.onnx + +git clone https://github.com/kneron/ONNX_Convertor.git + +python ONNX_Convertor/optimizer_scripts/pytorch2onnx.py yolov5s-noupsample-coco128.onnx yolov5s-noupsample-coco128_convert.onnx +``` + +We could get `yolov5s-noupsample-coco128_convert.onnx`. + + +# Inference + +In this section, we will go through an example of using a trained network for inference. That is, we'll pass an image into the network and detect and classify the object in the image. Before model inference, we assume that the model has been converted to onnx model as in the previous section. We will use the function `inference.py` that takes an image and a model, then returns the detection information. The output format is a list of list, [[l,t,w,h,score,class_id], [l,t,w,h,score,class_id] ...]. We can also draw the bbox on the image if the save path is given. You could find preprocessing and postprocessing processes under the folder `exporting/yolov5/`. + +In this tutorial, we choose to run our yolov5 model on 520. First, we would like to save the model path information in a yaml file, called `pretrained_paths_520.yaml`, under the folder `data`. Here, we could reuse the yaml file which was created when we convert PyTorch model to ONNX. + +For model inference on a single image, execute commands in the folder `yolov5` and the outputs are as following: + +```shell +python inference.py --data data/model_paths_520_coco128.yaml --conf_thres 0.6 --img-path tutorial/demo/yolo_demo.jpg --save-path tutorial/demo/out.jpg + +[[934.0, 183.0, 284.0, 751.0, 0.8913591504096985, 0.0], [670.0, 225.0, 224.0, 696.0, 0.8750525712966919, 0.0]] + +``` + +Here we choose a model trained on COCO128 dataset and class labels and pretrained model paths are defined in the yaml file `data/model_paths_520_coco128.yaml`. The original image and processed image are shown below. + +
+ +
+ +Note that if the model was train on custom dataset, you have to modify the yaml file. + +If you would like to use ONNX model for inference, you need to add `--onnx` arguments when you execute `inference.py`. + +# Evaluation +In this section, we will evaluate our trained model on COCO128 dataset. Execute commands in the folder `yolov5` and the outputs are as following: + +```shell +python test.py --weights runs/train/exp/weights/best.pt --verbose + +Namespace(augment=False, batch_size=32, conf_thres=0.001, data='data/coco128.yaml', device='cpu', exist_ok=False, img_size=640, iou_thres=0.65, name='exp', project='runs/test', save_conf=False, save_json=False, save_txt=False, single_cls=False, task='val', verbose=True, weights=['runs/train/exp/weights/best.pt']) +Using torch 1.7.0 CPU + +Fusing layers... +Model Summary: 164 layers, 6772285 parameters, 0 gradients +***cache_path ../coco128/labels/train2017.cache +Scanning labels ../coco128/labels/train2017.cache (126 found, 0 missing, 2 empty, 0 duplicate, for 128 images): 128it [00:00, 9335.42it/s] + Class Images Targets P R mAP@.5 mAP@.5:.95: 100%|███████████████████████████████████████████████████████████████████████████| 4/4 [01:07<00:00, 16.95s/it] + all 128 929 0.284 0.562 0.492 0.307 + person 128 254 0.37 0.764 0.718 0.437 + bicycle 128 6 0.373 0.5 0.36 0.217 + car 128 46 0.286 0.326 0.275 0.145 + motorcycle 128 5 0.433 1 0.962 0.701 + airplane 128 6 0.559 0.833 0.824 0.539 + bus 128 7 0.412 0.714 0.7 0.588 + train 128 3 0.209 0.667 0.552 0.269 + truck 128 12 0.452 0.412 0.376 0.135 + boat 128 6 0.109 0.333 0.229 0.0458 + traffic light 128 14 0.0488 0.0714 0.096 0.0599 + stop sign 128 2 0.636 1 0.995 0.747 + bench 128 9 0.152 0.222 0.171 0.0814 + bird 128 16 0.459 0.562 0.538 0.28 + cat 128 4 0.353 1 0.725 0.548 + dog 128 9 0.532 0.667 0.632 0.422 + horse 128 2 0.31 1 0.995 0.473 + elephant 128 17 0.666 0.824 0.84 0.606 + bear 128 1 0.323 1 0.995 0.896 + zebra 128 4 0.721 1 0.995 0.921 + giraffe 128 9 0.459 0.889 0.928 0.551 + backpack 128 6 0.291 0.333 0.386 0.193 + umbrella 128 18 0.394 0.5 0.458 0.208 + handbag 128 19 0.101 0.105 0.112 0.0483 + tie 128 7 0.3 0.714 0.6 0.355 + suitcase 128 4 0.672 0.5 0.697 0.193 + frisbee 128 5 0.315 0.8 0.665 0.416 + skis 128 1 0.103 1 0.498 0.0498 + snowboard 128 7 0.534 0.821 0.674 0.36 + sports ball 128 6 0.165 0.5 0.258 0.155 + kite 128 10 0.225 0.2 0.133 0.0334 + baseball bat 128 4 0.016 0.052 0.055 0.0275 + baseball glove 128 7 0.0989 0.286 0.292 0.146 + skateboard 128 5 0.323 0.4 0.376 0.259 + tennis racket 128 7 0.105 0.429 0.327 0.164 + bottle 128 18 0.202 0.611 0.372 0.214 + wine glass 128 16 0.22 0.438 0.397 0.252 + cup 128 36 0.297 0.389 0.345 0.206 + fork 128 6 0.0841 0.167 0.177 0.135 + knife 128 16 0.301 0.5 0.408 0.143 + spoon 128 22 0.232 0.273 0.31 0.12 + bowl 128 28 0.393 0.714 0.591 0.393 + banana 128 1 0.13 1 0.332 0.0332 + sandwich 128 2 0.183 0.459 0.115 0.103 + orange 128 4 0.096 0.25 0.125 0.0856 + broccoli 128 11 0.107 0.0909 0.116 0.0998 + carrot 128 24 0.198 0.708 0.409 0.231 + hot dog 128 2 0.274 1 0.828 0.746 + pizza 128 5 0.588 0.6 0.66 0.473 + donut 128 14 0.249 1 0.858 0.66 + cake 128 4 0.388 1 0.788 0.547 + chair 128 35 0.174 0.6 0.331 0.156 + couch 128 6 0.367 0.667 0.678 0.403 + potted plant 128 14 0.249 0.571 0.49 0.3 + bed 128 3 0.623 0.667 0.677 0.224 + dining table 128 13 0.26 0.538 0.449 0.289 + toilet 128 2 0.0943 0.5 0.497 0.397 + tv 128 2 0.198 1 0.995 0.696 + laptop 128 3 0 0 0.0184 0.0111 + mouse 128 2 0 0 0 0 + remote 128 8 0.339 0.5 0.512 0.33 + cell phone 128 8 0.0833 0.125 0.0382 0.0208 + microwave 128 3 0.248 1 0.995 0.502 + oven 128 5 0.143 0.4 0.336 0.222 + sink 128 6 0.106 0.167 0.0876 0.078 + refrigerator 128 5 0.35 0.6 0.564 0.403 + book 128 29 0.143 0.138 0.139 0.0655 + clock 128 9 0.435 0.889 0.848 0.679 + vase 128 2 0.0816 1 0.995 0.846 + scissors 128 1 0 0 0.0524 0.00524 + teddy bear 128 21 0.495 0.514 0.522 0.249 + toothbrush 128 5 0.3 0.4 0.44 0.186 +Speed: 243.3/124.9/368.3 ms inference/NMS/total per 640x640 image at batch-size 32 +Results saved to runs/test/exp + +``` \ No newline at end of file diff --git a/tutorial/demo/out.jpg b/tutorial/demo/out.jpg new file mode 100644 index 0000000..c0ac6e2 Binary files /dev/null and b/tutorial/demo/out.jpg differ diff --git a/tutorial/demo/yolo_demo.jpg b/tutorial/demo/yolo_demo.jpg new file mode 100644 index 0000000..ab8fcaa Binary files /dev/null and b/tutorial/demo/yolo_demo.jpg differ diff --git a/tutorial/screenshots/custom_train.jpg b/tutorial/screenshots/custom_train.jpg new file mode 100644 index 0000000..6cd2bdc Binary files /dev/null and b/tutorial/screenshots/custom_train.jpg differ diff --git a/tutorial/screenshots/export.png b/tutorial/screenshots/export.png new file mode 100644 index 0000000..380e5ea Binary files /dev/null and b/tutorial/screenshots/export.png differ diff --git a/tutorial/screenshots/fine520.png b/tutorial/screenshots/fine520.png new file mode 100644 index 0000000..14eeab4 Binary files /dev/null and b/tutorial/screenshots/fine520.png differ diff --git a/tutorial/screenshots/fine520out.png b/tutorial/screenshots/fine520out.png new file mode 100644 index 0000000..fdf2c39 Binary files /dev/null and b/tutorial/screenshots/fine520out.png differ diff --git a/tutorial/screenshots/fine720.png b/tutorial/screenshots/fine720.png new file mode 100644 index 0000000..4562211 Binary files /dev/null and b/tutorial/screenshots/fine720.png differ diff --git a/tutorial/screenshots/fine720out.png b/tutorial/screenshots/fine720out.png new file mode 100644 index 0000000..425e857 Binary files /dev/null and b/tutorial/screenshots/fine720out.png differ diff --git a/tutorial/screenshots/genrate_npy.png b/tutorial/screenshots/genrate_npy.png new file mode 100644 index 0000000..7f05579 Binary files /dev/null and b/tutorial/screenshots/genrate_npy.png differ diff --git a/tutorial/screenshots/make_sense_det.jpg b/tutorial/screenshots/make_sense_det.jpg new file mode 100644 index 0000000..2e1225a Binary files /dev/null and b/tutorial/screenshots/make_sense_det.jpg differ diff --git a/tutorial/screenshots/make_sense_export.jpg b/tutorial/screenshots/make_sense_export.jpg new file mode 100644 index 0000000..5591230 Binary files /dev/null and b/tutorial/screenshots/make_sense_export.jpg differ diff --git a/tutorial/screenshots/make_sense_final.jpg b/tutorial/screenshots/make_sense_final.jpg new file mode 100644 index 0000000..d21fa0d Binary files /dev/null and b/tutorial/screenshots/make_sense_final.jpg differ diff --git a/tutorial/screenshots/make_sense_img001.jpg b/tutorial/screenshots/make_sense_img001.jpg new file mode 100644 index 0000000..ca7986c Binary files /dev/null and b/tutorial/screenshots/make_sense_img001.jpg differ diff --git a/tutorial/screenshots/make_sense_img002.jpg b/tutorial/screenshots/make_sense_img002.jpg new file mode 100644 index 0000000..9ab976c Binary files /dev/null and b/tutorial/screenshots/make_sense_img002.jpg differ diff --git a/tutorial/screenshots/make_sense_label.jpg b/tutorial/screenshots/make_sense_label.jpg new file mode 100644 index 0000000..8294b35 Binary files /dev/null and b/tutorial/screenshots/make_sense_label.jpg differ diff --git a/tutorial/screenshots/make_sense_out.jpg b/tutorial/screenshots/make_sense_out.jpg new file mode 100644 index 0000000..8bbc47f Binary files /dev/null and b/tutorial/screenshots/make_sense_out.jpg differ diff --git a/tutorial/screenshots/make_sense_out2.jpg b/tutorial/screenshots/make_sense_out2.jpg new file mode 100644 index 0000000..bfb574c Binary files /dev/null and b/tutorial/screenshots/make_sense_out2.jpg differ diff --git a/tutorial/screenshots/make_sense_upload.jpg b/tutorial/screenshots/make_sense_upload.jpg new file mode 100644 index 0000000..cffb1c9 Binary files /dev/null and b/tutorial/screenshots/make_sense_upload.jpg differ diff --git a/tutorial/screenshots/readme_img.jpg b/tutorial/screenshots/readme_img.jpg new file mode 100644 index 0000000..575d87e Binary files /dev/null and b/tutorial/screenshots/readme_img.jpg differ diff --git a/tutorial/screenshots/readme_img2.png b/tutorial/screenshots/readme_img2.png new file mode 100644 index 0000000..ec7065d Binary files /dev/null and b/tutorial/screenshots/readme_img2.png differ diff --git a/tutorial/screenshots/train520.png b/tutorial/screenshots/train520.png new file mode 100644 index 0000000..1307f43 Binary files /dev/null and b/tutorial/screenshots/train520.png differ diff --git a/tutorial/screenshots/train520out.png b/tutorial/screenshots/train520out.png new file mode 100644 index 0000000..5978f06 Binary files /dev/null and b/tutorial/screenshots/train520out.png differ diff --git a/tutorial/screenshots/train720.png b/tutorial/screenshots/train720.png new file mode 100644 index 0000000..c58c9f8 Binary files /dev/null and b/tutorial/screenshots/train720.png differ diff --git a/tutorial/screenshots/train720out.png b/tutorial/screenshots/train720out.png new file mode 100644 index 0000000..6abc2ab Binary files /dev/null and b/tutorial/screenshots/train720out.png differ diff --git a/tutorial/screenshots/yolo_structure.jpg b/tutorial/screenshots/yolo_structure.jpg new file mode 100644 index 0000000..d0a23c8 Binary files /dev/null and b/tutorial/screenshots/yolo_structure.jpg differ diff --git a/tutorial/screenshots/yolo_yaml.jpg b/tutorial/screenshots/yolo_yaml.jpg new file mode 100644 index 0000000..5679a74 Binary files /dev/null and b/tutorial/screenshots/yolo_yaml.jpg differ diff --git a/tutorial/tutorial.ipynb b/tutorial/tutorial.ipynb new file mode 100644 index 0000000..64adf4c --- /dev/null +++ b/tutorial/tutorial.ipynb @@ -0,0 +1,1184 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "D_T20aqXkqdD" + }, + "source": [ + "

Object Detection with YOLOv5

\n", + "\n", + "This tutorial will illustrate how to train a YOLOv5 object detection model via our AI training platform." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "ZH08KlRsmJt1", + "outputId": "505f66e8-9a29-4af0-ec25-5313a29ec445" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Mounted at /content/drive\n" + ] + } + ], + "source": [ + "from google.colab import drive\n", + "drive.mount('/content/drive')" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "sOg9mo_XmV_b", + "outputId": "0f634720-5767-460b-9bb3-b2ea2740c4a8" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "/content/drive/MyDrive/ai_training/detection/yolov5/yolov5\n" + ] + } + ], + "source": [ + "cd /content/drive/MyDrive/ai_training/detection/yolov5/yolov5" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "F27nc_HOkqdt" + }, + "source": [ + "# Prerequisites\n", + "First of all, we have to install the libraries. Python>=3.8 is required. For other libraries, you can check the `requirements.txt` file. Installing these packages is simple. You can install them by running:\n" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "IIB8JTSLkqdu", + "outputId": "c07a910e-0588-46cb-d4f9-d8489dc1adab" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: matplotlib>=3.2.2 in /usr/local/lib/python3.7/dist-packages (from -r requirements.txt (line 4)) (3.2.2)\n", + "Requirement already satisfied: numpy>=1.18.5 in /usr/local/lib/python3.7/dist-packages (from -r requirements.txt (line 5)) (1.19.5)\n", + "Requirement already satisfied: opencv-python>=4.1.2 in /usr/local/lib/python3.7/dist-packages (from -r requirements.txt (line 6)) (4.1.2.30)\n", + "Requirement already satisfied: Pillow in /usr/local/lib/python3.7/dist-packages (from -r requirements.txt (line 7)) (7.1.2)\n", + "Requirement already satisfied: PyYAML>=5.3.1 in /usr/local/lib/python3.7/dist-packages (from -r requirements.txt (line 8)) (5.4.1)\n", + "Requirement already satisfied: scipy>=1.4.1 in /usr/local/lib/python3.7/dist-packages (from -r requirements.txt (line 9)) (1.4.1)\n", + "Requirement already satisfied: torch>=1.7.0 in /usr/local/lib/python3.7/dist-packages (from -r requirements.txt (line 10)) (1.9.0+cu102)\n", + "Requirement already satisfied: torchvision>=0.8.1 in /usr/local/lib/python3.7/dist-packages (from -r requirements.txt (line 11)) (0.10.0+cu102)\n", + "Requirement already satisfied: tqdm>=4.41.0 in /usr/local/lib/python3.7/dist-packages (from -r requirements.txt (line 12)) (4.41.1)\n", + "Requirement already satisfied: tensorboard>=2.4.1 in /usr/local/lib/python3.7/dist-packages (from -r requirements.txt (line 15)) (2.5.0)\n", + "Requirement already satisfied: seaborn>=0.11.0 in /usr/local/lib/python3.7/dist-packages (from -r requirements.txt (line 19)) (0.11.1)\n", + "Requirement already satisfied: pandas in /usr/local/lib/python3.7/dist-packages (from -r requirements.txt (line 20)) (1.1.5)\n", + "Requirement already satisfied: onnx==1.6.0 in /usr/local/lib/python3.7/dist-packages (from -r requirements.txt (line 24)) (1.6.0)\n", + "Requirement already satisfied: onnxruntime in /usr/local/lib/python3.7/dist-packages (from -r requirements.txt (line 25)) (1.8.0)\n", + "Collecting onnx-simplifier\n", + " Downloading https://files.pythonhosted.org/packages/9f/f0/b9de063e1f8ced84e1aa853fb64b080a4a0cb6bc3987abc0005ee315432a/onnx-simplifier-0.3.6.tar.gz\n", + "Requirement already satisfied: thop in /usr/local/lib/python3.7/dist-packages (from -r requirements.txt (line 30)) (0.0.31.post2005241907)\n", + "Requirement already satisfied: pycocotools>=2.0 in /usr/local/lib/python3.7/dist-packages (from -r requirements.txt (line 31)) (2.0.2)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib>=3.2.2->-r requirements.txt (line 4)) (1.3.1)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib>=3.2.2->-r requirements.txt (line 4)) (2.4.7)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib>=3.2.2->-r requirements.txt (line 4)) (0.10.0)\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib>=3.2.2->-r requirements.txt (line 4)) (2.8.1)\n", + "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from torch>=1.7.0->-r requirements.txt (line 10)) (3.7.4.3)\n", + "Requirement already satisfied: markdown>=2.6.8 in /usr/local/lib/python3.7/dist-packages (from tensorboard>=2.4.1->-r requirements.txt (line 15)) (3.3.4)\n", + "Requirement already satisfied: requests<3,>=2.21.0 in /usr/local/lib/python3.7/dist-packages (from tensorboard>=2.4.1->-r requirements.txt (line 15)) (2.23.0)\n", + "Requirement already satisfied: absl-py>=0.4 in /usr/local/lib/python3.7/dist-packages (from tensorboard>=2.4.1->-r requirements.txt (line 15)) (0.12.0)\n", + "Requirement already satisfied: protobuf>=3.6.0 in /usr/local/lib/python3.7/dist-packages (from tensorboard>=2.4.1->-r requirements.txt (line 15)) (3.12.4)\n", + "Requirement already satisfied: google-auth<2,>=1.6.3 in /usr/local/lib/python3.7/dist-packages (from tensorboard>=2.4.1->-r requirements.txt (line 15)) (1.31.0)\n", + "Requirement already satisfied: tensorboard-data-server<0.7.0,>=0.6.0 in /usr/local/lib/python3.7/dist-packages (from tensorboard>=2.4.1->-r requirements.txt (line 15)) (0.6.1)\n", + "Requirement already satisfied: werkzeug>=0.11.15 in /usr/local/lib/python3.7/dist-packages (from tensorboard>=2.4.1->-r requirements.txt (line 15)) (1.0.1)\n", + "Requirement already satisfied: google-auth-oauthlib<0.5,>=0.4.1 in /usr/local/lib/python3.7/dist-packages (from tensorboard>=2.4.1->-r requirements.txt (line 15)) (0.4.4)\n", + "Requirement already satisfied: grpcio>=1.24.3 in /usr/local/lib/python3.7/dist-packages (from tensorboard>=2.4.1->-r requirements.txt (line 15)) (1.34.1)\n", + "Requirement already satisfied: tensorboard-plugin-wit>=1.6.0 in /usr/local/lib/python3.7/dist-packages (from tensorboard>=2.4.1->-r requirements.txt (line 15)) (1.8.0)\n", + "Requirement already satisfied: wheel>=0.26; python_version >= \"3\" in /usr/local/lib/python3.7/dist-packages (from tensorboard>=2.4.1->-r requirements.txt (line 15)) (0.36.2)\n", + "Requirement already satisfied: setuptools>=41.0.0 in /usr/local/lib/python3.7/dist-packages (from tensorboard>=2.4.1->-r requirements.txt (line 15)) (57.0.0)\n", + "Requirement already satisfied: pytz>=2017.2 in /usr/local/lib/python3.7/dist-packages (from pandas->-r requirements.txt (line 20)) (2018.9)\n", + "Requirement already satisfied: six in /usr/local/lib/python3.7/dist-packages (from onnx==1.6.0->-r requirements.txt (line 24)) (1.15.0)\n", + "Requirement already satisfied: flatbuffers in /usr/local/lib/python3.7/dist-packages (from onnxruntime->-r requirements.txt (line 25)) (1.12)\n", + "Collecting onnxoptimizer>=0.2.5\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/15/3b/e00d34bf2680ade58ce60bfc4987e70d095a0ca6978fda62a0600a7ee0ae/onnxoptimizer-0.2.6-cp37-cp37m-manylinux2014_x86_64.whl (466kB)\n", + "\u001b[K |████████████████████████████████| 471kB 9.6MB/s \n", + "\u001b[?25hRequirement already satisfied: cython>=0.27.3 in /usr/local/lib/python3.7/dist-packages (from pycocotools>=2.0->-r requirements.txt (line 31)) (0.29.23)\n", + "Requirement already satisfied: importlib-metadata; python_version < \"3.8\" in /usr/local/lib/python3.7/dist-packages (from markdown>=2.6.8->tensorboard>=2.4.1->-r requirements.txt (line 15)) (4.5.0)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests<3,>=2.21.0->tensorboard>=2.4.1->-r requirements.txt (line 15)) (2021.5.30)\n", + "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests<3,>=2.21.0->tensorboard>=2.4.1->-r requirements.txt (line 15)) (2.10)\n", + "Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests<3,>=2.21.0->tensorboard>=2.4.1->-r requirements.txt (line 15)) (3.0.4)\n", + "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests<3,>=2.21.0->tensorboard>=2.4.1->-r requirements.txt (line 15)) (1.24.3)\n", + "Requirement already satisfied: pyasn1-modules>=0.2.1 in /usr/local/lib/python3.7/dist-packages (from google-auth<2,>=1.6.3->tensorboard>=2.4.1->-r requirements.txt (line 15)) (0.2.8)\n", + "Requirement already satisfied: cachetools<5.0,>=2.0.0 in /usr/local/lib/python3.7/dist-packages (from google-auth<2,>=1.6.3->tensorboard>=2.4.1->-r requirements.txt (line 15)) (4.2.2)\n", + "Requirement already satisfied: rsa<5,>=3.1.4; python_version >= \"3.6\" in /usr/local/lib/python3.7/dist-packages (from google-auth<2,>=1.6.3->tensorboard>=2.4.1->-r requirements.txt (line 15)) (4.7.2)\n", + "Requirement already satisfied: requests-oauthlib>=0.7.0 in /usr/local/lib/python3.7/dist-packages (from google-auth-oauthlib<0.5,>=0.4.1->tensorboard>=2.4.1->-r requirements.txt (line 15)) (1.3.0)\n", + "Requirement already satisfied: zipp>=0.5 in /usr/local/lib/python3.7/dist-packages (from importlib-metadata; python_version < \"3.8\"->markdown>=2.6.8->tensorboard>=2.4.1->-r requirements.txt (line 15)) (3.4.1)\n", + "Requirement already satisfied: pyasn1<0.5.0,>=0.4.6 in /usr/local/lib/python3.7/dist-packages (from pyasn1-modules>=0.2.1->google-auth<2,>=1.6.3->tensorboard>=2.4.1->-r requirements.txt (line 15)) (0.4.8)\n", + "Requirement already satisfied: oauthlib>=3.0.0 in /usr/local/lib/python3.7/dist-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<0.5,>=0.4.1->tensorboard>=2.4.1->-r requirements.txt (line 15)) (3.1.1)\n", + "Building wheels for collected packages: onnx-simplifier\n", + " Building wheel for onnx-simplifier (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for onnx-simplifier: filename=onnx_simplifier-0.3.6-cp37-none-any.whl size=12877 sha256=38a8a809f7065b4d5280a3103c3ae60cb3ecc2797c54ec9920b31c34dd48e7da\n", + " Stored in directory: /root/.cache/pip/wheels/6b/ad/63/753b75e81c462465ed01abfed4bb0d3ce61f415e4ee72a6c87\n", + "Successfully built onnx-simplifier\n", + "Installing collected packages: onnxoptimizer, onnx-simplifier\n", + "Successfully installed onnx-simplifier-0.3.6 onnxoptimizer-0.2.6\n" + ] + } + ], + "source": [ + "!pip install -r requirements.txt" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "lBRhzYk3kqdv" + }, + "source": [ + "# Dataset & Preparation\n", + "\n", + "Next, we need a dataset for the training model. In this tutorial, let's use the COCO 128 dataset. This dataset has been saved in `/coco128` next to the `/yolov5` directory." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "JEbxLmz7kqdv", + "outputId": "6fbd02bb-6ffa-4d19-d0f2-cc6721fdfbe1" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "coco128 exporting generate_npy image_data yolov5\n" + ] + } + ], + "source": [ + "!ls ../" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "9uONbChckqdw" + }, + "source": [ + "The `coco128.yaml` is prepared under the folder `/yolov5/data/`." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "Dq3-AD6Ekqdw", + "outputId": "b5795732-6823-4681-b2b4-6965f91d438a" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "# COCO 2017 dataset http://cocodataset.org - first 128 training images\n", + "# Train command: python train.py --data coco128.yaml\n", + "# Default dataset location is next to /yolov5:\n", + "# /parent_folder\n", + "# /coco128\n", + "# /yolov5\n", + "\n", + "\n", + "# download command/URL (optional)\n", + "download: https://github.com/ultralytics/yolov5/releases/download/v1.0/coco128.zip\n", + "\n", + "# train and val data as 1) directory: path/images/, 2) file: path/images.txt, or 3) list: [path1/images/, path2/images/]\n", + "train: ../coco128/images/train2017/ # 128 images\n", + "val: ../coco128/images/train2017/ # 128 images\n", + "\n", + "# number of classes\n", + "nc: 80\n", + "\n", + "# class names\n", + "names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train', 'truck', 'boat', 'traffic light',\n", + " 'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep', 'cow',\n", + " 'elephant', 'bear', 'zebra', 'giraffe', 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee',\n", + " 'skis', 'snowboard', 'sports ball', 'kite', 'baseball bat', 'baseball glove', 'skateboard', 'surfboard',\n", + " 'tennis racket', 'bottle', 'wine glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple',\n", + " 'sandwich', 'orange', 'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake', 'chair', 'couch',\n", + " 'potted plant', 'bed', 'dining table', 'toilet', 'tv', 'laptop', 'mouse', 'remote', 'keyboard', 'cell phone',\n", + " 'microwave', 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase', 'scissors', 'teddy bear',\n", + " 'hair drier', 'toothbrush']\n" + ] + } + ], + "source": [ + "!cat data/coco128.yaml" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "3ZhFRyFhkqdw" + }, + "source": [ + "# Train " + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "ZW5xCzsGkqdx" + }, + "source": [ + "We download the pretrained model from [Model_Zoo](https://github.com/kneron/Model_Zoo/tree/main/detection/yolov5/yolov5s-noupsample). Suppose we would like to finetune a pretrained model for 520 and just run 2 epochs. Execute commands in the folder `yolov5`:" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "TyV4OW7VbCLW", + "outputId": "d8a59898-a13a-45dd-a5d2-f0dea3d27433" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "--2021-06-23 23:45:37-- https://raw.githubusercontent.com/kneron/Model_Zoo/main/detection/yolov5/yolov5s-noupsample/best.pt\n", + "Resolving raw.githubusercontent.com (raw.githubusercontent.com)... 185.199.111.133, 185.199.108.133, 185.199.109.133, ...\n", + "Connecting to raw.githubusercontent.com (raw.githubusercontent.com)|185.199.111.133|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 13758885 (13M) [application/octet-stream]\n", + "Saving to: ‘best.pt.1’\n", + "\n", + "best.pt.1 100%[===================>] 13.12M 46.7MB/s in 0.3s \n", + "\n", + "2021-06-23 23:45:38 (46.7 MB/s) - ‘best.pt.1’ saved [13758885/13758885]\n", + "\n" + ] + } + ], + "source": [ + "!wget https://raw.githubusercontent.com/kneron/Model_Zoo/main/detection/yolov5/yolov5s-noupsample/best.pt" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "eDEPFnyekqdx", + "outputId": "a42966f7-1a0a-4bb9-c225-99602e12cefd" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Using torch 1.9.0+cu102 CUDA:0 (Tesla T4, 15109MB)\n", + "\n", + "Namespace(adam=False, batch_size=8, bucket='', cache_images=False, cfg='./models/yolov5s-noupsample.yaml', data='./data/coco128.yaml', device='', epochs=2, evolve=False, exist_ok=False, global_rank=-1, hyp='data/hyp.scratch.yaml', image_weights=False, img_size=[640, 640], local_rank=-1, log_imgs=16, multi_scale=False, name='exp', noautoanchor=False, nosave=False, notest=False, project='runs/train', rect=False, resume=False, save_dir='runs/train/exp', single_cls=False, sync_bn=False, total_batch_size=8, weights='best.pt', workers=8, world_size=1)\n", + "Start Tensorboard with \"tensorboard --logdir runs/train\", view at http://localhost:6006/\n", + "2021-06-23 23:45:57.157154: I tensorflow/stream_executor/platform/default/dso_loader.cc:53] Successfully opened dynamic library libcudart.so.11.0\n", + "Hyperparameters {'lr0': 0.01, 'lrf': 0.2, 'momentum': 0.937, 'weight_decay': 0.0005, 'warmup_epochs': 3.0, 'warmup_momentum': 0.8, 'warmup_bias_lr': 0.1, 'box': 0.05, 'cls': 0.5, 'cls_pw': 1.0, 'obj': 1.0, 'obj_pw': 1.0, 'iou_t': 0.2, 'anchor_t': 4.0, 'fl_gamma': 0.0, 'hsv_h': 0.015, 'hsv_s': 0.7, 'hsv_v': 0.4, 'degrees': 0.0, 'translate': 0.1, 'scale': 0.5, 'shear': 0.0, 'perspective': 0.0, 'flipud': 0.0, 'fliplr': 0.5, 'mosaic': 1.0, 'mixup': 0.0}\n", + "\n", + " from n params module arguments \n", + " 0 -1 1 3520 models.common.Focus [3, 32, 3] \n", + " 1 -1 1 18560 models.common.Conv [32, 64, 3, 2] \n", + " 2 -1 1 19904 models.common.BottleneckCSP [64, 64, 1] \n", + " 3 -1 1 73984 models.common.Conv [64, 128, 3, 2] \n", + " 4 -1 1 161152 models.common.BottleneckCSP [128, 128, 3] \n", + " 5 -1 1 295424 models.common.Conv [128, 256, 3, 2] \n", + " 6 -1 1 641792 models.common.BottleneckCSP [256, 256, 3] \n", + " 7 -1 1 1180672 models.common.Conv [256, 512, 3, 2] \n", + " 8 -1 1 656896 models.common.SPP [512, 512, [5, 9, 13]] \n", + " 9 -1 1 1248768 models.common.BottleneckCSP [512, 512, 1, False] \n", + " 10 4 1 147712 models.common.Conv [128, 128, 3, 1] \n", + " 11 6 1 590336 models.common.Conv [256, 256, 3, 1] \n", + " 12 [7, 9] 1 0 models.common.Concat [1] \n", + " 13 -1 1 1510912 models.common.BottleneckCSP [1024, 512, 1, False] \n", + " 14 [10, 11, 13] 1 229245 models.yolo.Detect [80, [[10, 13, 16, 30, 33, 23], [30, 61, 62, 45, 59, 119], [116, 90, 156, 198, 373, 326]], [128, 256, 512]]\n", + "/usr/local/lib/python3.7/dist-packages/torch/nn/functional.py:718: UserWarning: Named tensors and all their associated APIs are an experimental feature and subject to change. Please do not use them for anything important until they are released as stable. (Triggered internally at /pytorch/c10/core/TensorImpl.h:1156.)\n", + " return torch.max_pool2d(input, kernel_size, stride, padding, dilation, ceil_mode)\n", + "Model Summary: 201 layers, 6778877 parameters, 6778877 gradients, 16.9 GFLOPS\n", + "\n", + "Transferred 263/265 items from best.pt\n", + "Optimizer groups: 45 .bias, 50 conv.weight, 42 other\n", + "***cache_path ../coco128/labels/train2017.cache\n", + "Scanning labels ../coco128/labels/train2017.cache (126 found, 0 missing, 2 empty, 0 duplicate, for 128 images): 128it [00:00, 9721.70it/s]\n", + "***cache_path ../coco128/labels/train2017.cache\n", + "Scanning labels ../coco128/labels/train2017.cache (126 found, 0 missing, 2 empty, 0 duplicate, for 128 images): 128it [00:00, 12998.67it/s]\n", + "[W pthreadpool-cpp.cc:90] Warning: Leaking Caffe2 thread-pool after fork. (function pthreadpool)\n", + "[W pthreadpool-cpp.cc:90] Warning: Leaking Caffe2 thread-pool after fork. (function pthreadpool)\n", + "NumExpr defaulting to 2 threads.\n", + "\n", + "Analyzing anchors... anchors/target = 4.27, Best Possible Recall (BPR) = 0.9946\n", + "Image sizes 640 train, 640 test\n", + "Using 2 dataloader workers\n", + "Logging results to runs/train/exp\n", + "Starting training for 2 epochs...\n", + "\n", + " Epoch gpu_mem box obj cls total targets img_size\n", + " 0/1 5.45G 0.04987 0.07427 0.03102 0.1552 210 640: 100% 16/16 [00:40<00:00, 2.50s/it]\n", + " Class Images Targets P R mAP@.5 mAP@.5:.95: 100% 16/16 [00:05<00:00, 2.70it/s]\n", + " all 128 929 0.312 0.528 0.494 0.321\n", + "\n", + " Epoch gpu_mem box obj cls total targets img_size\n", + " 1/1 5.45G 0.05341 0.0778 0.02946 0.1607 143 640: 100% 16/16 [00:04<00:00, 3.59it/s]\n", + " Class Images Targets P R mAP@.5 mAP@.5:.95: 100% 16/16 [00:03<00:00, 4.73it/s]\n", + " all 128 929 0.318 0.541 0.487 0.317\n", + "Optimizer stripped from runs/train/exp/weights/last.pt, 13.8MB\n", + "Optimizer stripped from runs/train/exp/weights/best.pt, 13.8MB\n", + "2 epochs completed in 0.016 hours.\n", + "\n" + ] + } + ], + "source": [ + "!CUDA_VISIBLE_DEVICES='0' python train.py --data coco128.yaml --cfg yolov5s-noupsample.yaml --weights 'best.pt' --batch-size 8 --epoch 2" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "W_hjI69Qxv8O" + }, + "source": [ + "The trained model weights are saved in `runs/train/exp/weights/best.pt`\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "fBqrdx3okqdx" + }, + "source": [ + "# Generating .npy for different model input\n", + "We can generating `.npy` for different model input by using `yolov5_generate_npy.py`. Execute commands in the folder `generate_npy`:" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "l3PFtRDoJFHI", + "outputId": "76154d90-25f5-44ac-f70f-c9bef96ae46d" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "/content/drive/My Drive/ai_training/detection/yolov5/generate_npy\n" + ] + } + ], + "source": [ + "cd ../generate_npy" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "id": "hYhx9IVPkqdy" + }, + "outputs": [], + "source": [ + "!python yolov5_generate_npy.py" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "_o3hPSRYkqdy" + }, + "source": [ + "We could get `*.npy` files." + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "3bOcDLlQkqdy", + "outputId": "8da0894a-bf80-46b1-c870-bcb83acfa57e" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "20_640x640.npy\t40_640x640.npy\t80_640x640.npy\tyolov5_generate_npy.py\n" + ] + } + ], + "source": [ + "!ls ../generate_npy" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "7GSVvnW0ygST" + }, + "source": [ + "# Configure the paths yaml file\n", + "You are expected to create a yaml file which stores all the paths related to the trained models. You can check and modify the `pretrained_paths_520.yaml` and `pretrained_paths_720.yaml` under `/yolov5/data/`. Here is the config for our model trained on COCO128 `model_paths_520_coco128.yaml`:" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "BSzd3WjaKNiI", + "outputId": "9c8671f1-4a7e-47cd-c5b1-e9339ccb3100" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "/content/drive/My Drive/ai_training/detection/yolov5/yolov5\n" + ] + } + ], + "source": [ + "cd ../yolov5/" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "EiNccC_bzg0z", + "outputId": "e1894348-41b4-4bc0-8d2c-abfe22599436" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "grid_dir: ../generate_npy/\n", + "grid20_path: ../generate_npy/20_640x640.npy\n", + "grid40_path: ../generate_npy/40_640x640.npy\n", + "grid80_path: ../generate_npy/80_640x640.npy\n", + "\n", + "yolov5_dir: ./\n", + "path: ./runs/train/exp/weights/best.pt\n", + "yaml_path: ./models/yolov5s-noupsample.yaml\n", + "pt_path: ./yolov5s-noupsample-coco128.pt # pytorch 1.4\n", + "onnx_export_file: ./yolov5s-noupsample-coco128.onnx\n", + "\n", + "input_w: 640\n", + "input_h: 640\n", + "# number of classes\n", + "nc: 80\n", + "# class names\n", + "names: ['person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train', 'truck', 'boat', 'traffic light',\n", + " 'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep', 'cow',\n", + " 'elephant', 'bear', 'zebra', 'giraffe', 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee',\n", + " 'skis', 'snowboard', 'sports ball', 'kite', 'baseball bat', 'baseball glove', 'skateboard', 'surfboard',\n", + " 'tennis racket', 'bottle', 'wine glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple',\n", + " 'sandwich', 'orange', 'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake', 'chair', 'couch',\n", + " 'potted plant', 'bed', 'dining table', 'toilet', 'tv', 'laptop', 'mouse', 'remote', 'keyboard', 'cell phone',\n", + " 'microwave', 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase', 'scissors', 'teddy bear',\n", + " 'hair drier', 'toothbrush']" + ] + } + ], + "source": [ + "!cat data/model_paths_520_coco128.yaml" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "Z30earr7kqdz" + }, + "source": [ + "# Save and Convert to ONNX\n", + "For now, we have trained the YOLOv5 model. This section will walk you through how to save the trained model for onnx converter supported format and convert to ONNX. \n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "LxChed6vkqd1" + }, + "source": [ + "## Exporting onnx model in the pytorch1.7 environment\n", + "We can convert the model to onnx by using `yolov5_export.py`. Execute commands in the folder `exporting`:" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "fOBPycDDkqd1", + "outputId": "ba477fe7-4dc2-4428-9de5-b39b9b1943ea" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "./runs/train/exp/weights/best.pt\n", + "['epoch', 'best_fitness', 'training_results', 'model', 'optimizer', 'wandb_id']\n", + "/usr/local/lib/python3.7/dist-packages/torch/nn/functional.py:718: UserWarning: Named tensors and all their associated APIs are an experimental feature and subject to change. Please do not use them for anything important until they are released as stable. (Triggered internally at /pytorch/c10/core/TensorImpl.h:1156.)\n", + " return torch.max_pool2d(input, kernel_size, stride, padding, dilation, ceil_mode)\n", + "self.vanish_point 0.0\n", + "\n", + "Starting ONNX export with onnx 1.6.0...\n", + "****onnx file**** ./yolov5s-noupsample-coco128.onnx\n", + "graph torch-jit-export (\n", + " %images[FLOAT, 1x3x640x640]\n", + ") optional inputs with matching initializers (\n", + " %model.2.cv2.weight[FLOAT, 32x64x1x1]\n", + " %model.2.cv3.weight[FLOAT, 32x32x1x1]\n", + " %model.2.bn.weight[FLOAT, 64]\n", + " %model.2.bn.bias[FLOAT, 64]\n", + " %model.2.bn.running_mean[FLOAT, 64]\n", + " %model.2.bn.running_var[FLOAT, 64]\n", + " %model.4.cv2.weight[FLOAT, 64x128x1x1]\n", + " %model.4.cv3.weight[FLOAT, 64x64x1x1]\n", + " %model.4.bn.weight[FLOAT, 128]\n", + " %model.4.bn.bias[FLOAT, 128]\n", + " %model.4.bn.running_mean[FLOAT, 128]\n", + " %model.4.bn.running_var[FLOAT, 128]\n", + " %model.6.cv2.weight[FLOAT, 128x256x1x1]\n", + " %model.6.cv3.weight[FLOAT, 128x128x1x1]\n", + " %model.6.bn.weight[FLOAT, 256]\n", + " %model.6.bn.bias[FLOAT, 256]\n", + " %model.6.bn.running_mean[FLOAT, 256]\n", + " %model.6.bn.running_var[FLOAT, 256]\n", + " %model.9.cv2.weight[FLOAT, 256x512x1x1]\n", + " %model.9.cv3.weight[FLOAT, 256x256x1x1]\n", + " %model.9.bn.weight[FLOAT, 512]\n", + " %model.9.bn.bias[FLOAT, 512]\n", + " %model.9.bn.running_mean[FLOAT, 512]\n", + " %model.9.bn.running_var[FLOAT, 512]\n", + " %model.13.cv2.weight[FLOAT, 256x1024x1x1]\n", + " %model.13.cv3.weight[FLOAT, 256x256x1x1]\n", + " %model.13.bn.weight[FLOAT, 512]\n", + " %model.13.bn.bias[FLOAT, 512]\n", + " %model.13.bn.running_mean[FLOAT, 512]\n", + " %model.13.bn.running_var[FLOAT, 512]\n", + " %model.14.m.0.weight[FLOAT, 255x128x1x1]\n", + " %model.14.m.0.bias[FLOAT, 255]\n", + " %model.14.m.1.weight[FLOAT, 255x256x1x1]\n", + " %model.14.m.1.bias[FLOAT, 255]\n", + " %model.14.m.2.weight[FLOAT, 255x512x1x1]\n", + " %model.14.m.2.bias[FLOAT, 255]\n", + " %462[FLOAT, 32x12x3x3]\n", + " %463[FLOAT, 32]\n", + " %465[FLOAT, 64x32x3x3]\n", + " %466[FLOAT, 64]\n", + " %468[FLOAT, 32x64x1x1]\n", + " %469[FLOAT, 32]\n", + " %471[FLOAT, 32x32x1x1]\n", + " %472[FLOAT, 32]\n", + " %474[FLOAT, 32x32x3x3]\n", + " %475[FLOAT, 32]\n", + " %477[FLOAT, 64x64x1x1]\n", + " %478[FLOAT, 64]\n", + " %480[FLOAT, 128x64x3x3]\n", + " %481[FLOAT, 128]\n", + " %483[FLOAT, 64x128x1x1]\n", + " %484[FLOAT, 64]\n", + " %486[FLOAT, 64x64x1x1]\n", + " %487[FLOAT, 64]\n", + " %489[FLOAT, 64x64x3x3]\n", + " %490[FLOAT, 64]\n", + " %492[FLOAT, 64x64x1x1]\n", + " %493[FLOAT, 64]\n", + " %495[FLOAT, 64x64x3x3]\n", + " %496[FLOAT, 64]\n", + " %498[FLOAT, 64x64x1x1]\n", + " %499[FLOAT, 64]\n", + " %501[FLOAT, 64x64x3x3]\n", + " %502[FLOAT, 64]\n", + " %504[FLOAT, 128x128x1x1]\n", + " %505[FLOAT, 128]\n", + " %507[FLOAT, 256x128x3x3]\n", + " %508[FLOAT, 256]\n", + " %510[FLOAT, 128x256x1x1]\n", + " %511[FLOAT, 128]\n", + " %513[FLOAT, 128x128x1x1]\n", + " %514[FLOAT, 128]\n", + " %516[FLOAT, 128x128x3x3]\n", + " %517[FLOAT, 128]\n", + " %519[FLOAT, 128x128x1x1]\n", + " %520[FLOAT, 128]\n", + " %522[FLOAT, 128x128x3x3]\n", + " %523[FLOAT, 128]\n", + " %525[FLOAT, 128x128x1x1]\n", + " %526[FLOAT, 128]\n", + " %528[FLOAT, 128x128x3x3]\n", + " %529[FLOAT, 128]\n", + " %531[FLOAT, 256x256x1x1]\n", + " %532[FLOAT, 256]\n", + " %534[FLOAT, 512x256x3x3]\n", + " %535[FLOAT, 512]\n", + " %537[FLOAT, 256x512x1x1]\n", + " %538[FLOAT, 256]\n", + " %540[FLOAT, 512x1024x1x1]\n", + " %541[FLOAT, 512]\n", + " %543[FLOAT, 256x512x1x1]\n", + " %544[FLOAT, 256]\n", + " %546[FLOAT, 256x256x1x1]\n", + " %547[FLOAT, 256]\n", + " %549[FLOAT, 256x256x3x3]\n", + " %550[FLOAT, 256]\n", + " %552[FLOAT, 512x512x1x1]\n", + " %553[FLOAT, 512]\n", + " %555[FLOAT, 128x128x3x3]\n", + " %556[FLOAT, 128]\n", + " %558[FLOAT, 256x256x3x3]\n", + " %559[FLOAT, 256]\n", + " %561[FLOAT, 256x1024x1x1]\n", + " %562[FLOAT, 256]\n", + " %564[FLOAT, 256x256x1x1]\n", + " %565[FLOAT, 256]\n", + " %567[FLOAT, 256x256x3x3]\n", + " %568[FLOAT, 256]\n", + " %570[FLOAT, 512x512x1x1]\n", + " %571[FLOAT, 512]\n", + ") {\n", + " %266 = Constant[value = ]()\n", + " %267 = Constant[value = ]()\n", + " %268 = Constant[value = ]()\n", + " %269 = Constant[value = ]()\n", + " %270 = Slice(%images, %267, %268, %266, %269)\n", + " %271 = Constant[value = ]()\n", + " %272 = Constant[value = ]()\n", + " %273 = Constant[value = ]()\n", + " %274 = Constant[value = ]()\n", + " %275 = Slice(%270, %272, %273, %271, %274)\n", + " %276 = Constant[value = ]()\n", + " %277 = Constant[value = ]()\n", + " %278 = Constant[value = ]()\n", + " %279 = Constant[value = ]()\n", + " %280 = Slice(%images, %277, %278, %276, %279)\n", + " %281 = Constant[value = ]()\n", + " %282 = Constant[value = ]()\n", + " %283 = Constant[value = ]()\n", + " %284 = Constant[value = ]()\n", + " %285 = Slice(%280, %282, %283, %281, %284)\n", + " %286 = Constant[value = ]()\n", + " %287 = Constant[value = ]()\n", + " %288 = Constant[value = ]()\n", + " %289 = Constant[value = ]()\n", + " %290 = Slice(%images, %287, %288, %286, %289)\n", + " %291 = Constant[value = ]()\n", + " %292 = Constant[value = ]()\n", + " %293 = Constant[value = ]()\n", + " %294 = Constant[value = ]()\n", + " %295 = Slice(%290, %292, %293, %291, %294)\n", + " %296 = Constant[value = ]()\n", + " %297 = Constant[value = ]()\n", + " %298 = Constant[value = ]()\n", + " %299 = Constant[value = ]()\n", + " %300 = Slice(%images, %297, %298, %296, %299)\n", + " %301 = Constant[value = ]()\n", + " %302 = Constant[value = ]()\n", + " %303 = Constant[value = ]()\n", + " %304 = Constant[value = ]()\n", + " %305 = Slice(%300, %302, %303, %301, %304)\n", + " %306 = Concat[axis = 1](%275, %285, %295, %305)\n", + " %461 = Conv[dilations = [1, 1], group = 1, kernel_shape = [3, 3], pads = [1, 1, 1, 1], strides = [1, 1]](%306, %462, %463)\n", + " %309 = LeakyRelu[alpha = 0.100000001490116](%461)\n", + " %464 = Conv[dilations = [1, 1], group = 1, kernel_shape = [3, 3], pads = [1, 1, 1, 1], strides = [2, 2]](%309, %465, %466)\n", + " %312 = LeakyRelu[alpha = 0.100000001490116](%464)\n", + " %467 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%312, %468, %469)\n", + " %315 = LeakyRelu[alpha = 0.100000001490116](%467)\n", + " %470 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%315, %471, %472)\n", + " %318 = LeakyRelu[alpha = 0.100000001490116](%470)\n", + " %473 = Conv[dilations = [1, 1], group = 1, kernel_shape = [3, 3], pads = [1, 1, 1, 1], strides = [1, 1]](%318, %474, %475)\n", + " %321 = LeakyRelu[alpha = 0.100000001490116](%473)\n", + " %322 = Add(%315, %321)\n", + " %323 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%322, %model.2.cv3.weight)\n", + " %324 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%312, %model.2.cv2.weight)\n", + " %325 = Concat[axis = 1](%323, %324)\n", + " %326 = BatchNormalization[epsilon = 0.00100000004749745, momentum = 0.970000028610229](%325, %model.2.bn.weight, %model.2.bn.bias, %model.2.bn.running_mean, %model.2.bn.running_var)\n", + " %327 = LeakyRelu[alpha = 0.100000001490116](%326)\n", + " %476 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%327, %477, %478)\n", + " %330 = LeakyRelu[alpha = 0.100000001490116](%476)\n", + " %479 = Conv[dilations = [1, 1], group = 1, kernel_shape = [3, 3], pads = [1, 1, 1, 1], strides = [2, 2]](%330, %480, %481)\n", + " %333 = LeakyRelu[alpha = 0.100000001490116](%479)\n", + " %482 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%333, %483, %484)\n", + " %336 = LeakyRelu[alpha = 0.100000001490116](%482)\n", + " %485 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%336, %486, %487)\n", + " %339 = LeakyRelu[alpha = 0.100000001490116](%485)\n", + " %488 = Conv[dilations = [1, 1], group = 1, kernel_shape = [3, 3], pads = [1, 1, 1, 1], strides = [1, 1]](%339, %489, %490)\n", + " %342 = LeakyRelu[alpha = 0.100000001490116](%488)\n", + " %343 = Add(%336, %342)\n", + " %491 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%343, %492, %493)\n", + " %346 = LeakyRelu[alpha = 0.100000001490116](%491)\n", + " %494 = Conv[dilations = [1, 1], group = 1, kernel_shape = [3, 3], pads = [1, 1, 1, 1], strides = [1, 1]](%346, %495, %496)\n", + " %349 = LeakyRelu[alpha = 0.100000001490116](%494)\n", + " %350 = Add(%343, %349)\n", + " %497 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%350, %498, %499)\n", + " %353 = LeakyRelu[alpha = 0.100000001490116](%497)\n", + " %500 = Conv[dilations = [1, 1], group = 1, kernel_shape = [3, 3], pads = [1, 1, 1, 1], strides = [1, 1]](%353, %501, %502)\n", + " %356 = LeakyRelu[alpha = 0.100000001490116](%500)\n", + " %357 = Add(%350, %356)\n", + " %358 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%357, %model.4.cv3.weight)\n", + " %359 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%333, %model.4.cv2.weight)\n", + " %360 = Concat[axis = 1](%358, %359)\n", + " %361 = BatchNormalization[epsilon = 0.00100000004749745, momentum = 0.970000028610229](%360, %model.4.bn.weight, %model.4.bn.bias, %model.4.bn.running_mean, %model.4.bn.running_var)\n", + " %362 = LeakyRelu[alpha = 0.100000001490116](%361)\n", + " %503 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%362, %504, %505)\n", + " %365 = LeakyRelu[alpha = 0.100000001490116](%503)\n", + " %506 = Conv[dilations = [1, 1], group = 1, kernel_shape = [3, 3], pads = [1, 1, 1, 1], strides = [2, 2]](%365, %507, %508)\n", + " %368 = LeakyRelu[alpha = 0.100000001490116](%506)\n", + " %509 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%368, %510, %511)\n", + " %371 = LeakyRelu[alpha = 0.100000001490116](%509)\n", + " %512 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%371, %513, %514)\n", + " %374 = LeakyRelu[alpha = 0.100000001490116](%512)\n", + " %515 = Conv[dilations = [1, 1], group = 1, kernel_shape = [3, 3], pads = [1, 1, 1, 1], strides = [1, 1]](%374, %516, %517)\n", + " %377 = LeakyRelu[alpha = 0.100000001490116](%515)\n", + " %378 = Add(%371, %377)\n", + " %518 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%378, %519, %520)\n", + " %381 = LeakyRelu[alpha = 0.100000001490116](%518)\n", + " %521 = Conv[dilations = [1, 1], group = 1, kernel_shape = [3, 3], pads = [1, 1, 1, 1], strides = [1, 1]](%381, %522, %523)\n", + " %384 = LeakyRelu[alpha = 0.100000001490116](%521)\n", + " %385 = Add(%378, %384)\n", + " %524 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%385, %525, %526)\n", + " %388 = LeakyRelu[alpha = 0.100000001490116](%524)\n", + " %527 = Conv[dilations = [1, 1], group = 1, kernel_shape = [3, 3], pads = [1, 1, 1, 1], strides = [1, 1]](%388, %528, %529)\n", + " %391 = LeakyRelu[alpha = 0.100000001490116](%527)\n", + " %392 = Add(%385, %391)\n", + " %393 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%392, %model.6.cv3.weight)\n", + " %394 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%368, %model.6.cv2.weight)\n", + " %395 = Concat[axis = 1](%393, %394)\n", + " %396 = BatchNormalization[epsilon = 0.00100000004749745, momentum = 0.970000028610229](%395, %model.6.bn.weight, %model.6.bn.bias, %model.6.bn.running_mean, %model.6.bn.running_var)\n", + " %397 = LeakyRelu[alpha = 0.100000001490116](%396)\n", + " %530 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%397, %531, %532)\n", + " %400 = LeakyRelu[alpha = 0.100000001490116](%530)\n", + " %533 = Conv[dilations = [1, 1], group = 1, kernel_shape = [3, 3], pads = [1, 1, 1, 1], strides = [2, 2]](%400, %534, %535)\n", + " %403 = LeakyRelu[alpha = 0.100000001490116](%533)\n", + " %536 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%403, %537, %538)\n", + " %406 = LeakyRelu[alpha = 0.100000001490116](%536)\n", + " %407 = MaxPool[ceil_mode = 0, kernel_shape = [5, 5], pads = [2, 2, 2, 2], strides = [1, 1]](%406)\n", + " %408 = MaxPool[ceil_mode = 0, kernel_shape = [9, 9], pads = [4, 4, 4, 4], strides = [1, 1]](%406)\n", + " %409 = MaxPool[ceil_mode = 0, kernel_shape = [13, 13], pads = [6, 6, 6, 6], strides = [1, 1]](%406)\n", + " %410 = Concat[axis = 1](%406, %407, %408, %409)\n", + " %539 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%410, %540, %541)\n", + " %413 = LeakyRelu[alpha = 0.100000001490116](%539)\n", + " %542 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%413, %543, %544)\n", + " %416 = LeakyRelu[alpha = 0.100000001490116](%542)\n", + " %545 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%416, %546, %547)\n", + " %419 = LeakyRelu[alpha = 0.100000001490116](%545)\n", + " %548 = Conv[dilations = [1, 1], group = 1, kernel_shape = [3, 3], pads = [1, 1, 1, 1], strides = [1, 1]](%419, %549, %550)\n", + " %422 = LeakyRelu[alpha = 0.100000001490116](%548)\n", + " %423 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%422, %model.9.cv3.weight)\n", + " %424 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%413, %model.9.cv2.weight)\n", + " %425 = Concat[axis = 1](%423, %424)\n", + " %426 = BatchNormalization[epsilon = 0.00100000004749745, momentum = 0.970000028610229](%425, %model.9.bn.weight, %model.9.bn.bias, %model.9.bn.running_mean, %model.9.bn.running_var)\n", + " %427 = LeakyRelu[alpha = 0.100000001490116](%426)\n", + " %551 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%427, %552, %553)\n", + " %430 = LeakyRelu[alpha = 0.100000001490116](%551)\n", + " %554 = Conv[dilations = [1, 1], group = 1, kernel_shape = [3, 3], pads = [1, 1, 1, 1], strides = [1, 1]](%365, %555, %556)\n", + " %433 = LeakyRelu[alpha = 0.100000001490116](%554)\n", + " %557 = Conv[dilations = [1, 1], group = 1, kernel_shape = [3, 3], pads = [1, 1, 1, 1], strides = [1, 1]](%400, %558, %559)\n", + " %436 = LeakyRelu[alpha = 0.100000001490116](%557)\n", + " %437 = Concat[axis = 1](%403, %430)\n", + " %560 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%437, %561, %562)\n", + " %440 = LeakyRelu[alpha = 0.100000001490116](%560)\n", + " %563 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%440, %564, %565)\n", + " %443 = LeakyRelu[alpha = 0.100000001490116](%563)\n", + " %566 = Conv[dilations = [1, 1], group = 1, kernel_shape = [3, 3], pads = [1, 1, 1, 1], strides = [1, 1]](%443, %567, %568)\n", + " %446 = LeakyRelu[alpha = 0.100000001490116](%566)\n", + " %447 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%446, %model.13.cv3.weight)\n", + " %448 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%437, %model.13.cv2.weight)\n", + " %449 = Concat[axis = 1](%447, %448)\n", + " %450 = BatchNormalization[epsilon = 0.00100000004749745, momentum = 0.970000028610229](%449, %model.13.bn.weight, %model.13.bn.bias, %model.13.bn.running_mean, %model.13.bn.running_var)\n", + " %451 = LeakyRelu[alpha = 0.100000001490116](%450)\n", + " %569 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%451, %570, %571)\n", + " %454 = LeakyRelu[alpha = 0.100000001490116](%569)\n", + " %455 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%433, %model.14.m.0.weight, %model.14.m.0.bias)\n", + " %output = Sigmoid(%455)\n", + " %457 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%436, %model.14.m.1.weight, %model.14.m.1.bias)\n", + " %458 = Sigmoid(%457)\n", + " %459 = Conv[dilations = [1, 1], group = 1, kernel_shape = [1, 1], pads = [0, 0, 0, 0], strides = [1, 1]](%454, %model.14.m.2.weight, %model.14.m.2.bias)\n", + " %460 = Sigmoid(%459)\n", + " return %output, %458, %460\n", + "}\n", + "ONNX export success, saved as ./yolov5s-noupsample-coco128.onnx\n" + ] + } + ], + "source": [ + "!python ../exporting/yolov5_export.py --data ../yolov5/data/model_paths_520_coco128.yaml" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "4FkYYu4skqd2" + }, + "source": [ + "We could get `yolov5s-noupsample.onnx` or `yolov5s.onnx` under the folder `yolov5`." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "O8qMEaYLkqd2" + }, + "source": [ + "## Converting onnx by tool chain\n", + "Pull the latest [ONNX converter](https://github.com/kneron/ONNX_Convertor/tree/master/optimizer_scripts) from github. Execute commands in the folder `ONNX_Convertor/optimizer_scripts`:\n", + "(reference: https://github.com/kneron/ONNX_Convertor/tree/master/optimizer_scripts)" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "mWipxhnUsy9H", + "outputId": "c5dac415-77e7-47e6-f4dd-e7602b7b7345" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "fatal: destination path 'ONNX_Convertor' already exists and is not an empty directory.\n" + ] + } + ], + "source": [ + "!git clone https://github.com/kneron/ONNX_Convertor.git" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "uXzCud9wkqd2", + "outputId": "011efcf7-f014-4746-8bf2-d9c5ea376b9d" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Simplifying...\n", + "Checking 0/3...\n", + "Checking 1/3...\n", + "Checking 2/3...\n", + "Ok!\n" + ] + } + ], + "source": [ + "!python -m onnxsim yolov5s-noupsample-coco128.onnx yolov5s-noupsample-coco128.onnx\n" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": { + "id": "gxIp6xLmR40Q" + }, + "outputs": [], + "source": [ + "!python ONNX_Convertor/optimizer_scripts/pytorch2onnx.py yolov5s-noupsample-coco128.onnx yolov5s-noupsample-coco128_convert.onnx\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "bA_ZjFn2kqd3" + }, + "source": [ + "# Inference\n", + "\n", + "In this section, we will go through an example of using a trained network for inference. That is, we'll pass an image into the network and detect and classify the object in the image. We will use the function `inference.py` that takes an image and a model, then returns the detection information. The output format is a list of list, [[l,t,w,h,score,class_id], [l,t,w,h,score,class_id] ...]. We can also draw the bbox on the image if the save path is given. You could find preprocessing and postprocessing processes under the folder `exporting/yolov5/`.\n", + "\n", + "In this tutorial, we choose to run our yolov5 model on 520. First, we would like to save the model path information in a yaml file, called `pretrained_paths_520.yaml`, under the folder `data`. Here, we could reuse the yaml file which was created when we convert PyTorch model to ONNX. \n", + "\n", + "For infernce the model on a single image, execute commands in the folder `yolov5`:" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "M9iiPIcIkqd3", + "outputId": "bd501838-c90c-4cbc-ccef-644a51fab57c" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "self.vanish_point 0.0\n", + "/usr/local/lib/python3.7/dist-packages/torch/nn/functional.py:718: UserWarning: Named tensors and all their associated APIs are an experimental feature and subject to change. Please do not use them for anything important until they are released as stable. (Triggered internally at /pytorch/c10/core/TensorImpl.h:1156.)\n", + " return torch.max_pool2d(input, kernel_size, stride, padding, dilation, ceil_mode)\n", + "[[934.0, 183.0, 285.0, 754.0, 0.8881659507751465, 0.0], [673.0, 220.0, 221.0, 710.0, 0.8578410148620605, 0.0]]\n" + ] + } + ], + "source": [ + "!python inference.py --data data/model_paths_520_coco128.yaml --conf_thres 0.6 --img-path tutorial/demo/yolo_demo.jpg --save-path tutorial/demo/out.jpg" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "2dJVQWnPkqd3" + }, + "source": [ + "Here we choose a model trained on COCO dataset and class labels and pretrained model paths are defined in the yaml file `data/pretrained_paths_520.yaml`. The processed image are shown below." + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 679 + }, + "id": "GGx5FLE2kqd4", + "outputId": "fda375e9-071f-41c4-a101-17eace6403a1" + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABIoAAAKWCAYAAAAm1XIvAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOy9W69tS3Im9EXmmHOtvc+psqtc3S6bstvdprpBIKFWIy4SqHlpCSEkJMRDww/oJ35A/wzgsR94BfEDWuKBB/xA0XLTIOx2+VIGl11lu8pV5cO57MuaIzN4iEtG5hhjzrkue591jkecs/acc1wyIzMj45aRkcTM2GGHHXbYYYcddthhhx122GGHHXbYYYf0eSOwww477LDDDjvssMMOO+ywww477LDD84DdUbTDDjvssMMOO+ywww477LDDDjvssAOA3VG0ww477LDDDjvssMMOO+ywww477LCDwu4o2mGHHXbYYYcddthhhx122GGHHXbYAcDuKNphhx122GGHHXbYYYcddthhhx122EFhdxTtsMMOO+ywww477LDDDjvssMMOO+wA4HNwFBHRf0xEv0dE3yOif/y+699hhx122GGHHXbYYYcddthhhx122GEdiJnfX2VEGcDvA/gHAH4A4DcB/JfM/DvvDYkddthhhx122GGHHXbYYYcddthhhx1W4X1HFP07AL7HzP8PM98B+B8B/GfvGYcddthhhx122GGHHXbYYYcddthhhx1WYHrP9f0rAP4k/P4BgH83PkBE/wjAPwKA29vbv/etb30LtFrU2lXGIj6KWa/W9gTLN3vY3mJu91jfBRjM/sQVTQzYEQ1Yst4jrYH8OnNVlFifa/i1Qlr9MRBM8GZv29A8fVZLrtWxABOIEogSQIREWX+T4q54svaf17HSD+uDdAVw96EdJzhA8Es5I6eMnCakLDimJHhK2+WzlopSZ9Q6o9YifUpeJFov2DftJMe9NULqT0Oz2ngtf50DWlTB3d3lt41izj61isf1F7dLpf7neSw2arNh5vCu0yijJwDSehue0oPq12YZVBnzglorUs6Y8kFoGQAo4BcR2MSx0QWBwfWE0/wKtc5CR4sRo+G3Ykntu/xPQ/ljxet8LH4jIuQ84XA4yJw0vnS2RSvo9UXf/90tuKJM5orT6Q6lzBijWI0mhN9Ind6PG2NHA17MFbUWf4XGh7Tgw+GI4+Go97jxRmbHy/gFgbQIGuqk4bPDbBW/Vbjqoe2alneWHOmaKuJwaLeslgU6126ftWdhMS5nn7wG+RGlNQyuIfprx+KJJsS9YF1a3A9GXWSthsCH+UwrKN4Zn6J7NJ9XvvVDua538cavEelIkVZSUJBW6mm8lYOuZKUlmAxiBvI0IedJ9BFKgOpOhNT0KOVrXneHdaiP43WpQERD1I/W+U2sCyQyiCgp/1KcXJ/Sv9hFaFPSZ33fbb2OTOj4JlDBdcY83+Hu7i3m+Q7zPIPBPt+51qBHD/0fEIh833TNrdEegQGUUlG5IqekrbGxSLg53uD29qX/JhBSSqI3JBu7gYuRdEYpBZVrvHgJnfboFTNvUe6j4cEK+dPDKEpML/K5Eh+6Bi700SbvWnKZvuqFMNn4vYRn1Nv3gqeTVFeWdOax++PyQOy3Xludfves48IOrbPmmPKapYnF688/Kb8Avvt7v/0TZv5ra/fet6PoIjDzPwHwTwDg29/+Nv93/81/G4yvBmvXorIvQkmMh1ruwLhDRUFFAZcKFHV8MKNyQakFtcwu0EotaiQWlHJCqTMABhOrz2mdNTCqC+IEwmGakMlwK0gJ6uCoABLExzOjnO5QygmVT0AtYBRtgxnM8Q+otaJWNZbrjFoK5nkWgVYBrkDVZs4VKLWAUfH27R1KAcAZ4AnT4SWm6QVSvsWL2w8x5Zc4HI5I+QYpZdR6AvMJtbwG6l3XFxGftfEYxys+08aq+m9AHDs5T8jpCEoH3Bxf4MWHP4evfuUX8OHLr+GDD76Gm5uXuLm9weFwQDlV0CnjdDrhs88+w8ef/ASfvvoJXr/5BPP8BpQq8kSgLGNTcBK6YBH8hasrCCllGVdiEN1gyjeu4iXO0mcACBWUqitX29s3EzgoZ7GfqrZVysurz6z14bn7a3gsjXJG5XkDX6CNp+luOsadsKfwtx2UaGMsDh2gVgLXhFomMCvtY0blilorwAnAATllUDoBdAJQRAHHhIQjwAdBhxLm8haffPIxXr16jQ8//Dn8wte/iWl6ASAhpUkVe5nfQmfGG2T+UVKXApPwBS4gZiR+g9PpR/jRj/8FPv3sL3CaX4MhfSYzO4OR9VcC7DtNyGkC0YSUsiukYAKjbHQSRfNsMWa1MqbpgJ//+tfxzW/+EqZ8wHx3AlBRalX+EAs0JzTAFIYrDG83YhwUpvaIFXVxXgu+9pLxKivXBFnFmzev8OMf/wn+8qOfihHBDCJSXkVAzSBkFCLQlDBNk+BUGKgVBG1rFsQSJaTEOlcZb99+hs9efQrh0YK3OZMBgEsC8QG/+Ne/hV/91V9DSgm1VuWXFXenNyjlDgAjJ0KmhClnHCajI0LKOkdZxpqQ40SRLgtzNKXmtFwTGSNP9OvcjMux3MWzlMAU56Ogk/z+Bt2F59kHkN1BLnMRIkioqtE5qSM2eZ9EZcUd9/Ji+Gv4ERFIhKC2ccshlKR/zwDH4r3OcB9qbNJ8WZPn5gTYgmt4NDODFo7l7fLOouTysYZuNjx7JfHyPM06ZsBaZ8hC1Un5sTguCqLeG+pKFUQBt6DpCl9npeHzUFEar0IzwgkEYgJX0X8qqlKJLKjZn8kloQN17vhin+GjjgwUgCuY5bPW6vQj3ytKPaHyjMqiS93dvcVcZm1HxpRvAT6KHAPwC9/4Jr761W/gcPwAabrFlG+R0hGH6QUO0w2O0y0SjiBMEL4kiw5cjTJPqFWc55Wr6qsVXAvABUy19UsN85sR+M4Rh8NLUDoi5QNSmjBNRxyPt8jpBofpBofDDXI+YMqT8LTDQXkjIyUGZUZKQCZdGKtQeR306Sq4lVpRElC0v7jcAfUN3r76KX784z/GD37wPfzFT3+In/70Ryh8QkqyqPf27VucTqdOJ3BOQQBleS7nrHKhqpwQXU3EC68aYs5pmPHRJ5/izd0rvHz5EjndAvUGh+kljodb/Nqv/W38nb/9b+Hm+BUcphvkdMQHtx/i61//Jm5fvJR+IRKtjRmgAhBQSsEnn3yC169fS/8HPkcbul8jQRL+vDkZwgIFpUFfG8seNYUlmJPwKV0X1+gAW2D6hy0CMQOghJwOmKYDpnyQBWCsy0IvJ/TLpXQpHY0FezCCO0yT0L5c656ALaRfs/GGfIVl3R7qFkifEVyTeuapnrHntp69fxqc6t/W7ByDcfS26un1l/Ysm6wALg9hLR1PWC0PQO0iPZR21E9QzVYgDjZY+5DACLtUL+PEUPmBxkdX4N/++7/+/a0i3rej6IcAfiX8/pZee8dAw5+CDdDGG9vFURu7sdN1YEWmsUQ7JNFmiFWwmPGyqJqHgu47cdZKe1wZz5G5nYWr0SW1MXqjjIjA/juOwUo/MsCb/Rs9KyNSjx2TR0IXxRJw6ZdXsMCTuwfxtLRhhVefQ/K9GQTg45n3YNripuEYwQR7+7ettCYzCImwXD0P49o8hdimlUtjvUYfG48F3Dk6E7ivhYjEoX1tecMtGp+7epjvr9iOQAR38MiqY3F+Ky6wx9GcGZOmFPSKi44ht+fMiUKB5y8UymcDQzs2+59Wvo9yTAferGmfk2tjvAUbTiBzIi4MgzW8zgEvHmsz0AbrimKeLdx//mxBi46TX30d9o2CbOgNn0062Ryze877C8bYw3vhEi9enx+syjg51zEHvMyLnA84HG8ASmB3imsEdErNWKfYp638z10H+NLCWn/v8O7hTJ+7DnEvZeKdgukAYUrjueC2w/MBwqZf5REwyNVzuvrgYHy45LCFv4fNwfftKPpNAN8mor8JcRD9QwD/1fupWoV5EOoAtnudzJhcUWi1s9e83MwV0blca0FhM3QKJAaBZX28uQVbsfzgsQxhxaomXzIWL5e48v05M9MNxXrFwUGQrXZM1a/HbUsLxTUYilZCK5+HGRxXkNjJ5zErM9fCOc96gzPKvfOvgQgXLvWnbgtDoocYoCQOIqVf8dlo/XWtXkaKWybv28+k40mEVGXsPRpkgeMDgPjBry5o2p0caph0eAXHEXFPkluoxWrGi4/S69rL1xZBOm+6aJxCsFX1aNI+CAJd1GpRBcuy4io616rOY3H+v485/OSwZePHBzgFHkZAF31CT6vjU3Q8hTpCJRofcqEcLLQ4Hu9/DgY5D7NyDe6XHDI4vVr43v2GhDhELNRFdO+SWdTh03SYUW3tKgmfV/S7GfSLcVpvlZNmdASsijdGXKVdeNK3S9fFPW4RRyq/k/mQuYIoIR+OIMpIaULKk2zbT7oFDW27V/ItX/C4KOlKVwo6J/QOTwEUiOWLDF8kh+KlBYP4XPtrERPvZ6zcQbSG3wINiWe85MTe4YsPq6rCu5x6qzbB6Gfo7cj+ufsiF3WIsbzL8F4dRcw8E9F/DeB/hmwi+O+Z+V9eeKf7/TBF3Yyq6CSSHtvus7avOxGBU/IcGKPTwHCstYUKG+HVIloGMYNQkShJaDfCyjZiOK4wUF/Nvqq5ovwJaqRGdSMMC46wrmzfe0fVerlfElAhIM4gdUCA1eDWR1juL8SIdpgY6GZvmhGshmxwEnU06oriuxOEfUTEuiNlhBYqOzznr/dbCsKbK38XMbziGd32xwngrM7SSZ2uJM4jFnbFTBKlhzYeRFBFnby2y7UGZ4bSBiXdNkFhBf5z1te63RSwvBPmPBOHNYWxavnPziC9IjCW/gSzji6PsRv1FkaPxnCY+q04a86Z1tXkjiJm2UZm4b/rEREX8Aorh7a1Q6I8k983h5GzWwoRocwYp0p77mo0lu19Ng4nauPrzG2Y11fq8deHjqt8coKLEy18v1DnejTvGgThdxGvzRLC9+251eJOziPPIWz+Ioz6Y1eKOMkvOdVIjR6f0rE/OgeU6hLEgG+hM/3J/jUkCCNxUBy/i1DHBvXvUsvdE3WhNh83ZJhzwNFAXUKc575N2tMO6HZXNh5W1DmU8MEHX8HNzQukNAGUQGkCUQalLNuXUtxOTrodNOkWVOPbQ067LdgaWtdF9PWga1gEmS8e6vdlFN8D4OmX2AE8JU9c6vWNSnl48hxCZ26tbFN5PvCUeD2B8kMrkcAqQJ02/cYGX6XGs+L3degdUXE7dO+YajpT4+kDX7xam3w62n2fcLkv7/MMcB3+l8f4GuBhvJbZzHg9VmKBZ9B8Qzvi8HvwxTXb8AYa6gJK4t2OvsJ3Jlcym9hb41722gWcGIjb9B6ycP3ecxQx8z8F8E+vfX6513PNI3wNJIDEWTMyLQ7/tnqBZoia4VI3dU43NtzRY9dbdAqDUVhyGNQqAycGLwZHkeJDSuxnWmX9IX+6UkhSbi3ser8pV63vNijFlTPJB/C5WshPAOwKpTkDMgiSI4iYkYyTdBE0wViKCZVbaFD32xxIIogIvl6sxrxHKvmzDLA6HQcj7LqcMOMcMOV9JRLKK2nfWrDMWJcis8rA1vDaqi++V6+iIILkLZL8KBPcgLX+pARG6RwiEuIPD/dvCc6vqFDLWGuRYK4OEsu1YXis8PVz5TVDfCtXzHU40vhb53zvJIxvSW6EBUY+vE3IUryu4DzRIzDPYEhm+DYBxOF7rKu9M/J1qzU1J7kbb3qf2B1JiyDPsxg2aE4h9vxEzVkk+TAsfIA0Sb6adfIvsziZvMKtufF5wIhHNOq3QPq8PU/9mDOU9u8rC9b7ROTToHz31i7ObQkfaxADvq9v+W2Z82kdLijDXiJvlscXJbY+51FV5/EZc6C0mdTer0OE0Eox4vTY4BU2P30OQxa8omOpyTRyR/qS9pX3btUzAHWfkc947at2qud3MEe09Yi1w9uTwm/Db6kkjzlNzEFEKT4rfZMS4fbmBh9++FXkdFB9QvI/JWQ9ECS7o8jr8H+DPhF+maFFYaFj9cHuUqMB672UVC76p0U32XbedwcE9DLpHauOXs8guKLDv1MGCKuyCLhO73p/YPPqMR34VJ3/VPKtSdFEpos3PUZsk22c2RZshu+Xnm/PjmWr7jMke3dTvlukv8BfFeIC8mPhfZHjffrysc/ok09QRnu+cdZRTnL8Mb64wEnUHGHCox+F/CCnK3BC2wnhOFK8f64Uq7/Zk4v7S+G18tx60e153JtMn10y6w54aRQ/mKGr8ttopO/0lfMOwnPUvROfG7ef2X+W87qqY0hVFqAWWVmy6CR4mreFILtaVHBjuOKYUGXcC1hjkiHhXWckav3u0WS/9o7l/jsHSVg9gRlIropyJxOku7hxapWTTZi10kx5btfYBY4n0VXn3SLUX5mCvP8wmo5OsIbTWNYo6G3s12hiNA5juaPy/y4kmeUnqmjJV2PS3NiP6iBNhJwlCWbOWWbxkKB5wW5XVwObkkDD9V61H4za7m/lukdrbM3mWFscl0gblggy0tBKUUNb7gPjaD5ITQ28hrzf1vr5Ai4UDDqOPPb+NNecu4Cdjgg043CeJZHsaT6h1lkOG6iixaZgOHok4hNpcA9f8BhhjQbjnF+rY+QHY1noPxmAtb/bLrRVziXglU+jfZu7DFyKuqFIcv28pXvj9ITwWBvvDPgq6mpE6Lm3CtZowrgeOC4MWd9bfjjSdZRxUeCx9LtI2Y6edpdKcVuKC9Tqj3H4Axr9RE4U6+i3/9u2M4nQND5kTukkTiHKuL19gUQSQZT0z7BJKcthHJRdZ/Dy17qr42/3AFUfqKH/fuDKIX8IPk8SobOCn6ydNgNpLbJqte5Aiud5deRj4bluUfaajuON7198WJPh/TBc20fvGjqhssNfNVjxozy9OFfGvSh0qYUv5WMo4z3Cs3YUMWSltzlBaKFgP0zh3lBPNHJITmBSYyG1xLFis5HafNsDFZWPUgrkVCUAkH3vtnotigqBIKfvcI2JfCOmvCjfnA3LCAqjdEKvKEWH1ljH0rngBrN2Dq2890UDQd/amro7/epUMyaXqxxhCyPbqUrcVkggJ/oIQbHq1m1Fm83BgeR9HCOTtmDpLF1x9qzS5cZ4dftfz8GovCN81zYNdT7MAC6t7zsFK7lGkfyoW4D0JKaUUpDt7HM2Gr0Uvj9MJ09oq+1bTiGc+Y7h+9KQ7+dxvKfRC9y/fRZfun6WbrkROHxeD8JnGj/hzUI2+ba/avMlGrW0YqxeD96rYb6dyoxSTnjz9g2YZ9zeHJGmDBfmvppkK40VFqkA5iCTpIancwDdp0UDdPlZ1pzC47uWp80cNivlKkGo1AkG9EOoZXxH6+PBKXyhFE803rVoeOvhPsYr6n7XIDTY5G7QBqKYvwKfPkx/Ke9BIXqJKMy/wEX9x5J/LcrrPs+BNaD9jpEysVmDlAHHTujaH/rKVDEOR7VbaD9rOcFJVIuc7CUB3ym0ckJKE46HW0zTjZ9qKc8oveopl5lSy5ln9EyA5IgyLskodSmDu4inM7Rr+h93D56Dd0yvHNG1KFy9FRbEnnLe+GLcoMuK/FN6d52i8YrIni2C6xLPXkbwGvS69XLQzjtAOvpe7Zsvrr59HhqRRx34vWPhC0fyu0fjy9r3O6zBe99KOoq+sX5hTCsvPQbPh73/rB1FYMbp1I7YtLwSXbLTqyAon3FlmBLEhSNHhcqefBMezdqSpIQJzBmVZxcsHAyFBeow5UOVDmYARXY1mBOHNTwYFbXMqHyCHVnqkUVkOFiX8LK+MPbSN0CtkMSKCSBLsA32svrohJgzyRwZQAy5NIXkue7L7kPH453gzGDI0bSuwKWgMALu5U26nl3tCHU0xxkAaPh9q8EMx6Va0AzH4LQgiWiqYcxa0dcIzGbott9KhwvH1grziYpoa1T/UGcUjNCMvPuQg9gftNTrARgrklVYU7ITWs4addYibDuSSebT2baeleAfHZ0mVjdRgiU9DenfGw7WJV3/RCVwjYEPsNARt4wr402Gm1zzyLPwjh15z3S/vl+DtVaMmN7H9G8wNJyiOXOpRDFSW14wOC2AIPNuJUTdnhsjPH1VX4XuOL9qmTGrs4i5oPKEGCloRh05f7ScZCI/WjjidYru0yrENPzF60AbhwvKAVl79B3KKgsv4bpGz9c6B0YKi+24/njneEZla+eogb0/4CeutzmJzFWZ0Aja+MXlvmK+LicSMSm1hz6McuaqBYYHKrMMdxKNBvTqNiPTwVYN9ZjfaDAI2XIRMUqp/llKUSeRvdtkUEoTcjrgxYsPcXPz0p1CopdmjSTKwUmkOmWYf+6QIEi51HTZytVlnaueF7rQn+fzQ/Kkzhm0nDIemUOmCzQeKQuuV+ro2g8P4Y22tYOIZIFXHULunMKy/XGhIOfsesMaXk2Hr9277ZmNvjW98kyb4taZ5XV79Xnq2z1cHre2VbXN58pVcwD2p4q+b5BghNzh2OCL0P87vCt4oCS7B5jloTH4zgdHv4LJkbjV/j6YPU7vfNaOIsnpc0IthMwZlVUwI3eRRnEVt/vtSWnlO+s+8koAJ92Tr6coJU6o6QCuBZRIdGfxLPhqv0WMEFVU3Yvoxg/JX7e1Qf+rVRw/Msws/3L11Q1GQakngAuIZJtaMuGZUoiQYKUTNu0HfmQ0JVACapEjxSkBKSdwYc+vZHvVOwLjGv5mYeCoquRUEBVtU4LkkDH84f0t7aVesA25TZiDOufCNU6IZpy1KB3ZWiSf0O9wxUC6XfslKNSSD6aoWi1Kr6BmSkR15Sps/JMuICBVBlGRdhYZA3lhbsahlezGvOLceWLaih8lNby045KvRLYcE4bLpSndR3Y1g4t4WX9jRA2KtyGWg/BbXV/1nGNQ1+zWkO22W5kSqDRiEUKB4Vk/kOd+oXbdjhlmcxbIu5QALgWl6Ck06eDKu8ciqEbb8iiZIgvJEcKyVZNRwtyWPGaoFYQMN1rdIbA2vpq7LJEkwybAIp5kuo2GfA92J5pyUotGLxqtVICYkQkoxmfOFLolSizdfoom1uY4X2EYsh0fTR7wwzXwBljkVxlea24YeOQd+4ZDJsipQeacISjPLd52S9Jb6yzlc0ViM2glEoxcwFq+MOM7FQVVFFZIfqJSCmqelGc2/lQt3wUXpRGjgwywbD1xG5Dddgp9N87X1ge9sXedQLfakxvELaoJaKmSk0c7XSjLpgwAQmljZwsolERWEot8gY1fiLAEgHy+MmNd0VncHBPWR6Uv8wLw4ltzKoxO4C2QsYslbdR/lW52OUuR5b7qCx7e6x6I1DE4fa7Aacx1tIGVbDirlsQ5NZan95lCDo8NacWgq+qzRPWCYKPalnPIKg+0UZU/h2gyN6w5KX3FJNmSi5ABVIvitv+ogqmgUpFPrv5M5PMeUY6MnI7I6QCj+0QJOSVkEmmRqflOCJMkBFSnK/GkDgfDfYIccFKCfkZAysp7ZkdjhRQA0lPVKIE4OldJZHdiMBcAEzwxt3UMMZCkDxD0r74Ol27CSxM1/GuBReOIxqX8UvmraNqk9FJVLmqENjX6YcAXbUTHg4scsq4LetOCmYXf4oAo3k6pr6h8F4ceVI/MKelJdQe5TlYUt/5REEdRgesPjWRdv1sHcoa3zYPiPLqO352DGL11GcZnHlP/mXcD0yftaGYCV7FXKEd91Ioip/vRF3eZtfR24bkFfSmP0aJqDZrOeB3UK/ACLvZxUDN5GEPiJpXKFUN1rX/4EqnQ8MlaOA/PXNtTV2LVyeOxPsDogvxpoRduvyG/o9ZJWM4NBgOlrOJv7ZKq6pl53JUGs83j9XUQPbwpYc6IwLm4n6Cvwvjn2uLP1mDGyGR036+B5+0oYkYpJyEZrkicwZzAidVh1I4iXWcCYjCQJ7JWM4T0yMPEICRwZV3RAYiyvElVV6/DUdTudELQX1h52oYwMBln7/mMIl3Brij1hFJPYJ4Bbo6iKU/IOUNC8gFhRibElmqmRV7YSo4kt27PyQpKQim1qUKV28nIXNEnvVQhT3HijQyIVr7bAxsrSq5f9o4Ii2RaMC7td7eD0CY7AX6KXOVZTyspXi4lfZbGfjP3kawYlSrGIgPIifVYW+gpdQminhYPOTffnSjFrWGuRDu9CZZVHZLNKExauxLIKBQurURtnERlJAnvx34LRwWrcyQqwz2bNyN3NXot4rGgwyBSPCdPa7cohEO4vAnGJD/iPXH0SVsZlvidWx2Or+QosqThdgx0WwgndXK2+cDWM8FAFDVGcnOUOkv0WdiCYR1LCIkVrY1el4y9/8dG0xcMaB8zcr3KhsVpP0RPXeLxBOoW/zf5o3/dVqSuVR87eqOAouNcYZGL8nzkF21+NkOLUH11uglFKb+gbatq+caaVUXdsAibFgPBFxhUeAoPloiCu9MJUz4gU2rKLFUk5fGVCVRZ5j8IEv0m9VoUAtnx3HbaSxd11ObEdl9f1+MUhH+LAg1z7qpSorFjOAN2mHfDXSJv7Y0YYTm26H6RAcYr0PHA873UXj0La6zpzMM92k3l9Cu8ncR6Da3Gzdp7cZSuff/a8TwfOXJZKWwRIViXLy6zY8LOdb5yzfixK/bGE0yJ5e6ZeF1RiKPSSy/V90RG2gEjo/PHl/BUZygoVXQx385oehvgUUPT4YicDwCyRxGZm1QcRhadYqSsjjZq0tYWmERHUXwtWtYiKVOCHBSZA76xe6NMFWdRjFqOTkhjOx6Vm1rUEsCS9NrfNyTX9Ng2R7lqH8XnpBO0/UlP4kuoZCfCap1Dbo62qNP+dF0XlExX2p4zhIFegl5hvynp/DOnGkT/soHiqFg6T92AiLsuqkq3nNHHzhTXGh6ueFn3OBnxQXC9zvn4GlQhMLkKgJlQGcgsuuSYUsTecV0tXr+q5qZnnHMSyX1zFAU7b2jB4+Ey7kxtVnEjySABAn1fHK/LPB/AmvnRg9llaDx4rSXX1XauGuNBUY+z9prD35nrQhZHOdvhRMPvEXfexl3oVOUPqey7CrbkY7M1mhERaY0au6Tl+y2C1mh70ZpQVrnVLfMAACAASURBVHtnicv9Rut5O4qgjiI1/ISJZ1VoEjhniFvnXIhrE6LS7yIYqzt/UhgUSzqtq+P6XkoiueQEsz5Edo0JNaZGjehXB4ZRasFcZszzCbXegWsRR1FK4EmEVs5qCMR5FOoXpV5W+6Q5LNEHTTa6zDGDwldzLfKJ2VcSe4HRb7nyqzw+155/OBjHMlY0EHbMJg95zHcZ8YxaT6L4dauCZsjZSS6Cc+W2ra6yHIdbWBSnWqqmqCKwhZhrOVwkKoHTpHTRMIpbljyKJSifck8cjylJOUaZNDLrYQUt9sqW4uTs5qIAMR+7ldAzP3FYD3ivwhoTtD4IAk0HKyqodtMU8o62qdGutEe3UZK13UaxMVhbMWTOnnc3okW+VYsAkiOlBcXqxbT2WoRKU8a78joHx9jXS8XvcnwBOh1gu3TjgZeLuwpGeqNBsb1aKQMs5xp1R+GS0n84EfIs7lEAthPHQMoP7QQ9R+m80JYFhsBSCN2x1b5IUBhcKrhUzHzCnE84JFmQMC2NUdXZaDVXWaig4oKeKLs+LJmMzgvwc73QtYLWu822wzlXoH4VlOI4XAN0YbuZzl0ZYcvVpJepe+iaykKjqL/+WPB59PSGz/UocPuk3ph9arwu5ie6yMeXz5+PTLhUFl/1VHt21JEi3zFaHksc6thQhruFNWbnU4yii0oSLVLK7KcgyoKCLEKmRLoQccTLFx/g5uZFd7JZShk5TZLAOmVQknu2tcwmhzikFReTrZUDT2vzoUUWJXBdnoLGJgdJo51Nv11bbKKms9qWMDf8dMXBnUWuE66PVDfFGZ7sv6r+Scrf5Zo2x3gmF6AmFbc931qbDg/ZihZ10t5hRKgsfL4JkPiX2p/Tx3WciDbwb5VHvWcNZ0fyrwAkWGQ4LfreOmiU6e+ei7/fvIKXICqBzZ4IZsY9qPPdwdKp9y6Am/0c+XvUS5/NuD0FWLtMg+4XCFgjiT4PbvGsHUVgRq0npJTFxK8aycu6nUsNOTtifg1k5ViZEtsgWOSNRhslaKh0M8SIyPfrd0KFExKxrJRgUABdSAG2rSU0BTBDSYWVOWi4ylHNp9MMrjMIjClnnxO1Zj/2NDLStp+9OXv8r1O+KqCnnnDHcSSSisNJH2aUeR2eyNdM83MM4vI2h/Bo4HWjArim9AUgbmHrRhR6veVVGo17jVQAh/BvS2Q5a+4p9lVOCwdMZlCSHX0rRlItAFWLfkjqfFQHBxOYqpbRtslIn0mjaxXnhUdRxe5wnAcnZBg7jssNiqsYJUtB216073Wj7+V3raze+2tY0vJ46q5uCriy0cw2DaW04pjxdvWnzJkyD6ixX1m2ahK6uRBXW21+q6Y8KHq2Yl3AmBGPBLftp4p0OMQsiO0wJG1B3pA9n69hacy51bAVPPY4UL4ReiXWCs/90F2NRgv5v+1N6+8WjcHOf7pOOYsYd7THS1rYhL4PXexGm8Tzh+g8LBXlVFBLAVfGXGe8ffsWx2nCcTrAcpu53eEGdMDFtxwvV0UXCqjJgU3HSpQ5Vo7eWTjwR5p5IESeoR3Wl8pAkAMUHO5mEKtJ9JDK4W1lPF7xcz1b/nuMWtXevh6n0UEUCpPrvjq5uOU1PuV0v48S322b95X25cJRdPgSNQ5IaPLq2n7vIolcf2iGgWwpMP2kzbNeQ+hHybcgcVXaNgeR/LatRKXMwUk0Y56F5zd9UiJRc5o0R9ERhISUJpX5OfAT2TclumUODhwvCRYpw36YiTnXm/xgc5D79nBq8iYCNc1ORdlZuukWsRJgp+JGnbXn9XGQWue6QwiQ3Jeosr01GHLOMjvHdUKLoraUCU9n8ox6uMmQRFavpE6Q7XEZlq6BOYUma+ew9myXj2vZu6sbPzpFALDDD/is7Is62pcV2hKfbtoMFBEh6qqmcbwfZ8BD82Q9KQ72hVdazdaLS+fxlxf6lgat6MGSsqewtVreF8W12rmjd8WBmu73kDKfshXP2lHEEEeRGP+akLkyADHYbb/8RPCtQvKegAkl1nxC7UaMDBIDhjQDtDuJVPLaxCTdsiYjp4oB6kVFrAutteOZCXaYjjqCAK7iuCqzOjtYHQ6V/ejvnFsiS18Vp+SRRO0e3OFhxpskj2tOJFLHBrgphgB1/QhIOb4nW7fiyfW47zdYYk7whPPEKki2yXDJT06DoWlOFhJnV2gr2BSgkK8GzaEYFWJJYDk3GgmmrRQp45zAetR9S1SZUDQ5M3mSZguFoWoiTjGLzg0GgOLKg4xhS8YcI2wMjWWO6thbRttB+fJIpPii9VFdK7D1tIfKN4V9FWhU7OOYt3638n2Eta/6uWOruC1yrhnjoZ09AjCNuep8Sl4lBfq0ZlhUSEV7bFTSmlHBOr/HLXgMuHe/NwZM1WYdEr7+cLmz0LSG9776taatdB+j6I4RT8qBA0n0zrttMLcgGQV3ZGD9vqZgNzrs6+nDyXMWY6+WgjLPKHNxGVFKwWmecXOokr9CJ3DL76y8yMPgGo+3OlvUEgCKLodgjKNXBpqhvU40vTFkn9R9PgyGPjSxiMa/nI+QKvsEN6hY+SxT287bF3a+zpDJLVy/tj3L58wUuS+syfK+NaTRE9Q1cTXnwVmx1/oSw1yB0sAWx9sqdNPodqf75ZLWjO3zdBXkDcvyyr1Wl4NTydyjC1w1n010KFXVoUYqswWzqvkJLa4lbj2T6Ebdoq65IUs5YZ5nWSAhAGrYCw1lJDpgyjeYphsQWbL7rE4GO/ksq44ofwzbfqU6GfSQEZv0Rge26AXoVm5rDcRpRE3newh0aySh2z3fozt/7eZKGYg8SpxCop8yyKKjJOGMyE7zFIWKE2QbMSnDi9pCw/V+c3apEgV9t5oeqA+anmZ4dR3Tf7cFwxx19qHedUzbInMXkReV6Pdsgp6H5Qxah8fj7PNbc/mwD4zNDwhdkPXbOjrRXngstAWY5e845O/Vd8TdbHx/1V7ozuT0Ydv0HqtzXINU09NNUsCvPLLo8cJAb9tz/BF1nu1kdj0xyv71HUvXUj+vfF+7dhmetaOImMG1wFf+SwEnFW2JkQqDE1AKwN3JBSIEq/dDY9qWA0SSLyd33BBsu5kpO5KosFgiQqizyBgbpW7CpJRQStHaEcpUVSoKC/uwRH2qmNQC1AJwrXg7V5Qsq1vTNGGaJuSc3JlgQTQ5S2FWtyhCQpRVT/Co1YR5EKaqaVkkkrXDjHjqGDm1vziZNHpgZeS6fl9OuehQuPy8mkKOs2tW3s/GXAlmdrREtDKmsnNJnI0pWX9xaH+sW8c8CVMUZ5QkyiUWoygn6UNJdE2ujMCdRvJZtF9tG0zPkc3ZVgAGSq1NKIUlQnlDFM8Reh4SDJDAWuPKuinT6Nocy+Pu8zK31HKC03WdAanTzhxqGyVRLFPB6NFU90afUOeS0X4vUHph3/IWEWXPr2TzvmekzUnU90tUb/XPFU+oE6TN/1b5Rtd1j7R+aQIC7tgVp3m/hnlu7/01cA6tmAPMoDNG3Slg/dwUiHatOdrArCvwG6t2fo3btk9eWXVmyJYMLmcVOHf0h5X0OHY5C08lEMo8oxZxFGXl42WWrSiW1wNVTmiZckYrUHh3h35HR00DNXt9XAczmpT+uy4Hzlj8OwHtqurTOuKmaYcpHCDetb+nmk1EA59aipHr+oJj5dwk9f1hax41XmSRXs3pd7muSL/dopF97UQfIVLIOofeqmdbfYwS/GI5K/yEe0a64ugyPnjNSXldUeJo8CKUJ3OYJUFnMR7b1JDAn82h5DzHkhrXwJfke2U97EIdRrOefAgQUjqgbYexLWYHTNMNUjogeWSxbj3L2SOMJJpVoo8Q+I84C0gjiRjGximr7FkZOtFfpa31AhH0ud4QvnNHRE1mNjkb1IxY4rIsHZ+q0eep0yH0YJewIFlNFoa6pGhbbB2NwAt0s8HszeHemkl+HQxQlnGRiCLAE8auTi5S3FJwokXcCGOOpdg0w3E5h6LcPqcYvEuGvgYjvazV/0Q4Rf2JJJUApQpQ0eEwQRP1cNuS1uPW0gicrfBKxILdoPTaDuZ5/xDJ0p1r1PBrNL6S5Phd4sVhBN617gEsDkOw6OmnjLcdqd9lMrdr76WPNzp2XKAGP7b1ixZfDc/aUQQogVTJGVOTnEhl4ZxIWSNzhLknNdo5MJs44ZeLa9yYu75fdP8Q6fJPPCUskUSVVJAdBbFprCUV9KveaoSkrqrcysFjqkRo1GvhCs6NYJhzOM4zgwgoesqZKd2sddXKsshT7V0rx7aBNGUXQDv+M6iUpuBsHgHqEJl7Gznq7i+FouEaFf02Ns0IUx3RsNLvfXkyru2OJZg1I4xgYeIViUj714RC7hTtaLhJ2dHLKyfy2GlnVG3fv5QluW8kBxYlSYItOYlkf/ZahEmjn9ZP1P8Eg9VpGXrQUY6q2IoiM4JGV9vKzMOFYhywaDwsDUUjpaa0NsW13+akhZxByuZDKS1peeswG8Mxosiccq2+aPQ1iCZVXx77se06LbRty+52NXyzDevQxp6HukHwCMrmoLkfsx/hGkFo0Q2uoJgw9bGzrR3s9CTX7S37jUYqgyFC4RQcdHe2EafV7cY6/6mnvxYdKtVYLpJpEn5qTnUihp19Mc8zTvMJSfPJmnwgkgMBqvLQ5CegAXrUW+g3dXQpzXSnZY+98LihfDcQdPRtt819abxXhWNnUHf9Ctocfryrldgox5YRdA+r0w2ABQFQ9+sa4OHdtSculrFwirfvm/koumIj37wfNDJr8z9GmdWQOJs7IzEakSEX2hhhbDTH7VrhisIVcy2Y6wmVqzp8ZEGQWSKDxPFzxPFwg+S5hyyinOARRbYNWrvBTt+0XpEDYzNyZjlshEl2pY+Oc3XI1mq41rOG8cUeD3K2v35fhsO61azpRPpl5dFIj7x8KDC+e2ExtmPolCAxRQdW3ShGlft7Tq6d1uElye48Ugdkq345DkFX2BKohGV/ry6ovRv+9TzANQj9XkVeorYk7vZA5IkUx6/xn0s668NVo4FW3jPYAnO/6KYOeL1XwchPpi9c0c7Ib3Uxq+nQ7waWU2ltvjxdXWMmj3uzx0t1XCDIZsux6luDHfcoJ9HjG/O8HUUUQ2JZHEZZle6iq+yJ5ch7TkCV1d4mpsxLHLaZdX89VD1G3nNEJJJjR3Vlux2/DaQkkqgOhqmgakclRsPewp9r+C7KC2vIroXwWg4dyqRbahjjSrwk74VGSgCWq4XBsnKljizxq8lqVvU/76AgBFUFWKNGF+pr0j325ZqkvGSO0vJ73NYB9TCoI01kh44JM6JzqKkwciSqHGdIgK4micjJYvzBwpBLK5OALseRR5VUVfRqa7oaj5bzqDmkEjJpjgLoGisphknx5LA2z5Zi2+4tw/0Hfabv2WgwXykduwiyRzHgtXebYbx4tMMh3DI8LGrIi7UtPK0MoSYRVHPRuZIIKcsWzFYPedmdYWHMeGTE3skageaxEkH9NGdD5EtdERTeWbb50eB6btz2+ejirn7WhddmSNjgNPPTTaSEdYOWu89zLXJHi5bp2zTXeMjiZR08tBxlOU84Hg8+h6x9lSvmOuM032GaCEjJk1cnEIgJiclPKGFN4N2Oa5U4tX7ikhuBcYpE48RpdlU+tbl2rXPCIj47Z3z/RF/8KsQ5HpVoGn7fB55wYtj23c4gvGcR7gy9x7ZOo0OcdxZdxOmMvfjI6f2eINLBfYE3vser6vQR4dg9HrlNhaoJbM4k3XJG0VlkB1pUPcDC8hPpIlnWrWNsJ3cdcJhu8OL2JY6HWz/RC908p24IXZ0nlSPUWE8hza2psg62eMSCt0V+cyV3eF2KnhB2HHXDa+bkeoEU8d96h9t32zAYI+Bg+ER+b041APADXgbC1yEiCaUyVi3pDgDVwS80y95pk9Nf6OehOvdDW6Je0FpEgeZ6Oufueiya+0uLhVbTWUd4amXh+YH53Lyl5s2zFKM66Nx55Db6+UsLjKQOWfJAAtE5KxMqZJNrb3edg6frty4A48lK3QBbmO30wvN5Ph8DjXfBp+hTO4suwppaDO33VUfR+5sbz9tRtAa16mk05NsBOMn++Ep1EJot0oOYQKmFxy7A5p7m/BFqSfpOVeNUw1irOoqgzgt1LvgxziQKDriF6goDrJJMxYQRSYh0rfZX9XM2ZJA4K8XOiuikyBbBx459N8kKix6KAlJCgUu1Uz3ahIuRC8Gqk58t1tG/bxsdVsDDYUtHJ8BzNhHQxjcoBdJmyy1TIP016z0x/M3xYBFZOR+AlCWRpZ5+IjkIZjfyai0AMphq1zoZdflG7mXXBNZZN1qROhUoeSYAArlh6kkhu7aOikfvSNmC3vmzDS3yIxZHi2eeivk0ec/tR7RP3eFx1kXQr2qj0a0pykLjVZXrNL6t3ywhr5xuYhvPYMa7/efJSSmUsS6azchsyvHTs+2YJ2lcaRgjpvy6PXeOZgZcr81/tMzJEtlGk7SNV6hR5Y7JNmYYX7tYO7SsuOWL1AAZjSQevkOjhyQajQi4vbnF4XDA6ZRQaotQm+cZd3d3yDnhcDNBIkAJtSYUUlpheHSCnaYGDxM3RUecx52Xq5khAbO+R5eqWM99rnVo2PaVh8G20elOsavwWLa940GLcVut8gyWD3cSNQwfvmI3OovOrrh2LJDDO58/RD5jsMi3It9wiV9fM5P7Ltp654xR5H2pziEAUMdKNDNtC5qdwFhU35rnE07zSbJs5UkWd2yrOk2Y8hHH4y1ub18gTwfkfEBOsvUMlhCfmrywY+Fdz/BUBf1Is/EIkzGVOj1Ll0CtZc3CXpuHaz1GhKYH03DvsoQ6O3oEn7ys7ZfDULSfF/wgKV9vB08IhIgJqEPIebjpdxpJwf7QWby9MEdUy7GIUlOjXAZZW4MuEfgAr+grVrTP8b5jFF9jbhYRT8Nza59fbvA+14GIOxeWEA3g+2lUj108e5cQI66BDV2dgZtatHc0so0ItVYUBormQDuRUewWj37XQCMD/0JWEeuK+ufnOyttLAWTLZruMxq9W3j+jiICYDEXDBDlTkEQxwj76mnXsVRdwLL+BNqEbYYMdyvTLSBCBJcxt0StLGNfRASu4TlHW5N7WpQR2I9A54aEptsRxaDWGaXKvvnKFRNncbhnTbR9OqHWisPh4OUSmeOhY8WwU86AqlvaNJpo0KKWDJv8nt/yXEpL48WM/BZN1YzAFkEwnlfe3vfeGg3HoCBYDxKqJlDUWB5bCSaAISdwtDwE1ZUtUzBInxBaSaBEuM2Sk+Tt3VvMXqHsN6kWueHt6wOvozEgpEEaNSTPJTsKlLL2XnI8pHnS+4nYGWLsZcag7J2Rgdep5lAl52mAYIrdRs3cXDG2wsuqNNpWrvWcGOawIfSny/TGjDj9bFzEGPAIZncm6jw0nBOpQLB5YstZ1fEl1sSkrrzL8x7pFnQXGx9KcfuB5rDwLQrXQOQq1r/hk3VPukagmVBrG8N6ymmGhXOypb4bjA+hyQ2DbMAHg8JjIeQyjwGPLPItZeE17UIXgoBOYEsKq4mRo8LvTLOPMG18Txz51pL4pPcKC/+Q05CKOoUJx5sDjscD3rxJKFU0E2bJFzbXGXMtONCkeVMAiVYUh2PVbckWOyiO+9hjTR65Q3uhyNkcgq+e28rq2PPnlMA1RTFy4PPjmpoNNjhtjPcChKoGUqwh3vfmLFt4Btbd5ULD13C0aOZtPzP+XDzNG89GBr9yy+od61+Ejnev0aJt2019OsNnlUaCo3l0FI0JrVuuoPN8ovGHS7iHRTSMfRgdPq2X7REyPKIxH1bg7R3q2gcU1YXmUnEqRfMCJsn/SFn5T/YcZofjDVKekFJCTsE5BHUA24lWHtmsi5KKKjHpQmZDNdkpqlzBnBXTDE/eqnKuHSVuMznwdwCwRUI7iMNPUU0qe8hPEU2evDpsle1LW4LOd9OzBPchO0pwjLscJsWNK6DtU+1N3w15ATeYw9pWSLI+9Hb0bRBtT7cNWmJxhHHR0TX6lUXWaolBrYZWMtkOA3NyaYRYoO2YR6/T2/Rb0Dz6jl1t+NPN9YfAtpPlEl6XNUrvIbdbEiiJnsRR5lyhnHaL21o4r/9Y8DfH2PEIraD42etiVyA16EWNttuMS6BKQBLHdgWQkqS9SAwcTjPo1Wvgo7/EZz/5Gd5+8inm+QSkhOPtLV5++BVMX/95pK99BXw84o7Yc9XWpPYFyyJ3Uh3WT4feRHqNPpfPjVLOaf2dOuaadhHNDFuYWav5wdgEHrNwSHvdwkF49bzDgAHbR6O9BVzcEcDugHdfQZOContoKpNY9chltuC+OxKeuaOIPUmhnbjVVu9EkFqYYimSuDklM1jiqooNmHUQYAp8q0pzSrAZEyERq8k0avtpkyrOiZMrBABgET4ShSRE5bl+XGglJX4zHvQdyAlcpcyyVSwzqDIy64lnQ16OnGUVO2neInNsSKFJ25ScKGOEUKQqSppbR408z0nJLJHYatS1P1MkrazGbFqwT2Tm5fI4cytPxlFwMaWaucoeea6gWoFSwUkUjkoMpAKmEuqMeNh3VRJSMz4t58jxMCElcdYx66lLlDQpozk4ovlkQo110srdWisoiyOrstGHRjFZn/m4y/fk2PXKT3PibTnasHj6acDGmMIYj/Xr/YEmRpBAIptrdlxxBiNjW2ECIk2xKt8gICXyvjYnaTRexbCxN3XFlqs6baH+IAYhw52pbCt/Jw3OywAnNy4sOkQtQymbASBLwnMbU4rODnU2wRyHF8bPlRP7SW4iSJg8y/T3bRWmrjZaZLRYQdLYWeKovDfBK4LI2mFUbYbHqA4MYxJzZUkHSV0AmuTScsii7dCmDamiRgAVgh1TLA5V7a/KfuqPsHiN2jTcjZeS9bj2NOVurjqFMUB6eiFxFR7LQMoZx+ONLkAobdWKgoq7mTHVA47lgEzS37UwUlYnso+LbW2VZPejwusOVTIHWrtJRJIg3+gqjFDoZKzzgKeZ8cQhKXVXp35jFSfe+02Z7uSxKW+BBBq/HOrsNPSlCrrWskUkG1vkxXnwA0EdraXz1t37Z7o4civX3wLuvP7aArytF/U0TRp89gnjAdvQyZTO6SPgW54sf6HS23KLqxjXievZBl4zJg0HWyYLHNLUOwSnFWxhT/kN2rst8qO/R6Y3oDnAaq2opaKWAq5VI38zMiZkZECTVOfpgOl4wHTQSCM9mdSNSIsk8oS7lrNIOD9VMd4MZet7c+Q4n5MklCBMSl/JcYbqjaxObtPjiOC6IOuCGOmJrClJcm3Jt9T0UYFZ6qamW1CQCbH/hZeaXkZurJgjrIsCZl0cUUcaJUbSSHyLLGWWOO9Yhy2SwvXLdYg6/JI/ih5emFHUId8cfrKIy3raXEoJibTlVCFpB2ZUPoFxbLxZBzrp95QmSLLQ7P0iEf0suwQ40m6P4bpmtCZfefXJzx+uxek8D5IDiGAKsDtcTRdv83zkzWs6Ys9fOh7gb4VdE6uLkUFPNFRSv9Bzn+AccfwwzI4kS1tB6hylrM7jogEDjKkS8qmgzgVvf/YRTn/05zh974/xs9/5fdDPPsHxrqBSRUmEjIyfpoTXX/sQX/k3fx1f/fbfwM/9+q8i/7Wv4DOcQMg4MEBUMBODOVlijI12NP7p+u8WkHPa7u13Bd1civIq2Fmr9a/qB9eBL1fZ7hxuxRGU/VXTEkxOyt1uFnc29jrt6Q3nqau3Ad2t1PMQtaKUf/btNTY5mBLhfnv+2t0DBs/cUWRgSvbSGGZNdG1Qq62gpMFZJAafR7u4sdMYdLkwEdgMEwq5Z0gcCkgVpSp+1Ai5X5UThYJ5xXFCymw8NFq2OiU/TSKDJmlTrdUT+RIRJhjNySSyvC7MdvpEdBIpg4jKi1lvrjiEDlUhv7llLxgJ94denDZnkfyOjqhE7XQR2+YhUR7xfY0k8rb2jkJz0qSUOgxSIhwOE4gqTrNsQROzzMaLQxmmCIT+dN4gXyxaQR617ZAsioc+R0Tu93PDWa2PuA41GvmX4PqnzrBQt3O56781um2GIS1umCjyFU99mF3Zt/cI7mhAo8mmWC9XRYGspwBq4s9FWDoNf1alOWpVbXUDqRoVddq9Kbk9q24UL3KLhtqShwzD++AakdV4VWuD1trNvTZf+rZyOOx4OSeXo27GiD3Qt9HealsRuX/Xy+BQXbtuxh1zO3mI4vwjM0RX2muWxAZFy/xJSImdfxnPa475MIbyEuyjFOETU0qytSRP/hwzg2rFXBilSISnHZRQ6oxcMjhVyaWR2tiYMmUGVlNGh7HoFBhjAtGh1u/MF5wt31p7a801vNg29AQqnbEAk7PrCkZgZh2O67CkZ3j77qOgPyxrQb/9WupeKm5uIHf8LZ7sdWl+vQc4U2FHC/ppR8CPERuWt4vjvEZsv8aA6O+HSPt15JcNWOVTgSe3vEPxiWpHhOgvhuQolN+lMpgLapGFuHk+ocwziCRSKGl0g/DghGmacDzc4HA46gEiFOZlW7MjPRWR/IARAixaQuUao5+H7iCByn5ttW9f5navyQ6bG9TkjZ60ZpBS8ggi0ylMRyUSdxzZljkAtlG6k5K0mAaIEUWdU8692/3C7DKqUceFZOGXhrkf9R1rd6OCQCOdsbjkf0nriDi4Q5tZDx1pfMrel0j7glpJHUnnOBeaTjxwaVnL6vlDHwm2g0GkkTbeX1xgKnLQkQ63x7JVRtachkRVFrRLwuEtcPvRx/jse3+AH/3ed/HxD/4ML37yFi9/esIvvim4nSsOVXLwVgIyy3bXT356h4/+5CP82Xd+Cx//a/8qfuE/+nvIf+eXkI8H2cEwTQAqSp5QCiM/Ef2d8Wn8lQGf1V1fDLrxtf10wVE0luO2BlQuuOrOm++cg/tuUXz+jqLQ+F4I9YrbGCItUSnJV0SiY2RZQVuZMmPSBUkCuLYtbG48l4vXhwAAIABJREFUh/L8iPhwrt64b1RknGkF5Nuxu1UVguy1J6h3WlxXw2FX3tZSqp7EwUi5Ob4k6kIE4MxyklottvpqfSGE2oVS20kdg6AXou5PSwujEj5HAXs95fZKjSg2zaNLMNbbcEw+YUyR7JTfRQ2yupWQwzCxfm/RPFlPRpMjXuF6Clmy8LXFUjYnA6nBZ8oQS3RRlvbA64JbXUQWKm3UZfRozIecIWz66bYbvQq9MbBeIJOEY3dRaovKRsdGr8SZk6kGQ7qrZlRIW+0LlJoSqpc6pTTMWwAtPmKNGUbFu83HjhMbL1h5ewkECbcwRd8UcF3NppWGbsK6UN/KhaAxEOGd6EgbcWT/ao/0hnDEIYwEn6MVjQXoHGkRHwDd6kszdJr3RhXF8IosRKshwhpNGtptdYuzSHiDKf2mdFYaBbpcN+c5yqzGccbN8QY3Nzd49epTiBNHT1Aq1U9IY6+LUbkglYSKIpSWhJG31fHQFpXo0uc2qwf+Ss24cNHSGUva8tCgmBC/qyt8t9MY18hhCeu8oG3jbtpJv/AxQGnRJub0WivXr26UM25pbteeCqws6n+eA+/LLVp/9+bOsg/GebHRTyEKpDm/++ctEXQbHw6nOxqtrsVjXcR6wBcrPKNrweo1irg6fqbXsB8KAmcxJkslktW29s/qLGI258oEyU1IIGhEUZ5wPKqjyPKPwWjSonrEUeyyKJE7nCQ6UmRISoK9nSw7NprQdB9YvxiTZvap114Rzu/yz1+GO+I92pakfkqGu9ZhuYpYt+HfYwTHa3aS6xo/8hN+gxyIC7tWRkqKp6draMpXo5VeB/LFB8AXZoxGvXOHb75tXft4pEPhWb0MXG93uEftDnd6hM2krbK26/hyw5enzcaPjmVCJcaJCorK7jQTjpyRqtDlAYTDKeHtTz7FX/zOH+LNb30X0+99Dz//6cf4RimgU8JNPWKqspOEuOjBQ4wDhN4POeNlYbx+e4ePfvT7+O6//CF++T/99/Ct/+Dv4s3XPsBdYky1IHNFTXHb6NjnX54x+Hwg8pj76yVPostw0AnPFUfome4D4Pk7igCYQbowSrveMYFC7qSR1TOSLRau+pgwsbLqenmqrZPtddZ8FBJVMTisSBWGatE5ptgPxpZL/ISm6kCfj+th8ieOCgIS5Gh0DeiYMsEiVWqSLdaZNQ8OEUqxlrWIoi3DXNBvikYUeq37o3U5KpocmhaV+6UiOELrw/45j/wgU3SyKnRZkkmm1N4ngoQBh3KDIUJBgWJAnH6Al21h8uahTbqdjaooyMKwqQUocE9zVqM5BURPkBUqAiR/1QxMOYtRye1dUyKrnuJGFJWh5nyi2txHi/7r0Fmqa3HKuFMN/ditQ4uy2fZ8m4rWX3Fly50BbW6aI82ULVfuXEFzbH1MOnSpKcPm3CVQWElVxy2JMs3KTK1PBaXa1QGrv6ubz7S7b69TnG0vcvbS2i+PnWPW182V5tCOubOugbW5xgN9MDz6C0Cff2phBizK6qHx1ebg21C8OztS+nAk7YaDzoo41sHQsi15xK27zRfhRhQIr9+8wd3dHV6+fImbmxu8ePECn356wJu3c+NlFSil4HSa3RDJSGKQaiLayuTbGSS3Wd+fTgdUHZEQb+ozoRldTVKdh+sUjeWo2VxrfHF1yxXgBlMbn759a4bhiNqaU4nsGHGgKTDusGwRBu9KlW0z6b5zp321BYaF0Xg1bM2r+5ZyjbHZTTD517ud4Vuy0HLkdEV221HbOF1X45qsoTOtj8+rPuFXhT/Z9xipye78sPfbKbPMcrpZLTPqfEKZT7rtTBaGEjKokm7ZOiAliya6Qc4TiDI4JZc/kqurYrKdzaTz1r0deugJAHDVaCSooyg4dyw0XfucEqn/RhaaanBitCjJJnOaWFlG9FjkkDmLCFKd/RkeiSS+1nSmLa6yef0S6dmYkBwywuOYDnToawhd203OL3HpaaPRky8gOb9TKW0qo/HlUJrpUy0GbE3mdY1r6TvjvaBDu56z6Bibt3H+Pg1P+HzgGj7UeO/4pt37ovbAnezrAIiRMSNxQkZG4owjM2g+4fWPPsIP/9kf4PVv/yHyD/4cX3n1Fl97M+ODu1sQKubEKFSRUFGJUXW3uaSx0FxqteIFEg4z4QNM+PqfnfCz/+F/xR/97vfxS//FP8DN3/oFEE5IxHjLhHb40dr4rCpZqzA6q//qwvvqhO163Mc/PBbFdmcWPAHKXwBH0agItMsEE1SDwWOCyARpV5QIDrGpB2OHCC65g3AxxUbqsL37kqjWFOEu1JXaCWjdXkDTo9WANrWnsK63UAz9lmOaJYTFVnxa3qTKLCHRIKCwt9lzeHiVLVIqLOIPJh53uEliaGV8QZhSeANYhn+36I5+PLag3yfZHFBkTjmwO4gIGS1ZsFwzIZ0oIR5SFbE1g18+7fQ6x1iUMgIsJxWgK4BMmuNA+rXWppgyW2RWbXXoXmAzDMUvUkGlAlmSOiYkD202Ra7WiqJZ1omrjmPMAWOrZsrso/NxRWG/bPzEFbrWW649sak3pSloo8PTeo8ILRoiGp5QL6fVF/IKOZUGBQ3kOYTC/hbvp5YUOrh7SfPY+PyzHFBqjCtd2BZOAjq6XBV6ZA6f0C8XwQokH4+rfTeLcpZ1Gm/x7Vrcxo+6xozblbgvzg3cZl7Z98646rZ1KL3HR6y4cOrjcr4Hnm3lwjYOR3G2xo3gcxKB5t2hsWySv9ObjgQ/yRqSJyDp8nphOX3y7u1bJCIcDgfc3t7ieLzB27evJc8YScvFUXTCIbctG5LwQg4HoFRl+3NqxkDnKATae61FQWEO/A8uaVb6ZY2otnLBGA1azqRY5IqyaMJ0qzQy7DQq0xzbhqPJvrO8h8O3on0UaIFin8AdMcu3nw6svi2eabwlysflQ8MoXTn5Gy2fcYtxrw+cLSs8uyiJgX6rvRUf5Xt0yITnxu2o1zRPWWhffiyXnOwqNKp37VmX3VqmOyL1OQq8UPtA7kveHsu5xFzBepqsOIwKuLI7gezAiSnfSG6i6YDj8QWmfABoApLIIFa8m5wHqsn9JDKIAXEa5UmTVZsTmZHSpAuNFYkSSqd6Jpf7Jg9be4IBYJ/M4RmGbbky/hpxNN7Zdl7p4grIHeDXZEG00fFMVjqGMTVBM1SS580U3jxSwUgTenXgnasy0eQSAaT6OAO9zNB2WR9ylbQEADpZxdY/JDqKpQfwKCiGO+z6rcTwcR1DV+NOgp7Hr/Vo7A/bOK6SYGVA7rtl5P1AHKsNXnYJ1G5aFvuw9l7nQH86mLOcVJ1J8oUhVSQqOLx6Bf6Tn+DH//d38ZPf/j4O3/8Ev/jxZ/jgzWuZgzyh0kHzT74FqKKyREQzJdlOyfDUsTLnKipOYFR8MDNeflzx5//n7+KHh4Rv/uf/IejXPkCZZuR6i8oH1w93eEr4fPrUOQovp8u7hC+AowjSK2xJpGybgQwUDQPWH/9p23ygipQ/taqEJWrphAnUFBhnWM0YZj1GtSXE9YBgRGPRBFBj8HKk/UIGhKgjy1TPJqQqN+WhxjfV6CaWXEZo622i4IpIb8kqTZinKKNU0bHmxYSW7XtL8tUrdOuCa1QLNsAHp5UhSnn19stzlm9KP0djsw1yh0GHkR+1KlvLbFjdvFYFrm2laQk+q9JecwzVLk+Uhbt7k5A0/FuSPM4pY0pZE2cmj4oijUCoVIKyoUZ1sN/IV/1MQYkmVRyDRr+t3UsFn/UYcDcsqH9A6LVFvC1MS6LuHnXj1xu5DEJxR2pzAEIZnZ8Y45EhMldyakcO23g3RXjFQGZre4w8ky0DpUq/NlI1tdjK1dOriJEYbhCMDW8Ri91V6xX9GrT6hXp/HrYYf3RE19o7cvr5R50jKOLH4G6c5Fpt04YAyfRtxpXxQSPNfiutkauYN+tzXXhfHKvIQ9c4hMx9MxB9EZ3j2DWneKT62NZWldIpM4AkjvWcnA/OpxNeg3H78gbH4wG3tzd49SqjzsWHcp5nnFLCPGVM0+T9XYpE3CXULtBm6TQTY1GJHURxm+qK84stSmroy4WcW0tAvXzL62ripQduRtta1K63CXDnDZvMo9SMNqONAXXrxw5VnzJ2IzijlU4biXP/7oO1oy05JZ8i2rincXMSDTytK5OHjuX1mlYx6mj6HJ5nykAwTDeeWTPT/Y7lZ6PYjiYzOvq8D2pdYuz1/vNfFmUaLWPuHVeLRQorIFxOgDiu7YbmRqunGVwkibXJbFuAssTHEkV0i+lwwHS4wfF4K9FFtjiFENUeKh+3BduClsjsDCZb/EnISRaGEiVxJqnjmlh0NImMTagqE6NcpsiPVT6ZbtDoaBl573qsc8xhjjclYmW8lhA5rXd/YMYmY5y/EztPGDWDa+np0jYNHvCOi8q0SFA8CHaOU3joF82pFBdw+8Um3kySFiPZL6vCkZAbT7wvL3ge0M+H5bd16PrLiokM4BFgNti7hCPPAEny9JwOyPMbpJ/9FH/+nf8Lr7/zu/jgjz/Br3zGuJ0rbuoJlU44UQLzDRIIBeocAiStvS5UJ01rUQGwJbAHkLkCNAO5gFDx4ZsXeP3P/xB/egC+9Q//fRz/+ocoVXacfJng3W1Fvzcmn0ttXYt5uLn+80ngeTuKOHxJ7mdvCqatYDSPwmhCt3ssdy0CaQ18m5YZ6FYmJTCyrixoQkA7hYcSqhq9YkCJAWDKiO1LZ1d87BQIbRY34WaE39Q+MXBUJQFYGApXMfIqF4n9oeTC0vDw/fDEKFVOhJNH1lcqAFYHQgHzLPX7keEQo8YFZD07ScUmWkYV9Q4zLGShGeKM1j+u/Hh6uFiPI9S1Y711srrOkMGqteh7mo9ElWZxqqkjSI1yu2cOJEmGOaPWgmJ5EdAcWzL24qyAGoQJyR1F0zRhmo6eQ4uRW0SUHtHjTiF1QGasrXs3ZdL0ER6OY6f4jzmHanFaH51E3p/qGKSgWLJbfVon2hzpjdjAzAHUnPX6pEagdZQ5V8MY2jzyE8RaRE0MrS9dYm3q3stpQs4HdRRVACdxsHTWhYhfr4sYxFXSzcCckdYebMyZrtNMc9ff8fSrxwk05wssDtuF8yU4j80BanFUNhfFAd2MYACwPao+riRGo5fLZkKrAy9U0m0NuNDC+wh0W8lt8ZEc+pAd1/NOkkEeqPOdQsJa2W5TMN/NeP36NT788APc3BxxPB4xl5OPdykVb+/uMB0ycj3IFl/l7xbZKVX09Nk7XJYS3Q3waOTB8pnEd+ReTMDfv3EB1nIZrZRzzv8i7Q1vuo0nlOYUwNt0EBc1l1NiiGIjk6HN0DIavmYuLdtyvp/cRObe8B8jKSm+sAHm6DpLner86uqCcf3+2tkyroBep7B/W3RR1e1A3J32YrwfaJ4/WiPjjUrbGHNXhiPlxZgJTuHdRfkbw949xv0zxOJUr6WCK6OWgqJ/Jp9FPoicSWnC4ajbzaYDpnyU+5xdjdzaKu91BjFIAJIe/c2yjx1JZRMAVE4a7SfHr8fFx+YoDQuOTB7tEvsz6SEZrn/6ip/FCxk2YfyGPh60qtW2rTfYihpLCJ3A64NHaPNgeWeFAIJ+EDWhxpbaXJWIUPIHzLlnfSTPGW5a7EIvjXhzfLRvj0cSL/mG379Pn1pTvGH3fPdZwAW8r2Fd17H6ZwOUElJNOJSM20/u8PHv/L/40//tO6Df/z6+/rMTfv7NEXQ3g+gEJEahBCAjV6HtkgEgIfMBhIrMrCq49GMiUc9Yp8KbRMgVmCrjlBNuThm//AnjJ//H7+OjXzrib/wnfx9vXt5gtsWOR/Tl5+qPeXbwnuejsUhqU2KcGu8Do2ftKGI0pp/s9FE/j6oZm62rmrFuYZ/uqdby3LSlvrM9J8nI9/WV3gnUBC8jCmk1qkwqwZRsR8o15mBnIQ0rmQkkR0OrkGXWo9b1hQo9ppmFQdne99bu0upGU4CZmwJi+njTsMxAs4SQBW21OI6J9XOBRYmMfX69RnkOmqAP2kB3rz+GN+YtGcbEiuLmDKvuFDMDmdWBJ0pdZXGuxbNgzDl3qjNKfQuuckId16KnrQk+khsrBSNH0E9IyHnCNB1wOByRclZHVHInEaWmHCaIcphV4Yxh0X0Xh8iB1Pe50WQXts/iJFNKXPa5tMLpvl1vrKrRFgK99ys3NseKOghFeY5RFBTKlH8TtZX8oMZ186t/14c/KP8HJDogpwkU8lfJSq2udFPpyiDYHG+zoTM8xpXIARiSV8K5+qJPHy5tI11z1a1OEEWiOoF5K6StazgOym6y6B1/Q8slQLL4tyi+5bbDy3h3Ifjd85EnMmzLRhOI2o8hobXhbK28BkR2JDVKMEQBimyZ6wmvXn2G43FCzhI1JPjpzC/AjIJ5niWx9cTI6oif57c4Hgg5F6R00Lm79mfTg5oxqTKkGzlS6UE8kIu767r2XdyG4AbPCtD4td9C7UV0zhP2a0CWuUQt3941CRN9ppshbE4KowB3IogB3xwdQgdDAAfG6KsW1TtevYDXsPixWNkeG0H2HOCLLz7VlxHDizm0MJKXG8fYnhtwureTiE22az0e7aPb2ynW4TUHHhyVJTprBzanmszdleA4B+dRYYEOrfqAR6h7jY9yYxWuFlbSZXhdoKssB3oo3bWFhwRwQqKMnCYcpiPydJBTVmlCFwW7gDDHTadDo0nSva+JyLeoTTkDzKhIGpeXFHE/J8nx6iJH0doC5dEtstL04QXVtT9dlCF3bHhtY1eGbj43a8gfYdMR2Wht7C2PCR7kutfYtX2J/1rvL3GL0VXte+QZiiQ1XJZb68Pi1YjJpqOaVuZza/WDwKs6M9nOvnwplGSNXt4teADptc1xvf3Bvfj+oCbcnhiHP/0xfvwbv4lP/8V38cGf/wwfvi54yRmpVvCBMIOQqszDjIMerpNAlSU5PirmLOqp6c9VF+mS0oNw7yR5jIhBlXBbKzI+Az75GH/2zz7Cx//6L2P6N/4u+AqZfA4+36id5wuDdeJX3xU0y+thHOGx8KwdRWBGKSc92UsEMCV0jHzcq+edyGb4sWng3TiuKXLjHYlACr9TAtdmKBNlJGonQ1n0Q1x96pTvQct14QpSRYfE+ePvtRV0Y+vs92USlyLqhgZtdIYJs6iHsl0FriRJdFMgdTJHkuXZUVOZbQtSmBSdrsuhrmEEgsLZ+njsc6xCc26oMkRJjLAwIZdDOhhn6ImiKcnWH+IQYxQd09opOZXZT1+qqvhXdRRZLpeKGZVnMKpGIYS+Ute/H3SiEV0pZ6Q5I89HTNOhxz2R5k3XJJMkCizrFjb2hOqmJPYGOIEk0+YFaFvlRmUh9lv1ORRxbGWQRlMYAQCIWxrVwGdIwl+JviNQDrlqgsFsx/wu5BIhjGfDxedPxJ1ItxJkABkpHQAkpCpWRCIGJ1HAW6JQwyXWYfNheGaFTcsvjTjrh6P1ZdSFu/d6EL7RP2z0nHWCWySMboZU2rVXLI+RpyHvKxkN2GAwmqMIxH6ynxk/lsdCyhoNaMaSjuQ6+b9AbwDEWaupVIPn3vq0U9jN+BrbtFpq32Y5bMDqYC/DFiJev36F4/GAnAnTNCGlhFI0qhISlXB3d8LhcIcXt7dIOWM+Fbx+/RrzqSKljJujzFcZJzmmOvKkRmbh2spqsyW+VQTD/aj4D3R1BpLx0tBLPu38mVbmVmi3GfI+L0i208hWUd2arM7FiGIzduB82OytxpNHMiUV7L3sYR5W9pnBfX7tVZmSeN3U7BFt854IHpFLqd3v+0ZwJwhNbQRO2JtOd+aAXujv1k9kfEDLXRiyWHyXdve8qXMScVvq8E+V922xxbhVm8/kNGiVBrl+jvYWAwr4CESncVNK2oORMbqTMErz0NFs5v5YPxARFOd60Rw15NvCxRmUkbIs3Bx0AWfKR6Q8hVpJI01juVHfSC4GE0F/k09jW4DwFui0l9NpLZJIu8ajioID1mUlo3O0BHpM1CKIDCeRmxkWjU2dMyqORRt5614KxTst6nd7LjrHzOnSyxd4P0jdLQq4jWLUpwNuiKQRkBrAdFwnCZ07ke5Nr7XnmS03FvtvsK9EK9prMq2hM5LtGvQS8NLjK1dpySgWUfkbcN3CzrlJfLmO+0GcwdTTF2ADt47SyG64+1h9fpQb/c3ttq3x22gHkss3drxSZdx8fIe7P/oh/uR/+Q3wb/0BvvEp48VdRioZB2JMfIeagYoMP8hI7bupMhIYc5oxp4JERyRigKtsR9NdBqhQ+5DxlbnIKWu54MCMY32N6fAGB/wUp7+s+NP//TfwK3/r28DLn4ctCZPx0042dz/OdehFeBdOpU7mreiYa3SxUsqFFgTdUirVD1o+RYun+8lVh37odB3072z01zXvBynk+geFG8MUeDQ8b0cRIHvLdQ87pSy7LJJFbCRZqRksNJkWSRUyXWsJqw0RXFUnfbbqu2QJWFWQmOB1Qkgq3AmS80ZwSixHKfs2tlBPT/SArJjKihZXCzmWP8tFUiGn5VgbREBbToUKFELlEwoX5JSRdTtTXMFk23YGhh3FLkhJ0jVQQYVsY5P6CzIxZNXYsuNIsjZhVHrqVDCkzcEUI7jWCXXMYmMTJoWrkclXEM3SHt3bb7lUEh/02SpGCyaAMywSAqCgvKjx4ZETBYy5bTVz5ZmlHp189rYfme35k4SRAzNqlZNULBeA+Gqq1w+wRhwxUpWQ84mOqHoiQaJJnUMZCVlPOSBIDJLgUFm249i4FjTnZCSq9RXcNkagptStgTmfkjPJ5O+OAtdp2i+XeNt6HUA7JYoBIJOWG7YWmcUXklYLuct2IdbcUs2RIv3aDB6WVWQd57b4J6fZyGkUrEcka2g6Kw5MsM1aRdtZaxG6Z1Gw2bfYSMiw0yjLar2sHldAt8QJ3oLPmoBb/x0MA7ujiottPZpLwVwkp1WtxfuAAHW2mbPOkhivK7zGW1jr7Y7ZZJtTjATLy2Pz1sq1eVuVV/ZguZIsOihpsucEQlGlUEaE3XJiVLDl9/DEsFFXrmr2dBehzFvHyeY4C892rKW2KSdkycUMJulPnk/49NNP8cEHH8CSpwMSvUVJ+HMtjPlUUSuB0gSmilORI7dfVnEA5ymrQ9vmZkuATYR2eGZAG2DlVQKSSF94WBw7KSYZ2Y9kckZHmsL7OmdIVi7b6XaRVvSS8zjtaz3JEQxYgl6mAkl6b/Mzg0POvqUAUGoLU9h21LnTGeZYizJBx9cntdKt5n/poRl71kW2uBMVgGGpRJRoz0HXcOXKHuUo7WtRug21JrNsW3qHz1CryX7B08ZAmDf5/OvHYg3spE0wfAukjfFoTJJFD1KrjyE5NRiWxLdtFycOydtj2TyOalx0QHiO/Ku3nNq8A9ToYfhJpPF1xQJLKoo6lS6aFM0taKeDsv5V3V5aZ8z1BPbtyupA4QmECYkPmPItpulWTj6bsjtxbCzI39PcgtlON5t8cUJ4/gGyxTqBc9DDOAFzyL3omjwHnUIivFhzUk6Y4K69JAtTxtstUjKlBKQJoBudg81BBD66HJJ+1NMGNYpzXGSl7lPGuqKGCPfegMkVgOaEqmBYEJfpLiIAik692ml4CHUAANcUDDNbbE2i41qfqw4aacG3/oKRmNVBLyfdWT8zCgq/wmn+FLXOqKUi5Sb/uSZU1R1SYjBJ3hfCBMvhBaqoSSPRjYElkzVbYHke1xThTY4dHnmYpddvTYfrIYv6zxV/BXoNtqOXomxruzYqAJElLQDenKmDgFzwQoLoX41fm9xouOjiwyDPoLlJReVYNn7VCccAYcI8vUVixvF0xCkXgO6QeALjiA8+foM3/9N38NE//2189f/7CF95XXEzzwBOaksQKk/guaV6kMAAoZ85q7Ozii3AVIF0QkJB5gRwBhJQp1nmfJ2Q6ivMlJBoRkpvwYdPkaZXeEGv8Y07xl9+//vIf/kZptsP8ZaS8NrK4NQkpvRs8u+PhXulGLjwLLOdkLi4A5fNADpdeU3PvYqQuSV46cVoXy9F2XoBfwr4pc5KUzHM/z95b/dj2bXkCf0i1j4ns6psd/s2fae7AWlGSEjAI2LeeOAB8ca/i3hhkOBpQIBAGpB6GIlhhp7uudf2tV2VmWfvtYKH+Fz77JOZtsu+dW8vq5yZ5+yP9REr4hex4qNgaBQZ6386Rs43Db66DGQFklBkaITpy3ThT2qftKFIFeSucGHoJyowRhiKPGzhcPO/8DeVf2FOKArmdBuZDwcZII5FSAUyT4L8lNXBoynIzIArH6g0kgAn32mCeacMzAQn6OigruUTpQlk8FU+i6pMq8mT80GBdzn+dHDi4CgUOZGpD1UJmqe5KOvPtTAyXF9I5RKtCkdmgPPs/151zphdOanP9/qappFklPep3KrGBv3bq2MIYDmIdFbc2ANAT5pHMdTYnk8vjzmBuJ4kK3BlVvrpfYN7pbWlBeAWsXAZ0hN7cENkZSmne3UNfFTPufpP82tHmDsygJ9YapSlh+DsgYAx6p0x9KgJCJvoiSy4A6yhO40ZoxcjShAW+cvnp5Cemuo1R0DIQ2fypJiItfyx5RHLlhVnCG7AavDE9MP7EQIpecMkSIoeNYYZXcfQE2vPOXUESJJk8jMBMisy7T63+SALebqo0rNuK/y0lIk1lNEM2e6B9FwbIYYyZ4l7RQLN5tvya3iZZRLjf8qbZuWijKd8JmUu6wX6PA7DutKd8057qgDubH3dHPXV8JkKg7KDZKf27EY3EaCr4fdyuYCIIlm1z51XPyMIehes64p1XXF/fx+hK6OrfDotZ7TWQKQlt7XqjymMcIBhu4v8dB/RV955rhBf040DTFeonH/t5z9Y3KFM9J/75/v6XocZugeeiGD0nBfn8czz+2djbvUQkJhXfYaUe7xzJmPZPSpmJcd3ZPB0G6x7Au1JLa8xJQUJoAaVPoqHSUmZV5PhTGZoQNazAAAgAElEQVQ4KACXZrDrokyzCNaJtveWCUrvOe+T98PlhCvK6Q0xHfyIxNwp6PTjhMqjCl/1C/W4M+Scyzz/jAPQXtPG0adHrXLNfReuWIBgV5zj+o0hj/YXhBVgpqHEVAN+wNJ7D5oDdJyen4i5obVFPYmWU3weBlIi49dlf0KNhkpc+QxmDV9lMyrqbKY3i9c+mffXblaNVCSGNuPQKveJbe+R87Zi3PNr7F/jDIUHOV+4jbvqvr163l7mG3h13OgPitfR9bOu36pGS7jcRJ0nw2N2rlTf7grZIGD0HFPirpnVTZhM1Lt8DNsXvlXJjXcbyOSLhPHJjY0uOw+ncddeB8qOPYVes+t+XLu5JvLaHtdnvaKfB7QZf9fLrvj9lXS7umuHFlH5eMXfwZMOZOOh8YiAjgu468HhSgMDeWByd7ng/f/y1/jmf/5f8e53v8PbvmHBBo04IAzjEZmSeoPjrD2+dLcQxsWwvHonVz6omuQG0D2YBcyPYP4eTN+B6RGNBUsX8Pvv8d3f/S3u/+LXIfJCD4XnPwqpcTXunJPpr5yXH2AU+uEtOvfLNcdLL19S/rq+QvYPOcRbZYNNSsWtF/v6zxFO+qiKV3af272v2psvtE/bUCTqAeAKq0hDYwGoGcweEPM4qJZrvRe4Rowvv6+ucyUMzxdEtESumCFVGAOqXCmo9mTX+moHshQqj1e9uAYN+366MlTROCWtSW6qMQaEBDyuFelubuYEwE+jclpK0I2fbJmCJBp/Ba3+ZUT5AyHj89fUcR7NAaGLGou2AawiOEGdUlygDAc9tNuh09TW51chogByII1A4t4M/nfpXnh3SIO0BcAADS2JOdxddAwDFHrTkFnRHY4xRlNQQohICwGB/WSQYSd5vh4VCSPGETMxAdrjWc6rJWPGD64ZprDovHZzuZ12RNDKy60ZaBfzxWHbsx4GuQPAtp5eYtcBPTe/kkEWchnhj4BVstFrQRkCSsRqjjevM6IBpmHegz4uN4jtkxJLjDVQ9t54RhmCSYUuc0nm2Q2FDkmWRDCPqHKdLUH6NmpfL5cLtm01j6KkW0+O7uDXec4U7hRz7EZ371HuDw/hW5aTet+4B6cVFEB4MghuWSbdeKeGkSovC9/zyj7kj/E+DoS3hHvVEM2eoy+wIB2qnUbDPUONB5IqA0M00X+XgYenR/DScDotABP6GNhG11M9UvV/XVdc1ieIvANAaNz0IIfcUKS0E4YiWqAemj4G9x7ageRJmEv5/gAUC4fXie+p47HrLUwZjqJtGNhwr9mUlXsZ6r0RGRhdDZPbOnZ99f5quOd+ccj/r8IFgmHesq5A+/7yq91Q1iBsebJsUITA0vH3ng6IqOw5O1kelzKglC8hNwA086CrzwGSp1B5nhoEKR4nZT1EEKaiSWuOH+r1QFH50ozxVI3uqc2E4WnCnyr82fbH6PoZlVvjslCKtzQMkYdfp6eM5ib0EP/XAssfh+Sncfygtt8P+XcY6gjzPAaINrlg+1bDRBcwn3C6u8P5dLYcd4vKDqoeZxRGlqzEyXEYETyTi4HYc3cJwujrONENwZ5+rRptyBTLmn+x5vEKIwpG7O0gNyH1qrLcJWMMyND8hkd7u2LAmY08rzzOXsTZhswPCqeykENHPdjhPoVQoTwnBpPAzRDDTCAIJ2aVCeNFZ6dPcj/Y66pBaRqbANgM86rnpHo59PI835Qv7Zdr+XrVz7+nLfj1zXawpq9qdc7Lc/Zi9aA/++cIAO4MYTv8HQ2Dz7gbjOXv/gX+zX/33+LPfvs17teBExFYzpqmgkw+kKanINF8QnP/Ks4GIAJGB2MBhDU8jbp5ARtuR4dQA/MFS/sWzF+B6QNYBNgYZ2548/495HffWAEd2zM/YTY/XkssdGsdrj923neNj/8oWrED3FoXQs3ruftSjjVyKt+/xv7xXPukDUUALAcPooKYNE3Cqic3dvJIplhAT3MUABmAtefU7ektNlBdgLhwB9ztAnWhVmEOHrrxfRHiBwFWSQlFWdu7NsZ7C8+YPo+O7ntuYq28V8p9fU84Bih328yYnqhgdlxjp5l6yqL3ElkoHeTZXAxXoXZ24euA5/U1MTwDbqGQ+7sih4WfGOyfd/RMnhhMAoViXDMgXblqbmM7vRvNXERPaE3ihFwVdFXWxcLcrnshkNEjfIwXo6fQmmE5VQRCI8Lthsz0XAH37RkudFLuvaapXQ/Fn2rnojcufWlp0/tBXqAHf1fJo+Bu8wbCG7MpVCMAeghYM/DwVUJhtsTluZ52Jpz3X2ubU04sV2id34pPUAhuF1oJakF7wRbQss6OGQmPN5XHsCf/iNug1fMoDJ0AQlERkakMtM/f7AEh4Xm4P90DtGIPeT6n0dGHhsDqPcWYQWVOYvqSSoV8fijvKWDZPZjYjALdwBW5UcUnnZyv2opdLVk9o8sxwkMnzKCm3gS61zSMb9P5GgMPj49YNzXI67mhVYwEtDoJgKenRzw9PWFZTppjjAlv7t/hzZt3GN28oXz+aIGG7yrtEwTghioPfEH1tM8NykZXB8od3NtQWA2dL7gQXoErNGT+DZ9Pq5E39SmBGIVMEKOt9MJ0umxNFfCrA5vyy2TURoYux6k+dPEFQB/rxDl97WWocYwtj1slBKUB5wc+Bglbo4jTrT7RDbEgQiN1P5/4TWthKAIQnin1OwqDQuZFIeF4124WgsX7Z1o0oYcBoXFxqYBWIFUvNz7cwwDMCybXbLrGDMZbv2CUwyspz2itoZk3oho1Ml/L3OoOuwFaX2jJK2899wc/TXOKWRXTGh4O7LyATWboocMCkBqGzqc7LMsdWltAbFXOgrFZ34x2M35U54niXx5YRm5BAG6MDQOR/6uyq8jIK9wk7ktQ5sn7AjfcV9lEmIqtxGT7uuq8uHFkv4BxHjJ/Wv6fnwVspfnKCY3srFB7jAj4/mFkX9LjJ691+acD9ZDm+qRbFHTs/bCj48ISdTINVUoeukRIM8oavXoDzHx14st1if6+NePBeyk1807affbD28xhbA2LbvZ822y7XcDc0WmBtDPu3j/gX/2T/wZv//X/hT/pX0I6W3oMNUiTACxq5NHiKYCq3HUP+0/HkgQSww0AhDoGb9CIAkITALSB+BtQe0Tjr9DovT2/AXTCMgjvnjY8fP0NxtY1vo8XwwtJu7+vFu+/Ne3Tfq0Maf/zj2jDFGPes5ehqB6ATc8zd30EIxHwqRuKBHZqngqbyBaAqfECkk3zuhAD1CPWGDQsBjuFKVDl8l7TSObtX9O0GvkbhYBWMOEnRFKkLLGBDNbcJQRTWMkSJduGHSLhMTCELOexJQqu8lYynpf8A6RDIkzNoKLRU4Di/Hv6q4IXB0YoQszBp/3vVqhRArF8/kTLz5p+K0CorYKdYXkfBF7eJJUdFeABwMvpXGXAsK+YOIxAInWPcTFMMciqYgkUbAfe8TnihoUIIu7FsmB0rY5ERNg2D9eg7EIhJ/XYGZaoPXNtTB5qPkZOCozHBCFLyJ3XsANXSF8WjjPAu3768yzNQZ2AMIZ6R6nR1IG1r005eTVgrrzNc0zNRh8bRPTHgXqsCyiUSI49eZRHoyTXLCc6Oo8t/oa/l9TckFOS/ge3wKJiASlIuhJCrpiHm2h4KwIEq8dXItjGC+7v3uH0qzNEBtZtRe9b8EhXUpwXRWUfe9gonobb1tHY+5RILZRdmBvtGBhdK37pabk+iywXBMWe86WZeYiSaVLvnleQhXioW617cJWtjOJ2TcdeJMbpcix1nX0OjR7DUMX63G3bsG5bcJLNDP89eJ2Er9m2bXh4eMT7u/d4++YdltMJd8s9Pv/8C5zOdxibzccQUxoXEFrsBbhyQ8mXI2QqRpn57naUhKQ0A5XMz2IEneeGeU7M2F/pz/LqCGrInx+2mAcNN5xOJz1IGGzKuANPfU/jRcdd3yZ1zY88C4Z6XBpw9vH0AfRufFF2MkQ8pMSNfpiX3I3SBY86ntof1CRf6ZpAFDTRaL2+NQKZ4j9G3l/B7CwjfY7zmuSL/vcIQzYR0ClD3ES0WAIALEsLQwKZoSr2amAXmj7zsY/RM5dL0JUHnkoYUFXGqTGBi4KfHi97PPQ6hWO/XQu3MI+sPfItbRJs1/JoPoBTeaK57GDbzfZ7HQMtmheQGpbljNPJjERUc8/VXnpuH4J7FOW72Ax1GcpG3ECRCM052BFNyaQHufyL/lbFwfPpQeA58jTPkoWnSPWHcx6a4/b9MmNZ91Koc1qDXMKvp/AZw1/QA8YJowYfzvURm0rnpVmgpayhqLeo/yfSDQdIrG99RxBFzK2/JN6I41bpYO7Dc+Ap94ytDeTmG47ba67eMbG/J62aQR3LS2DPyhNeg25vtJp/Ef4oefUjBQ2QBawZFgEWLGPF+7/+53j43/5v/Pr9F+pl1DbLb6VGoja8kEKDWH67QQAi5Nc8VcnomAwrDfdq3DQvFg3ogRMgdNFQs/a3YH7CIo8g6QA1WBZGkBDuO/DtN9+Cx9Cqi2SB3AKAzYt1mtafML8/oP10Cn/9uv3BNZe5z+hm1SYhdsvkbfQztU/aUCRI5cbBj6ffIRAgHWzgaVAqRgqYXFC5QM/nBksuQOgKgpAL+Tz5nJ33EkCxJ5EdngulghPAvY/YnxVYJF2ch/+N4rUUQENHEqEatdW/5XrvJ1QtfXEgIJpoLUXUfJUY0Xqfb4myIyJ9lRHzJm17//wvNwxpEutwCpduhgSxZKSzZ42G0PgJ8Q5mmlItzBgDpiTYd0y2VjpiX7dUiisNnAy4L9jWFRA9n6fRId28MMwLpg7PPWf0ZHqJBLoTkLT3zf3GDNp2cxgwLRaAdn8r5lSl8JlFGvM9oUAe7oFsqWRlP72gCIFwOllFGdl5NkCTfINIw8QsCWDNZjztJ2AC84FIPUyNvdqLPj8UczPK6XAsgbxTFDEYjGEKqHoGkTOEfFE5Aov9VflLVWyuElJe7yJnGVT4jD+TwGi8oNGC03KPu7s3uLu/A0SwbivW9QnrutlcISowqpeCe1jpPCgv1Xf33tGaGRpEjG8JZKji7mF+fb3ov3GB54xRxRRwemV2L8tqsPHNK9UMajRhHE2KMjMpCraWXn1tMi7MtOe7g1IwBBdzgC+Y+eagYZ4GHWvXQgAiwOgdJz7F2rveIWIpmmlgvax4erzgtNxhaQ10d8LpdKeltNk8AkjMo+iEUDKtJ8J7I61M/7Li1xWF6PwKJp79ksE3lVrfj+V9RheEDom9gpSntvW4tazIaEp2eP70NddLCB5SCkpZEjucgbYsaM3AMZGC5jD8qMLbWkPjEwSIcL56biKi+aK2rauOLLZrzDNpeOLmmL5rQRN5woisYpSE0UtGekxpbhtPHG9rMPHW9FDa/7/Ogc6DebwMgaBjSA+PH/9O32XJ9MeCDA/U7/voti/MK20IuoU5eyJjn6vEMWTJqc2QTPo89RJ2Txxf/vSK81lzw3ES1ZEyXu+oK/8yjd5qs8HttvKv/6fY53qQaMUhuOG03MVaipDlEjqh8R3evHmLu/MbtLYAMX8+e4G8dLsUY5R7E3n4mR4WWq5E8+zMfOzON50vKcagwWArmmKZNjM9n2sB0YFqKOL5nzBmHpf9DhkcM0gzFrnVAqPCxJgYw7Z9El/sQKcfTPi7Dh+efKa+jiyXYZf9t9qiwEZ4/MCMTNBUDzG+3VCKYU7K+/f9ue6mjlHv9zUvSPnVZH0t8w9e9tqH/VG1lOE73JBXxPopBrcV/AjTFRl75DksTBjccNdPkL5gaxcM2vDm+2/w1f/+T/HF+ycs408xWLCJ4n8WYBGKvHcqGtVQBJfflLsgDlsggHhpM89HQ4AsxjSeQPQtqH2Nxr9TL1gRaGoHAmFg8FADuAj6h++BrgVANMWvPbOw02tj8afc/nj3SOp8xeizI/LJkQV5gPpLzMsnbSgCUADaAPMw44Z5FEFAaBgQTRBsYEmTBQsGDRPk9ohyBJpKv34+sSgHfjuPClVqBFlRwRVNB94DkKYJuA2c6VO9YpWXE6aDtZ3BCYr8vbWNNSSpnnZU2HT8/PDA2IlpF4UqsP1TBQIKlEb051XnKeLXvcSGduM+BKD50AKBEOXoxZMNOjDlVEBDWcau5xS0Em8lB8Uw13HRCk2oIFGKIuTGjkW9lLBAsEBWNgUDAA0rrVtPel15dzDLYSzKE0qnt5LTZG+UgYfimXpO9TsfEMIQ6qcXQlSueX5lrv/KnZKnq3OvyraylVIFUBMum3v/mN3gg97N2DOX8vWqJ/sOUiiXMjQ8LkKc4H1Tzwvqs4eGVzNRXpKz4d5MbVngHjQ1efD1jqTMxVDnqBj4rvfxLVovSnzZqywN5+UtPn/7p/jzP/9L/OpX/w766NjWFU9PT3h8fDQPB+t7a1jXJ+tzroeHo/W+YV03nM+n0h+KeenSQSzYtgs+fP87PD58j6fLB4yxGurRCj/MADdTWIO/JKhzRqKnZb0YLfzaQqeUipCSeqUtzQHmfJtBkcQVZljN9/r+z3mOE3KiSEw8Ro88T5p/DWBh9DHKqnjIlfWBgcu24cPjA5bTHU4L493bBlAD8YLFPUzZPYp0ntLLZEA8YfC0udJ7acobdZCgO/ITCRC5NnbX1Fb3jfPu2bMgPZkCOANozebNqzG2ZgbYMxovcGPtPrSFRZOqN2ZTmL0fttLMaI3NAGSrQ0kTAGVYHzEWC4uKZLz2zsfHJzw8PEBz6yCMLQBKqIi/GweN4jufjzSqIow226ahiZfLEwBGayedLfdGghv3jZ/RLjF5vC8PmsboGGMz+kuvqSFmqB2C3ocVUtDKnFvv2NYNfWyQMaYQ2wirNCPdsKqO7HltgDigb0QYXb2YunStBoahZzBX4bLR9fyF6s/bc3qkfO1vOZY/O2XR33d4tb9P6YahockaHqh0v7QTQM3mXExJa2A6YVnOaiRaToVWZzwyH5YUeezrzjxdX0PIw8BgY/D0CYAVH2BG7+JO0sfzbDRFUnImZaKuMg+F41FSnkRiZtaQTZ+nF1BZbdNZh5Q9b92c7FkHy3Rr9fLhezoxTC0MryI6szcKzB53V3Z52F4/3tv3PYdTX/scut5jP7Zrf0jtWDztLihfBtOqN/0IhXh6zo9rQgMETWy+scob+s3/h+3/+R/xF2++wme/+g7y0EBPgKx3wHoPyBmCBZ1EKxb64Wx3fTA6h4g6sByngh4GNA5j5ROYLB8RfwOWrt6EQhA0iGxgbPB8oI1W9IcP4E3TBnj1bz+Dci67m/VPvO1kwx97q6ftR1+/MB8fc20/cUORQMuyAwBhCFu+hGaKXgNbiVMi0oObBgstYlDTpIDqCijppePg1aCxTIzbbawVaBa4J1DFgFLBdWGpp5JWJYjcK2j4Y+FH5BQlkeINqhb6yVQowOn+fx1LW/II7AFZoR+qUnynqNXYeL9IHXfEwIYxSVHFClaO+3XERwXQPHPN4Wfz5+FhpVkk9PuinM6JJxHfu1AORSAMJmzKoucWQJyaamiNhQgOQQOBmpXjjLWwTap6lNGioDUPlQK8OkYkbQ1sZ0YdT9Rqa615IvZ5MXLNM3FmTEqcDHqIpfvG6HsRnMJPOP1zqmDv5sokUM4TtbpGHh6zp70KrG3GTGFmOhfjXMOwsuwZeqDvITMsQTzpdfY9FaTsj8BCQYbonMLyw0iDeqD5iW6crQCkiSoViFalV9C44Xy6i7F4xacQ6vu5qnuWGQiF8/kT1etPbV5hxjE0G8OC8/IWX37xa/zVP/hH+PWv/woyBtZ1Re8D29Y1TEfIErQ2bNujeQ1oIuKs+iNYtw3ruuLUTkaPeaan4bArtu0RHx7e47vTb/E7/i1O7Tts/SmKC0QVtBgP5b4wEnHVmCBYGiavufQWdE8Air7A6U38ma5sZTXCmHvjM+ZQCN9oNotxleccaa3hdD5DILhsl9hDIl0NQr3v9oYqLV0sH13vuFwueHh8xDizGVVOaO1s66YdZDNuEpp6iLj3YzACxIwrDTnP9YOHrERXUbbf7zQZj9p7fcQdtRKShSa5ZlWuqgn3Yx4FABiNGxY+AXxG4xOYtULU+XzGm/t7MDf0rnnDmld+aubZF0SiBsDTacGbN/cW6pPiS4bADcSeQ4Z5wbKQhTlSqQIArGvH5ekJAjXk1KpWniw7h1e9LerYKXJWiQzLA9QiD9LoHZvtncvlAoDQmtLOumqp8ra496KHzAJsXmMu52sQD6B97WPFZlULM3+aGybVUDSGGoa2bTPjrlbdG12TXzNzhKSNIaXyoRuIzHOpd8joeHp8xGW94PL4BOGOdbvofrcqPj6/uhUrzPSFuiKvV7Wj0PTnWyqG1QM7vp1OVr1ZhVASEC2TUbERBY2IAI0Z3E44n++xLC6TPN9UPrmG0brXoyiZQqvtuveWH2aYbHfcYRUP3YjIDGTOOPKCkvC18oM53/Pi4PJKXCS3vpoy/7XAKHfKjdBd4yPPrcb+lW7s9rXMsLAi7+vd5hFKbeZg163ivSSyfBZPc+KX7MlxHxXw3NuOMObtVp8apWh23730rKP3/cjN9Au0n0cVr2uMiY5C5MtuVVO44aX5kv29P7IdGbh5NHT3xOkNMhY8/pvf4LMP/wKf0dc4f3bC8uYEbPcYT5/h8vAO2+NbjH6PMRqABWwh6EIbPLeq6nh6oBSSPA6+YFh1oNETQN+C+Gs0/hZNLpnXUkr0CYZ6JsoAjQsWdDQBVj0Fyx3vsuaF/Ia/v+bYo67pp9rX338Tk29H1C/0PJ9/bfvEDUVAAmYPgdINMlST15w8cbrAADRGXL0FCOxu3fBzU0wgwhFkMQvB1YzjGSZTPrXCmSqp/veAYNHKZwQ080boFtokJazFQ6Lqv3x/eVXFSc4Mq6Gpgply91VIEGjy1nAwc7wBHagYwohqabqBX+G4nAxXjt9wDRpvk3M1DoEIA5pDJMrYMl+XkxYLRbRnV0VMwTWwbQANm387nWdmyyM1NEQCjPQx0MS4FT+r+796PLS24LScsW66rVpb0Mdq+V3UsOOJUVtb4ADCjUPLkglSr2mirhlsiWZvHlIKS+1rmlt/5vNzXWc97iY3gNV8EMeGouunEIi1gkNjTZCsHn+Z5yQUefE9jNJfsioyapz1PlTlKufFjUxmYHG6KWVGwygEV4o09LCGopxOmuDU7zneJRMsnmF3pfmwlt2e3zLR8LAiPYlWY9HCZ5yXN/jVl7/Gl1/8Oe6XzwEAC60axnJHVjHN9gMBImsou3109SbyMJpTRz93BShFQVW+uqGPFY3uIOOMsXWMreO0nHFZ32Ndn8ILx+1iEjmgZlogQAEKEUQWNGKwAF0c2NRbtP+N0xhcQzXAZkzwW6TwGKox906j9eHlWUxq0A2jfa7FkBF2/cpHnQ+qVyBh2zoeH55AOGnunvNdKIrxTPeIqsYuqJaZXj3HIEhZ2YFnUVhVjDJpxDSm99aeD9K8Tx2U64jtsQIuXVC9jOCxZwufsCx3IL7H0s5Y2j3evHmHt2/f4v7NGzTOvUYAGje0ZcHpdELTRFjOunE6nbCcTsbnnCcDEe7j4aBE4BYxq6ksGr3e3Qnk3QD1NdXqkYqsjsNozxPTT3w719k9Ld0TTWwS3DNJRMMSxU5vY0tLknHMGwTEXlnPebl7frkha5iBq5snql7rVas8/5FWWdrCCLZtW8lnZBX2WjW+ku3xrJMIEmzrBdv6gK+/+hrffP0VvnkahkkWNM5y3xTVG2de5vYKH/SBrfyHt2rjuGlAsrxwSOVtf8Dh8sBxm3uXVfkZXoGSMsYPAziwgxtRE09FV4vy6PSShiHHJfZthW/MZnwb1rfs1yTjJSRVkRsIcpeYMHtW4DZfnLmqana0zhMAtrUzmn613e6qWQiQ+IvUML6USn5wQ9cPfK4/77prtLvGcrrUbrxyPM7zJw90ouv7p8pM1XBv373CgHGjBz/inl+mfSyDy3Gbn5v51FxmC66Zy2uNRHntnKftNg3un3zoBSnAMlasPMD9BO4NGA2P77/Cn+A3eINvNcffeQUtZ9DpHe7vP0Nf3+Ky3mM8vQHWz8DbW0hfsDIMqyS21P2veo2Ov0FrZw4QngD+FsS/NU+iFQ1nACu8qusAq+eReAENgGXDWB/R1zWEVBxCmWHJxfyna4NhvE7b/Hvcwkg407Psr/mJ7RM3FAmoEIqHFLmwJwNcg4a68BJDtoblpKdEfgAgjdBJy46rImFPDwCqjV2JJHMO3Blf9CNdDmY2jwTL7uB4nhk0NGxFLFEnEWMQ66kyNEwqw4xMkWAV+ESqgMGqjmUPHN2a1xLcQ0KV4IKBvavTJ9VDRJnpfEoW8i/uaAlIxBRrtgCF4c94Liny85I7Tq1RGXq9niwvhuUXEj9nJ/un+QHMAUjBYGRDqc9KA4F6DMGqpDBaIzALpHds0jWBLWvlGS50MaZSzmZsmUqxqjLiyuvpdIfz6Q7r9oRtu+Dp6YLeNyjIG5qro1mheFY3eG4LFjMYKd5Mw4wrLi/tdx1li0ReFJkPAM8Bo2T0Gub73Lo6wDJvkOeeQoRBDGDB0hYsxGikTrKeFNMBrz5vM2HnylALpTLCO3RAprgXhYzF9lF9f0luGqC6a5iR7UVViG2/iZ4Y8XKH5bQAfk2810OkfDbEhG72BxCgD0to6h5vFq5yOL+u5BgtkSVQdjrgBsgJ0u/Acg9ZLXFqX8LJgnb5mjoW5Y99g3T9OczzYHTPk8JxvfMXGh3oT5DBQBeQvEHjdzg12/djwYoLIN1ucYUUmOMnnN/oNX1siOpA6IUPG58QqxZkoRtu6FEW7e7ZZlyAaJgMkyV67HEyyURBJ+Jdgu7+RgsW8/7YtguGaNjN8OGX9+1DTsQr7pEaqdcx8CfnN/j8sy9wPr8F0QloBniL4I69AgKomRt7naKZY+t1N7w2XcmzOY0BFt4dz9Nk/ggAACAASURBVIll4MITEcB0fqwmwfQ9RTlpCj75hNNyj/PpHZjusSxv8eb0BT67/wLn0zmMF0yaU+i0nHA6n9S7jYwHWQl4EoD7CQ1aFWqKXLVwQjcgOT+dJ8BkrYeWsktI3iXppvydDBzPtmJ/QeFotgK29k1yzuIEfKRXRky36TexkpQHU/t4GDc8BT8y2eqjkDJm5Se9GMZT9Q7juBsabM90GejbhvWyghw/yYbtcsHDdxt6/53uNa/GZ7nM1HN4U+4W9OTz531NL95r5SL57usSa3pN0GcUOQIA1oh+N+zuVivko2EZN4KkrGZgGL8bgswNuKAtzXgN2ZCSbtwTiC10072V9I/Z8IppVVr52OmH1bNATM5UHskMaeYhT5rEmamhW8ENsrEQNrvLwnMhaF0xKxGBl6H5jvz5Ri+OFWHGStFSTJYMgUG880z0qXSZ7AZ6OxAT8/TFYJClWGjSsMgCR6OeL1LzDkJlxQCG6FGWhBcxVAaJVjLWsJlaOATGN4ymDnCLH7iOMbCYrGe2PsZC2KBM8RSnOjPMeqW8zGnHJk8NswEzzg06Dyos/6behSGwel4dUjxl/tXnW5UXe9641wK8n3K1JwmH01meoL8Nk/O1IIQuiGJXCLQM/DP7OJ4sgDNhGV4hjDHEc0Je92TyWBPfm/q9OJ1PzQeVCfxhmsNk3NulBdkfuqpcGEE3nRiDBef1EWDG+/4VWttApxXCq+GZAbQVdPoe7X7Bm6EHltI1egHSsPQFGPfo61v0yxuM7S14vIOMM9AXnUxsWMAgeQ+0fwtatLIZyQUaVdGUJ/LAoAUEQm8EyALe8mCEnt5DaMWlAR2Mk6hMGwxslpfvNIzvPQPi5WiaD6+beXNgjv3Bw7Mag7cxX1uKF0nsp5ef4znMXrhK6VEHcfMafX3SlPhBV81vC8cnVTTs96qYF1BBKmNAxmv0sn3znBOCwy0o5pH5OuZys33ihqLK5x3AUcx7ust77Dlp+cIhWJYTmC28RQa4OSityhEcsYWS4F/M4WgwMFZBnYlsUsHSxzAMagoNk1ZgI1cQ9T8uyq8Ludb0hMvDahDeOFQU5Dob2ePi7Pzq2RSkgp5KRAFfZS6qtX/sQRmeA4XHYTpTb2jePPnTjCU2j65sJmuQXCsBPPQrGZJvnN3a+jOJzNtoiZAHiFb+gSVtvT6ZdgHlzK8IHFZPNm0KRE7LCadxhkjH/f2Ky+UJXsUmDTWeYPSMZVki5OzKG0xMoJU+aaillP5Zpa9ySpkaUc6cYuKez7mxNoKXGUt1zb8ZXkBeLrRFDg1fN32PiW7Kf7qW+Y5rY2TeTKYMkCewvrq2AA1yNeDGyD2TqFg4EZ/gXkpVUUnvh/qk8rs4f/CAAn9rVS7249kL2VxXItLQsqGKiIji594FgFWZchBl+2FA+Y+MFvtQxsDYhn5u9OEKqCtMowNjqLEMWEA46z/a0LhrXkVuGJbbJJLMWxU074MqaTYmM+Y4cPaTLTcG7ZvOYS1AIEVxyHmvs+aG/RDksQcc9KrSt7RTgFIRNyyXvRGHEYVOXe7AsQJhDMHd3T3u798hwwwp8TCyG/6LsiQPYz0cNdxIdkXxDjjiuxIGkVOye6+O+0WUN4W0CoQUfKhuZsqd8arGCxga7s10B6YT+rigbxsGQfPytQbpDV0I4nMjBEFXOexrTwTpCCMfCTRUxdbYJdu+9+FgJf47lSFS/O17QsWVyoM5pZoEsCuQbV4PmeksO2GKC0kYiZxM1PBC07X+kMS7lU4Lb5CUPRouxVeLO+emyspvut107zMTpHeQbAAW9PUJT48XjK75HD33oxrcRglN2nHHSB5rtB17cJ7H59rRYZLv4FfoH9afXIFKzrNM9ivJwg9zXmUM9G2oRyEY5/MZp9MJkQutYBq9Z8zPJSS/N2N0Yi6yfcvxk+IZ1iff9yHYilch2aGYFXggaBjhMCOJ0mA5MIUaSNhMM2SeZNehJJI3VIgX9CihrMSzyfGfROXdisxCeRPnlcYbQGicIcnhxU8UOt+MMhOr5d7J+R0iINGjv/AMOxDZgZd9iXa/cBw61NfWAyoErQSGASD5oMIfCi8QKPa7yiM3z3+999n22o0QPThg+nsG9dwLXv2+fJrsVs95WRG1t/tb6KY8YebbhQ3mNfldeApFpctrzBTvPPgqDrRCcLhBP7HTFa8ynLTIhkF6NUNA4wmNyVIN+IUCsJ5ak3R1GgAgJ9iRLaOZwWj0BWM7YWxvgO1zjPUttqc3kPUM6vfAuKC1r4H2G4A+qA4rSrUdaqCF6QU2OXCaGGT+SJb/LiVpkTGOD2IuXkGju9k8nPdXPeela26tqe9PCrmchwJ21ZFB9KW3BW3+mHmQCZP6O196X9i5Assc76DXzZQfSu3ui7n5oWO6bp+0oYiIsCzeRZtZKYJLLFdD72Y0Y5N/KwCJfB2aG6ObwYbh7nm4YgwpKELh9hcBcfIiyPsIhMEqKvWkRSK8KN2L7XdR1kTDS01z/uTizkzNShqa/8zudNsVGi2ZeG1YuN2qS2YlTveG0DO+fQ6lVDrys6N49eoe/vo+zf3L5p4cCCMIUc1R4yf3Uj6vpxKUhijazxuBaVgYIMWuZdFks4IEXdUIUpsmLPXKaHmSCE92zgt4NAitms/jdMbWL+j9yfKVCJx5E7VdbiIcvPvacEO7j8IDBX4WfJBDCJjCTGLNgmPZD5vLIwNkCNeyHtVQFO7E+gEEZigKY9pEXvFa/8gTffqc+Hrr1FD2NeGhVS5yd94j2ktDc1YyTCASeXAMGGRScQ7gP1fDcbCcfCFfW4za5Ey7eENN9GT3iqOu2mOjW02EFcqdRGiKjrSXgwjNhwP0YYYiS9Y9OtA3hJFIwsiYglJPZvV0RRP1n0B0BuQEwhlMHcwKeBQubaZkul+AntjOIZOZl20KHzkChCFBXXGzaZ4UuGmHl1mcuQdNl/s+o1IRj2Kd9VRZX0niPELnOUOHkpcDKms+++xzvHv3TsthU9JU5gxIGtBOMRi3leMauubSxcdWjWbKx42fmfF+b5fwl4z9lwfN96mXK6gnjBRejirf1EgG9G1gWzsanzA2Qt+UpsamHgFMDBIu7vRmzGDWHDzDSrB7j23+LZ2Y9aP08WrSbI4leVx4niFpG+JzIKDF5ni4wkCBpdwjL2lNckuSzSEDxCMZ7zStkuDvyli++7XStACov6cqGtjAx7YHxM5zvWIhWai7CIOhXqqQBtlWvP/+Az58/2A2XTYvOpuH+Jn0fSXDqciI3bj3fOtjt1syOLuWa+6G6DgMIg0VHcNIS4C7uzucz3cgXvTwAhb2YU+b+Hd8lv+fCi0ouAijiR/cgWny8mQidOPXeZDj2KVZeLol2u/zuImM58cYGVWWuZdr2B6DM9reG9qXeZ6sY1WUvmoxAFfQq40kMWLSqr/Djg+gSZnGTOdB+t3mnY1/KObTzBJil9Lr+inXuEVpwr/X3H3shrgyJ7dZZe5R7f6xTLrVn4/d4qCyrOPP8JrdOyuf+j02yh8/riu5t8Vp5XDhzXtPCG10gBidTmCs4H7RYgGshlKYf6QKiTmXlYfPAgJeVK9pp6E4fJxA4zPI+gW2p3fYHu+AD38G6r8D828A/g7q4bZYQnvNJ6fhZq4DDSx6PADBwMqEFRw8jaEyofpv7rf/p9meMXIQ3JUtrz4ajAA/V/haSoVPcBYN936s9gdhKMpEegVGhZCykCTLm8HhQmohXkOrkDU+WZnfxZQBS5IbgCjV4vDUifU3wWiAIpRDE+TsyToGWyhE9t8NP4ozGxqrt5OeFm5ovaNzU1do+8esuQuIyCrMoOQyqH0qPX6JJopRKJWN8tPmWCtjqNulJlmzt9BrvZZe31Q5k4MNPgowVmboypOpHCV7f0Cz0kdLTLnTHPV7fzfF357zQY1zphNhBspVWa2GMzcgJLA0OqIRTkZM0CSvnXFZga1rEt2qrLph5NiDBpGotTY3fO0gbii6qYCZULRrGjiAxZXBxkCcn0LsAboaRRyI6wl9ALgdjiiwNL7UfWwV4XIkCW1jz1guL69KZyGSvvcApKHVEweb55IrPPWESOekxp5YGIJ5GqR+mIkG9XlVYcDh727MiVCQq3ugSkQAZIG72PucVGANN7K5AUpgCrYm1z0vWi1HvYwQoFUEUV67l/0MAXoHZHDwTs2/kiW/57XQUEGiBY3vwHQPhhqEGhMgT4hyyRB46CtxKrmzNxs0EXRtvpj72RRCZEANAtp5C16tQVVQ5if61UGvTld131CGrF2FGJjADSORbZrGC+7v77VqktFJrieSDorBUCQpcAJsV/tdyr45au5xWkKcXG+4Uvxe5tmht/oDYjNobwcsAXPXymjMQO8D62peZua51jfdO8Nd7Qf08EFMTjayxPoD2EhPaOOQwXakR8qQ55qoe74cSFgXx8i1HxFGQkE3vicEA20Q7u5Ps1I7NMrbT8dlJA90nuTpmmBkGRX39lNc+hW8UEpwbt0ytk6jj/zeCENi/fNe/38YrxwI1u1gbuZMnm+rm7cJ4enhgvVy0YppLgdN/jrbmmVYjsk51o5q8PtqxwdSLv3MSEmLYULl/X0MXJ5WnE4N9/dvLIm1hu3Phugi5+ONjCyIkAcHeqBWDhJC9vt6cnnGMKwwKzR6n1diJM29CZeTeWF6e/nzNWzH3695/LJSJ1uIN7t8DJxk4WHFKPiD5h5FB5FiNKv8uTxXoB6K4h7Z5LQkeWnZ377H40CwGkBe2d/JS/7gnmEKduyxoH9/h8RqHs5AfPH7UhAlujK1wxw/8SV+f/39edrzowlmikkOT5aFlM3PPUegh6sEoNsBKltmE09MPYKWh3maEjJtir9zALQFLyeCJrfm9yDqOLX3aOczBE8YD9+C5ANIOgR3tns3EDqYCJsANJxOLVcvBKN1bAQIFnA7RSXgCQ4Er1f+fyPQ/SO0itl+5O27A5JoQojYSdlfX59xcP+RKPsxXQzZW+XwL7zHXnzdx1nbT9tQBM154IqNK8uKwwUYYqe3s2BlTnAo3CAWjw+cNPcM2z8sqsDYYT8TNCdORd5lot1UVHUYFd/F84VbVKTR0toOxlS4MzeNE20LTssJvXe0vmDhBctyQu8btq2H1wBBE5ZybHCH9HmSpIBYog+vaRVfTrdQ9QgwJhLAmuAK4JUhxu8IA9qB/nPUD/Ex+Yy6klncgQUGgklDGxygWFyxVsLLlznoVeOa04ePAa55TFAwTxXEmPwO8dffXRBQgrZUjEeAQRIFrAr8GWiCRdSg2XuG4rjBw0HeUdME0EBQnTP66BUVw9F138NjQqVT0EuBojrnobPs+mGDquFmqhcdG7YqXiE77dSl1LxdIqnY1/6q0cVLcZs3oEhJVj7Teir+DelRlEqP7z0CmaeSlyuH8gXfCeYB4tXm9PlOj/WEiGDxMSkfIreQ9i8P3szDgYxeyfuv4GJKwhj35rrm1Lt7PmPbBuQEyFAvjtHFqjzm/AwRbJZzScS9Bas0Haj8MkNA7TSXG050ArOAaWC9XAAM0DY8wwDG2IwP+54pv1/R345GLKQ2ysI6rxDA839MXpQYun/s1LnOky7CzKenNvGu/KfGBgsOtNLYuReOQjKcJvxvzbfEbMUTRPun92VCer838+Fdr+/Nfj8buqsqXyhYlTX5KGTex7dkhENFtpkm+9R2DVLNFJUPbqm3inpqmLG5tUnsY4vxuvFlEIG6s98RxiA3VmCwVukggFiyshupcqfy2bw5bSyjO2vycKNKC3qdDGho+I5mZIiFWgKej70eSoUSCUCYYWnobFhFWdPNuVs7+9WNVoWVaH4wu2x4NwsnLkq89yke6Mvtxh5CCTkymWYHF15lb2wD0jXstK+aRFtk6AEMJH/GEHKPeXBmIZayWe2DV8j5n9qqcQ9InOF9cJjg/898duphuW0d69bx7t0d7u7egNsS8mI4Twi+X2Vbu8IxKqO4/HQDc/Ekz10IPywc0jW3D+V+yaVNL8o0KGV4moakUZHv/rl7M2oKg7zeZaMeoLhhfA4t9Odme0nvmM4by3sqz0uDi8r7yYBPNswuSMMGZV6vI1p6hr0fXZxYrtDL8Wh2f2UY3NTlG+8+wrj7nGo/xhj3YiNCzdmSk3rzhrzuIyqyKf9/X+2lsTgNjBS9rgdU4wl8r3mY9F6GAh1NMQjZocO7z7Ge7yDbN+CYW0SYZdJQlc9+neMFNzAD4AtAK5geIPfvIU8DWBuAE4gatDqv5/nTnF40BIIOaTYwUbnKIqBBOJ3fop3OYLB5naaXbtoUf/71u85d9Kq79MfN/eXP3Mufo/fsxyjP/vm65gzO+1lDVD/eHntVP8Q92naff8S1/aQNRSA/aYkP/GPd2BE25BsdunFgu6CbMkQEGVvUeAnHYbaYcIFWYwqhV0CH47MpqZr2JYCaqMLRydzSZQTwVMHJcM8ZJqCRqGCkhhM3DF6wUcNCDZ1OODctxTwMwarRy4nPlNYJjMAI9QcQaNXnQjVAUU4okrxOdC/1muN2y4j0zB3l4fUz/8dgUW8RGQQa6r1Vv89cA1XRvw698mETk4WW6NoPqNFRh8txta/7lTFjJyDnfEs5/ghNI4OkUe69JiTPMCeOCkKlz1MCzATNTl+gAvzggOEG4hJLSH7w7f6eyQNr6tD1OKevTUgKRS9jz3qoqIZAFWFQvBgIhLacsCwO5jOOfOpfjVBy4yTyLHCyTYiGhDYs6Numay3zPHhIgIan+Ps85Wrmi8iHV1DqU2NGgQLYRWjebgXARGiW3TuNL9F25lGwqkhjCGQQtm2YZ5GfxKr7sVgeKgklygUqgRvh3CwJvnW/hloQCcB3AN3h/ffA+fwBgg3EXQ1VrWtYZWeAPGTAOuojcR5N6XY9AzOa5lN5sM9l9bupM5c04s8O3mXGglmhzO/JaIyLFkAEq7KmvRtlvv2Ztc9pJMqwFjIjZYRX+coZfxa7kcr9+YLdsOq7fFKmC1IVJgMIUyLFSoeuge4MEEcygsT7mH4HwZ/EwzoNlI4OWgaIFLgKOpgXwIz1xAJugmURk19k9nyy0K0OkHsJjuirG5F9gAesW3M+SCqeIkDf0vOoejumh5F5GUHFsmwCsqTjfQz0dVhxBqMCCyHcz1NkluojHPinUCyrpseNre9lzZyNOK71fGIHaxJGCrdQAXogJjW8O2nel8sN8elVpftNQ1GHVi9cVzX60gpPekxF2k1yw/avG7+zg3viwU9o14r0D8Iw5TnhH+JzbF5fHmK79Y4hwOnuzoqdmDeR85p93qWwAkjhK/ou3X1pIHKDjB8yuGepkn1iBTaDTWtNPTzdEER13/mW3eMWOzzzeYv/23hL7kt9pKTHre/s3Vz/0KUrVDt9poYt/dz7Lq4tI8fvXqgeSpYGV9G1cFlex/2KTlaDfN64p6PdHrt6igTWuDXyeE7BK8E5BAf37kfzc7TXPt97eojoftH2yxiXHJupfhblTAHk2DOsV/l+bnvP06VSaaDTGUSEhVYMAd78g3+I789/gl89/C14dLA0PcSu/PKKdxKAUoHXvbLdbU5YcdvyCOI7YJz0vtbzGiumwKKVSgdp/5rhHpKBZejh13L/DrSc4XI89lft3h9SC75o/IYMP08gXnaknXx1+uij9cn21e9vO1l7ZlAfYb990oYiAqG1s4HhBi8HLEPUI8gVzyEFVKv3kQpvXb/eV91MzYBjA9Q7ZYVAwG1J04CDTX+YzMLKQV4IdQOXrrCKdH1WwxVIJ1IFuTUdnZbHVU+pftoU0EUSRtbTWoy4xhmaK0MSiq4rZz9EaPh462wr4BBTfqnky5n0YxyFi+U4f0i7fb1PvCpgwxlheBxYHhXSeF1X1BTAmXeJH9fW9Sz5l+LQ0QECZTghgJ2RKMFIniAea3pqfIKF29QcE6V/5MDEkb55ncgM6BR/kOETozup80YB1Pbwbd8n/yoTNT7TRoL2vNfnIxVW7c61suq4KfG3g2DVmpSe51eGW7+5y6vgtDHemu+C79UosvP+gYJmtw/4Wmqy0HKndXialcjW6NVkbA6eUWZqupE9TpAbv+XI5vE5rfkgfW5772Y8BvS0zKq2MGM5qYfL5spinDS6LxBCyYlqcr4lwvsJ4AXgRlgaMPoFpyfg/cMGkRUDC3hrENbwBwrV2UnEFYRJggMThVYua+8lV/gs1r/Mh5/Yp+HJ9/ZAGoq9D3sla9dccYNWOBwi6u0y9oZDo5cIz8mVqp5/6jHkymOOzUHnfHqX2VAiD9rRXqzkvhtBvfo5fhv6zIRVr6/ncpIaLzaewyCwGNAUNQyJl+6lgdOpWfUbLQwgpOE957slcorpe22epaN5mCYojBrzP+edEn0mInQv+x7hZpp3y41EaXeuxh57Dg30Trhc1sj703tH3/wajz/z6kx1HkUTiHbCQlqhSkO7rL/QvScQnO/OOJ0XROW2soSqE6THRbJVN6QjvnMjhRptd3vFnjfpIwZ2hi26VmRRd6n1SfDw9ICn9YJ1u4BoA0g9i8C6/9VbytbEuMR1cuQymI+k7F0bN3/YfeExad0KPd5Cct0gpIn/tdqohnibwnbLG7EY6qpxbu6zh6AlxkjDkfNdmR/JpJ7GYDQwOlXsWjx0pnc6Le0PHBBGF6Dkb4xosEozzivnJ/y4VZT4WUa3e9f8ty6N5T8L+pHpOqmPKW/6oTqYTP3yp78C89TeuJifDumyN44d01v3qJeUPz6mIlm8GPKz17xjP/7fu3b7E9szE3st9HFNo9d7O7+330iPQ4QbWDoIG3pj0J/+e/j2s38f3333/+IL6ZDAqnt5Pnt7RFhkdCUT3oeRg5Zy32Y3LmrwgvIP9kMWguWIVJmmsGTgCYC8fQtqixUONI9ngnkk7bHnz912wvBV18te8Sz9pflSoOyL+pw9x/yYja7+ct0Zcit09edqt8b4Or73Uvu0DUVE4HanYIbUhVjGsLKHrhSuELKNGgp+UcTUsgQQWTJYBV3DXQ8JgAgGbQAxGpEqP85AIkEBaRWKUNKNcC1WUmDKJzL0SqysOnsJUhPenldhiOYkOp3O6GND7xvGMjC6AMIYrHlE+hiRg8kVbZuh6eezCR9DYZzUDLsvPxE3FFk4gRJ/BYYfm/z3/bk2BkTKif37yeWjA8UEKbMrNJu3giosupE9XDBZCREBDPDgeP6sWFUBczQOpz/Y++fwHlUSMlGyGy7S86l6KiVjhFeWmoQb4vf4O6bmecZwqJheXWTz5fCuGNBcGRY7Dd2/cYJ/VJJACwqoilnf/e4PcUV7DzxrMswyXCo5RSTT8vr3bsCo4Qq+9wED2VCPBTf0tVJFzSG6G+SuAfI8/uPPxIcG5V8x1AKnKWdnZySixua1gFBGloUxuiqurTUsJ6+e2KZ1iNMyf77L4Jib3XrYNrm/u8dnb9+i90fcLWf0doKMFZ0XYDSrPOkKqnv5ODAxJdoq87i320xLGS44z2bSXBifzIOI/Q06SRD2ZPdpXLrmIoAb/qYdbXOvbqsRWKfPKqfGSurOZySeR4B5ttoobGJTtacpH0athnV7H5bFKVfPCsu1h8BVIzL39Gu5qF87P6kGa6dLpRSVbz7jGv4lowNNABpoDViWZhUCG9rpDd7cN5zudp0jDw1rOW6dvCgL6x6Hno8vx2rfDdHqTvoB8oDG18HDcpyWPVddB9A0t9Im6LSZPJTpHT7qZp4h6fSWPKvbgY2MoWFrYrkQbb+oJ3BR5Egil5uD8zr3ExubpmzHYSaDSpVBYnKa3M4bIe8AtPw5CR4uj1jHio4OLZasiYUJgFA5bYYr84518Ok3o9c8LHFsx0bSmiDajZVdJIyjMKyTeyH/7V6QfxVPoBkX5Z6sObUESB4gAMxLb1CD+hCkbNc+UPDpVwz84Hf3gpXgYZWV7CTtj27axxoyRtkTM+ynnGNoyGPtjO9WyqXAvEemg7sb/RCR4pubY6x/yXwDqhp3GzdXvmn/nqGLm+2ja4zG46504D+Ezfrx2ozDnrtq93vsiYo6juYu38AiaNCD+w0nyOd/geUf/mP87jd/jbfbI84ovnZRTXCntbj3EFS/01dwuUJAjSF8gvCA4NEg6pLdFe9/kV9+IM4MyIoxOj7cNciXX4BOJ7jrwyzlDKPInqY/oXaFgSqq+1hc7GO0uR8v0uQfhFCd2ydtKAIxluUeIsDg2ZUcMDdresDo687rZnbndvVLk+gOaC4JNSw1EQi7t0CDxEm7KzDpTqx6SWEuIflrKJGDNFNqiAFqGFYeWuPINSxkaQNyUsV0eN4AcyHets3GtGHbNvUikA6RTT2JxN3tXTl+YS4d3Oyn2MCOKuUWN2uhAWxu+A4IfFQfXe4920xRLmAdtpa+zk4POv9zCBrHOtJUTdTXMyeugG+ar6vA5qpvB3/7NJPRhZ9sM7Odhs/Cq3rS1DwH2r2qUMv03XHjZ76z9oICMBkOE8rNBiqoochTsd4GcfqU9H5zZU89qhI4muvvUKPDGAI2z7u9sUgVyoGs0kfhHeOhVmocrlXWKJ5FprSi7FP3FgnTqBsSPf9KdRWyz6+MWJQ/8wTTQHBV8l1pOFiDDMczijerlefCaEvD+U6NGFn2XhWiOLFW7Q+puDiv0EcPS+B73Afls6MbP+0drTXNp3a6w7reofcVC58gfALGiGpCsLLqqQQXBQxmIIgpujaKKg8SOC/N2PZr6mJqIOqauJUXZOLxF7hTea/p8WYcUaNW7B+yQTl7t7+rYqPJ1pNuCCj0TLHs1SghZQ5udnGXP6F8Ez/lxrzkMA8UxMnYUNaJqKyVG7hLsn0bj3vujDHM+7VHeFk+F3agM/PSzB8YM5Xbg7k4duxkt/GqfLfRsoVaLuwyQDDx6iIb+1BjVA1R8Xe0xfpV9uyYnoeQfcMSZauxTA2K7Lkmgh6Mp1HNK4jCJqToeC7bVS8D7QAAIABJREFUjpXVSUEOy27Og+eaI4h5/xJ6B9Z1AEO0RLN0PF4uuPQLOg0MEsAqFeoBgK6DO3AJ3BPQ1/QXlvY7Gi3fPNsXEfckhyVxVoNXBPbaAZF6taHw6Bqqnnsmeaj3y/8H5NzU9/t+3H+XfL+oZyBklc557OqhGcYiIyY//PA+UPxPjKfNeFccMznxueHqVjdf06YDKld4443lGio6neObXrBxznPcWfauGF+unvzPUeE+9IxiAcs1QFRZrO+rHmHZKcm53T9kjyEn/PhL7BWXlYSsIId5sv7I25Tb8eqz2uq63vpu/j7wdp1nJrSxoaFjZULHCbj7Ep/9B/8ZHv7Pf4L+9G9B/cHoWWzfIXSnfIVjeFs7ADLlCDN+0AZ46VphcyyAmLGHtrhO/HKodxCRoSYCugx8f2acv/wcfYi7i2qfAvPovWkw/VTpJ/fVXHTg+vvjPXiLVx+956e3ePKzOtovK1M/RvukDUUEwtLOeiKEgS49wXxUUjipIzxtGNQjfAtACFsnIMEAZDMDk55uau4SUQ8PVmHMI914XRgy5CqMxnvpgkUMgOjHkrolW/UL0TC5OF2UBcsCBXyLKs7LcsL5fId1u6ixqHdcLhes6wr1LmL0vkEVOq0edStJ6VXbCTk9TXdhmSFbJHAfRjOouKKQJ3Zi98xJJWvOGjrYD76REyAj5ux48wY7J6iBjUbZg/M5kq93VbMSaitYzFNi/cXzVjl+oVjkW6Cw/n2b4RDBksj5iZuvkQPEkpQyvLcqcMznHL87x+w/w0jyQnsdS7x+ULwrCRugXPOc97CmoI7Pv1KFa7OrzLOKdF/7GCrsdSVR96Dn4EhFlRbCspzQmmcSUc+EZm796Q3kXof+jhKu4vvXfldrihm4BkOkAeLhcgVtk46zhkSq8XWP3VzYORCpqzHPtUA9OTz9KEHzWrDZP7ntvcjI2JT43eVJrmTv/l29UZ8lIlpOWjrGUP7TRzfDzIKlncF8sn8Lhmx2Wi5wD0TySQydwX53r4speb178eR4GMl7E8hop5UXFA8kYrSF0dgTQ1L0w+8jA3BeKpFYq2m2dsLAqvTRzbIh/nzNSTWHggg811UkoYs1YLhVI0qtI41HRPXvOvvPgZZbiucxbzrYsSAaE1gNSDXhQ576GfmWTIHS7auEM8YAU0fvK9Z1xeVyQWuWBwEC2QiXJ4EM934znhHyxHvhxhi2w1XfT3PuqRipyQnejdnzBe7hrqb303c0ppLTzn94f8TmKWmGY/3K7hQNSQtfRXKemxe01sCNIC2NRJWuc1RGCeJf6Zcy/NmCddvCKDfvAYpKhjkGQlM3Q6wXwboKSBhjrOj9Eb/5+ius2xMGNsBkqFgVQ4puFAMpGHEwI4Is9/yRm0/NM0YyX5cqD/yHToEZUUZH70NzjjU9nDNy0oqj3LCNgQg5MwMGBgXvSvr3wwanD+9TGt6VN2SPBNh5PjvvyC4TEZiargOT/ht2HUONezWpcwmNJpNVwYes1Dyx2B7PngQPhmVeY/Oo9k6T0+JeCpTfJqzotxaPKaP9YTSuXmkmaS0M030s0jTGRX7P66vTWr0yXodS6lxL8A2dozn0mTWBeTG0uZyOcZUE0VP+xJhXywlXBGjZKcf9cgj0EjAjQF5j7CmJ6/f3V/D3Mg6UyQNrfpTEt/G2g+d5JcDswPHT4LJXRtDv/I9212MGT8b7bZIQaQCcFTimq7whto5hRqVW64tj7XIwdNDt7KHpmzIg1ECs1Wbv/uo/RPuP/gt8+Kf/HGc8YOkrhAWrLDgNLXjSmdBoBL7VRNZd+0sCYYqw9zZIeUAboHuGPBLw1LSiGXcM6ha2CpAsAK0ACQY3m2KB0ILBwPj8M9z91V9iI6j3KDxG20ZNbkI3vPgCwbicnSZ+t+SJoZ97Vj3Yf+4qoOZ8PAYuM+3RwTAEsAik+v59/xwfOf0c0KO/e8qBJPN1YihI9ijlenRE8xxcOy58rObA44fx0337pA1FgCrbwUCDXob9LgAvqpAzq/dN7yDqSD6fgkMFUYeHbwmrUYkZ4CYAGoT1RJQMJESCYSOQeRmdCaZ49vf6vhIiy2VQxbJVUPFawIuBCG5ofUFrF9DC4O2CsQKw6jq9r+Ch3hN6eqb/3JMKuC2Qkgj3G0wNF1EdAw5GKgMtAFucCxM8186cD8Jn3ZnxvhV0VYRw/b6G++TZm4AxNBWckFnRR8IR8ZEVhSS8Ew4SRMcbBHv5XPQIxLRdXXErJ9SeEbmiWYEeIXMq+Xrk52ngVCZOB0D9+N2vcFd/5lQ0LzlYk+kefw9rAr+rFhIcrng4Taj/Sbf94MIzkxO7R8K+f8pINVeQlLw8rlyxed0ELYrPnlWLgBtx7P7oJpmAkZAXJEprboiSMRuZ9Bl57yBBm05SKcLLct4S4OUOzDHK/nsyLwfFNRpWtjQsJwYvCdh9+3hlKU8SXSC3GSttXqsSHK/3frtRbmhozejYties24ouAmoL+HQHWh9BywKC/ROAxhbGtzS2+pvMg4xjguO9JBrq6rHdHALX5+IahgcVmpFy2L36HZf5tXfY/OqWk5hP9fDzQ4FyEki2D5HG9KTDsja+UB5W51ca6J/XWE8TaTKsv7Rbj/e4j8oT3NYm5TrtC189JrfXflc7r7Ok3zavAcODn6ohcV0veHh8AFHD+WyGEjDGOGFIM9ntL00ZwTaJk7GTCEHssV8oDOB+YayFz70J5YncAKt8qkY7HoDcqCbpm5786f67HYqEdwPUG6NBcwGJ8RoHoURmrGxJIyDlJWKh5vUkt/IEsc3psmzrHQ+PT3o/+07Q/kTRg8ZBw+5Ns20D69oxuuY37L3j4fF7fP/hW/TxhDFWqIdr19OwUAY1Y45EXiWTvALAvJ+fkxdXM3ogm47DHhM7VMPBPrm3vt0MljZ/ZJXvxA7fMi+l5miTCC0bus99TcnlgJ3Ku+Kfb7TP5sqFeQjovAIl4XpdcARte4rH6uVO7h1qXqBgMbKnpBujN/HKld6zOKhTgKrGb11DDl4gZpCy55PjRPdWdnBb8dpurcqapcHEfyp/VqOYeW0RAaTzzk2LUIQMNK8KmvhhlYe7l/qfvr9N+yNROZvMaxaAWq2YC/34uP16/S7CVyNVpc+HYfEoue1ephkOG8bDIhGcT9DUp/3QpFRtvdGKbH7+ouvX7L1HR7+WmYdPuqE8Sn1XvuXqTz+Ymo9mj5pxVz8QIJjB9qCPHkY7YUDDjkVncJmefTYpVfdyyA6XW0U5j/QEeQhUf06YUAhCWsSAhLCMAYwV/c3nuPtP/nN8/6/+B/Df/DN8+fA1IA8YxEAHiAc876g0wsbKTpncJ4iQFdCgdMgCtAG5WyBnAp4GCKtzMggYTSzHECdmBS46R7xATifgT/4M97/+dyHUwCQQKzrCcL6gWMRn71n+Ltc7bkIahUderYPzjZoP9KaJsjZOukbu6at9X2h4dlrIz8ZEF0f7Itd8Es17DyYamA8OfGz1UQSvRntTG5Oh4eGYabXylI/bXljfV7RP2lAUJzCcp8wiat3VxeiQbgLBFWwibCNDs+DKUxCxNdFYdcKADA1dIxlA85MDT6rb4CkHmBoGDS3Tiz1V7fpeaGgiXqc5O/WamBMTuDO4E6g39aZaOk6nE7btjG27YNs0IWXvPcLTNLnteJHAPOGqW7YJDYpszZMogA6MmTtqKWDtFQRHeUz5o1ue5NvzroTaNaOyTub9IXDql6/pWGWL9Z7bYKDeW8Euee4bEzyeiNXn6HlvMPdA4BSyv0g7EAuBUZz9Hfd33hM7pu0/3eCSKmgaOUJwoxjWoKEsDrZEFXVmFXq9a2imC7RQPobY4bECRWCe5zi5tX6q4cAqJAlCARniIEPKv/24y9YRF1o/hUWbokBecW1gOZG5O2oYK9z4TAmsU9gowB7dqqKFPuT3SSTWVcObDivyoaGj9yes/YLL9ojLdkHvK7btCSLq0UhEaIspPWD0YZXWaB6zAwuig7kwZSI3rgOCVHzqU/w3V/gk8g75f7J7nv5gVwoKmAngWEEpuYGoGDKC/eTe1Wpn/m+XS8mfVYB/TTp7MAHXn75wAlS9O4+fljz76PtpyCUn0LENK+ctdsDo6LThcnlEaw1jdK1S2Bb1/CUB6ITF8giNbt5IzOCmXllc5nSSM+SqgH4+jaD8EUA++EUBjxSe+Zp3TnbPkN1A694p8+bAMBSXkG3Ov61LTtthu0tfVe1Ws7y4yUMEiRMAMzrZeO7u78HNQxvtSaI8sMVnyQ87CMIazj4AbNIxZEXf3mNb3+Pp8gDnYdO0x/hGWfz87lDi/DTMWR8f+cY8d5f2zX1A7bp4HwHFyO9buveOrWuONmpqqZPYx03D8pgw1mF5KveDORrlbt+EARNxvdrVEqOkRzOi+hggVghX4hp2D5+CAeJfKDWS3XC+7XuEAM/BOAsZk59x7cxHAku9Ake8DmnoQWGW+jYc6WHAZmUND1s7wJnec6Tc+Uh2stoY/42+qK4QV5vXAhdPRq8IOPaYK2TjcXsNtv6p7bVP/0hb7/fYlC7mkL2jVr+XMBDfbkeeIj+tOUYniB0eAhiEkzQ0qIdP//O/xPKf/tf47X+/YHn4Z/hs/A0iCx+vKuPkDIGl84BAiLERq00orN+iTgoAGgboJMAZkDZAW6ak0A4NSHvCYAHJCcvoAK0QGtgIeDy9BX3xJdr9F2piOgKh9tmPmbLUAfY32ziKjNu/8IZEecVLP8bauqw7wFv+/4k9Vl0GePUuLQauF5mLXykfn34/ZvukDUUAgNbgSTAi2zuZ1W6wghkRO/1v6KOBRlYTc4VPXPuzFn9buBp55vre4V5EzIsqaTy0HKEl1RYmMyIVEAcX9Po3cIM+ihLs1VeGu+kzxYkheENrAzQ6+qmbknZC73dqMOobLpcnbFvmMLplMJrynljFDc3HpCXno+xJCTfzU7G83/9+WVRFbO6LF778OL2EcyOXf9WLaphSLxEe5EPOHBZRiOTFtgPiV78/g1cAqDU8E7QmQEugWHN71PCWIwUx783PDpndj2T8t5vPtdP1PC9RWvjqntKCT8oEiie1kyq95DhzLgwUSy9YXexkBujrqiXv7aRQla4Rp5geFjbnPsl+kCcUFPW+YTIDy+g5AMfkewWzjDONFQbrRVDcCI5mZ/ekFEoiGqLHonnKPjx+h4en77D2D7hsBMai3jjmmab03wsQtipTXUMyRjdFE5rTJ/c0bM+ol9e2dVWmaODp6QMul0dc1gsulwu2bcXT0wPW7QFDLhhjC8pg1hO/Q/qTUIttHVwB9/HSfDGAOTXpXlBr+E3sKymUKn5C77aAvJeR6xifC6wqCHY0mHsxw5DrnvWDhOSV/v6c1zoJZCeTffrs9SrCvh0oruWbbNcef9dcfMyyMXtcPvP/LBSKADUmrnh8+oDL9oTWtDjDclmwbXcQeQvgzpIrS8hkHozWAGkWyhHrIa59xxtVRppHRgxgNuI5b/HfR9fxuPx3OXs1QP/TlVU3MO0PJig53kSvvr33D/evJfnmMLpUEcs7HK3PbI0sNAdY/ISTJLriSaq1WqogEjtZ+NQYF6zbE7ato/cN2/YBT+t3eHj6Bpf1AUIDQla1TnwtLYWFjOSTON6ZOd8fr/nh1G0porLBDXIe7u7f9bEq/sFA40WvjrW0VSOAW4s8To3LoYFAcZHtz+gVJcaoHoBEafDxV7Sd0Th4Es80lCfQdhDpXxB275XoX2IcDW2V8LiwtZccp+LWxXDefpcXKVPmJXnHfgVu8KXpMldg6/tUHiU2mClJeeTxo59rqZxmJ/aHbBEpAD9cSYVV941VKhylCpV9fhOMhuz6BNpH3nufbtsZGMI477oFylwYE/4Zl4iogyCRk5OHxigMdDyeP8Pb//i/wvrVCX/3P73H5cPX+KwPnIQxuEFoQxsdvBHY8SBpxVjt/gDVxPNEehC4NMiJMFrXc0Eh8DBvHNKCBIIGHjYfZnimBrw/EfD5lwDf67QUY7KIswv66XN2S3872uTx2Y956U/rqLz6vSn58hgu0dAPabMnqr/+Rh8+YQORt0/cUKQnQs1wFYZ5AIm5gBGHyzaThp9Rb2g8sPUNnbyMdE1+C1RlxJVYDctgQDY9EbGcJMwaWqG5gCymXxjM+n7FE04JpvxWxWU/ItusTopEZLlV1GCk4Vwcxi6SVQ1AraG1Bb1vWuJ1W8HUsLYLmDWfERGFscjbZFAQV85jdqNPPi+eESHhYp6UAXFmvxvTrU1Ur/zxUk6hB0f6Ry6nvxAoDZhnVZQsDhDhp3uzYekFS8+uv0cqGN18hBTukPAIIGpmgLSqZ8PxbCqme6XmlpX52L3/FcN6dduBvFB+KgC9Vjl3PYqfc/x99U1Q91yxAYQQq88NsFaB+tE7pPyscslc/ilBITGAvjNAWEhB7wNPlyeM0eG2UQ1tkBC2lQDZdUP7zMeQ+39ew6NwqhlKaxsiGBgYsuHp6T3+9d/8S4yx4t3bz7G0MwgLRlfm6Al4+1Avw8Z3cK9KIkRoGoHROOnQQ/p8zjSEZcW6PuLx6QGX9RHrtpr3olZmFGwg6vqTdS2JgIUaxqCbNHvc6ho436r79eBZjnGcLMv7PIwjefL+dbUc+4jVIFL+Ir52tP/nc6lepoQMd9RS2HMYmEQIKUKh135V2vf1Tvnx2qY0VpJjXnPl3bO9X/V++4wq5RnA8f6UUMUBQZeBJhp6RsIYDDw+XdQYaQcErS14+/YtLpfPcDqfYw59zokWzRG1aPLn9JBRzzmYvO4yIkG0FoFIhbA1m/tFQxhI1LPk++++w+PjI8YQ9K7y+v7+flLAEfLDvNJE87iooauhLU2V/1Y9FGx/iaRxMNbCvcq0b6PnfPc+0NeuCaa3DiI16GooacmZQhqeIgxNcMpkoZ+bVkQdPa7d1i1D0IjAdMK2rXj/4T0+fPiA9XLBGB3r9oDHx9/h4fF7CHVwkwhDFRJQMx/hwtPUCUTcPJOQ+WdQUl/zyKjwVu+jrED79LRiXTvaibFwU7oIPyUb2BAz5pAZ00X3emE9jnBscmKDVJqrYQfJ48l4AYWRyH+vI83DIn/nXnbO+38v38ny9+2xiJTv88BDn51eSnqliIfpTv4Jew6hBu0brCgMe1U3t6EUf8N84tUcS8yz+DweKJxpVKuYDdcXYvdZeKTGqCesWP0TAydM791/sn9+GdoPkXMvXEqvuAaCmyl1/nDaEa6uzVbM5plQ12kkvUqu70sT91P08Ay3JgxirAQsGFgGoYHRacFje4cv//F/icfzt/jq/3jC5bf/Ev8/e2/3a9m23Af9qsZca3X3+by+x871R2I7IUF2EiciUYAgQcIDD0GCJ5S/EV6RIiQEL/BAUIQIGEgsoggcY8cX+/j6fHTvveYYlYf6HHPOtffuPt3n9vE946jP3nutOcdnjapf1aiq0V59gRMBQicMZvAQQDpGy1BM30geikUguxnT8O2iqfXE5BsA40EdEEJzwygxhE6gwRCc8NXpY1w++5HmYCz5gGSz0arX7uuWybAc+By+OAcETVtGc7sE66j1fINFfGJxHSQYHMpQvlGpTG6jc3xb5YDXPea1Xst7byhqaHHlvO4istMYgZ8YiAgGWfwls14VbcmBuyXBdsyXYBXwSzX9RgkZjpQ0eS3LCh4LuJ3BWKBeOKoU6OmwtkecQTQBQEOQzsprgoamRg0Aer2TIzWAcMKJGsCCIQTmgTGaGc1cyXNFjw1M34eh4cizKL0b6m0nmgwRNpeppOvPylimeP26QlTDLh4j/jfbcrQBbbThb9XVUY1ksxufOP1s6pPof35OtA2vSyXnaWOqCiAjciAYInLX7KTfzdubU7Jsewsq32GpQ9gIhFQ+36wcMqedAFHhGY+ShAGQIZDR03tmDDBLMbaaQUecItLgEK79cOHsXl8EQBMUMgvW+yvu7l6h9x63qcGVpk0/c4uU3EMC+CV1KSKeKiAqbXri747r+gpffvkn+H+vX6O1BYwT1NV/AZOfIvdicziHgZfCU1E9MVtrcGrMfau9HENwvd7h7v4lrusd+npV7ywbHBPQGgGs8yy23zzEj+F5ooCtd980c1L2cQ4bEAme5Eb5NCBRrKOfkrXGUOBkyl/hU/7OpKSFETn/MaWB6Di0Nvek80gu4bpunIoVltLmll4CYh2hpu1nR3st1+rhfVi/q9qF7H7P0BDv7zaPktOk7puODpErxqpeKGvv6KvmB1x7B4jw6vocX778E/gNnRAJQxIx43Q6hQfSeVlwOi1mjFNF/n69Q7c9LsZ35iSYLrNynPf392oouV5z5H4L4HZ2yLyCbV+cTxf1hnIDjiX/dZnelgZiQreErC5rNYSuRW4cIsK4pkwcY2C9drz86mus13v09R73dy8h0Bts1Lir/IyYsMqqSaaZ0ceKu/uXWNcr1tWNS9bf80XlCBFaO6H3jrv7V1jXFdLVm0LDMFf0cdUErBDQidCEg3z0YEsZlfg7T0b036w8KbyS1Ftl60/SSHEeAWZsbHrbWdlfBBjNWSZG2oR9jdIPqfhGom1MrZLl5PN8UWYcYgKbIdMxmRuFt4oxUc69P0co2CNovYzXmG+EbqVghB/gZQt7S8Lew1yNvnoDoVVZxNMtD5rZq88qCqPM0FuaaEDsGnFxnGn8kRzfuuF8Iz8fLDtS2e7rI4/3ogrLZDKKdx5NrEsFEVN6tr1OedLzT6z3W1Yv30ExGUgpy1OHCPCCGrbtpEbi7xvG/5ZmYxDQhAsW6RbkQmgQDBL86fPn+OSv/8fAi+f48T/5r0E//qf4bB24XFUHuLYVsgysLFikYxl+aYKhQ8fawnFxAxYCnwhyJ/CLud0wJjiD7GBFGqkuJwN38gxff/QjfPbn/y30bX5C2+hmO8/yxpCewgAxN7LFGG9S+baed1tGaSumg7aT9w1K4dmZjF0O5u8dldrOaxiIvLzXhiIVoOz+Bqan+g1iNYO9fj4w7Dp0zRimCnlXkSQrmMnydahSJfC/zU2VNHyCQBbK1sHc0UQBLLHl9WGCDAOJwmhoMffDgXYklfWRFEkc/CGVF0C9lMbQjPTBPGWxz9XAMIYbidQTiYjBVzK354b7+3vNpWRu/n0MjK6eEWJCWxW/bgpS19veSOIml1Rrq1Jja0J+ws5Il13/zn9zE9PRRntzI8P23TgJhWudfmq/ea2gm9yvGSoyxcJHVfUzP0080mz349F+mfFMqMi8gKxwItgaweZOG6GEQbTWI9P7Ty17F/M3KUfKxENM9ZYksn0tfpOhfiab9aohXA7ihyhdSxgxUrDUcKisT0JhhSlH1ZChnjHalnrUXIMHxFpW3dnrqyOT+ed2aZ6mfs2AVlXzgYEOwRXXtWPtCiggaijy02RNUmtMZ5wjiavn09FqXTlxbUVb8b6lR9Ed1q7hZQSyfCkAGgGDQlGqoTESg+R5v0x0u52Rhz5yj5mar0g3PZGojyGRhtVtDONppDEgFl5Evu4GUiXVrehx8Lgtv/FDipYhwv6+3WA08QUHgkUR2vLTPV8kF3xIAVKen3iBE+XA4VzuJ3RjAKrP1L76HNdkoP6WGTZ8XQQYvWu4I+tNg3103F1XvLo3UDoG+trTwAdgaWooImo4n844nU7wG4WIBNdxxRDLeTVSqQCcXxhPKF4TYjLPc5HsFdDqaerhrKrc35EaiTTvmdJLF0Hvq1144cmH3Shgnq3FUNTaAqaGvhpdmPY9xsD1/h7r9Q7r9RXW9ZXmD+prhI47Bumx1y18Buodva4qw93Pp9kNTsx6eBXh1ZbUt7H+Y24a7tZ0/DySEt37OTDURGNvQ06UKg/k1GMeGWE4MTkK/7WekosaipbG4TXk+0a9X4zTubGCgvPV/wHKRW27pdE/poL04NENg4Q0OoUXkRsOueCmItPcyBz/0YxX3Lg8GbZ3k1Lns2Ce4GI+MXMd7iGWMjLpwKd3igY+LDNdpKI+Jnp1mTDfOpbdl31V++cODP6PldvklILZacJxwJw36Vg5rZ7ofgjyWiXZ3oOFn1DvNsPWd6/IU5YSW7mYYqvIqc183TrQTT3hzYoQQAOWgQtg6RAPr5R7CN3j60WwfPjz+PDf/k/wy5/8CD/+x/8V7n/3t/Gj8QXO43PLP3sCh2f68CtW9P9x2ZFA04GQGorOBA0zg1qs7OK0FcCpA36bGeMenQVfXD7B+ou/huUXfw1XqZS/H1P95s25vXM6xyE3Z/Hprezk9rdYTN8g1EPU6NTrVzcRn+EVQHEOfkqj3J3SPl7ea0MRSEFPcZ615RLAQHo9/SGL5XHPIpIRwh3U0Pswo4vm/RDRG5GGWofQRwo/kY4MUwBYBMRdbwfxkA0mjTWVJQCmn5rABK8SQxWuKLxyr4ww26n4pNiInVZ2yFjAfFXPobZqHUwag2//PNF1XztoDLUs9zUIRAzEciTmToalt1FxgBmxUztMICZp/+nxn29WbpNzbVd/D6DlMO8pm+FJu/WI3QbcPKwwTp98skTpII3IFegdtXWr3W1b76A8pHMmcsftydv228K+HMy6u53s11A3h4eJzt4EVcHXN1RZHGNoXhIUuBxgW6Z3glZYFHXZCQs8DMnyGBBU2XSPnmmckwBJ5l/d/KtH4aMn59Nv8VbwuvDkEDPuwvNtCexsGK740bB8G7FHSW/acA8JcUFot+9AFS+BxHLoPF8BXEFkOeHYf6pyTESgcngdBtuYFgKE4W7Vu2vNt8rpPLmTIjSDPyk/JBKHV17qoWdu/gmlzHnztJR2SuiCu3Rqb1QhxC2RFiKMWjelQStuPbM6kk9ulZ89wCpqavk390OSwHQMNKZx1bl8DYhWXqvrYLRNore/SAdE3d9DPjnLRcfSkHtmCGRccXd9aeGcdvmC5V1o7YxPPv4ULz740AybKhzFDCZwfikpm8n2FDNOTV3sAAAgAElEQVRS9gPwHLq+p/1GzhzTxgTr8hBAl6vteR1I5PcaA6OvWLvVwQQeHFPE5dILv2JdBpvLv/dX+9f7PYbcA3yFjHvQImgs4EYYoifPC6n8H6Kn1kIDJ1ngyVBFJD2RBZY30fcTJ98TDdPX/UGImzPdydXeD+XKJ4Zkswl+SoA92tc9N+/7LKfTCUM6WjNjbYAvlTMyCnYsdK1Vx6CNN9g+ik/9aUvWTHYzLC2W7ygx0dRdbLG44yY3oJfPmUHjuI7cezoCNypNBi8zXjsnI1R69zbTUzI5gvXTpuyNkER2dA+1NspeNbB4T2a6e90e7FTfzWcbeeP/uMjjkB3uATt3Q3b9d1p6Z8jrz3CpMrQeMN/QHxxrEkwPqfjw3fe2doTEw5s1zQAD6EToZN4/rWMsDXdoWPAJlj//7+FHn/4Kfv+f/Df4vd/5H/HpF7+Dj65f47IyaKwAmwxogprni2TA83AKQ7XzZ4LOK7ifAWIMw39CHjZPgHQMGvjTZxf8+NOfw4d/9d+BfPpzWNsmTci3Mm++0+p6f2uNH5b01Hy6LBPj0/OB42uWSed4P8qT1N2D8l4bigjQOMsIJxrFRdZdD11YiSV7NOVg6Ak7GdCIMAsB+ujovel1seRhaGq1zfw+qaSMoRuYxa/oNAWLzFA0BmQ5Rb/ZPI/ciBU3EpErky4xycAa4iYGBRWsII8IIkv0gcFA63a6z2jDFLZGoHYPXhqW0ylOKe/v9XY06gy5iuZAM5CoQJssh7VatNWqrZ5YnkRUezRCba2l5gF6enlMzL4uGXsfE0B52WEXerz20E+nz16Xyany6W7zAc8kAckEnPx6nghERkzq6+V7eUJfX5tLbF4IC0QB5YcVJwBwMDu6v8HxreJJ/22+WayPHjcj+VXBoKS5BP4bhdDmQA3Aw/gAm73OgWEmu8w5M68UUgNRVfYUoKcSJeX/0WxVQysIvjWlNH+0fzzzuqjRqpvCSHb5jeicyYBI5nAhAw+pIglkpAHV+aaYguKeBW5w0Tc0FxE4b5gjFgut8MS3hRzMiJLkoOtJDmaQ7ed+mHlHeitqXXPiWqc3U4Brn9P6mk1DzLsUYcip/GHyPkqtCnMImZR3sh9V/ffbNqs3kefDgp1MbT0v55Xer7wQhWFgZkgVVI/4WEyIHKWvGBvVUFdjJsBtuOshzyFvaSMfrR8Se9dlroU4N4L0O9xd/xQvX77E/f195BCCNFwuL8BtgJvgdL7ktbHlOuL5LEL5JAEYHZZ3JulR+15DEyqv2k594gUhRNic08CQ1chaDYkeHjDicg0P1GoQVkORpqLoMR8SHqSADN1TA1f14nVFaGEscHA64BGAXQZWB6nlp4fFAebJIgxu3kYP2gjDkbTCr2SPFg9lhsSPapTMV75N4L+Ht25Ifvbsgj4GQKvdLoZ5jQnqAU6e4UQ2PCUEycS6VDTod2S8Tb0IzUDssocSSxW7U4ihaCl42wjjTeVL8dJupPlJ8iZJkbnha0cyJPY5MpTSay0i9LXLlFDb9sKgzJenuHvj6TmR0tzw7Nktx3zodm9KnZUTzmtcZ0g2f28L+XzN1qN3Vh6t+tvccu+62Cl64rgD+Vg+j/xbeL2AszQSvGER4590h04MlgYSUjonwiKM1glnOqGNFVcauJ5fQH746/jB3/+HePXZr+B3/+k/wuUPfwef0T2ejVd4Jl/jMgaorxAChjlDLDRA0BtDhQVyAugEyDIwVt1PHYwOwWkMQE4YxADucc8v8K8/+HNY/8rfwS/91r+LV6cGTBdnbCfhHRHT1MzbbeN1daE3LrRBZa8xDBXTRrPv2IniTUsM5zU8i95rQ5ECIc8BJOlhLwkBFTghjBxioJ6I0AfAgzTmH6YUDd1wTIy1CwTdDDUlJ09iJDhqU2XLk5pZRwigrokVF5GSHyPDjeYgLAmsJri9RtU91oVU5mCh7HN3Tqu3eozesa4dS1vRl46lnXBdV6zXKxo39OuKta8I1zp3J7Kxiud/Cr3JkyMWgiIXoG9SDpSiAMHepoGHw012xOyOnimA+GYPUuCkwcEV56qc3Sr5/FEjCcc8ifLxjCm5SFiwpczvm7DZ1xMHt1aRNt9vlHUX4JLzP59Yzu+EazwZuAwA4GBQJorSk/2uihUDcYly6kdpwIWg8RKnyQG+gUlB9xvxMn+XJbUmI7XphhQzzlj97AA9/zfNlI0ulIPtAxk6l3Mz5fZO+G+/5omx7sutgnNAT57cP6Sc52DydUnPS/2es03H0uWqVpAm3tU0bBQ/3TNpH4qJ0pZNBDGI8oY17Td2xceZoRjmuRSeMpXuUG6zo9zqhaZi/sxLw70tiDI8hQo/0/e1XaE09x0qw27oNxYlsQ9y3UKLozojmX9kM2qvOOtAUUSBrD8lPCYvC+RjuxWJNmVutj6z+SjCYWL7z/kjxJ3mVaBao2YYIXO9oaT55cy4PD/h65dfYB13WMcdRh9o7Yy2CICBgRWCBimB5q5EUOQ5qjQigAx0C40Mb74wGEvZ4znH7j3sMtaTBLtplEB2aQbF5yAEf0bIJ+Uvyvc6xii3P0neBqitMtwb0sN0Iqm5G+fq+miD0/r7nnI6r/vIDZZuTPdnnC+BMswsDbvet00RXcu4Yv2GjHgKaJ+3z4YyJX95uCqy3ElzxeINkIaeCektj2rbkcBsYICbraXdEOTknf1y7r3vqXvBulxhcmORrgU7z8VUabDiqO/gUG1Ojj3PT/KKShfpZ0o2ECVHpyEc1LEZ0dFcOx49+Gp6ZhZhSFkpJeQTdrNYbbsOGpYsve4nr/LNUOU0qGnMhS972/FX8rOZULd9nanhuK1Hiu+/hx/aYITjcoSXj/fig6updT1R8X6KUfjhPtQ8RPp54L4i01TN2B53PN72/vkqk+vnzk/3fc/+0fRZoCcCBtmBnDAG2TOiYajP1kVvJeOBK64YPDBefIoXf+3vgX7wGX78v/23+PIP/hk+6f8/fvjFFT94KVhWhNEYrBKUIZq/pNntIwuBFoAaYXQ2PZU1Xe9gCDd0OuPL5WO8+tFv4pf+7j8Af/ojy+e7YlbxK46fZuBRY5rLwwfL7lBv10zM7dspBLcF+N+7un2LT3u+9s/XexM256Qi82w9NAfx3e5gcF/vThDXz+K7h+fpG83i1M7j5T03FAHqvWNJZ8mTPhuTAeLwQMYIIaBDt5ulmDAG24k7QE1ArestQGCAWPP+COm12GShaMIBRka5bU0TXc8MR/qwvD+JDJgtznS73v5IsXRPorQoGk7SlU8zN3h4GLihYQEWQhsN0gS9dYzecV1XNFrQ6IqVFtxTwx3doXU1KGV/WGE/qas/U4fmdQIAD6+w0DfWkDS/Qan2dV+ewuB9Ylb72SDSTAGzvA3IE3mZ2qqbHgaaUjkBoLktbPI03xSZMrK9/c4phxE3cN3sq4uOggQPigNt0fgmhG5TDBRe5yAKgcGgsuBuyHhbjLWO4BYjqvehbEuZ262gD7Bq1BweLhQ8SRNlCAJFWvJan/1GeoWoCsGOMa7qUUQLXDEO75ox0AcB0tDaCY3tBEVWkJzhuRK8fWBgyBVjXCEC9c6RNcGJeDq7bv9WAKLx6LRaCBoZrjSFRNRrkFgAVmNf3IwWXjcjZ47I7M4JTIzY4eFgOYNkec6o0MCAyGpGMcYQDkWQqJW517OnWckwBYnIbmjy0D5XYMjkrhrEiQca52m1XxGfa5lrm3iwGSDvRuzmVk16I1ZfrU9koIgJcWpPDsZU+YozAV254K1kA1EQp5fUit8cQh2iSwHCyRTCFv8aFpyWCxoxaEgoLAJoKAmFShHFvWMmpSK8vBy0e+4LN2rF29PvoRgKBT2nQS/n1I1W+eo29l9nJhNSugTZ5whYJuBtixV1D/v/MR8TSCTWDN7rPNZEG3hOGKs05bdOKQ9oTDifn+F8OuP+/g7n8wn391c8f37Ghx89w8cfP8PlwhZG5T4PxVDh+3hw0Kl7zbCHjjoUdD7kV0puQGvwXctJ6ABRPU5GGmh9cquIAZSmpyrFZsrDYmublmxUjEeYQi0mDASqWERNYuMX7Y8Yf6s4IQ3alusCzj889NaUJFIvalUsV/hFFhHC4bnFdiSTMlkmRa488VgC4HjO5/y2nKyeIzUUzL/TJK0OwR0TudcZbP8OdFnh+4BHByypPjOpoYgFjQmX0xkLN+OBZDzKh+1jZ/PqU0xgCQysfgZEbzzK/5RXczHk1T0mNmdD9MY7NUma/CGBEKMLYaCZF6tiFPeBUlepxASI2fAwWIBEn+4YllNTn9PLHjj5tohGtzADxWg6efoUY2gW/UwMhw0ZINEcdixdUVMfWIghfWh4jqDgKeVrQ9QrXyAYVS5BgI2RYJsD8EEU5MJD/J/Ri68rucHPPQ/V47jZxQkizreqt9ew25CDKuIWqUCDT4BmI+b0gWKyahqS1NFbqzJX9aZGordZHjMS+TMZGp44rmKWyN95o/vZjssXIDmDyavymU684VQL8XqKMbL2VcsKkYYmgOYLYstZJJrWAwDhCk1Np2vI0iBdcP/sA1x+/W/hs8/+Ej7/3d/G7//j/xKXLz7HJ7SinzsGDzQADQNgQSegXRf1pm0rqC3gEyB395D1BBbBggUDJxANEBb8yenn8P/96m/i03/wD9F+42/g5fmMjhUDrqht1yTXZf/7cemPPLOngSP/Zm/r7WTa0tyF2U52ofRlADRmeXVIXhamHqhDQaHJm0fmh4pMJPfmffh5GK6qm5nc2STGd32M6UG+yV5/Db3y/TYUEYHMMKIgybOFDyUAsvASgSavNqCmoSYOVDkYk5ahVlkCFmg4lrBe9+unIlpn5pPRK21doFSgpJ91QREuyozacgovIBsMgAxVqNb1KAEIKX/amF3geVgGAGhaJALRKa447tQhbcQtII0bVr5qWzywrgMrUeRP0hRtfn+P5VywT+a+Wxc3xDXn8XBUfYt4jz6X/L/N9f5x3n6A3EGp7KZB2/sim/5VIGqQM5SAnNdZtbjRd5pPIY8erZ4knlyVPFlqtJHeDggwmpoJWR1xuxTNjcy5d16HaWxGSEB6CO3ruRWWIjtB5ERsLDYy/Ou+FKmz654JKEY0rzdzEmVIHsENwX3kKaa2o9dSa0J7MRaa4/BbAHX/Svyetk49BZ2SIvsm8SS7SLUwx3gwp25ACHLbg708fZjXMGOic0ancrhE5TQsvtsqyRlG5aGAU0iA8STmBMqZ+4fDyLRV5mofKmnm1JBrAjkN5SQjD1GO95Maqbj0xXZJeZRQdQxfez2fc9aRxhfPbzN7ulVlYg45Oy5BQ6kyYBpk8QjRcJMWhu95jxNgBq/IV/AQCy2jbrv+0UxLqHNK2FsFnN/tuV3QEyEUyqhm02ZRmzSfw0R/AJHecnZ5dsH99Q5EhGVZ8MGLD/Dhhx/g2fNnaLxoT4Yb3RJLaX9yjanswinU2BUR0dE7PpTIkFW0PmT9empoXsey4f9U92flb9qp6lFW852E55AMu9EMse+HzAC+8hM3gDjeCUNWKIlU3vU1NqBS1yHkA0W71UiEoF/7KDwM5xDft3FGEcr6jXLLK0JfMyPxJDf8Wz1oGGLGYktNMPwWS58WM4qNMPBmTjJnmLJr2fdnyrEhgga/jZOCXrS41/pDG9fn1F/yEGznyZaHEsmjx3AckeOfQnT9d6ZoonpCPFQEjpkTiUxZvCZcZe1PMkNrkUKSnsep1sPR34PpmPj4cX9ve83kmzcGONEyypyNISbrk5f7GF0e1fbVa23fzuuirgfL0fz8GSp7j2gJ43jlrTdx9ebbivH89wn3OZSSo7efWCb9oNRS5MIWs3nDAtVPOwTPP/oYz/7K38YPPvwMX/0v/z3+1b/47/CDu3+Oc/sCbQHaGGhD8+KOZQCiXkPEQLsQ+ldXNFzBawfTirWtGLjg8+WCf/Wrv4qP//P/FJe/8Vv4mhuk63UIpEn8DgzxtSQOe1vl1hy/ucfgw63VFo4+e2zNxXXB0r9KjU/eiI57nyo0q1FIZP+3N3+7499aeb8NRVBDEcRRFoe89ASs7KebRcnVlJjDTo2MEbGfJOg1ggCjk4YayFgt8XU3wIgEGeJhMb0omwpQgsRYGYIrIO6wo8+kt4jGtmsCNN7szP3JhFNdZUL++wApZAFkYOEThJsaE4QxpGNp2h5zQ2O2uNcBxormCoVAT1FET8yW1tCoRaJWz+s0TGEfARyS+by+oHyY8mPe4aqGA94qaOafcw6R8rN4GG2nd8s086SJLOzlsVE8RZtL4KjWb/3n4BDEZjhSjwcGW0epKCIAZBZMs8KTdA/c6JHMf0zKx8RItwKvvCXbvwVb8JaA0r0npEhq3b9jkAF2gl83qgqL0q4ak/QMNxTxiSgyhEexHcXvNARoquzM61OV9+ysj4HstF1zouzHdXtCXVGDOyjkd9bn+fRvM1kbWUA+//HOns7dSwNhXFQlikB6a0oYXBJOkSufZG2YQKrGogrGq1EwvCeL4Tja9V6LrTdNIzEe4v12o0HUsp/Wo482RjaySsKAEJ5jqPYnmxajA2KX4FhX9RRri94cie4TPisGb1Jy3jd0Z4cHfkua02w8Q/qMr+VD+7AW2QkN5zeFiKZH0sNt7t++sWkOWOWMw/G6jbi0pWqm58MpdRPQuOF8Ptu17vr95dlFD1MkQ0C3+VpmICuT4hfB4vVZydc1Z+EWPNZ1VhrfU/h+LpwXURhQ5p+Rh0iUl/gNUO4Vo3PXtQ5OwwOqcbyMz9UfuOE6jAU51/64e3DVdZxZph16BW8q/Sb/NA+/oh+HRbA3OB6VzA82y6ptXbpgu9ArVAPGrOx4j2VYUnUMwBKfe61OBMPXLujXDaBJK1v5ONNc+Vn3tujfRGRHbfocbwzeNaSFQEH7MV7f78ZThRnoB/yh9NHr2P4f9bEDQ7fqsxs+7Dk996xEv97QeW3FnEHjs7BDOtYVIA5tQ27PTfhH9YBityTfoFRaVm8VOwQZiWf3vcqfs8etr4sT2tvs6c9OyVAv57+5JqD8/m2Vxw2Nb15vlqyfal5SaugmO5blOT7+C38NH//CX8LLf/ab+Pz/+EdYfvy/48P+R/iIvwL1l2Dp6HyvoczcQBcCXnSMrwT0ioB+wpCGnzy/4E8++ARf/uXfwmf/2X8B+o2/ia/ogiaa2643i3nYGefeHN98X55Q3gGdvQ/lvTYUEQDiBTCjkBpzjDkzAWNEImA/rBXFViDYGY+5grnHAIEgfrxHfuW8hjkJp1eRYjgzkAwBVgKk660vfhIBVVTEvI3UG0lPK3rvlgCW0VqLRIjqUGLgeJNnQjZj999mKG7vkV4PLawCTKQDTcDcdAzcwX2161r1lJdZcKWGlVa4R8UYBBlqHGqsPzUvlJ8KOdNWcOVw68ZqPbaaN76btGMDH0UZlaN3HZWYHkh5Pa3SjSqBY2gIjRssuAAWisbSiBeGjUeZ6QMeRTCmTKn46G17qVRQ5K3QcCt4SAIkD/AjVEUmsJydPwLscvhnqm2idPwIwOHJGJBjmo0I+S/GHAqCJSMegj7EEic3QBrcfV+7oPOg8z1fWw54/ZYU2ZSoCkgBAwGj9meEASgNjDSNQ8rkxEm7DMjoN0DFXEcUmv9wkBDGkaK41qqCzPy9uh6lqS3lp/u2fTPcb8RPpI0fWEU2Q/o7YQMS/ELojfJgymz1Ctq+W40i2pUMjyDjFeQMeVIWfRybKTw6rd1kpg1FS3ziCjiLel3RyzUOYxK5qmj8+okhNEdlRyE+B77mG1rx087NC2Ucvg46j49jOcFksZnyuEwUM/eW9kYFpgy39W05XdUsiH1UeyqYl1b1ptmbQezgRQ8s9NDiKnqhBDNjWRZTEu12Jk8KHkPg0iId0sk8K/ZsgGKtT+qaFNpRPGAyGYVcMVDeMBlbPZO37SoP88xGCL7oBqMBYrEb1LoZqdLLMXldHkT1PtB78bokNw7Pyjt7CGquYv7uijdl+NHkkVLmTfFLt0OvLeat8/mw7NC6Z+/nrKPy4Y4x/Do5XxuCK4t+UDdQtrw/6aF7ZnwTXBWrmbc4yMNkGdfrFeu65nwVubGV4cljsr0MpHRUkt56bPPpKcsJeiNlbM/Nlhd4yF/lnw0Rfm+5Nj0ctAal5rw9MPcHz0/feshljNw7Wn0zkrZqqZ64h43CwvlippwGKu3cePV2p99C8Q7Tg6RbjXpbyet4f/LENVl0czq+L48UmX8Gb5kNO+/WsHED171uLc6+ErIDIAzTMVWurrjKPfDBcyx/8z/Cp3/xL+OL//N/wI//5f+Mzz//HXzafh8frD/BsjLGuABLA50E8uwVrhfgfjnjrn2Eu2cv8Pmv/3k8/1t/G7/4d/8+1h/+BdzJcxAIK68QIrSheXjdC+5dGMq+Lz875b02FOkRjSrvVBiyKiIDeR0xTalVPPksF4auOqm5lQoA6C1imndHr7MVrCAaAYb6MCMQdY08pQxrAzWI3Szkp3Ue4tZ7xoZTY8ASuupFYgYUOHMvpevilmltGNikgBSvDSpingaIBlpbwLygjY7W9PpYZkGjFVda0yA2CGMsICxo3Ew8cqiuCSorSJzhSAXPe6b+VCa8VaK2v5sxZ2pHAhO7oaj2NUEKZxJeOCCAgWiHR1WJmPt8ZImPMMOd14hM34/IL8IFPHsfFI2P3iGjI72aTNF1A0ZtOwCs6DXVmzJ5hmymNdrmquxU1S8H0qXvAOFsKMr396eOmQdAbWBD84uggfmE1k7wEE1M/wAUmvY69bkxtTcsT1F4u/k7xVjk4aNRT/SvToo/5wqd31iUe+rBYlPnJ6FEG+ot2NTXg6b5e7zyNAR5DgW/gcfClDx3C4o3UR4ZB2+qfYL34RCEETxJcXowzeNJRYOLB17Lcdq8EqnXZg312rW13Vfx9wxyqlqbfx0ox/43qW+H+pf6gYEpxRPNHEzB0ZzU4ow/9k1RimmvfgavovpZ+UmEmq/pcb75UHS6bH6bGBTCU7XWVf6k8qZ3zT1mRjGyJj3XdWqYPVIIGe7TTDYCSzvhcn4G5hOqN1V4qxFCvqWh12nRvRDNCCIzX025kOuy5YWTC4XThBlIXM66J948FzlWke0K2F6D8740Sq7rPb5++RW+evkV7u/v0BqXfZjjdNzAzLhe73F3dwdmPWiq7af3BQGdUWVFhl7X8aqHMDNpQnc7wMo18lDe+Zaqo8L8wPfGC5vxp5yvA6+hGl6clU99UmMMx+FBylcdp8Bug+S75PuWw80NRfd3V1yvVzBO2idWGpu9esoaRweLgVwKd90cBnhfMvfedlJmOZ4emWb4M/qefdvc+8/n4YEJRyCbR9jG8bpN3Mb52uRed1uZ1sfd8znnKnN11bml+P/Ewcu6PlgeTQi036/xqs+N7ZtHKNw6WA3MkiwFniOLprO6MBLgce79s1bcGF7le+y/WDM/CN/rGO+oV3DvcwnZ86alynFzwjVs416497bHz13Q6YLrD38dz/7DX8T5r/89fP5//zb+n3/xP2H58f+FH3z1Y2B5geXMIKy49p/gyxcdd6dfwvoLv4aPfuM38ct/598H/8qv4OvTC8i6oA3G2oC1DfAgNEma/b58R8sOX/z0yvttKAJy85JAQ6SQILAkoqsCl+y6QREB2E8K07tA/+k1zxDzRLFkdu6SKiKgoR5G7OFIrMn4eJDqQaIMpnc71TKlGAZYhnS9SnEMtDawNIGmsDcvEvJcQ5xgJBTLW0K9ggFjBi6loa7/sCS6rZlC2btdcU04LSuu1xXruqL3gdE12TdkwdIaPHxOmzGRvlEiFEJrAmDt7+wZVZ/8Zow+AUaG723bAMJDxT8LAFcFUvF4cEWMvO5H05Ud9y4MJKnrbk9pZ6U356MaVzzBcoQ4lPXMnBF+aqzfuYfRfPIyGyBu4arZUORteR9N8QIfgreqHB0ZtKpXkRc1SKon0WmBGTqyntkuYHVF2w6WTexZaFoqt674Wp6KCPMYJdyvzOUuMXC2eXQN9DFgKQaZ1ywVJMvmm31ttPmNwkjUeFE+FwpjeoDJ/Kr+URMQx6fHBKJGbVciZ+MsBV9IBUXtVQSM4qnpio8rs0Q4IKdpFqYZpqN5d0XYlbMkk7lqo0khpCHQnyWEUnboqfi0QuT/m40yoTgGp/T+bEdJWY+/XU6yHz9FPTAGZO/0CZnr9/HP6y4GZufe+XdE5llGuq/DV42yTu+1+v2qt6xDVE9SqzR1wml5hsYncGu4nD/AabmAKK8dJ78sggD3X81cZiU8j5CL7zRJxrvUxTRZyW5m/DeZqguaN2KpRnetdyA8luD8dutZOfPl0QfWfo+vvvoSn//kj/GnX/wEr159nfYQn+kNDxURXNcr1vVqbbmnWf5T4yuDx2ljKBoT/RCReh7D9y/MAMIHcjL3ybGslQP5MRfFMjX0zGlxq4jNNCzk1Jnzx8QaWj9sXct3ZPx+yIo+XmGYd5HdgGDzpakLxrqiLRfzmG42vrYxaHhIds6Vyx793NTYctNZxWoqN31MFCTopFrXLoz9wy9P8IOnnOuZZ81yfp7D8tcDQCb4QeCLG/xv+9GOd1Wel3vQv77dh5nXezfyUM++K/uuPv8YSwyDnS9/8A9vS1eWiUsi7Tn/pR/K5Bz4GlPwkuRtmG2MJma/L08rfsgqcXgBBEHE4cC30A947MTj+WxulmJMhstMOBZTvrXICdQvaEIQWQEZeMkXyIe/gme/8SN8+Bf/A7SXfwz84f+KV18R/vTV1xC5h/SX+PD8CX755/8qxsc/j/Xjj3F3ueDaGFdpELZDaCGcV0uUYpjYx/N9uNl3sTyyZoKHGf5bLO+5oYhAtDh0ANEoh6EDNNJIJKKggSzvid5OooBpuFIpnoMEpmx6+M8IhWLAbtkwI4gmUvWwJru1YajVdowOET2dG4M1aSXUeKS3qqlkGUPzYm9K71IAACAASURBVMB0l4UBSEPmINLEY35aOovjm1I7Z4nyOQlmxTZvQ72XCCAeYF5AtIJpxWgDY8D63tQDiVroK+Y8DD/VY7sBxOWxwJlj6efE3x9jTo+NLZncLNBT2dLvTMmIGysSyPnfebpJANi8zqhWdbNvyWR1cGxXd+s+lc33tQZPEOphP+kB5GBoDFHvnbF36dabUaS0sQGMVTnwdzZW6CPgQvz4qXEFRqVDIVIhKHXMP7fhGWpA7VBj5Hnbiv4knpdgOhW3MAOxe108VFT0di11tBEzFNkp83BlKb20amhaHVh6LOWp/tS74pmgvIUQm+QJZV413zvYO3aAds8m6iW7UIHtBkLNawVAeUc1lKICL/2bIk9PHdktGphSkQbtuZGIQhGzPrN/7iexnjfEFDtYOCs9dcaMtjbdo4O/tmaPeRT5M2L1ScM6qkL2eF+0FnIAu9s7WXfyxLpfKd+fEtZXTx6vk/G0k82DEJAt5UzEdNSWj89zP+1biTyw9r9wwvAPi6WukVLOKLzMjbkEwrOL3n7mNV7OFzCblxy7F8Xcr1T2Cs2Z1ijCuZ9dWk6bSpWOmg+QtsMvCaSrgUsMK9ScRL62yRslZWwEZUvMtf42cL1e8fXXX+OLL77Al19+gfv7O2TeouSlte3qRTJ7bNK0VkSMNk5lYC4vyhCJIC2NYBHO60Zcm5lqNDouqgnTI9qw792wR0zKSvXyTJ7rawqg8F8dH6NpmoGQ4SYbLNl3lwHBKw0lJTGjmMpCxgmn8wXPlme4nBYQnQAsihnE5bMZdcJA6X1hSBjTGELNbkObZiS67vbJMI6VJ3zOZm/BxGz7vfw47nuT4iHdYR09aFZmxvFYjaV3tn6Ro+uJNTxBiSXeGi7frB7EIUOl99isCMwYz85rNOEmp+16wJTw6Mmy7menHM1ISmmqAKHKlm9jJgnwA4k3aY82uFsCv5L7OODUCdwZ98sAjRWXKwBasPIKXAbG+QOsH/8Q7Rd+HpfxHJfht0/fgxj4Uj5Ax4C0hnuTFh0axboIq5GIGCurPuvtfl++L9+0vNeGIiLCwid1h64ASATpWqTKN3miac8JgAHCyNvQHLyJmCPSMIXSW7NNHcJGDS1EA2OQJcb08BlLymvGIgcHCmRUCR2kIFFCyRWLx1e5QkMwLKeI3uqmACcBtgmo2OjHgt0BTpx0Ujxt/7OkrSKAdEAEdFrUUOS3vAmHx4cbW/xUTU9h9IrjQaqQkoVRVSxReoAAs48usM/7ww+F4Eb1DKheTFUBKx4Q02++tn6rm4NCMkCQ/ZAx934HQArI2rnR1se8/0DM5+Rx5IYecU+ivF0vwuMK/Uynxsi7fBCKjucI2MzgdorjzleZaaqA+rEDerNHSCjyh0rznBhZRCAjE+k64ApaqUbAXf/9HTfw9DDs+q1CkTmLhv6TgTHWUFK1K/WmoTqPQJ3TOj6dbl9ff30Lomn6NeHBdvbmFmL7lIcLlU9vOaD1U2jmBcSLghNJsEvxjiA9pypfK8xBcEAYEwnovhi1/Y33gdMyubnGs3lYWLCvJZd8VwG8Z2U25kmwMXDM8xfzEQozlU7HwOJPN3R3kUyLFTU+zdU81758MHG6HaeJ/3ufZg+pzSk2jfBsOMqPcbtfWxo+2Pu7T/a07r9Pa298aTI4zE/q3nZy8yENRmOJeVcFTzBGw/l02RhyvNdsombmjVsZM8lAAuKAKAw6ziMLHYAwyNqo8sq+ltgnfsOYehSHYu88OIxR6vUh5OGqAI0VPa9Y0+u/xRIsG2/KlG96ZbsQmQdj8SI1vughkeTydyMz0gADiPRIwJ/0Ztd6wz2+CEM0yTgR2c1yW/kNuPKsFT9AfwdemXu5VpXqPV9X8WHGf/vdjUDh+eJvS/MXbF7cqNbjtjPNVWT8P95tlkNoQWeVQaMDowta4yKTkx+pSKs8tSRZpvx7IkQbf+KUioRQvE5mj6IaGv1tFr/jY7vChjzwOCbb1YjgdwSEx810WFF/ljfrQvuevNnKnOeNJlnymJHTazC8501K/pzDI/1iDZS9ljSoP3naC4HdH+nFz2KZlrnIftoKmOmleOPgy6eXJ4U1fuOyBdIVaai3j5Bg8D2E7yGyYsUzSL+g44RBSfjj9BFWWkDU0QYDuGCljrGYc8G4okPxHwlhEWAZAEiUzxGwmA7TvyfGb7dswOJPgb2/k/JeG4oAAskChp8UqZVAgQ4grCEyACAkFj4mCp7sXEOLuvcGgAmJRmb0cSVH8wiFUi55Kl4VHc0jQAAaeu9Y1/uNwFORpqaqvI3DTyzHGFhFwDziRNVBKHnfsjIEBJw2vQtJTMIzFHWjV2IChEHSQKNZfgHBsphHi/V3jIY+AMaifxNZ/qJuYNnUH0ICJT/hpPw+f9sq429YitI6j1G/VEHueS0c6JZ/Lrwp8xLM1217YscU+rSNC9h2qXhYPAhSrI2tjpmu1W7YrAP1VzNURMemSkfma6HIvzJN95TgVj/bwjU/TS6fTO3X0+365gz8LQl1KCR7z6rqDTWH+xUX33Ji5/kjUmFQQ66Dd/Fk025wJWCIehj1saKvVzAaWhsxv35NsiNjNf4Wypy2jj5f+65asCk6QV8jlIxd2X00A+GHYHiFFhQx5k5rHD/zRpmyF/07uI/DyJDWGF5R4GzPzqGLvsfzc2a908eNbgGy4wYvH6HxMXJeOnJv2VH7bOjMKT/cOnXmthNGB8/sHqE4H/RPxPb7HHL2sGI07xMApN6VraWZVg3xGXQCsqw4Zb8csa+qTs5KVVFkynjmsh399g3nb9tkwtuxylbC2Rhc2at9qbRiSqW/GCf0XhHBzYdkcpXsUgqUPnHQxdZjwNRW2o5R958SntglFk7P7qGS81G9Cql87m1k8NA13o0npgnZhlcXjZGchlWGMCRyp+n3AmKKcDMlwYGaeD1ZePbJZxHAxF+3npzDDsti3apia7WPUWu1NsUwhw9g0orrfNdROy7KQoaJ/PdZMZPNfis9GHaU5rJQnO96fjR7J07W69uGCe0wTunR/7PVifl13uS53VoaamJd0z9MCxfG5DzD0lVv56by2GAnztjmWvPwoTCxzVwCsf2S/qPiGxzvFiPcFoO0IV3qcj+pzE+6F4gfHDi+2R+s+Fwd8TKvOQj3ZuvTuyF/Hh+8saiD+nO/HGG4PR7P3Fm1G97I5IH5TcrTF+TN339SJ7/JSLbI89b3tpBBi1VuuifnFjfI9HNfs+Q+nN6b+ftuRxVPoMBJT1qLkiTXt7c46nC6UCeGZyvjKmesDAh/hU6MTicwBs5yBa7PcCWgU1esIYwhDetKYB5YRsPzTliZ0NkQH3WsrNEwTRgrW066p3T9kfLtGNreZXm8/1tKrbLSMcyTi8vpqYEdo396fVNPfjrlPTcUAXEbiiTQDE+BWEIB0E15EUA0UTSjQ2Q1DOneFn6C1+HbWB1uDAiQA9EBxgkiQGOtn6RraFoDEKd4HYQzxuhgspNF0bh5kgEM884QfQejB5AZwhjCCtWEQJ1CMVNQ48eQgcbLzEjMgV4U4yqHE7UKbsUl+ilTw6BhB/wap99A0GvLBdQV1Il4Mraht3HFnOscaVJmu6kqAPEI2ncX7kflQxTPbaLzqWRpt2Ohg9sA8RXM92C6guhs32sDw9eLBaAOchBeUBCJgiK2G7Q0v4FH8ToIb4fde2hPOyg6YkYJ7DPHhgDqOcaqzIsZKYlqPo44k0NcNx0AcyP4pqYP4UvMUwQzxM0n9Vkq2FVsXWs9gqqgVG8nXzv33tt3sAGRJP4K0IJht6D56bAQIHYaMmjAw6paGI06BGveTCg2f7RC6Iou91jHPWg0gO6wLMov4JcXi9NDGjTGaCUsRcwopfTDTUVsF8YYCyIEhgCnT6ejSos0eZsVMS11DfrRUmIUY4HrvmKbUYhBfAIti4bZQcBieUssoTRbLDyRQQSqCrJ5NQjZCT2Fwa10zxR9yt99bIHzC7RibOioB7fiUM6tDWEMNIg07ffGm4kFaqdl49PkBvTbPI9gocglca0q1Mo7WFAMHQMnJpzaAukr1ruXIFlBltsK0tSYjoQGNmCti3UMEAJxU/7JC4RPCvLoZGMm9Vh15an2Xlw5d/ALy6fiTeWhwQwyaJ6GaT5K2aGcHSfDcUVjejYkSejJSazZhBk0GGBqICw6P83mu+gAYvQ4jMft/FEmPUz/YDEFuxpFHHxTzpGUxMp6TfxGEwBKqORMS26cGzJAdnug5ifU5zV5ss6P7sWUsTVsikTNLQOCBkKXxcaxAqIOdUtjLNzQSA2YbfgY0tgxoGaLYbI2jj0M/8StmeSrphMyuJuXKJCMY15RhnoDe92w/gIu9aapwQ2Cm7atT2dVJNI45XkbbTNskTiUXIaM+fZBNnyGrmsEhl4c4mOvyo/LbEHHALDEGomFCA8Gehf0QZqPsRNkMHg5wXs6RCykrIQ/bsLX8ycC70QfhG03MNzPl5liGmWowV0GWWJ/0TyXCtHgN6b5ejDpc2NQ3j5olREZVkRVYUj5ESnVhOMLwWg8PRYV/xHAdmOb1a0HoxQ4VaaQSzfi6ZiZyZzTF0DuwDLAw8nOjLjSAHSAVwAD1FbltzQgQ418nr5gEMDCEMN8aepTA/Nwr9YtrcX/9Jd9XqPEzzEXZJ5n0jGwQnAF6ArQautPxkcAlfFD0y5Y6om1j2zbaWAP/fas+I3KA8Dzobfk8Td9ht9OH45qOtjwtVZqkaTfsd6MN2E8rB6iPNKLjQFQyijzu6MUFt4OXKrBUY3z9VtFo1WqRKMIaZ1H4zgZWMGZ4w8Ak4BlBUTQQRC7AbEJAFoxDMMsllJFCGpkggTeHlVXKCP8mS8kwCM59QBnr4Qefwsg5QAGcSXGI4XzRkuBglq2n2+lbOqZTr3fXXmvDUUEAnPtYp7S+emTgig/4Re7taOG4PgppqMnVxip/DN2YAYmsVPIsBk502ABIq7dFVy2Gz66Mr+hV77S0LqIM/nuMCHjJ63KF4cRltjJnJgngM1BOakJ19aqnG1otzqg28MmYPWaRm6cU4D0yPHTtzEidWi4dZMoOGiN0TuHR4pWsTkBPbKm7hd298fkGVNAfRphnNFKnT3MQACPGG5sxcnnyVYiFGMHWTLN35G6Vcfqa7ctkweO1yJ1frTTmpBTQ6iEFUQ5aKunbEVfOywxtgpkJyXPDaHLTZXR+9Y974bkZ75vXNdQz73NvAS9YTO3niw2T2TjOReaQZiJjkfsd08ka16DHmpqJ9BDVozR7drtbn3zUEUuFyRS6WzuKSkfCkrdh1M+e5BluMaNSa35g8Sp6zaa83XUefP9bz8N6DRa4IaFCI9g5xPFgws5v7kcnvA+58KVJmD2CgmPoBtDiyHG/11VUg2J3MtIGNQWM3g1QLoZTms4hu9srcO9QRhbEOjjsU0fakTppdjc2ZoQCK15qK+E0h0rX/jrdqRzeKka3JfTCefzxa531wS5GQqIUKiNO6VW44AhwrqqNHC+p4InL3Lw/9V+zfukfudbXrv7lDNFKimt9uB6Dm307udzavBtcK+L6K5YfWagVc9g4wtHfIxKGyTwyxLqeNPJw0C25OgjLGs3Ovs/5V+zPIFeeiHQ/gdfkdwvAGjiTaZUGA6RzmoAEEIoyxZ2pxdJwG4eNY9W84Syldb/W5t6nGDzOKx+p2cbiyszYSCdvGd8mPmeHODUrbdhIofNXtpN5paHivE133PVUwhID1bkxWDkEmr2ehOX4bB5FOXHU5eINj1Tvqgh/JR7H4bXZMPnp5fLRgUKbrS9SCh0sJ2/ENNwWa4eyxvPOuaoK+acEEwiLmeArTqxpSHwSooMNz4Pm6OyYWKHPn6wtXmAkqrr6sssHB8oElhFDrAQMYHD2J04z2XGbjV9T4cMzGacW3pf/QunvQwBpHgxZGPJc6SXXfTAFRlaqp1zPcNvAszbACvPnYL/30G5BSgeLltJcavmJ9U18cw3qemhMeReML+YR56fccCt37Nbe+Ph1G7gsIm5RH2TnnXYmTdYd6rv0Ez70Z5sZqCiUwT0mT7b4NqnU83TDXFvqxwdoL3bBh97YMtR5CYZTkfxpd8p3yrPRsL8UtdbGy6ZCHmI4ePtzO97bSgCCEwLEhRzOcVKxu0gX2Q4jnCVEtVbBVCRku7cLlA8C6GdJgqCbU1GB17UAwcO3iyHEQ0N66KBTiuIVFkdY2DAhJEMkPQURUN7V/PS6Ml1unDbL5OiomFTeJj4C4MLkO95iqBTMaxtFcrqiUUkaNLikmufT0O1aK3ZlboS/yQEKDArVAkEHi7+TvHLz4+RAN+LiZMqCMJgWAXbvKHF6o6rcX2Mu+clPr/pIj19d2wk8rHUULHsDxBniAawksFTtBHjdbBvRoZbRmSa2uFiNMq6Qpl4kLm40ypX1HjYVpz81n1iv6ZflJgySQD0Fhr47Ua2U81NL+qYQGAAVj8lUiMWeT4ysVxFQ89jQnGhBHzZNcFWQFf1MviHlHd30MsXwMBopdWbhcpv86lpfi5RrwNuwqykqEqkijmRGnxqIuvo1tRviXFNibhbK/XW2cm2fK4ECOMyyrPOJCnarEUAWkEgLMtFTT6CVGwpT9+Mm1g/qxGF9qQ60Vp6XSjooTQQ+ZwIQMyWyJ8t2XLOTxiJCMUjbEPvYSQicGMsyxmn0xmtndRQZF4FKbwR61KBghsXPHRvnrP0aKhXi88rlCtV+e6U38VlIp5WZoWzgp9U0Kn0TVdq5EDNWCnE02lqGTEckD+eR8R3Y6VEKb+mLOfSbyVt2bKpaZTxM9bXvSUs1w10S6QBep6P6J8ARHog5EdUYDs8Ge4JAzUSqZXTvCAHiFWOoi0xWoago2vIWjGAY3j/NoC05CTU6XR/lI2sCfZE6qkDzIm9TaZ4mKQacR5bHwpvk5hFp7UyX/o5BX8Q73tZCzepIXCN1+u5HSnX22R7xRZk7xJg9lf3CoJtaNF9JIzmsqwapcOA9ThGqfKCoB4mxCVsr/CffA7TuBOT5Vgqn/M7BeMz8e9gObFiEoI3V9nkuY+IyC4P8D7VQ4GZf7ux0Ts3S6TXKVJkZhm9uzcFC6Ddc7dw1nErx5/N0sDmYvu3zYe4p3GVq2V6nLbGGIZvdU+OkjR/DrV/ByXIsvLB70KpK3SEKnZQ8uA9f77w/deeg1tGove/HB9Ny/wnvltUEWWDIfOzgwd/agOsdPt9Ad53QxERiBvqLUqZmLrmDEgAUgGHRlqI3ijFnkzagbo/4af0roD6X6IIiFOpYKLC5Go4lgpjEQHTgsFreDiINA2R6SsGsQoei80ffS2AFKjgVWNzm4IoU0YZrG64rugdMNEJqFTpRwSMFo+TSGE4rkyNtFCIu+A2+6fuuq2pZ1JU2S1kR2YD19OF6FYQUn7sil8xnyefMZgjZjQ4UCZDmDu44rzFBKWfkbtkehnYV1q77O+Xt7b8z+a4gpUcodPdANECn18yN/sgLTioNeAbysC+qFs+I8OkMrxiEtmPLg1ZqCZ2RsDtPMwn7wkEzQ0kATuMlkTDVJg0VGWXQyfIoRp0fQ+aQcj5gdcvmqg6jS8DQ7oaJkg9BNRA4sqVJzT30So9xMkumWE15jWbO5yranxxmnWg7gaErdCxhZhE0kEDIoISXZN1R8CC5+ey+kLxsImU6GUOxnJ6HTnDxpx430aF2/oElxxe4hsylMeynyMz9RXcTuirAMONP2Yss/5KeY88HxSlp0s1hJQZs/G4ArAH1ZMiVnI2zfqM70gxvrGrBjVpOLuHVVlzXc5t3qbSRGw8o9G0audTwdyK2lNkzLY/CNOAV0eoCpI+McpTx6V6n01anXuDRf/qjsl+khl//Ua+qZtF+Sev/0EA72fLtidL35NekjfQ5CpDJtc2c3U4h25IEE0+PZQf+9L7lIShJugIpjjaaKz5xgtA5kvk+XpYc+K01tBYeR7zCRSwi2xaBQv8cg09YOp9tVu8RlFyfU4tVIcAPVySXfzYbAilidySn9n6CQVGiuna/OY0Dgi6rNvGdE5I10W3x5J2xFpLEe8EstBi9cIZjovEvSaVp49hCaqdv5k2XzkOqCOMJ6FgK21GXrfYrhS4QIynzBRZacX5e+4TMlyq9ba8sc9wWsbGUW55k8f1gIBIDXfNMAhYw808tCzmllzUWV9cTNb1nAxVyYAqj/KRhYcaYmi78lQle3p394pNhozYJwxGHxmi/JSyRfnb7x6qR6AefXUPaT4tMtpaA094OgqIG8il8M8Sahp78UhQvMXinuXfGWPRAQY4/PvovVnu4dBQ8PAcVKz6XVXxj0f4pqOp771P9POUvsh3h+x/Bsr7bSiChatU12Rx5o5QJNwrw13DiYAxCjNnF4xUcHph/i64TYEZdu1u6NvkN5gQmFJYiMisfAzNr2KZQixMS4EfmDXxbl8xugKtIbBr0XNDD1jcqXRY0g79J4AmxdQNRJFkNoG1YoLZlZeIAGbNhcOtfFOicQU2iRpf6WNT0NcgUEMG0QlDOqgj2gINyDqKZ4Aj0qr45Oj2ZStQMP0tsT718eIFUMY5GTPqKSy7kli9lqiAK+QaSjUAHpdQxipg2Aknh9WeMybfbcTorOCQaTEalsjhMWO0BKyzUXFf3KwXJ8vk/UihW0Hitrc5Gj/Jdtf/g/ETNH9V8QhJ9ImptjGGKkl0RlvOemMXPI9H6WPpu7gBKOLXt587gMt/7rnnN6NpxDfbjWtixp88pQ8DFA3ksaobaoGakGO/JmagqYgdSRsFqcMNsbuZFN2F9XH3CNG6U7tL05D+N50oU9IIvDvh2UEA6Wlo9NIUzEMwFvLZ6qVqsHOar/k7/FfZXGxEUE8xNcANAJoSyfehxPgCHrrRhwCi9ALIU2ia3ouhWJ+qZwxRemX5I3mCb2uOsVnYGZlQnVqnC2TzBLL8FQ3onsuJcpkL9nXWM9Ah3Ve90K+HmQQ9uSfAvmQTyvv3RlzaPD//PfNN5Nx5n0qS5UkVu6kg+biLdNkwqjzYITwWr+97xa8iD5tHEkrsqU0vjtn2FGpen/acKwyRprwsPFGSbqqhl0QwxNaGVeb0ATOoaj6T4bxJLFcWn3A+XfD82QeIwynbnx5y7HvUb2wdvWPtK9b1Hnd3d1jXK9a1xxzG/je5V423Pt9qDOGyf3wvb9ZmVA8qwkwuzpiSQ6uXd3wceGpsvIuP5Ggq2tpntnBQD1UTAUYfibGAcjOt1SZpVnY5zOyrVPAMYInnF7RTuSQBdjPdZiYEyPOEHY2z8lRtEOGlY7nU9CKGZBg+V3lIVFoppBgrSdr4ZOi1eXLO4/wnDyasRqKpTx56NYViJTtXw0kMOJjZXtC/bomBINeeCJozbgXsQgpfG3+kssuHynat4udjL25qUU95vUlPjUbK81wnoLpAN6pPvOgLeQCUaqtv6oG0MxZ900X6aZSKT47mwBl5HWu+F/g/fkh8nXKmYLXv5By9/SLl//XXd+oJ90h5StuyE/g/rfIGfXgfuv0OynttKFJBUhQVE5Se7C9DUtiSL7vCaDlrsFiOkwG2MA3Xvzy3iYep6Oq68tnNwAKTIZa/iAcInnMImJRN6zARGfhzxZYsZwqZkNYrDIkBaqSAcF2tn4B7yABAHx3AyW5I0Vw27EqqDDAvARBd1vuNawAlbh8upFI5JCSTJapjQYALnUvgdGpgOtkeOOsNaesKXgmrHaSN0c1N1w1NlMzf1zI8t+ZVHqN6BFH5lwpxKHrcbLBc6hyFCVaXa+9Dfqdj104XGBfrGbc53SiTQarO0/SSjj9yK8j8rruGDwwFbYyk8fDQyTFUEs12ZqVNu+GO9xvhWbw9ZMzA+9YYWySzPv7eRhmgXfvuipX/gTCEEjUwTmA+g9DshHFrhJAcK4YmWPfkl/C6K2gogCHGNzQptiVJZVLaUG8Pz/+EmFuvRSwfytbg6Hsj+Uq+44qJ9tOXpIJ4MnB8VF8Fm2aUBWELcjxUJMA/Cu2FEdkAeXRK8nk3xLhhxJQakRunbrZlmMhCe8QUwUK/8DCrosi45xr58lBWRqzwO7z51IAeig2AYWPQdjW4bu8dae1JAm/9vEVdY3Tz7kBEFDNrHrlq1Ne1Vm9T7WbmfVFe6+s5z0uMz8go84+QJgmPXhqVRCTMkVpq47DKyGRdOuEE40BQXAmvmj2BSlcrz9l4+Pj3UqquSV8n0G4yMSZgAvxj+rsaQm95I8SJvvg81r6nYk5MlvDWdlhtduJ/rlhMrcT4khZvqaL+AAGTkQhmfHD6Lrxt6KUJIt08lrWeIavJPoDphNF1TZnOaMy4nAWfftzw4YsO9wj0vomHz5IaiYgEvXf0fsU67vHV11/iqy+/xMuXX+G6rjpGNAgYbVksnK3SthsxyMItKcIttX/FLOiDc7aaDLgw85xXYsLgwjeRuCzaANQAtJlt59FK45Lvknvpef4wrYvcUNx4Ik/l80lrzAym5AfOTWFgb/SrrWsxFDudyzRE24f+DCWuOsAtMI9VwMPQPJeV74GcgdwSabAL400YesjaPzLW7Q0NA1Lsy7J5L/ml7zWRYsyPHGlutMdMAw+UB78PbO18hOA40r9LD2mdGALmcP5Cd9M8UrLEaWoe6eO2uyrrt5jNm555KsX8znVPhlmyoEGS/WUe36AkzsSMd97bQpufWSo/vv224Y1p3yTek+D3w7Z2WeOQW/nJ96ViDv/5lJyFP92y9XCWAKDfdh+AvZA4ekaLy74/i5T3XhuKgCI84DkJqJC+KaOk4EKJqZwswT2AgDHsZikB/MRU8yqkocdznbiBwvRJu55VGZSGgaUnUghFo2Vm8xQSxmCLbx5sYLDpLWeDMdoA947OHcQNvXsuI8EQPTkUu+2hB6gw12Qq3jGmJHHhsQ5CyRirkGbL33Lq2YvGQEXT+YnNKnpDB1s8vrqId/SlYb0yeGW01uzEcw1J20plEgAAIABJREFULtPaOUqwFS2A3w1LE3jYCBxiN/pwAYNIg99AGtrGxsARyokmRmZXciO8hQMIEHkI0+uUzZyGwgnrr6X2FUHDgkYdTB2NAGnulSZobYGHL4ZhwfvoUPWRjhFEQzF2yekqaFIl5GYdDjZlgSsbPkqfY1WYDKzX+doZzUbsDfaEt2Yk6hvgmOObPfYKYsSxZ8A8zGq81Vv7+qTgqxFlZvwpmOzuCGacWjNFsQBf5BpMwVJk9FRAedbpJ/pb69zG8CQeTlsGU35VxaKB/PQaDSxkBvBm206UNZGP09pHhiBqfTW3xtwrTzrKda+6LdZBu42NzQgxZGBQt21OuffJw4DV9HM5n7G0xRKOtxKCkLcy+fjSa6kWn+NUrMIoDVNUTSnh2NPq8cNtMaNR01uolhNaW9B7h6dmcSGf4UwUCpUY3bhBUw0JLXOBFMsSTf21dSGnHVcIxV1KlW4HbEywi9AoacsZlBsuzKAvcC+5jWI5GQKO90pyWH/fac+zDfve24J/Kp+PUgsBfu24l3JYEDqP723Mnq/xiojNaTcPG06HWkpvC7hBPfboPLadDc3mNPQJqnPTwSB06XAzgyYkzoMKNwgJdQjlhRliIWDEumf00KND0E0JVno7n19gWZ4ln5DoTsy75knS3GuamH/FOu7w7PJMd/HoINypoY0bmBa8+OBDUPNQcDJ6V1nP3NCWBdwWrNfV+kNonB5IzgrFJnGsa0n0XovtN2agUeQ88jXzn87T5ktI5vVNY6t5rcL3kt16NXxtLPSRJfenGaOGGbRExIzAS9CH91dFxsD1+gpfv/wS0mvqgC2kp/KZYMrPF+Mvf3n4r/Mh4ahFvz8c/s1SHz/yHK5OJc5qtiOY6qPEd1MrYZTO+jZWkCcoZZTvb8bgvFL3JnJ+KMP8oin4Pn0A2JjRaFq2t1IEnutw+lSS/2rDdhBg/8JgXGR3dExq3W+5vLVxf9vlAby2ezSNQrs6DNO8k7n9WSnfSRryffXTXPfv5MS91fL+G4pqcswAnX4dckq61lQZ691DR9L4MAR2YuUu4X59OuLkw91OBw0DI0hAH8mu3S3V3FU39aSMNSEy/CYrC0GDeQSxa15+lfsA0Wr5CVZgAMPyAKT3UhX+qjSqsUSBNUaCW9Xy2JScnKMHTSCR2DvBt845hWJEZ6B5CN1Qb6ZlWfTkc12wriuu16uepg5JIKANGEiXWEc39HhYQigT5F43qRiKrYFeLW/P1NMFm4uKcf3kTpAdydPGWRlVGGM5g/A0tlStzjNERWAyNbikQsV0BkPXrGEADJxOJ7SmYJdD2eKol9hufiqD2wMVd1DPhItboKuntR1rf7UfjNsAXCiPC8hyZHmpBiY10PXw9qnJt3Mt640i2oexWmCGqGIPsetv2ZVpO2Mfosp+MbDEngwDB03YwYMpM8zC93U54YzwtHmF1fgFU9Scf3Ad/mRQPdBC4/ONzr5pyr/1uuualu/VYmFjViWK3UAseq1soxOWdgHRAjeSIfZNWQ4DxdF/u6ErvYAK7TLl6NwYQQxurly6d4JfFqCec0N6XkMe49QLw7mdcL+8xCcffYI//qM/xLCcKL7zwmglDIl5dw8rr+/Is0X73jg9ilyJ8bUjUz5JGEwnnE/PQCxY2kkNb8RhVMpTz1y07Rr5Oinva9PJsipPFLh2esf4Hkl6TmBoiBG6mcoao53OiD0EDx9CaYPgpwJdfE2zXykjnO/PNFwL1bkmKu85/5DpWZ8XJalhtAForrEFImzKPia9iex9NrA/ur9baN1nSZQGRx9qxPOr61222b4eZjiWMTCvz9Ybw7+tdOmTZfsCDeBunpZ2IYaFeKr8a0Gjg5rKdHTIWCF2hS7BPZIGBCuYml5yAaWvxs1ISuWylEV1nucXA0AGuA0s6DjJAhmC0/IlmBeI3KMxo7UzPv74U3z6gx+CTjUpPeWFDdwstIuxrh1EGshLNseeC2e42U4EkG4eklUpzn1B3CB2g5QYvdQbBJ3XNT4Vuk1DEsUnI3g3FZlHaBDRAzEImTy3EMHdwoqlQSOIX4bgBk7z4AYE6/oS0gdevbrLWRJYvktHR/68/RUhP97/sheQBidyY6EgaCSf32+8ymsPD35u7NUqGoKSD+b3SWXXvdys5GLyaTXtZkb7pTvIZe9hCK01MvGIJ7T1eH/2Mi2+c4MmlH/4+jO7cTU7Vw8gjstRjyu9/CyVW+N+yqoeVWfEWeTCjvi/Lz9j5XW40vflbZf33FBkBpGdAHRwnADFn2+thoL1ADEizULILCcA5rAxEQELo8vAYAc8rtD0BLYO9iDIcI4EViIC6QIXNgrUBSJ6JXSelGmcNA09QSQ0DOoY3DDGFb1TJo10UGunMqoUZv/EDSlNTy/J6kdJzikG0ufp3Si4oQRvTv3sKW6qrEIYQ9TjQsbAal5FnrTz/v4KWddSf57Ea4cybIFJjWIJSM1IVHM/gTB5AIFTiLsxcLouXSaaqQDBldMIjcnU5fnEgaKXU1bn6aHCoSCR2JXHJAAzehMAjEYAtYbL8gKXyzOcT8/Ms8i8eUypbM3CFKqhaAPP9C8PiyjrHF/aXMrAdb3DtswGEdjpK5XvCOv1Gjq0fiRY+2px/oh9lbm/NEH3GFftnXRc5R4iHWRjGTJ7FsgAhN1YtzGGObC0fZVTYAa1CaDaQMxQJfF+h0gvz/hc5t/kxobdHDn1+P/LvqE6h1v6oKDvWtcey+qck4X1eDsENXSz0QS5ovjBpzifnqPRAkBv89J8UISlKLfKfwb6WAHR/G1jaBhsTm1B7UAmAGXGaVlsPzYzxvptfdCwEAADK1rcKGheVBAQrQBd8WxZ8OM//Jdw889ktHaXDN/nZW5DgbHk12HIofSemEJwq/HXDGzq1bFgWS5Y2hm8EC6X5zgtF/R+Ra+GxYO19rMCV0I135PxKKbj1GuHhUCW7w3SAWHISmFUv5xOOLdLOY3XteamOeaYyaJdrN2GA683U/1N2ffQsyPFyT+v4ZY74yU0Gax6ppyK7u/mBSjN0oLRCefz2YeaHohuaBANUV7vryVMcpqe6FO/rrhe17wtLEKNtfSuMnnnHVl0Cvf2aq16087jdA/ZL19+gXXN8DGIecxVQxEynJvIDKUD0IMeHQgzNGcRcr8nf9O9Ia6IOk9GetaI5/VyngbRwwU6Qbq+z7zg2eU5Pvn4U3zw4kNgMSOUzad733Jb0JrmglN6MOV4ZAi/Uov3zcN8b8s2bg1iHktiTG2EUd4XAGagrQpkkVfkMtlz6nDOCzR/XXf5Mwg0ODBBi1D77CMB5nVkY6Sc9d6vkHGPu5cvcb3/Y7h8UE9XW1ULN08OkPxgJi0Jmc7u2ekew/WChEOPHKtbaOo7NnjCDcG39mvtTaWsrRLlh5a7XsjmHd+b2cM3K1QN9HuMpKLY6N0wW+x3zGs58+HjcdwsN551+sn1Hyr/CuZ0I3/S8dEaVD7y2Br9LJSjOfC5e3h+thh9JvnU6dLgSNjrgu+u7A9jv+OlWmW/A2W31re25PflnZf33FBkwvtAaSuYBKmwGVAAjNszBhMGWgiH4fkATJFFgAYAQxUAoTSQqLLLgCwF8XpOAe9CChyI6Kmq6csSuV4IjZomwpQB5qEAiwTMA8zqTTQGow+AG0BdjU6Zv8hBGOlp9FgVYAlhsBk8hCGmTKr+akYVitk0fYw2YEjSGCPJUahIbBKJm1YaMWhhPREXAqSBaAHTsBPCa/TbwaSMnnM2MQGecoWw5RPhZgoR3BBhCUcp+5QKpocfurLZzOpg17GLnaA6UCL1viFYzoyiuh7SGir4qYqHTM+leqmPLNA+EgHSNQ9OawDRBaAFbTnjwxcf4OOPfoDL+QNwO4Nh+bSMpho3XNerhSTeKAL4CXnRrssDZGBesK49wFDE/6LQgufuimnWsfeexj/3Rnj16hVevnqp34VCoKfj/m/I1ZKzXnGlvCo6QxU4lO/IU2RZ6qvxMoblNyxtErWm9xAKbdsujqSVfZqW4fWabz+hG1mp8cWvjPZEykfu/I553TOgfheL4xtvJ+3M608EeXWRGpd9PwANy3LG0k46J4Nw4jM+fPEJPnjxKRqdVNmx8DBmdjNl9kAk6UcQyuLWuOoG18bNroFfcLpcsLSme1E0vKaPAWZTHEUgssZaKc0uaI1ANCByjz/3c5/h9373n6OBsEZibc55hY+/rhospJ5iXjw02AdCRDhfLuGJ5+G7Ag3DVJVuAaGh0Rkff/jz+OFnP4evv3qJP/iD38PdVXlp0JbNm8hQtuHJTUV/BzoQBw7Oa9y44Ep+wWRFZ6QYWGnDKKaB8fzZh3jx7COICJbTAuaGZVFlXwTgRiq/aIBPjPPzE07nE1pjuLeg3pg1jKkx9iGPG+olMm/Gbdhj0rEnHW7LkrJ4EsBka1m9B6Twf5jnhRp25D69djG1ZGFLZMHTti36cENvcGL0VXOY5WGK98M9KwltscMM33Me3gfSSyEAcGO8evkS96/ucNfvlId0oF0WLLQgvImt+cYa5immFGt0lJicVNpUw+WqoZXScpZs/yjP8ng6C2mEe0d6/i9W/kkDtCygttgQLT8jAavLY6M5t2iSGXVh3jlqXHJl2HKxIUNsmrFYFscYRf573XAWRhhU5Z8dR5W11FU4yoXh67fhgn54Jg2NL2i44MwnLHzB+fQMl+U5luWEZVmwLAvckM+eignQ8HoIhHWPku3N6/UOX339R/j8j/81fjIZVt1o57I/AJv2sRryyd9Tb2tygy0ZXiKAmoDdXWtr7qgVkcvHfE4kZ2aa/lJ8X7k5yi82qEaWTMhcJ7fWIVXUVekUvyj2Nb42hn0wDFbkPlDeN4xemxq3lobWFvO0dO9uChmKYiASzPiTwCUE2Hyj3Rjpnq/Bc/aDy5ylfmgUL9jY1QtXsb6FjlbOYxgQTgcgeM6wigzrbctu3PCDKJdkB1Mfzz9UpOC7hw4jHzVeJIO2iqcv7Dcph8e3+nWQxzD47GPF652kXxhfKubI8PttaChCViJkLcqBqRnWp/YCJOYOFPeCdcJnyy3o2E/gFsnME297czwcieG3cNdx757fGoe/UUlPz8dtyccPvCtj21PqjZQQ04e4MccSP7cHWFoqvdxo7+CtNy37um7XXPu1lanfrBy9/1jSWexo4XVp4D03FDkqpR0wddCSwibFXubkEAALiLsxJRfMBig9TCCUTFc0HMAPDXUS92LxFvpm2cVO11QYswHAf8PeuzXZlmXnQd+Yc+2deU7d1dVuqVvqNlKjdqjFzWBhywHY8oPMCzwQwZOf/AAR/Ase+RvwBgREGAIiwEQ4bOMwEQpJNu4Wct/kvkiq6uq6npOZe681x+BhXOaYa6+dmefUqapT1TWr8uTOvdaaa17H5ZvjAvIU1GoxpKcYBLUvEaBYQEwRSKlorG0rrCBSrUCzDCDMnKxmbCx8AZqA1+McFZB43AI/ZXPTbFcgcw/EexFXKAs6Yn9T/54Ac1MhG/GCwoxa2ISFxQJytmg7kyqZKlw4aCUoqEEoArQzwQKk32kIqmLvsvklFZl6ccbkJ0eEYi51DhL5LT7u7rqWVlx8GFebK4RV3WNidHwFjHU4aOhuTCrDqwvZNBVAdij1AvvdQ7z68qt47dUvYrd7AUQ7ELorgTOZpR2VwGaimvlwNPZuytkaBSXT0+xmoybKYNVXs1dpHClO7105EcZuusJuusL19ZUGXzdGIJYpULCA5QguDUAF4QjBMcAcvZsjG0uAgmZFEH0KkLJPTAaKOCn7a2LawTBYPQ4CO1AGc2O09VAE4kGfY+6j40GKTpm/1+qymgm53uQQQNZCXBeE1nWK9VPjcFRUB2mZcDw0PNgD+4udqg21YqpTtG3NDAJoljQe6VoGityijwpZWu8pgvvWKqjMdkpruyMxQz+9dUUONGGadnj5hVewrxOW5uNYhyg3QIkMPtpvmHsLbN2TuQP10Zl2O1zsp64kuIBt64tZrXPaovFivvALX8KXf/HL+P4Pvhvv6RWqtWS3zJGBILg1qqcxj/nxOmj8JtdMHrspKT6NnV4DZarYTRd4ePkCiCp2ux2m3Q67aWeWLbC4uQJUVmuifdFsTj7OnOawbay1vAZsXGOeB+B7RUMcWaDo6WaR1R9uAxQuPQJMXEAThU7OkoQWI/ljVibEXObCTIbLJX64anMtBLLzAgCO41nzxMBvtXCSJhZ6UBXBShMKaoABEaPIAter3uy8x11Cdb0UMhc8c0vyAYsU7ULmRqayBsPcbNkhC6XDHgsJVOz8IwZI4wRVQtlVMKrpOYKwcjHFuAfO1nUpaf8wnOYaLyDty+l8UtAwz/Y2DDU6T+qUcgsoGp8h+NypRVrBhFouUOQCBRfYlYd46eGreP21L+Li8tL2QgciPVak/thBGTGElpgX4YZ33t3jhz/6lwCSpZ5ZRjtcoIaBNhYm72SbTj8YKaWaa99I12Nv+iLOPBrJ4Z2cXjq5yPcBfoi53ocU0L+3rSigSu4e6WPi7+vFFUutqMuqca04re0iJYR0PZrc7BZY3lRlIX19oujYVLNiowKgea+7BWo0xJMk+BgH3c/javS5kLkKjn3x0XHQqS9SRFxL5wfdGk2i7bplXXawgfAxEgaowQlVyN3G0zPgEfsn/j1HIe8qfb476P4UddFGG9ZiYayXDXp/9qF48EkbdPcdRH18h/tXMv4gOq3bZ31JIFHAgbY+fBkXWyv9vhUv9xr9y60uyLmRO/1WNuXFJy/n9Ps1f8xhMZ6vcrrezoNEku7Yuuce++Me3b8XZrJRz8lzlH/dtqeebk5oYyHKHRlkb23KE5TnHCiyBb8lCObrqfhJgZ8OkAsR5EGq/T6OujUehqiCGEEq7dTBT2nEFjRN8BMXRgvG4QTGGZYKVWyndGXYyCpossVVAtzaSRkQgQqDGWikaV8daHHQRd3NOpUSQOPFpHc5WKaWFzWygqCEyALKAs2qJJA9mK0r7m5xRCKgaqfNtaJZG6kt6h6TgKJlWaA4Q3JVSqcJ581KSa2R0rWsxwHntt0obLmCtt6mXUmh1bNbataporTFqgY254LbytoCskMpO0zTBUp5EaW+iFJfAEStH8TaRrAsZTLBYceh37kB5MTkfBHqViCuXAhbXB4IHCgplULg8LmpiSgJAJYZtTB2O4Cwx3E+oi2L7r/CEFK3s8YFhRjc/CQ9KUTB3LVWh47IApmKie99MsS+T6dRDg5s9P98nAEJSn9yRwL4uiuTqUMBTEuM4XqAx1Vyfj6URjjoeI6a+7qpZmk3aXpqAa4eP0aRCe1Bw356gN20B7EK6+T9jpNTdS2JvUtAaNCgGFOnN94PYYqflhXzOBVEz5CW+sxs1ZPAw7cQqQsTzaagSRfkB5cxHxdKSvYwaoBH2N7t9pimyeJdkblajdKdu8vNxxnXVze4uT6iLYxlbhFvya0LOhA/zmFfbxL1iQFufoI9Zrdza8xR/ExcoIM2II1dxKLWLHUPtEmjZRGhVlvjFj+uTlUz+0HBjVEmJFQqkOqC8fn1l2lvH1s5uR6jbv9s0UR7ID2+OoxY02rXtaXv2axD9tZ0YbE/b/GOKhCuYhv7TCDgFq+Ke9RFs6FUtYg9HjX1PLnsQKpURrwldNCAyY9FBEzq7llKtfWhMoMCNOOYkinSPbuXHhypVTCHHBGtJH3G+Xlmjbo3lOfqYYln2/PDrxSvy8AucXdW4/l+8NNXoB3drIaRbWIcLEHpMlQo7KE0u8wg91OMPC4iCggTStmjYI+CCxRcotAlpvoCpukF7KYHqHUyl1jlF6UofyhGK+ysDyCONSHScDx+gP3uAVzmISAy5wbrMRkwJ9eI8SLdRz4GxVxe1TpmgwfEqrH5Z627KNPUJAQ6M2hp7Xu8pSEw/gY4S0Do9+nO6NtALeV0Pw+gyh0lW3/428ISBBWaAMRde3dBjwtVLLLAXTIH653VCD1JOetCqxdDBnCLMIq/k2UgXGTQVe/y6FA1dctUp/26X1sE6fZn7qVoforLNh/4ZEuXHXNxDptp76kUL8wQcxl9VgDO5+WjLGd01I+5FT/P5bkHino5syziBNkVoG4ppI/YiQsAgFHMxJUouZME3/QMSTWBRaoUdJ7mcQvUWkXIg1n2dobzDxUNflnsBFpKV3rBAFnMEDu1AZvpo/Q4Dw5oRSppauCkKIfwbsIZs8eOUKVfhVb9plBVt53ipvdaLyVlwk9JMyMcT5wBUFIwTVgqhWCBT+JkqpC6NTBzMG4HTdw6Ze0HvAUWqSEQjZw8CLwr9HcVAwrD9Ds6kxiGr7IV+rIqJC5A3/FKE0bC1QSAB9FmAdAIkAlLq5hnc0WQsiF8FIhMqpQmpeiETdp6HxWnFcAGqFJuOImfasMUHdhnKr6GTDBkt8gzRQAASwHhAhMV0NRQ6AFa1VM4P9lt7YB5IczzDMiCUnbq1tXmCIQ9KqgdyAiLryhmBp4UFD8OFVunIIpnsyXRCET2E4pwsyTRfeCkQzfwSnDPAIQrOSciPLYWxjmxBhvfr+tUkGhCKTtzM1P6tBxnPGof4HhzxIOLl/Dyi6+BJhhdSuso5q2r7iVoio01WwvdrwbJ80AAaT1wcmBzAIgYTGWwTHDFEgCoMEBK/+Z5PhlTumXMdJH27xW8G8GBHvjdf9IcWZecVjYWtAYcjwuWRYMXi1kZ6l4YTZnFQAxJp65+gCwi6vjhfV1Zg/URsPhioZI7MKWWJ2IuWxCom3Fz6m3Zn4qAJoCqn2gLZNGDBi4NZb8zNucN90DRJVqiU3IXsRpXaJy059E/K5mtgr7nAVxVLyya5GFQPjECa0ZfwiJsMHUAPKC7zU5q4rqPnEUD+NprTWMLNjuAubm6RlscHFULrWrB421q400an8ZhF7EYe7r2hZrGQYQHYUb0z90Ii+3d1iyGUyGAnS8lBYi6jOJgvi4TAqAxAfe7fcgzErS/z5OYH4U/E2OQW0b9rbm9QOLFoVm7DDKOs9NblxW2qd1W6QCMoAKyA8sEkh1Ae5BcgOgSyzLhOBdUA0VLIZSq7prwxCbRcidQ2n+2WGClqCyilxwosyeMRhDUws6EN+ubWxIR4JnODAgMmSZ/jgFDt4LJY02iLrtsSQek0xUBzu4xmybE8NLq2qqcrYcwyHu3lcwz+7qS9KNWWb5JStEYlWqJqy1bH6bIyYenK6cHfIn/bD8wPpf2gTYnHZz5OA99Ta/BfejpJ1d6D7f//vSXjXnJ8tma7wC2TnHy/Vr/2H7Xp698VO5ln5efz/IpAorOFD/5QfJRNiLvGb/IMv74qVE3ODagAV3Q6SCLF0atPaB2xKaAA00l6lCkRpkIY4FaYYi5KlGP2QC2d1lWKU37FMxYM7rsUESzpTU0FNLYR25ZhKLKe2sN3EYzWBE2xc8sQaqefgoLKlUNiil2Khanb6WPIdFAQE9PnjtgUCxOQ3ZXAUwYaXYKU7r7XCk8nFJlhWT0y14RumDiG0wgwMJzBJ9jbDbrdmARd4obZ0u3DkAaPxf6PRiqNVEKhIsGBZWqwmRjNDQQNXi8D226wIG581YnWgr0pH0UWjfaig4OhKulCCCscaQACyAbnQNYwIS0D9zu5wKoO1ufjEIaLJplgWBRUKgCbblGoYbd9BDH4wGQm74Dk5uZxL+CiBUQ7bYYYx4s26zkPBjp2EPb0wYQDibiK0E7LPBsDEMq3xjheDCUp3FP6DXXLrcn4W7BzdUWs9ax+DsBGGGngC8BzAuOR2hQ5Fbw6kt71P1eXW5MsBcDNroApc6zdRhz30NKwwQGDtqG8KQApfa931ckg/tiQVYmBZouvDHjeFgMzHYLCaPGWelxED5q6mNCwz3WZjGwOwfDpjzCroj6JwcZCkYzd4plF/tsqGtdSsy/NqUH2M/LJ1xQKXEVQ9pqreClwcHk5bCg7RZMewNtSwEvEm4qBIAbYVksgPAkCiSBOk5GJXrrr7pbRch7Y6TJAV+E9cfqSSEAzcYhuS0lOu2BmsnWIYllPov4fcpHcjwnwLNpGRDMMTk6P+K0wVxOrd+x7oz2Zlc/3S/dQrdWwrKohWuBR6wDpmKullCrkxEA1DYQoBZHhSCisds805lbyjidcWsizc43gRv6gcWKZ8WKLd5mj3lUI4V8KdXGerR2jP4iW5dIuppW+0Cb5Mzn3CjlQ0T19PpJffcvui+KSWkFFRpbibBDKReodIHWCMss4WpYq1ks1g5ClIgr43Hn/DBgQWszYM/BgQuycTb5iw34CAw7MFKKbJMOCAd9I6c9lH7SugfUdTeJHhpP7DSbZlBhI8HZ/VV5ZK60f+6HhVtlQwmWzl06IJLqz7cmeSss5cW/9/drCxW8z2szj0KSG8419UOW8eB0o1D6IEC4j52rLcZmlBkJWR5+gtbdo+On3hH67GideDpXJ8/la/dt4qepREzH0yIhlwEQGvZRtopz742zr6Czr/i8nC1JQ0lj94mCqsEHxvI8A71PVD6mNfp8A0WuvJuwu4mS2kmu2I0DyCPjgjBD0/Q5C1hudqxCuov840mKN0oDuYpJFCJsZsNiESHNBB3JdQ1Q4S/6UiIRL1mAXxdM/WSTeQHMdUxT1jaUIqhFwK2hUdP7wJqtzQEXFAuUrUK6phpXlwwXvjUDnAUxJQIR95gLSRjZ2lAkDo51pZmEUj8JdVeB6kpHzzKlJ7qWBWuIu5SkKXSFEXDBycbeLADs0LDHMKH+TAiPhQwImyIApUAsxgCGd+WTqRxszVfIvVkudTWtr0a3AOsCuwgAtqDDDLS5WfBdtqCnEsyKBGFFtnrZeOJHXSlbtzeDc34K6v0cQASCnfoT2ir1dD5tc7BU8QO1KKGqcY5YGhgLQA2NDxA01CIgzCBMEFYlp5QawFQvkpTz3heB6FxGIPkYSXQ3U/TZciEvhO5c/+p98SkrgVv+15WkAAAgAElEQVSxOlYt9T3fRxm3gwt3FV8tWV32lVS60igdGNFgi2qxczwecTzOmKYLjc8SygxWSr5o7DN7ZVYWhr0GpD0oFhiYTCEvQV/0Pod2+noM2kkwANGBvWqKSt/f6Y3nh4fyVVr9uNIsK0UFquDGWHq8OUqKxfZ+GZSLYZ30D906ajXGaRlQGmCJug0QKyXSwC+t4XA8YqIjUEXdfKhiPhZMKBY7S10AxRTY5SioAMqU2ktnxjFI5fpaUmQMYChmfSFQOq3bbWtu0jhSr89P50OpDmpIANsqMkJIIHVjRrJEI7KYWLbOi7oaSmM4WQpFz/ehzxk54Ovz2gbewEwWcJcwewa2kBVM+fe4Q0lRY1FrvRZuXA7K2agSYs17HZqG2/uhGfj0Nd0SucshvnB0TxRLYZ/b5FYtHjNRzAWzs2mOfmgwXpMnaLUn+lQl3uebv+/EgKxPNIDtfXpymHPmnry/tLVq2SxMoFJxcfEAL77wMna7i7ij1688Mwc/9jFjURB1WWawMGqd0RpH9rc1MR/bmgjhyT2Jq1tMHkeV+lrto+YfTVzpW9IPpWK9JpAptyTJXZJY1iDPntD1dd9O/w5j+1Tner5k9d0IFumLNYhvt+gs5CDmFBZXXdYY230yTkEj1uUWXnBL2Vx9w/r1PbrVJm/PxsMpDEXmi3dx+/XB1P3L1oNZTj7lW1HodP4/62UAU9Pf+VB6YMx3zNxohf55uavIfdfmx142aPxnptyjP89gCT/fQBFcEAeywHbHE8gKlwpybunSASAXZmEnnCJiwSO7QKAgkAI5Hpsi3NLgcVb0LFKrVAm2EEAWR2g4lSHArSFURtjp9+L1FzA7WEWgorEMUDSVJ1O30CnUULiBSwMXBjGjtaUL5yE8GlgjpsQ7KAYCsSp6etrZhd4sN1qyD8s+RsO1mB+YQGmKj7squTCv7erK4zzD6uruZwoayVhnJtIuzCMJOINklX7bj1qclPjs5sWuHYdCTut3jhXn0+G4Hyvys1qbp4pmVyL8M1klbQGWWd2+Crnblyv4MGHyVNA3kSfeISYkE0FPT9P3GQtiTnF/AANs3AoNUBHcrd4Q8xhuXwECUJLWNTtbLRYQHguaHExhbJiqgOgGmtmqQNikaI8b4cJzEsoIbvnnUq0G//UUyGUAgTujEnia+B6dRMehP+vgr6Q1FTEgB8KqLgKcJ9+shZIo75NqQcD7tSeh0TnuWCgVgG7CYkoilUj7HS6MImA0NG44zte4aHvd05PFMQmwplO/UkoKeJzaEACl0yDbR7EeNSZIqfrTu+5jXWx/eL0630szawTPWOf3hrvfGJgwXLryTKQ96rTGg2knpAPd/cXpbcVA+0UBmLBiG8Z8NSlpDzsQkH+QQAGkPep6YFSbTtQdqC1FXYhaY3BrmNsRCy8KsjIASxPeFobFszZg3CEVnxf9rgOXI6EaRnGThVLsdaULJdcQNN0PCJK+GO/tc4SgqwP2L4AwGehUEQc2zoPhlmTdKnVYR/48EagJmI32hFuz/aRAy94XKZYJ0Oivhvax9RprqVqad81yV4rOD7t2HR1iZx9wyuFShm5Fd+UFqFQUUUB8qntMda9yQ85e6bwkKZ+F0A9A0N8Df5sDVwwFisP6CPCkFRTWlpmn9/2f58uGFy7VZPmp8y776dDRZtnk3WeLW7RahDCjUaVOePjwIR4+fIj9bgdfnx2McdfnvmWlKXDWjNcxq2WLZticULC3Pdr3cN73oVR6Z2M+xK5TDAI53e29Xo1qqitf9g1hVubqteW8L14+jGV2s82HQr3iZ6AH3AFkrBXmoEMJCFZrct273a3Z19KaTw+1n335beL+YKG50aH7rD+RtJaHyULwpVzn6fN3vuIjLplxbRSboiyL3DKkn4FyW09N3viMli3d+JMFue5Ym59Y+Szugrv68+HXwXMPFN1Z4tgWKvcBCBaSBAA10jDz4Yixk6wQ3E0tTqwsTgRZ1oMEMniASWaHi8zCwS02xAVWvUfAQGkK04RuJdBMaPq5uIsaeSYYBygayOITkAFZRQQoTdMhNwWKCivgoxY6bM+aC4MxbBYGmgasJcukom47pDFna2bjroCZfOS/ERKj3rUCWIjMtcSUXrWUAqaddGCH9NSwLQtKWeL5EJSTC5u3hEAu1/uMmcBicrxnYzHLoSwQopiSVwhscgmNtfTPRCiYxr0nOlsRBwejIgh45mXKj0TLYzHGmGkFASKa9ljrTk/khrhaQAAmK3pAoXXGG+BCklqg5b75u31sKdZItJcayDNR0QK1xJDk9mGddmsVNvdF33ggM60njZTCBaAKkT32O8ZN3YNQNYsWTZGufW0BElG+rP39zE6gQSYVyHJLKMpjL2L7O2eP8fWZ5nmwYOhzHyOe1w+6sBz61kB7vY0rgjzEw7pFufLhA5uCWEJnKBaA3Zsn8XdXZjQmimBpR1wfrnBxcQHQHg7euvCeXXDKRntEvO89rTqZdyyVDui50hbKp6/VvCFSPCsQMNl4VsvEVAhoi63DiF5M45h7m9LUjEBNen/StuJjmh8itZJRF55i2ZPEtuUZM3QngKYMiQ2Ijmc14MveL6XPy/hqfb+tKR8PmLUIVd0zSxMclyOOywGMFgGRld5ayvOiIDJsf8FTddsiGl9N0b9bS/AjGr5zGu9jQ2IHC+uhXf3dAQkZDesE6pbd3ApOUCYKAMf3HNbPpEZSLahCoIWBRVTR5h5ny+kqYNn4bI2y017/FevLYBEHi4wnllpinXh8vWiQ0Wt3dREeG+x0QK17CUQat2Wqe9Rph8Z6qGObEJ6JFaGMOyDroJzTn+T+BAUrHPyPAPW2q33dEKDWW7Y/O3mj+NXplveLwt00z6yTfmdX54TPbLl67jpizMu4dwz5KaVgN03Y73fYTRNaa/3gI+YY0X+ViarGguc+C90lVfmQigkmI3j/xQ8jfBr6fsrz6gcncWiU5w3U/45GrsrGcBDcVdxA/KLrljdPwFLbUpXDaz+G0vmlt0eCD5G1f5r8IMDpev+83uJrYHEA+beX0F0NxJ0DYmBf0JUhu9htA/oxDvSHLFt0+uejuByY5DoAESLkVoAxr0fgHI37vNyjZOHt8/KpL59+oAhIi3JdpHOmFKg2MwXy+8SFCyMWJhUxuyDh6bejwogj4JZEIDaz+l4ziMEWJNtbGAJanBZ5W0YLChZTEjwDA7u1RIObk9eiJ9JFBKU0tLZoZgZzR3H3OB0igTD0eYs5UGgaBWjLrONHEdkCJo1q9DCsJzJgYcy6m8ATJiLNhFYrpjZjWRbM84xlTrNl1gx60j6FMlhc8M26fmqHZOK//mDzKeh0K0/jttKzdl2hmLO+ysylwOouVu8gzBEhG4l0sIdDWaYK0ATsLiZcXE4a08eFFQP8ojerxnZh3DqjOZsDFAnlCS7A51NZDGvK0zNDGKUKHKxQ4Xoc2q6c6qlosxhZdSphxUMgUJn0nUQQZuynC0xlh13dYSm7tEYFGE6+BU00YyAV3ysAkQfd9pNoSy+dYix1IdZHvCtfuhj8eYTgv0UVKKwq+oxnfbF/KWrtUzD2gSiAmpNnNgoJFFwlpFhm7krBXZ0TScGVVbmQJiBisCw4Lje4unkE5oeotWJpDbvdDrWa1SBFHrnwnshFBCgV8DgaZbJdTuMzHYxJNCzTYUJPqgZdM7VOnXaQB/DttMbHrQNB5poJXx95LsQUkwwqj2BJXwMKeBbKc702KzchMllPjUFzjKpbHQommIIraQBWspHqPV1oDdcku5081bllmzsuM26ON3h4uQOmCtQJArNwsZhEtShvoEpusKVr7eTl99ObTolgWrMZvclMLO7tNChGgRx0cADCeFtRgFIEsWfIxyCmwd+d1rj97aEp1IJB76WBBiJoSSQ5onVGPsQc9hhJ/XsqZrEHA7GMUIjLCKLrSYyhuKWlZ0TKIHaP27JHrRfY7fagNiuIxC02kfiY2HtijAB4vEHvt/L/BD7HSUmfOnfj6ivdJjPQHp9c9GtQqh1/bawZxXHGfbguo5vH9vXhb/YWFjAr7ldKhbust9YSsAuMAE6vr7mVFrEerpHuDzXEVGtxWLYqBzWkOT3FCSGkTCsI6LGISt9UPvaCNF/oQaqTjOGxrUK8W8keHfQ6O7QnpctsNO6hZ1TW8+jd2iIEZMHCPWi7wA9rPAuv7Vv0WJ15jP19q1GJ72lrQW6UoN1jT/wFw53xZ1ym4fr4mc5vjJNXdaHvPo98uLKik/G1pG9p+C5/9dksPl+EnAylH/ph+C4fBn56ijwRrfhEynM9nBt75vNytnyKgKItKfWWIuf/7KFSU7UEOOoMMWGAxMzVuxWRSeXocYwY6spgyis43K60fhM6I9jpkhTI8RTRT9kqTRAUs2piUABRZhpfSGO7lGKZo9Rqx09BGytgBCAFtTawSDVSFWiFwNRCEVfNYoGIC2akJ8BFndY0XS9SXCADi5zhGyMSP8EEVHgQQiNztZgqhHdYlgV1mrHUgnmeI1aHZgXpbjYaJNZBAJue2OBJaHXt1VzkQGo9VE1Z7AoP9R8XsjbcXjaWEPxkVoY2YPyU6Y8Pawj5uqbcGqkUwm434eFLD/DgxUvsd1MoVbqyungkrnit6h/WcA44S0mQtMJmGeRDIgKNheL1Sx8L5p0FHe7S1AloaBZHtY5m5yACm7Uci619vsCDywe4ur7A1bUHZ3awwNd372tY10R/u2KTwa9hHGLCuoCJHG/Mh8lc0NZCcJfqdC1H/zdIj+LOLvH3uE8qn5CNf1L4MAITlI/ofX7cWigEbB8PifXcLIuPvQIQSzYIRgGj8RFX149wc32DOk24uLzE5eUDPLi4BHY7VZzgliquaPR+FYxrxr4Yxt5PsYvRyGFBZkUuAFa1yqy12DQXsGh6ct9LHhOGA/Tte9D3XfparQgIoUCOMc6SgkEA0WQKezGASQMZT3UaSAjQx6ODfTHj6FQtpUC3fRnVuHIYez/NuQ+Rv6cUtS5Bj3HC0rA0XfsMTYZAHne7FlDhTuYKoVutSX+BEx0nAifagcScjo2j4W9aXx/2QKpD0t80Pt8zkFokngLQTlODBzHNLxKnM2IgUH+x70WSomRLE+qdzWGQgV2lBZ1vMQvmecFhnrE0t/DRiReyYP5u6WJgNHOz3xpUXwHqBcwLWBY08UD+za7ZoUfZYaqX2O/21pNjNGzkIokmkPNoX0LuaFj6/oXGiAvQgQiAu1T2YOvItEdW8xRr2i5vjqXTxNtP46OOMzd1/oG+dxw0sH/Dkks0dQEVaDwps2zUuDh+OIXONmwQlM6aFWUhoAgaFLyY6k4tGg3t1iW0ISj2KbCXJMskIMY+XPNs83eSnnjVaoQl/ZuBI/872TfFWJ0CKP2ita7j6DQ+O3Zs6OKAb6dfZ0u3btO7u7W60ndtg7q/C3cQVWPaOV9xmtdXvwP6cQCBtbt85z/rpXWy1NK6Dms9KA/qeyDfm11n3RIbJutmutNDVTh92mqFnwkIjVN7d9mOh5NFhK352fpO0nz6ejuzgsbnNgbz2WEntifCeqvTlKArW8RHsNoR0bL+/cDfnmDE73l714/8uXsMyv2b8RRFEn366MvW+N9y81ho/fTGriA5M14+QTRcH2jovSZwLevQaq0/bSyq0+ec2vTN5kSsE53N8bzH+5+pK+FK97irfDqAIhPa7yZ14x3dJiC5PWVJFs4EPPAep0seawV9R4qe9wnUMscZuOJHZCCKC2Kq9KvFRoEH1ETUJwZ2dysIbYrG8YAABRMYR1DENTJXMmqQon79UsztjchM2gvQFhAVlFbB0NNLQTNLBBVwYSdpIk0FclszpbD1cXKJDoWd69mCd9cjGztn7j0tKkxR8A1SUKtZSInGjJjKBKo7Fd6mI6b9ActyxNwWOJOnUlBqBUPQ4MK5ZS3yQOJw0K2Ci1oXMVGAWQGASAFBUyqrnUJFgFHw/jhpYpwoSvAl4O5HKvT07qYT9fhkmgwmACUCPWuQ0QYUBtWC/cUF9vsdptrBgRJCsQkp8Pcllh8EVlvdBSsb95gDHwtt75i3ZnVWZ8JQa5ZZxk7QtYLSB8Xeo64pW0JTNXC1YqYZzAv2+53FzCFfIKBqVkLx6m4VwtK6SjTwiu7eMAAEKuHpddb97KATSYojAgHKAlWqlHCLZRYMoEwaGs8rAMIsIwgd4CIAtu7VY5XivgRNDQKvg37F6YeNW3+X0xsHrCuoFoAYizRdF6hm81Whrme6OiYRzKz56GguaHIDlgNKeRFUH1jab1Uma/H5lPjtlm6xFmxIfVl19mzWNwF4I/i5xBzB1v8ClgMeXb1t2fDclbKn/CYSCDWQ5oLXwPq+N21MBmsGKkGP/BTbFQ0HpnxvK1lU0L3JNajeoJRZlUaPu+N1Z4MBp2nilmzaHSqmrBPgQCEb8B4ud6BBqYhYMbZOFWQ0qymq4KKUqUFwvRwwzdfgImA+Au0S1C6wX3wdiG7iOGknyzLpr7fxIrMCoXFfdeImvbMAhsDsiZC79dXACzPVoPRI0FDbENZPMboSpGv1cL6fhjvSHiGM4HCFWaQMDd5QOGzhsgaybiRoIriZF1wdjrhZGlrSXZiAIzeURQ9+yK0WuYEhWMxyiC27I8sMxgKhBYvMaDyDZQEw6T4tO7XcFeftBEl8v5hM4X83Ur7mfEtlzQIxmlpQLU6ZAR1slnUB9Jd+MIG1lV1X/nxFhu0RjbxgpOuu2PnHLj9REGenfVD+tiq5Hbp1FRDXgN96LFLrhN1+QtkRaALKXjDtBLt9tmaU3AQdw3A59zHwPlZMuxfw6qtfwH7/AGIBs1EVeA92mRpJRp+V12sGNsgEosmSgri8YHSYatrnNg6SYyD1eHuZB3VFlwznEqMZeoeKdWJSzjiOEXAdZlXHDLG2+Vzm4m1RF/liW4TDWrjPXL95K96UsyjtpsqeZCARCaFYwHbliQKgAaTzUzTyQbcqEj0c0TXJIOI4oOzWPZ1n+hZ1XuvtzGCfW40Kk7q+CwEkWJYjljarTG9ZL8ky7GliA4oYmV5j36P9oNYtVPLW6PtJrX0jdGMMmrf8tiIb90j6rRMV+XD628eNALVCltXT6KwpPZmIXr5xVf2zLuT7g+0QWlLG5Xs9X+EWnueaKiF7p3WNLmn6u3LSkrGIyRL3a9O5lj7rAcwHnPcZrmdnJcV333KyfnwVZtPypGf7V+tqBIC4LrSmY3njNZydPr+fAcgK5liN233jmd2rUANqMmUw2tzfebpuz7zw6d6/2aZTGvGkq+LTARQ9ZdkivXcXCm6UzRK9NDHRilQZZWPm6tJiz5QJEdulwBa95yfpsY7U3UA/q8BlKjy5Al5UOCkel8WCeAKoVMwqweIbwRmurgnmAqaCRotauTPpCV28jxMjUUWa2WIKlYJS/HcBiphCZYKIKdcuqPiwxYkvXNx3FxJTimwMPHh1rRVSi6YpboTWKqa2YFkaWoMK2HbCmEl9vHD43K/FuaecbsoO1o2zfm8m5aBif0s8f0KUHUzyMbH1oRmnCGFev8zQAKeiwaDtUReGY16LD/RId4D+blMRB8UjaVlap/T6N8eCCXXn/SJVboChv9wE3HL9uc/+rNbVmgC0oMmCZTlqsN7MPAJ8G/vUN2JPJ5z7Fe5nMHcQvyaw2GJWKY9NjHtS5/u/JmiKpdKW/vDagqlb4/jYn6M2W9/n9aslFEZxsd3jYbCmcBYGZIaQQHMdFoAXvV+KBeNvqDJhoglEgoUFNzdqddh4wX5/gVoVIDtCUOtkVmC6b6rHIoKCDIiTfJ1PbqqZF6LB9UvHpqs0guIPAHTE9fX7uD4e0FyY9tmyOC0cQ2T0sXibRqueFEAFgKY3h6i7LeB0Kc2JaTYijGWecXNzhUeP38fhcI3G7WQW+t9bAkrfY5LmyHk/uW9UrtVpvbYCrkbHvp5q0IVCO4gUzHLE9fwYTA1zq5j5iCPvcTHv1S20WDy5gp610q2OTItTiwoLHp7dUvI4Q+eQSkMc9MaWTtxTOs2+rbhrjp6D+CmxxvwJlx8fP3+I89gyHIDjlhTp3mS46YTb1qz38Thn/o8Jnaz1zscF19fXuLq+wuFwhbkdlF7AY1g0NDYX0JW7qwZHVqCoGUikxxiahr21GcsyW8ZI5edTnWzPWSNoTSnWvCevkbOjbexNgvcO7uEi2JJf/JrPV5/pLUVtQyFJh2br+tYb6U6e6u0XB2XUem7aTdhfqPWjYAaXA+ruEpcP9mEwXGyNDfsssXbnoQLG7qLihZcqpp0D5BVUKmB8OKtzaZsjGHf/KxrerYvPu9ltdnkFwsQcEdLho1vjWB/XYo4/T+uVs20B7H+fKHFEGE/z06HQnYF/VV4lM+nrFsIWB87jqtG4Lru7mQcWzzIOBVDU2imtiWae3RNWj6znhSIkw+iuj+B38Q6fn3Rfd4Xe3k/9uQ2apTWcb/BHUO6/Gj+75SzAEzzkftaRH7YVvQRV+sjf+omVkH/S3zAaA6CPx10j/xkeo6coz9SS6CnLZwMoko2lZfvyziVpir8z3fHAtLuoqczZBTwCgUlQpCB84qn4eYNC/xDATNj9KIYsTa7YUahbNOTTcm9TITvZYA5FuJQed8hFbo/BQaSn61pHs+wTQCPNNlM81ol4HBSOkxohNZe3CJsQVuudCs+iUm2cSY9ck6BCBoFlaU2Fvh6HRASqlEAV8FKMmdOEqQBYvM6CSoxmlji7OqGWCZFFY2OiPS6EAg4afLSIqWUCUMSNgmUFk1VdXblzBnMbc/c4I/75fDGQTLydEgoXM6FhxvF4hZvrR9hNFbvpgFr3cOEk5rUAVFIWr6RwjUKeK9dAPrHWyz3GwuCqMFRCsd44Ti5OlRhXUNrSII3T0/1foAtry3zE4XiFm5sPcDg+RmszegwD3yfrfWDzoQ1SQLZ0pSj/MLPuEYtV1MdcgkcN5vuhxGSmFhf62JGP622FVr/PXb9PoUiHbTsIfm4MEoCbWtZRC6sgt2LRTFLqgCaygKWisAJCCwqO7QY3xysFZ9HHcL/foU7VBwlEhGqAdeM+Dr7m/dTVQeRaJ9RaUKekpfluE4BoBuiId959A4+uH4HBbjAJ99CV9Ixba7kLo9tjOt3y9eGjOh9n3PAR82zBzoyIU66TlXYuPGOeb/DOOz/Fu++9betwY4WTtyPPnYNC+qPrTq0+hgxVqj0OImH+HT8OArhrTTEgXnTeW2k48A0WKHB0s1yh3lSNA1aUlurpXBkqzm0vpYBYFbbcEHEd0UGd0jZBCScNAUjcKbAQSjGRIh9fE6GWgt1O12NzwInIKte2iEisq2VZEjAVanz8LlRQC61C1owdYBbL8Kgxb1oTLPOM4+GIw+EGh+MNlmWGWj2o27OvB3XVdvohajlk60iBIlGQyNzNRBZIW9QlPHjIWgHOrZSVYC1xkcTdzPSLDFCPCqvRaMquOiNw0S2LrIqTA40u33TAXob7Y+3D+d75U3gPSH8f4TZi1dioszQcD9e4un6EWiccjgdcXlzgkndQK2fCbprQLd+6NZTKR/7jfREAR9Rdi30Ly05IKBZ4mVLGT5OnAshTGcn5lFuM+LtBfeN96IP7qHYEhsZT9S6nJmkRW/tje/g7X9VHQ4gxmpXWQrqtv8d/UcTNCgIewjPZ4aeCRtiwLPM1SjSOoVv2nB0iGprlPR3vsdP7iNEVdE2G3yf1OI0bxspm/e6l/Hl5zko/LF3P68fWANjL7W/fex+WUDzPRbBiyHCjh37Hfebi8w33vJVPP1C0krO2Lj1VCWHHhAAT0nzjh/hUXKlTRV7di8wfmjR+EURU+BOLjhScmAELmBkglDFQFZI1ewQbxyOIBm6WESxyRZLMpL0UgsdUqk2zi7XWTDBTMEYIGhMoBCEKBcIFdBX4CqROIGJMkDixgVjcIlM8PfVscPM+kIMQJYC1j+I9pUyoFsO5lAJUoFVVwhQoqt3EUPKpkAJUZMSIWAX7MeKQBACRlSHqk4hTseHucu7u4fTM2xjrxkEiBjMwtxtcgQBUHG4eYZouME17HQOBKYJ6kk2W4rlXfyrs+0UxEDBOLQF0Nx5NIaw35jqyuT5UcQrhMc1hYDCEtiyQjZO/3iIDcpaGZZnx+Oo9HI+PwXIAsMBPM31/6TI0sC8BBxCoCanHY3FrDov7pQqhDC+X/hDcImqQwLNA7lcGoXyjN6vTyP7paZj/uILWgT0dTSEDTkgYzLNlcCJzPdEU0AChUFNKIATCBKDqvY71tIrjcjWgAUJAOfpJsCmR8IDko/LhS9hjUSloY9mhKqHU3Hw3bxEDim7w9jt/juuba3WFdYUMAoHH+vE5vV3pCrqbBJDWNO6bpnFv6MqbzQ7p98fjNT54/A7mZYf33nsHyzJ3xW+9oVdTHW00xdvjV0UMq2Eyo6EnF0gIQ3wsKpaw0WOx6ZpfYFaGUhV8aALMuiBKJVMYJZZxtkxAPvlmSeNAMe/5u5KC/J52pMu6dwncamWRRQpJdF7Ckk2tv/I7adUejVd34v/vnTVaWgxUWbfXaYe6i6nVCDNjMT6odSuw3HhJAECzViPeEw5Ddl+TZhahZlkEi0vECxqb1QJ68ggH50scmvSV71t+cA8OtZyMTI2cLD4bMKRuW3p2447fedx4Ywy3LIeydVHWjCUGIyn0t/JKW8NPpBA4j26Y2wGPr97DsswgFOymPR48eIiHDx/g4mKvVozwbGEWeJwICxuABwYi0cGCQgsaP8YPf/TnuLo64JWXH5isUo2GGI9Bw2BNtBIJdHisnYViXoc1fE8+MMS+SSOQ3qYAMBgOlmewxvf5Jp0cvjs/B255GVaOQ38JArUkF48nRM5T16312E3KZylieJpMiNN9fNLMJ1Cgt8j06CK3qkdMeaW+9ovTcu/Oyft7PetWuYx8axsdAPV2bdTzedkupxLWU0c/8qsAACAASURBVBaXG9d7zcU+XwcfdTl5xWd5NWzoUSfdpdvI0udlo9z34OWjLJ8BoEiFxs3yBPtxMEA2FGE0me0fOrNwZQiQCGgtiBM+E/51kgvAxQJkNjt5nOP6mqCVmlLdr4977RTWU/P2tiAULte+SwGkGJQj5m7AFsOozKpkhQuGautiIAWETKhudlJswj4DHt+HWTStK0wwIAcWKLVNAaAIYWjjQ0RqAdH0RLvWCmlNLVoYINH4HT2zUXIvOiFKBo5Azx77iSLiPrekWc9rFzA23MfOFBoWxbmbukVRKAZQII6bnVLOmoXu5rDHNO3DekrnXFQJaRoLo+vupmo4OEK6fj3rSFjmkAaXduVaRJWnAEEzOCFl1fSVRZK9I+oCIK1ZP+4YBhHMy1HdfQwk0hgWHlS6zw1i7fU5g7nc+YkjgVBoQik7EGbNXmPX+/M+ThT1apfHOQvs1OfIV1jXFjd6tM3sR5U171eEy2V3jTp9LgvsXfn39zGkLWjQqdKMYBpnoVgOe7IU0Is01BDeoXFciEDc51tENC6OEEqAuypAaztSHB50V0tXioySgKAWK2ow4YqwAlhqaalA0c3xGst8NLqnrnRa1OIjEA8RFMlJ5HzFIRRQcQuzavuDu3tQCCJOYyLoEMC84Hi8Qq2XWNrxzNx2BWFQ3WJ8xMB0yy4ZcStSTBLgTNWUnslvoWh2jkjiq69xMz9ALY31fjYwuJ/OR0OtDQWEJeZ7cLtNwkef50QDByXKft9JG7vlSf4u80lvyzgsNHyObHJB71IrxBVWVhBszff6IyDAAkwvuma4W934u9jjTxnwo03tdir6nwJBakVkIBP0s36/mMVRs+vqugZhuEtOKROoMPL6Gscot7x/F2N2ZrwgnX4yBKV0/qogUv/bn127PcX1wRppbIcEbxytPrZcqVgY23x0TU+VuCsvWJRzM+F4BOb5OvJXTB/s8ODBJXa7XV8HRGopZzJNa7pPWBZAOPjLVBvee/8tvPHmj3E4HCxjXAd4BB5MvirIZ/tZ0OOX0LAFKBRMp88DSHsPRXBrbNwCQoAOyow3pFHrDfL7NdB96U3w6xsKhu+tsaV9H/ksnVgzAdHG3o80OIK0RhAg+gm/o8RLQkbO7Rhl4dzuTJeFRj7b27PqlQ1KB/VNViTlXxjudr7U13uerkzD1nQr4LyVYnc/ifLnp5zR1k4H6uyNt9R9Qr9O19EnVQbdxfbWffWNT0ch9AiofR5kCBcAPCfT8akqnzRY9OkHip6GmqxKdj+zb6LeOMW07/tmHxkleVYECCpKuO4we1A1E0xcWSMFVdRU1rTYlJVKzfBV6WdFMwbFpwfntcDU0R4PPEwAsaZQBqF4jJNCIC6olcFc0VhjKwgvKkSlANt+5tQ4pxFWKxcyFwFGtePMajEjmubXDkbbfcBVqCox3t39xyyX4M5W5kZWqgFFNQliHIJFH38bN9lgypvf+Xhlhv+E62hV6Vpo6Ld0oTKDgW51pkrHAqEjBDssqCGQYqWYuPbmJ3trwqGAmsdESkKRuPAmaI3Rop5xnMKqiGw9r/rbhbKQlu5B9U2xb7rOmG8gWKCn9/35cLTwOqln2ethFAyYIAUmatmh1p0CDR4c0YVTQgfSUmcDHxvWnymEwup2cAv4vHbl6IPX3RtttLGOZJVd4k5qkC68jsK5KsVqsTDbqJHVXEBoYKoaLNTAHSZy9adTMu57JSxE0OEeBQuqCd8GQpFaNCLTRlsnWoUFzm10atkhBQQGaAbooEGsubvmqJWGjQn53iVEYNcYp/TitBbcwtD3hVv4OI0JQax4P5Q2ujLva0EM6M5Nzx87nemWiZpBp9dFaPGgg30C7wPiXcwMYnPzgrXT29HVDBtu269CES8jFBujleJWEDrSSVHz0hIgZz3SFyMUJ29An9z0ua9mp+X3LyM9PF3XThPTE6bVlTwfJ0Vs76qLSTHX6AwUdR4tNkc3YxUGROnQGKgCtwQiAM1wOd2TzM2soBxQ0p/GDkI1LG0JVzS9n0DwOH/VXMIdZLGxHOhMHgisrqXep4MlPUiSyPy2/t612rVSm/nQCBYxTkfe91UZ6jlR4leA1Ola2upHz0RVUAFaoAHnm9IT2wfLcsDV9Q3qsdr8aN8UIGomnyy2r5VeCi0gMKbCuL75AMvyWPdfSm9P/q/th2wB4r/9mzisM7C/89Q+ZVluvE9xHt8BlUx8NHnBCZM22sKkVlU6TM6PyYWsM+3ooEfiSqu+IuQjdl6c5jpbUerBTXLLS4NBMcbbbekgkb/CDoHMvT5ba4VsLsERh/6crkX/zn4iy1qXlUPuGGjN6Zpeu3Led44/EgDA1sunuSidONeH7i4YC/Ipu8vOI869hZ666lV5mlp8n+SOfvrLeEiU91mfTP3u7v5K+vfz8smXzwBQ9IwKrYi7x4NZKcdkilQuYSUQDFKVTXXNsuxpwprJJylRqidwYr7GmNBjq6grCWtcB1KXs9iQFSCx1C++tUyA7RF8BaUSpKj7CnEJSyXmgioV3Aq4FSzNXNSCvnZCK6IBpkUAqmyxiyoG+b+YOfb6hN30k5OsC2TWDGbxJNLd5zTWkAdHVBPz4vVY1rZgLCHMpJg3m8xobc3x9EQ6QwBbIFF0HOhtoa4QqkvAYgGJVblYoBnfog5ndramMlgnocj09zbO7Riva0BXW19pzrK5dlR5oqSYIJsUcG/f7YKLM8IFmrFuAcsRIjNgyrbPo4I6FMyFyNJTE6nroclIYU0groD2dNHDQTv8XlMqQtDs78mm9IFDIgnDZ5WdlcJrAALEXBzju/G5cC/1zycCfWeq7lantKNFoFxXcBWw0r7XOoGbxso5Vcpdm+isN6wAigM89tufi+CkZO60NtdWJYeKZUGCqaBIj+cW4AIA8BFUZiztgIUXBUhh8dlSNrU8tF3QzyduSgf76Kb+DONsa4gcYPG+KLiytBmt1Q5+ZwXa+jkCvdZnXw/aKQiaZrviGaCKUgQJvUIIRQA8AxIzw8zfgvY5JsCUrXE0HguZdWesFSGIuzoLzI2vt3sYRLibsCcgWO9va5oI3M3Uhm2g20G05UlPP/saJFtfDvZ3gCqPfJfc+VZB0pQ9zgq7vw/wvewAj1pj3QTdzeshFiEAtwqCdzna2BAgcriraTw/DY6rYKFmVTqa61kDLxW17C0j3w5T3aOxulOp9aa7Sap7Tp4zHQ7poGiyYMmxjhwU8u8Gy2RgnGdbK5SIfwZ9ArQ4I5yH0hXuv35lnCtvywmf7ShEtDvILwDGrGMtBUQzHAoXiyckjdDY3dtNDiMKUEzMJV3nqkGzzHoSkAUEQWuLtbEG7V2DQ0OxdSCSRoX8sMJpbYl1/VRlkDmNpznd6/728Z/edo7nbsgeJ787CXAwx+VXFpjcerqnTopA+bCFLhBumJcjjscjhM1aZ2NMBmA45IOtfigdzDGObiMLHaQc2+7gvsuJwdcdJHX+eLYtz1H5lINEwFo+vu1GdPH8abr9cY/V+nV3ddN41JMdvjzvxfes806fwPUk3jU4a9n78/JJl8+BIiuhVw+8ccUwBYiYM36bJMadiFMBgamggDWuEFUVYExhYV4ACEoxgSmfXggg5NZI0DTqFrvFFUdI0wC2Fr/EQRJmoBYyM2p3v7BTcHJh0wVfM7euBcwVZTmCmdEMMFK3N6jCyi6YNkAWcKmY6qR1paCIRFDwB868NVW3CrxdoPXr+jfBrSlqMbae4tAUFFQiVKJwSXF4gEMQ8LFfK97bwg4lv/U+l5Ku383QzgFE2cXDA/XqqTUgZu3hsVq83QsLiisMA109dVUaFWNEH7NFlQOPfl1P64CT00bq682HS90HchDKRPCT8kj+/JnSwawFVARUNWgsFY0j0S2F/KTBn2R4wGDJyhHZXrObdR2oYlgLQBEvRBAn9mL1xUmyt82BVB0PVfg04DtCwcgn6OvAuuvR76KpCwB9lAT5tD7UjgGUkBizMTi8As1Lmy1jnKBjMgrkLIsBOlWt/KZSsVByQ8hxw6S/XxgR9DODGjBFheBWSp3kiafMszDbYvGINPtWcld09x1ZAF4wLwfMTYEidsXRjsRVcXEIaGsf2VowSpYH3E3+eQWA6jJV8EzSl0ubsbSdLY0x1RfljuKUHvT1bOuFFyztiOqn4MUPApRui4EfChKpNZ/InOETG2Oyde4n6BNgVqnqOmQrhMx91BaK2GGA3HJCW2hKAO94zfvoa73Pv90hnfoQ1ifr517pYFneJzmWi4/huowQxXnqm0/5xZS/9BSxtbWpBV5bFICgtMYMEFVXHRNuScBt0bH1ZAeCqEtBIU9bL2ZVpHycZcbSjpiXQ4ARkAnVhqKWCbXuVGk14AMW38vbT+RZ+7B54t7bnUbCZQbWWIfkAFoOUtxv7jK79v50Pk/A8a3hl9UeSW30fSoCDCmQt/vTZ7y7ijmvcgvL/i6KNjsw43UQdbdjpRPNjAQtPk4AYbklFmtIbLsS1EV/Q+ELK91oSgfk3aLlafVSdaEjoJ2OpdO29axsySaJUqETwpX8CqejWMkBeWycHzg3c76UWqAE2/aD8vLGDfN8xHE+mOth3NgfG8Yoy4i3Dd5tNCM16QQoP62FvBEeK0xym7blovSGW9//sZTPgEXRuSIrugJAl6XEx+e85P0mnd7eq2Q95dNcXPa1Qhh5yvrvn8PyvLhBPml5zoEiOSWMH4FJ58BiBsZxd+k+2V19z6xWkiBKNEHQQGYCradn7kqmQp6/llkDGAv01Kx6SmlpABTgIQeNbAMqrfUAz+k0yjannn55B7UODywtrEph4xmlFlRLJ6qBl0WVOnZBUIXxBRZPhPTkWyOTkMUsMsslUAR/RJzMuZKin3VKVSE29AFdaO5CE5Vidd02g/63/dC58zc/wd8ud23nDgp0gSpMWhMA0PsJdLcAjz1ldTh4KGaWK/0NybSstyoU/yzY+FrbEKyCOGsAcrdc6wTc58ZjvbjQNI6C5HYBWIcNP32t9YcMcAygM7kdJBeDUagUEFX0k3xCZ0K6JxrPaLxgN1UU08paW8BUQRZcXpULA4WIhgCG7k4jibm5FZKucToRZPWQ5PzqiGk6lXkSGLO67LSD8il7GasRxrwc1ZrB44ihKyyF1PKq1gKuE1AmA1M9zsYqhg4EzWKDCXLjzPoj6jZI1mIQ9QQ3rvB2xYSou9mKAUUkDQJLG84WwDyAD12z5BsdBSj+43GQYrLsg2f+6wpPaxwWQgFo2UB6Br8SweAJh8MBy9I6jczrOgZ9pThEE/I+aViWg7r6ibsZjXFCRASmS2lbLGYQxfwVtdASAYuPuZ00mj/Z0JKsjIdS7td7DByf81qmHrskZt6Vbu9Pp4XZLcldjjrfuDsemaKYo5KqBxzdoqjzgbGEYizONSkvSzgvc4BMe2J5ruJGDhCPRd01ueW3uXsiAc3cm0vVuWlNgVjR/heyxAvhegZ/m8Y+Yo1r5HGJ3PJP3cb72qrTDnWaoIkmJrVoYRpoXxyOuN/lxgANYJHRmWIxxsisaRjNb7TxdGtBUaviYla8ZtWkOK/xada5MVseA4X7Wj23L5DXkvGVfNCQD4b6596N2HQCiKhLle5vtnVa+r3Sf8dBlDeJrLEBVDn4pgAfFTLrZKOZhaAxNTiSIax717mgARu2U8hcfTsNTg/cs6wPl/qhQf98qlhkucJApETz3OpsE0xynp9lrCD+/m5oFscBGOqfczfjUNNdo9uCZZkxHw8auwunh1y935QGm4b+r/vrzcnS45bC1b9L4xrvlCQTI+bsPuqEA4GnVsZbN580/wnK1r7yuQmGMt4VyveHee9tbQDWmei2xv4+B6u+1vr8nRnLja9HHNDlVmzyEj1M8jW8uuo0dqBp59q7enZD7O5yOqXvfWGtKElaw/qIHXzJ+t5nVz5OYEKnN82Ny2mhACFdu7Mi/fhM2n+POu7btnvt/+17nqYvz2z+PmQ9zzdQpNxo/O5pgaKzz+VF/DR1S2K2uUbd/Fm4cwLGROqTX3qGMQijVOjpMjOYTLHgSTMaiZ8tadwihRYahDSYpsBP9Nm+F5D4d9Dngh6Jvs+VFJjbSikgYv2BZkKiUkDNLCwsAxe4ZywDxCynBEKMCobIDlLMZUwKFtZAy1gBKNqYbn7pycC5qIuGWHBJKYCUCikVXAq4Vkg1haQVCCvoJtAU4SIVhEkFFZOkWFzvEhTX5ldafZyInl0Hpk4Z+KSfJV82IRuhZHUFlABRE+0QOsj5Sw/sC5yuwkFhPrnBrcmsXbLe0p0ZSqzVHKi6z2OsXFPoutXNdhHqcxfaKsHUHhfgdZ3CFGDxn2AE+izF+z12h8fEACB+QlxMTROILBFQ2BV0Px4mIaAWXTtwCzd3B7W9SJMxaesDEYQ0qHtj4LjMkSUwrPgAVeKFrNuuKEzjuvZl7SfOIIDY3r89kbotKT1srpcQlNrAmHGcGxY2WzS3cLKMVcUyAJVWUaliinWw3m+9cE1zn9Zxd4EiVEzmvrQqq0VKBRZ/TRvV3epUmX78+JG6atlaUCCpQP1nC0gqNBi3xzDoaxrp6DcPMwuhsahbIy2QstiwlwDitZj7UNFQTYfjAcd2REOzbJRJTvB2uzJj77TlB4igVoAw4/r6fRwOH2CaFizLhEoFtXrfKeKswC0ymbFghguNXjmBAugEgFrqSsY0+lgoAi6LuIWSt1UyKesKo7l0OlDfb9BfmgmMzMUn8ak0za5g11LPsEhX8gUTjfSHjSYi5gRYlhtkcDZqiTFPbiHoipqDAALjUdTXqSseRQTCDW1RVyYIo1FKWT8MlblMNoLQgt1SUbihFQLLDkvRBAK1zZClYcYEaVeYS8MNBBeHCVJucA3GNO/R2g2OfASkYYcJFcD+8iHq5SUw7XBYFhylAaxu2wygtgVSChZMqCwAzcbvG5rRz4qCCQ2TXOMgggWTWUgRGIvyMwKO5SEmaSA6gssFKhOYBBMmgCsEBxwmxmVr2LcL3NSXMPEjCGZI3YHQMPEDVClYChtfvwHTDrVNmHCDRhVMmtjCrQvF1jgEIDSAeHN+TwD3WGECYAagwFzfHk6vRx57UofA9pqoFR4JmBhgVucl9qQLbPuzGqjstKYAMoFohqS9qYcFFQHeBUnQdQZeUOoEd4fs7rv3L9nVrBgdby4HUNq3RNB084QMBpHJOOraWQFxl2XfN66oGTAiCXwNXNm/I1tz8UqtiyxBhptekSLfHTAXi6F3jXl5H9fX72BpBzsA8CQvMWvWN2d3BlpzhbCGQZimamPBBtzy8Gwup4dyEpaZ2SWQqKqLtgC7qlawjhSJHyYWDQOgZ7fu3tzDGYyWfgTPujsCVDS2SfD0OsvIBDQOH9U47OrUfG131mn5E5WNR86E+XmKqtPBEqom+rHsy9ng+cS/MHcrKpOwgF9LVGLARBFGcfYrITUajep5dUEq25y44W71IbHPsW+xyM72v9OxTAO7ve7zX+7RRjn9vH0kdB93u+1siU9ayNfKrfcA0izW4dmbgFFvOlMinMbq648RsNt4+Yeu4vkGip5l2dzEtvPlDAV4gqoDSB2PyjrDEVfAewYvkAUgFqi+BHPHIQtSGyl1S7JEMTDDMqiwEUtLAgOAIpaPpolViwlXmsmUDJeYCS40uSk+g1oBFz0hQmt6jUnj4boLmliqd2g8mcaM1hpqFRRqqFVPTutUsavKHIC+YXLskT4d/TuytNEbo+2yzap0Zu36Wf5WGYaKRBoLaaWy+2nnVrvGG3NTVn3Jt4zfDapXrA0kbu8fKP2dnz63fr1a136y0ILV2iZ0cCz/2CXnZfn9g2CeehGLPrlkUdwYSl3iGAGo+klot9hJY6OVY5y5JA5Znd1KzhalE3GzvHMxj3xs2Ou1mqVbv/guxVB/C0F4kznEZtqal+011GtZxzgb+9lN4/P4MeZ5waOrKzQT0guppY8DRbA03JUmTLwm7Rssu6IrQNF/2HjouBSUMStaUqSiZoICGOICURKeIGi84PHjx2hNY7I4vekAaD/lF+Rg9WlEPX18KI9kAmOoHltDjm4hpncxMw4HdRFakeoYc3dTiXgxXhNbzwjg1vDBB+/incsXcbF/UQVhd8GDzQlKhI9TDJOxlGOvL3iG90+tooopsr1DXflQywiznmPJqybNUfpTFrTWrXT8ugdxba1haWZRMow3pbYp0F9r7Z2xOXaXVjECu5/2q/EHpHgGPR2Pm8N1OmjA8J7+XuOTETAXaf7VFbsUSjzC1oyB8W0+AGyWr2HF2K2JnP5M0w5EBY00KPVuuUQTRtsfIbgEQJjaB+BWcLPs0NoRrRWABdfTI0COqEfGggZhtbK9mPZ4ZdqhkqDuG6jeYMeXqDxjAuNQjzhSQ20CKQ2tzCAGeBLUhTC1SxzLAq434NogtUHKhGN9CaX8FBMD3/6Vd/HHf/Uf4vqlIwgLXn//C/gb//xv4S++9QUcLwTTssO3vvYt/B//xj/F+w8eYWoV/9G3/hr+8r/6Jv7+N/45/vBr38bjB+9hv+zxt//gd/Frb38N/+Ab/xjf+9J38e4L7+H1D17Hf/Z7v4tfeu9raCTYEeG67nC5GH0NRc2Aczu6Guno+bLthnjeHW7L7e7kuVD2EAoui1pOttY0AUItQwBjdd0z2hg8at3G7fZvWcE8qYLeAV2PCWdKbKp/k/uvv9xoi5ZyQktHCwbaqDy/5La2q3Wr8lx19VzaQa0sLbRC388GVKEDVZ3+FU3G4BaZxv/DYmqk8GdLtlbr/TX+TsmyHQgaOi/LwGfD8p3E3E07f2ReZ/NjyBkkZb2O72Nxc0vP0u/xIFM2Pp0rd7VhffnZ67ane7i7quq4U8ge59qQeyyniyLxzW4dm+UWs96kccRuBYhW9Y+33TamIy8+Y0eL8/L981Tu0z655a9c0+11bY/T05e7tp6qSKM19kkdeueTT9MnCRA9w/LzAxTdWjLjfMoa/FSCxm2gAWc5lEFXkszm24ijB9sz9YwEjAIST6VMlgFKQphxc3dnqHFyixbrWcwiQUx5VoXZTPdBSRG2kzwQSHZ24lYtW0sbXNBaa2hoyYVHEO5EcGG8oLWKUibssUfdVVWaRFXPLghk5uWnZ9r2Uoof1A3T5IK+z1okc/KfPKV9coYLsnHLsygn1lKjBje0hwYUbL327hSJTj+fC8iYUcxgSKcndHFaZnPg8VWG+pzhZgXaLWz8C8rPsCn+SYAXjyeRhXMHMTckFQIymOG3eJbBCAsjp9Z7uu88aG0aEmtbHhcVzxnk2dhuA4k2F9v6Xr9esF1HvmfjeROaHHiopeC4zPjg0buY2wxY3125oFJQqBpAUVF5wwooFQHAxd5JtitiGruyHu6kia6d9sbdf04FBQLQGltqanXPahZ7LQJpIwEDnm0t9oeCYAGUiADU462BlBZa5KMAQ9YKHxEZDWMsy4JlWfzi0O6wBlICqq0TCWtEN3hry4wP3n8Pu2mPi/0VwAhrGm2//layS5DGOB6PaOUAP5qkwZ/frQAYrYmmVx+KrWsC6qQgPDwO1UnpM8VLs3hz6AcCSXkDpAfqTs/7NT3b8MMHxskLBXDgU0RQ1iAoQa1VDSiCRHLP25od8wBSC1uBK199b1JxetCtegkC8IJlPqp1ADRzXKdXrjzY+nZ3ZiHMYHBt2C3A5bLDWy9d443XHuHhcY9X37lEY0Y9Fvz0pXfx5mtv4uv/6iu4vHqAn7z2Y+wZuCHCo5cW/Ftv/hr+39/4Efgv/hT/sXwBXz38JRxfKHj74mf43sUf4ct/+lW8+ugV/OnLfwY6Mn720hu42c34zR9/A4xHuLm4wTRfQMCoQuDarK+EGRcoZcaPf/X7+Navfwcv31QcCvCTL/wMv/Hjb+CX33kZTILr/fv4w1/5Y3znS9/Dr73xqzhON/h7/97/iTdf+zO88fI7+JMv/gn+9T//Kn728ID//q/+Pfy7P/wG/vSlN/HjV9/E1372S/jRL/wp/tvf/p/xd//Jf45fe+OreOfF93A5XyKslkXMEsBjvzkfuhsk6uN/8u0ti+L20un+Ka3PoCsZHVMFf+TXEpskKY5bTbJDMindkqO7guZ+3b/tHdzoFkan7z1Xo1M9p136O7aLnN77NEXyHPs+JA0cDmoQLJGdU2QxC/XMK8n+T4Cg8x4IuoVXXkPc6f5tbTu5nlwCB7rX2yNAHHb2wwnrmyC1JVsNCfzQ4mN16wnQw9vwrKXXz1qxPRD8QosfMnVsSsf1kx3Pz+fy8/J8l59zoOi86vOkZaxhPGU9fR8MaHGmZC5SxAEYeZBePXl15b6rQgWmn5uSVogsZlFR96aSAhdKd+eQMNk3VVq6ylJgLmgRQ6cDRa0tAQIRkSkfzkj1XczqGkMooFIxgdEYmGdXOjQeQLFMZoCflI9j50CE3p/Uv5yxJe6WwC2SE9ZQ3blsGWcBI9n68sMUV7gQGdzYTLL9lO20bGpk9/hu41os87yO/Jqc3h/3rtshgd2EBHbru90SoDNn3w7icubQRumPE/XvIh6NXzwFkDymhQva+g5znSu1i4YGMJiKjBAcw8LIhVb77k6wiMY2nwBI44iMcaR8cUpqgzez118sOP2yHE0A7640ATNLQRPNmleEwHcARQDA1YWk3kLxYTIwUx1f7jYVljw3qf8iBG4eP4gC7CbRQNi6x50WoKezd8DGrJn8WXO20DgapCBRYwMOxqXaW0FdEFSwqAVoJH6D5E2/9ZsUmGe1QGBuOM43ePz4fdxcXUEx+271BPStVWz9c2Nw7ZZMw7gLxjjQuTNid1ksMzpaVjSq2BZwk3WQH0Yk4G0sAqfD/dk1X+xA6tZe6C622wqur9FYU7xxjynm+d3ZkkgADdo80ACJ5nSgSN2W2zJD2mLOqq4wnHQ71sbEhKUwhI64qkf8i68+wnf+zZ/gB7/4FnbyeM8n0gAAIABJREFUAL/5f/8i/vIf/Gv4wRd/in/xN3+C916/xve+/xN885/+Mt54/R1879//M9TdHu++csCbv/8GfvKNAx4/fIwfHX+A/6q+CuId/pvpv8Z3p+/itx7+Nv72D/4Gfv+Xvo1/9uIf4Kr8DG8/+AB//ZW/jv/0O7+DB23CYf8Yl8cdFmgMrKUUFHqESYC3Xj3ip7/6Pl754CX8nX/4O3jv4QH/42//I3zra9/Gr/z0y3jt6hXsWPD6B6/gxZsXUHmHiY/4Cx+8iNcevY6b3QGvXu9R2wUeLEf8ws1LePXqZbz18Kd44fgAXCe8ePMa/sJ7X8Rla2j1gMIXupYscHu4lpGtsQjen2nAR6cA5cMiV+p7try+NjqgIcigxeAGB18/3QVrhR8PRe/ve6FbumUIIlp67z75Gl6374QaBW9cl9WdSfCJWDfPYkriMMl5pQdh04x/MPfDLkg5wROzmNfDpQin4O5pSBY77u4Xr5Sz87EuZ+8Tl7gT3XDZkTz5bpZr/DB1JRek+d0GPNflWe6Dlbv/52W73KLa+U4dWD46D/r4ymcp49lnoawXy+f7y8vPNVD0zE8EXNA9qdaYi5hQ7RYUBtFks1xnZf5crdVOL0owOVWaeqBKEo1dYZARhAoYxRTPDASwMWTNMOI+w0Ic8TTEUyVbtrJSNGg21QmtMSotkOJuaKZoicd/8VgAgmonNEsjLAfGPM9moaSWRrVOmKrHdlmn1B03aDaIGTxl1gqVySTB2tfXoXNeyE/bzJIrC4Vr7vGUJSzMBqFZVIAahFaJqx8NYVoDRrK6tl6sDh4UuMnuuE1WYMjm4x386FZmGVQhDdw+NOW0nacKq58u+me/L/cl/VBFjkFEJigrREp9DYkrFg4ydX/67Oo2nEha84fTY+v7MJeSL3fa0K3pvL5oDPK60LkgAB7XSa2eVMB21wRBt1DsWbFuW026/F3B6RtMog8EdacSAOcAidR317rXQjQ8GG23OvFMhgp89P3vigwZ8EXRkhJjLY4w2tpqrBl36OT1ub0rVw4b+9ynbMHSYw4g5hneTvNBV9BKwG0Gywx3PYvRlT6TDFK65QHB7YLwavxZhlhFw/DGFLmyTtDYS6eUI/TNsOTxwVn319oiuc/6WZCy9IkDA1tAUV6zt6wRoyfqcVZwgt4TIOLWa9B3FQuI7PgptfQKV6gdnIP1h2OOyE5TCmXXNXtZphkCHAGQCEpjzKXhj77+Y/zBN7+PL7x5iRs+4A//gxvcXB7xZ7/6Pv7oL/0QTIzdlyYsrwBvH36GH3/jPXzphy/i8q2H+Ae/823MOwVz35jfxO7dB3gsB/w/r/w+fv34dfz9r/xveHT8Gd768lv4vZd/H19593XsZYf/4df/J7x+fBV/6/u/BSbGJA8wl4L9skNltRIsMuNqf8SjywNevn4R/853fhM/e/U9/C9/5Z/grRceY6kzLlrF4wvguJvx+OIGf/LFH6BKwb/9J9/EL777ZXz7l7+Lt154jEf7H4BJ8B/+y9/Clz74Cv7ZV76Dn774Nt5++AgvHB7gN974K3hweAnXe8a0vIBGjAK1iAtAJCxz9MDo1vODVbmXkh1y0flKuvK+AhJ9zZIBGNTfFxZITgMZ8CDp3SLHeUuXs/pu2+bYT6Nn3kcOPXnPfWQFZ0Gyev4+bbzXPLqEY1btg+UNkpVOv79bnqPzrcSzBZ6IpcsmQ1bGc81dzedd93YukntjjfVf4oGXz/Xdf+7ijc+6CHrCkadbc5/94jJdpwkBEpJyONWhNA7ZhwWJnliTlCBET/3Oz8uzLJ+DRLeVn2ugaCi3HSF9uIrtXwu2C4BKsSwbJohYxo0uq1N61lNlS5yqKDM0HwhiqIMImQuGSkMFBWypgrtrQbdEAApYllCICxUL+mWAlAUqVkGcQTShFEYpO9Q6oZbJLI00NhFIUIUgPOtJPQNMC7CIBaFuqvKVHaaJI91wNfBImbencTargQAdVAEWl0vDQsWYNBW1MLDbu/49bnaBOgKqkMIA+5wkBdLvPVEqohL7m4a5OlU6kwpFXejsCuoGYera8PDO7ft7G+5fshB1l0CVT1Vl+O6Uoa7/NkuecKVEErpdQBzrHdo1xMTx8S1Da7PpdzbH1vc5FGRRM8RdPy3Io71uLQT6rKmeebu5e7xBrI40Jj32Dg0/fTzDSSD+TbBQGiF/gCzlsMcG0R+1tjErqgQMYGzOZiEdDrs3tyAr/HpaTKv+xTrNAAGNwQAzMFhKX/ugvschal0n8O8pMrXp0yU2M1md7P216lkYjRdwBAOi1bIeaalnNwp3SFDs0d5HGh+PraL9FQPkCOoWpfTT6etpFTFMQrY27c0GHCmoYYAfCXwUhmLkXmzRalNbzEW+u+tlanUEn65BqelKhrqUje+jfI8L2A6o5q6Ju9FmVx05qUktPz2744YLWwbubG2RBSYlZ3Wl7/c+Va5gqurn7RbjfR5LR/vpC95alEBHqoKLecLNjlHLC9i3S1wue/wn/+tfw8tvv4j/7r/4R/jHv/v/YSkNr17tsW97fHA54+F8iXex4JWbS3zz//o6vv6DL+N//y9/D4cXKx5dPsILh4f44xf/f/beLeayI7vv+62qvc/5Lt3sJpuX4dw0V83oMhrdLFm2YdmKDcdOEMEvzkMCx0BgO8iTkTzECJCXPOXVAYIEhg3ECmIgCBwnT44dxJIhIZY08owkazQaijMihxySw3tfv+87e1etPNRaVbXPOV/3193kzFBkkafP+c7Zu3ZdV631X7dn+cP4Df6jN/4mf/2Zv8Z/++n/hq88/bvIcMLR6Zq/9PK/y1O3r/E//Mj/zFce/V3+xNEPc3waSfGMxAAyMWoiaCDJwDycECSzmlZMcWaOM2OGowlODl9Hrz/KHz3xCl/61O/x8Tef5Oe/9qf49pU3+NpHn+GFx/8vrh/e4Qdf+UH+9LM/yR8+9Ty//bHf55mnn+XmeMLPPvcT/OiLP8xvfOpL/MvP/Ws+8tYn+Pwrj3Ny8DYHZ5fYDMlP0TJ2df59XN8NHmp/WZ7VibZqnVHIbX3oDBr913PqMxrQ0ojhAHDtmhrou7V+q5tTPVjegY7dtew+oCkfttu2p/q7PcP6oVB0LLmdocuu2b4nFjqai5txrgH0vV3azknUKugBxnbdNl/S88Lb53E7/7Veu93/wkotlV1KYbmlUwqqKUYbONS3pdV/r9hD2tVZfvcn3i+vtiy789pgrgZy9HzVwz3vvVxcsZbNg6KuE8/AZYHMJTg9SLiC8KHG7Z63SkkwtC1TfFC+x6W3Rm97qv/7/V7e+0DRQyGy0v0L/QG1b5mct3TuKm6LH+AlA5nHaQhmWisakFhP0caU0yyHXNBucTqgpONVsmfEqZJT0TJnAiFk+80PSotrpIKSCFJS9jYNIaDJiGuGmjlIES2WRkEyWQMhDKiWmEXzPJHmqTBUEbIR55xzDQroaYGhaJVFYJ4VjZFIyXiEZW5yV6VS9mVwEGP+pQp6278L7GYpsDN/21Cl8C9aJ0xMYCvj5NKZPbcKfK0dVTjt5cvGJXXrwOd1yQzdi7Pc1XbsCmLvLEFrYxpC2Ak8u31te3m/2tzttUJYaB134yU1VyTvd3NtWRLwXfFYUYJiWTH8VZ+Gdi6Py/aaNnOxKGSL8dt+nm5x3f37clyK9Z+vzHJNdZFrretAALvKGUBZ1u1uPC7ciI9btSLhAkUaWLH9PVAtFu2aFiOoXNG7E1HnCaTbeB6PKkTQORgYQusDglsWSRdzxwFs6Z9R58o+Gw0s1mke96gf/0A/DyJSrRoXbiiyG7dsi0SwdGot7XHAnG5/+l5ZxuawsQxSYs1UQtRG27XcMZaMjVL3Trs/m6CqHSBzvmVBWQO5xoTre7Vd3Fpo38nm871fxG4Z6nwd9Pd2rZdybc4QJO+piUUgcDGLsyJXOijcZ4NzepNKQgdXnphbi4ON5IQnUmiWWN52ExAFkA0aEoFDsiRSnEkCLz96k5O4IYeJcV4RmLjynWMuvXqF6194iYPnAx8//AG+NHwD4hp0zYe+cYWDJ65y+4nM1x55hjfHN3js1qMcD5E3Lr/CAUccTEecHSWGeeZjNz7Go2eXiQrzeIpKUcgknUgysRlhijDHRJDbrFMgbta8dekmX/n0s9w8vs3JasOVkyO++rFv8twT17l9cIezccMXn/sxPvnqD/DG8Qln4y1uHN7i8VvX+OGXP8lH336KNy6/xNl4h7eP3+Zjbz3O57/zYT7y9uMcTsc8d+0lzmIgpBUpnJHDASlQMn4t1vAuhPDOFLEzeHddNs7Nn2t8Eq4g8N+782Xn3N1TOks3sTXdQA6n2Y06NAD8wXt5kbI9uguepV603beHnY8yXs2teLtN3Vj4UZvdShFa4hZTMO3UstOj+ypNAbct3O1etz0/nV5xq+jWe7NEvdszlr/159+76c70Tu+3Pz5FoVqvIdvz5jxe56HRSRLv7py9y4Tig/JgpZL09zfIel557wNFDzGpAmZ+aAfHBYmDax3ucsXCEmPXIsQP11Cvd/9tBxKaFUYjXuW93F+YlyJ8enaGkhY0kHOqgBGUzB8uvKkBMeW3hGoomStKq4nB2mUpzLXkvUaJZM0EySSJiAW0DJKJYSiaTinuZWmeLNh2NuYhk3SqBHiei0AVYyTniOpAjJYRSUrsEqRoqULNVFFimQSJHXTUzUIv9/uQ2nvQLVjBBXa7wOXa+rvFilITPBvT6e+NOeq1+bIvcLExns6r+Mg3iKCJYE2e28O+uFZz8dM+0OSizMO+65176lxRYO+hua05u3vZZmjxzccCrFEDVMQjZvUawl4oAI9j45pcZ1CLsGAxEEyDtLD4sWdua6GRhAedX5q6O6MblgDOeX2rt0nPLy7u2DuPHZOvW6JR7picEAZQsSwxYkGstwGRQLjAMmh47D4Bqoxv7DKStZv6DkgNWrqMAWLrWpUcE3DGZtqAWa8o7uJkbfa5lvJs6OtbPlO8HSak9XHhpda9HJPyW6iZdZyGbs90G4V+ByzBzpIqfmQc1qzXB+ZCBqrFwrIKrl6r0ZsYIpm5nwFvdMnEJMXVGNc6LubFab2NVghtHenu7PlYFAyv7KUq7G6vPUsjj48r7AidnjVzX6mgYQgGImz1D6choQCGAvtO0JogQSAQaxDzheVAMS3Cwfoccvveg+fnVM6RNNvYDtZKn3epbXKgWmTN2UHicHPAFG4ysGaSgX/+H/w2A8LB2QE/8pUPsU6HfPknnuf1p14mrzOba2tuhDNW85p1nIhr5Zkfe43TSy8wTsfcOrjFX77+F/nRmz/OP3jsH/K/f+5/YcPEX/rmL/KcPs/vH36VmANjGljNB6wmZcyBPB8i8YzVXCx2Q14heSQH+PRLH+Wlr57xq3/+t/knf/afsQlC0MgX/+in+OrHvs6v/OQv8ye/8QU++52P8puf+V1+9wd+jzsHJ6ynkZ979qe4sb7Fr3z+1/nVz36F0/GER06P+eK3f4TXD27wT3/8XxLzr3EyZD7y5pNcPR3KeZiOSRJJIZlHYC8UdzT8gUp3ni6+Lmtm7/U711IJmqpYpr1GMfZa6Z1zVm/TDI/B1X+ux5+0J7ybZZvq7i/nAWH93DzIPOk2ceza1UD26lZm+7AC4Lq8A7fsWLTpbgdW2LlmGyTy559Xdk+Q5aucKW1N1+42lmjxjHu7Yn9Qvj9Kr+zr15w07tvXUeWFnIfZPr8+KH98S+PF2Lte3t/ljwFQ9DBlKe418OCc5dEfEnc5lPbcSBNOtQp2bqq/1Jl1h1WNJ7QU2quFEhSLJIGc1ax/slmBxArUSDW/LDEcVHrIopncqknaTR9XbZvw+DBQ3NoIbuGUGCSQTMOe88xGQhGa8lTMkEMLIpuSkEhFCE0lqLXqiOpQ3U6QaFleCjjkgmgQZ/cs9TQNCLDetX4sLLC0zV3VIlAs590thvbKdZ7pBNo6AzgROT/OSZvLLnszAQjSXj2Y2Oq+SNk99JZr59537n/e3Z/fa9V2zbD3MHtmqSZmndXvAxFtVj+AWOBprTuxgJO7XXLLvB64aPPgEJd2bWoxVizGjK3nZZtz2Wtqbit14s4ZE7kf5kHw4J3d0rD11VxsKqOiPT1qLncqwtHRMTGGGsumB2iqG+kF15HWTurivVnwCIOMFsx6CQRZBeX7gKUe7oQoqDQn55mbcpOT6W1kni2YcZuvYiHjwM9SoOtjC/nfdS1pydYYfS4MEJSFIGlBU81dzOOjxVjAg+0V7K3qoZ7FljaBcRxHDg8OuXzpEaCkmXdXXPprLfabSGSIsaRl3ge82vUhRlKaiqVLXcX+HgghojK0WG3nFRsTCbHMzR5R0y3YNOcKdu0Wrbctwwr1oxVq6uiwfV9tT2tBPieIpwR3S7TIVYsAtuV5faywGCJp7mI8UcAiNJPzRJo2aJ5LZkQMdFCPmbXsS1BhCsoYBVaBIRyxypkPvf0Ij926xpOvPsEXf/vTpDVclWu8/KEXuHz9UX70mU9z/fh1vnDyJD/y6mfIqzV/8ne+wFufeJP54BE+/OYT/K3V3+Rp+Twn1zf8fv4KT978MH/ltT/PSyff4Wf4Mb7wxg+yCaf8h3/47/EDtz7G8ekj5DgRNZA1skozo54WupbXJAI/8+XP81i+zB995kVyCHzy9Sf51Gsf5vbRbU5fuM6ffPaHOYtC4kvk4YT1W4/zuZc+x48//+N8/elnkDAyBVgn5ae/9SN89pUf5Ld+4Gus9IAc7vDY7cf46ed/ksduX+L2+gZjEoTEao4lVtRyKT1Q2VECSLfnuhndKd2Xbb+2OGjtmgIqxjgUC+i7GcYuauxcKe1EaUBTWNKiZUPe1XLO9ttzUdsPxWL83okN7rslNldVwdq1pc+IC3cDcJadeFhh/G73i7hFZ1iCVzZeFRSyc3l5fjR2/17P2DHy+j4CGO7lNvf9WO5b1Gp3UpJLNHdw1f63juYsxuEDcOD9XT6Y/33lfQMUaZNeFkx/ebvYwli4E93tnh0AwJ/VCUhCp7SVxVv9Q7G0z5ZuuQpyZnOgAaG4bBV/GwvmKSDkon5GiaG4Z2QyIjPlYHQBNCBksmoBd1ymN9cHzeU5pZiLQRY35i/CfCgB4aIkssYSv0hnpumUeZ6JauGzc0bzVA9eFUGioHlE80gw17MYV4QQEB0IGgk5WjBJaP5kJa5SIBUxNitBi/moaAeMWV+zFve5YIKomM990IDg31MDzDZzVIorgE2O1ngePYOzzELRT2Nv7ZAzxTk+e1Dx3K0MPxGFrLsAVBWHXJBarJXOvH7nUG3MeGVkxLOAZarubJvB8WDRC7ppjJ9ggMo+sMv218JCphzIOZU1uGMirw2chJ4pW9Zbg01i+1mdWW2WRWKPDubmgigaS5yusjYowd8lI9XSLqE6oVKuCQJDHIkhtmd170v5wIXwCBarwePQ1HmTEjy+9c1jfPg6K33N4tkG3XqsxP4KQZEQOTy8wuGhEmNJ6d2EmTa/RWwPRDmPtHcATagrD4eESyma8yBD3Rcew6eC6Z0g1tdb+1hBv5l53rCZZ7jxJhWc83n0bIxGzyrYYxZj7o6EtSGEUIANdaua8goGqIkGqye2ttiiiCKMcWA1roqL10Kg0XKfu8HhyevzlqBYPq/Xhzxy5TEeffSaxWtzgGIJFEEBi0QCwzDSKGdP88vaHoYBCcI8T60eG49ybSAOBeBKKVUAb584fb5wts0UW9fvIZHnnJlzOl+4yC5s3e0s3c+E9TKbL6sQeoFzCW55GwZLsLCsqexh1UROic3ZGTH6/EqzIO7GPzjAlSNpmBgObjI9sWGOG/7s87/An/jmj3H9+AYfWT3N9aMTnn7xw9x67YtIHnh0OEb1U+Q3YfXYMTflhL/wR3+K4c6aw8ef5DNP/hBP8FHGfMxfP/1P+NadPw83lIONcvT2MZ9Nn2BEmOOGn3v9J1ifHTONk2UaE+Zx4kRn7qwn7qwnznLidLzDo/Nj/PTXf5TPXf8sMQeOpxUqM5949cN84rUnuXryCHMQfvErvwAxETVw9eYVjqYjfug7n+ETb32UWZQgM4/ffJxxPuQnX/w8X/jOp8hxw2oeuXrrUVLInA1nDNncWjWQ6eiZ2Ppe7H9FZaRi3z2OIPWfrVVbvgsWx8rPJKdKjkf0En59poJbR2t1MyxKtkBmXsF0AGdnE7fCKafjKVkHivV1Zg4TKcyUBB6YIs+qkUSMyhCEPETyENAR8phhzOigDOMKGWaGIRRQ0uLJ5WoV241LVmaZmUNmDjMpJDZMbOYNWabSljSTJJFkJuWEKkwoIcwcnR0xayAMmZGEyMRBiuQYyHlle6bna7VZm/sekH27sN9GfrbacHviCfUzS0scP/GzfSg8hRQguwBTsew1BU8v3+a1m8OFtdZSKbAsu9/1gN3S7as9c3GtNjhRgChSont6wlP7wSUE1UDOM9KNY+O1Gozv9QvYudyaUtpx93iH9f57yCFL0Hx577L+vPM8WWxA7892G8JuG2T3eQ1Vsz/2XVNLlwk0FO4EUYJY/EOLs+eKHLbckptVWudOmoPxzr2SwFqjGdXU8TSNR3UlckmYEBDxWHm9oryMX5nfJvc0a9xOEeaj6UynfbNjQXfu2CzLReGJiwS/r3W+A2Dg/TzvnSueVbPnS++/qMreLKtbVwFpOQFVif0u9f1e8yICKd39GqCdkPd61sOtg/c+UHTh3cUebtiF2vscxK3rz2d9l1e4mFEsC5ZSuG5dCy5EeZBYp07ZBDbPghSKRt/QFyGWfkqxjCja4kwOaoJFIYaF8JY2ZIRgQo5IRLMtUDWXBbNqChLwpDXeHAetghQgJCgWrDsQQyANMynNJY5RmilxjSxDRs4WdwMTDmIR4EImulBuR3sgdAei/1aE4jaWbXOr5hIAGCFpLiBRvbU/zAVIlnq5BZ2GEiy4MFVKBVfcfEn6ehqz0E/hAliszbP4TwZcLGMg2bXS25htMUEup9vh1zOhvQzfVpxXaavPTanqCLZ1QPfM2oKtxV0eo+1B5xIp6T4ZiyieNtfGoKtvaYXi9xfQzztZZ1rrEieIMEhZa8HqklpvFRvJoiWr0eJ5YheW+ahgEhDCaDG1+jHuB4LGc5U7apvdYq3OmjfW9muZx57RXDK57VktbkyIAwcHl5AghLjUam9/FgKDjsvZcGCmrjeB6Pc4qOJB88ueExkIEq25bf14uwtjXPpTaIc9QWmMF4msG05OTxYD56begsf18Zg8sbb3vD7G4G6wICageMY3MCueLjBphfJiZDWOnG3O07Cby2M9WDuOEDEhoLRltT7g+PIVHrlyjRhWRom2aLUvEDVQOww2bk1Sq/SFYh0FSipBd9r2EjESIUSzJNMMwY/vrT3jt/WWlerPvWhZkDYlackuV5rerSV8aTtQdAHrhT3MXwVhbWyWrozb563R6SBFY6xUKlbtZC1+1bTZEIILFGLXUmg7lP0UyuchBzbDxIdOPsV//OyjfPT6Z/gLb/47/NDmo7y5eoNr4Qon6Yw8g26kKFcEZBLYUJQxKTGdZQ5feoQrt57gw5tPc3R4GVV47M4hV964xNnZHeAMpKQSzzoxzacocz0byghNIKecnh1z9JZy8IZy8/ZN4hA5PD7ikXyFT01H9G66C++pIHzs+uNIKACKW41dPT1ux1gHxl09uVTOW6dp3X6uVwmF99DlDEq/oFVBlvsP1DDlbi5zA3QXNTnib+elqFvvtmVTPoe2zsMJqg2sLwLnTKC47l26fsDJ7Zmnx6f4yMGHKKxvJOcSYzGn2ZRJjR6KCIRY3EzjmnFYMw6HDOOaOK5ZrdasVgOrlbBaQewxYGtrT0ocZ8hzCZ01zzBnZTMlNmcb0jyRUnkV68SZORWgKAXIcovNeub2OhflmQygI5swsokjB7kLlLvv3K5nUnPTPq9UHuMuQmKNm0kZsxAHYohUFz3t+TW1MVgK9/0e74GibautB40bIztMjFs5V/swUx61ZhqZsHNMLAaa93f7Ad2ZzZ7xqmv27m1/5+PibNPX/nu3mLt7Dft+PzfkwL66Ov6wjo3xYWVfO9+UKcodQcnLOmx9qJo6qXOz312ZbRKXVqYNJFKj2aH2f3kWOf9fummWyLWe/f3WRWu2r9uzHvaV87fZ+7bU+XnobXGRCmT5sR5678zEbO9tvdfmq0pp6b7a15aLjI/v+Qcv732g6L1aZEFC73JZsz4oiyJ4yFhT9PjBB8XFDNzlR8XS2IsZHNHiPYgRQaXF6AkhmrBsBFOL1YtUYCoTu6CylfCSC0gUsDpLFoECIBULoxAC8xyY5xJQ24MkF6CoD1ZszLzFoKguZ6HFF6mMRGWIShvUAIiSoaNpCFQT2QyskuZioRGHwsCqW9X4qLY5af82Iba5lwme5r7EcG1/Y/eJzV/hnTM5F9AspbkciB1Byq7Fc+3J3s3vh1A75BtTrbTYOoVRa9YuHhTWrq9uYNL1eVsoy35a7tDQciy6e8huIGCtV/hfrn0ODCGUbHgdYi8+74t++hrLkAU6y5I+bgloWRN9C6vQWn5XpAKpvuZdpCx8ZAvaDsU1MxgA4czh9qFV/nSppQFv3FUz2Eaojn6da23p1bdIQs4Z0cAqrqrbqGu9e0ufBr4EagBfv6a2oLWxyEMeNJrCGNWsW0UzHHRV1lJwYbQ/cJrWJw6eUbGMd8kGZu58EhmGzopHPPZYqGvR9/pOe7e0xsVVFcgFlAkh1Lmq2fYE3M9kG2yKUSogI1LW71KH2VxOSikWh4K7CDa6NA5rVuMlhnhAYERY4SO6nHMThHDQvS8upbQ1FkJbB7t0oCxGCYGQV9s/7hSV+f75rO6GXhub9ySULpdL5an2063tNt2b+RL6ObDvunXhFlfNrbQ0QFSNtBSt+hgzhM4KoBd0HFwXQcxqNa1OePnay3xboZKOAAAgAElEQVTr4HkuHR/yry/9Jl/69G+yyZl1HkAnoy2d5tikSidpcxJiWHF4eJlHrzzBOB6CwNnpKbeevsFmOiOIxYWSch6m1MX1c6GahMTMPE+8ff0t3n77bc42p4QYGVcjR8fHrFZrJ+wLmdRpvZ+Z6vsG30ttpBv9juW8optHF/ZEatyzmrlvV2qu+9njw/nKXwimYlQiB0I9++0HcRrl45qr7XOv4/A96hZiWTZU10IJRncyQianzMmdUzanGx69+jiPP/4UwlCET01m6dwAVYKvwJJNNYSBIQwMcSxJOeKADCPDMDKMI8MwMA6j0eTmWBlsIqrgaQJstsxgKWdSVqZ5LvNv7qs5J7Jm+7vQtByUmJXPnf4wH77+GQ7mQIzFKjSFREwrAwSXoq/6HtGMerKAc2Tei5bG9/k5rOZ63PE+xhe4Ukc7/lPPacA+16hda5n97dm+b/l3e99VPti6rIAUpp9pgprmttTbHtrfhn1xbe4LoH+HS7/27Bv2Jy64n/KgC6gHbUpbduK9blVdx3VvXXd/1AIc6l7bPO2CF967dpacrV+yE6+qEuAPygflnS/75Ar9Li26D4Ci71G5n6mVjpFySKScYyWoNKjpCAKO+ni8IbFMY6JmMWFprFUx9Kgg+6oJRIh5xtMYN+sDN9H0RdmEMFUla5HjS8rRInRrKC5zJeOZO1wEYLaX1nuw2BtuURQMWIr2twuYDhpVod0OQJfrsjOoNYtPNjDAgR5zp1OKOaI0Z5L+3cWftg2LxkMta527FHm0a/H4KhVH6LUinpmpWFCleWNCQRcgN7T5retjQRQET4tej/vK/DhTbStDMyIDMQQL4mruOdIsrwQT6jvUXNUJkce+svaJ7HqKgaWWba1uYIqZuOO2PBCDkHMiRmEYZuZpLtZu9MxmqMOxNPUu4ykeqBQHqEIFcvypriX0cCtNE94YgaZragJ8mcpeMO04y9pp7UCxJtw3eVY96djWTPrtYsJkJ2RV/qNZIVUmxlIMay7aZwdvPIZXDxQ18KcDzJYRnruWbPfTLFR85dtEKoM9z9w/1QGlxmD7WDgNKFU6bWiAcmleLLGVgrvataad53Kw/V0TcM3iiVDqtSxm6i6bVYvdd7/QgwIWxSY09Bkfe0pbJapGZ+o+rGl2I4E1gSOCrNiGiFRNWDQB2t0S91J/B+jbCmP5gbrRyvqPZvG2Xc1SUEkMO1XcV6nzpMTFnlxeUhMCXOAB6jSrtrmraCHUtGdpJ3SDx3gzC7iFNK51jEXUKL4rA7bqdmBfSlDsYVrx7OU/5B9+/H/iD67+IVlMwy1l/6p0Med2d/j2iNjSkXb1I6BPdqDH4raOHiyEuvK3rb76W28hdjex7V52ZNuzeS+2c6fZF/xt/1Pv1rZ9LfG9uOeuenmj876Hm32sK8HusUhlOZeVVEv3nY96jxz0a3fn3Gzn1M742NpdzPpW91WUkAM/9PYX+E/zf8YX3/oiU5w5UCHmgchcNoV0rj6N9DQheWuDah3TrdIpY3Txebftmm2VLYRxuvFodLRaN16AUPSC+Dvq/lLBovqk3RXVBbwvbVn2vfFL3LNt923N+dBl59A455p7tUm76/bRuwfpU8fj3tec3osy3b0slYv+HYvzf2nVtVzvba00kK1fP23f97d/N+f8+7DUMdD3/VC818sHQNH3ulyYi+sILIVpLXJOKDFvKIxzcoZE/HhSiEUIjxbIV42BbS4xRehKaS7XGzesainu61lRCGKDAIzYu9tHtu8lIDmXQKqSUIttIcRqYeQMisnDJR5KiAzDQIwjw7AiyMhqtWI1ror2bhiJcaSZn3ZDY8KgorU51VKjEw7cHWYhVEqsTI6DDZ24hcNz7WEWq8mEAaQJSs7w1bTvWtwRiuayxGuZplOyKjmb9UAVWptPXxlW6drgaq2l2bgL7d40lUA0X/AgEWQkStGEVg0aZyX4OdmyIjXLo3KJVM6oZxYXzF0nqPXLcynYq8lLmRwS07xB8wbNUp6fraMdSFTXuBYf9mLFRe27owxlurPFT+hc+XYEzR7+8/VrgKqBoL1ori7ImxAZgnior1afMxfSAx0djyBKrn8vOUxbHU2ugE4waZYZlZlFiGFgcFc4GU1o9mf2IEtbLw4U1W9qfW6RZ2NQ72vrqIIuHhvNwJFebK1PE98Lg90fuqtS3ashVLuA1kZvUfA2LvuzBJD6NmaLXVTiDJRgzdYVrXLXTvE1PMRtN9Y9AunicxvbXTArEMKKwAp1ALb+1rXD+qA7rjY2Dba2ZU8MtFLyluAlW5/3tb3M90LA2Xn6vUujMW6Wr933lL1XpdsLcITnNaJuW6O3+yxWaJas7pat5mJcBT5VA5LK/aHbnJqdadV6LwrzkNkM1/mVa7/G71x7lkAmSyQFs8jSyJhhE3odntOXvoXaixILIdH3Tdmf3TjuFcLacGytuPpcd5PY54Rx3tDeT9kWN3fbsO+m80TiLUGK7V7uPsV/VdkS5LeAnnpm7dy7pKsOUDSrMud/7NkWa0ns92ZVJfW6piDZ7U/fj/5k2ff7Vg/3/tKdIIQ0Qsj83rWv8K9u/XM+ffZJjufLZAkgY6GDHf3u667b1Pkf53EWiqd+YrrzamcPdmDQVt+z8Yt5777dNwp75uw+wIPzLInuVYfUa9v41gN9+QB6l+ZFzIULtLff3xcFx76fypKX6krlx5Z8w0Xqq3En74Mg9au5q43F3qlz5GeEf1cseLfBxhZiYrs/XufuXO/pEdR4lD21vjtNe3+VxiO874fiPVzet0BRFXy/B8S7muXe69FCc0dpXzVGxy1bnPGVSKABHn08HMxapKUILgGes6YSzlqlxHJQRXJhd0UKkKHZzYibJVFlfg1wQAZCKM/LuQQs1mRWPCEQskBMZgrfEWwT2IvFQWQc1wzDwBBXhLBiHA4Y4opxWJfsJeI+8I3Kt9ZsEWmP51RpVYtDpBa3SMEzdrPU9rlA0MWDsTpFIm6DFOw/fArqoQM5JTRl5nnDnGeSJjbTKdO8AUqcJo/FWk3pt8QOby/mSrG9XHuZUs3lKMbAahyJEojDmmE4YIhrxnFd2J2wqcCGa14LKOLuVsCsxUUu9+x2exYKmYTq3H3ZAtA6Y1QAESXlmTsnt7g1nZLmgOYSHyJo3HOA9AdtNg2wARKLAVi6oBVrqtCsW3ZELbU947F4fL1kIJW9pBmCx1JygawJeQueAm3GYCb8+ZuLKcUyq3czLIDAwoJoR9jvi1kKSSQwljnun7l4dQyw9Jl77Oqa3tmeWPdRH6zenunPxTu8ddCL1ym1DtVEc02zgJpSBIdxHBCJFRhusQC0ZPLazl4n0rV3Oe8hRFIyEXwnAKfFO6i9bKXu+wUA1c/B3RjDFn+oN2EvY+TWXdvYhnRjtrAt6d6X7jkWPWHP8/u4Es2Fw+m8j1nfh4dRwu9jrM8TkpY9un/QYru3lWXfGQazDJIOrNkjuDdXp55ud7EBHQgoGxFQDs9WvPj4t/nW+DwqyRyZA1FLhLxZR0SnopiR7b5uW6U1cMjP7/5sKY/MW9mhtmOWqe3CLmB5DfZOrVtgYd3Zj2mnKjILTvtNnCVoCp82Ka1n28Mvi08d31HvLa5fhXbMbR7tfK9KJXHruhYUu29CHRV1Cy63cu1Hyz+by18N3u97tKeK5bpQ6wj1u2o5Yv9KB3T12fFUhUY5IXvmt1ZF10bbF7LVp+5cUltEolKsNe18EO+doReKoqG4nkkuGeeePfg2bx2/xqOvX+b0cGAtE9SYdLk9sPI95dxbnN93KWWr77ce7HpKvw7UnpPzvvt8Ze+vb5+7ln9uoPR+erbP2mifC/Pdi++5bdrs9fUx8EIHClyMwH73LYpKeeeAqe2dCR36eB/t6eqQi7dP/eY6z9sJDPqdvtz1/Z4D3wv2TcfSLK2D+vflmbqnZXvu6/bGTvnur4PvfZF+gD8o78Hy/gKKzjttvifP0UYA71FVo2y9QOG/S8eRmFWFQAlR7YJ/rlhREXojiBC0MJ/KvBBeNSVq7BCULOWQjIsD1RhKccHJYgOJEsR8/lFyLtlICJFgvvdF+4WlKwWxwLkxDozjinEYS+YzWTEMa4KsizuaFGHTg+06M13jrPTnGCbuGtOzR8at/bMoB+UODZWBLAxgQnOw1M0W7NYAiQKqWR0OipgljmfcmaYzUp4to1UBis42Z/X5LgSHzv3Mmtz+1uZW5t/7ulgunwCyKu1lJobEOAgxjBweXObw8HIZ25BrrJYiOLe6KtCQIc/ZXOQaQ9YL2DnP5DxZG1sratwCX7tByHlmvXqbO3fOODvZNKDWxqtqbCuT3XfMtTacX6RjBHxsa5uSrfMOHJCStqXsQbMEkWq9jwQHFAyIqHGdWIBzi6K+jgKBDJpwN0hnlaUKWCyyz0jXXu3ohwOPglnlSFgKJjugh9OEBl608WzXqQkiy444E+wMsYBGa6hbfdmaU3+3sV7E52rz17s9+L4t8mKo2cBqmPK6DkO3z6XGJevntu41pz97psOls16QyDmV8XRShrBkPPdXJJ4lpetbdfusFg9uqbbdiEajKoOs21eYgAt7ltbWxdg5oKH9Il13tu59MBatB6V7UHNZfCzuJ1TivdpT5nYfo93vltavBd1Xzm3rsp7eQrRkxFMZEDlFSKQgxLThJ978UZ68/hS3Ds8Y5nWhFYtm6eKjWwwiEEPkYHXAEIcSk0YTt+/cIqXEZnOHk5M7zPNcAhfnudF5s8pooopbzLmF31JgS5V2+tmhNfuSu217FrkQI0GE1WrFOI4EB1jE7rf9rPVsEe9Ya4tS6W1JEkAVojUrooFkeyXIwGp1QIwjEJEQu5hioXPPEv/fhrJXSpV+umOtdLdgMZ2SlKyD1doTqgXDnDbcOb3F6e0zxnHNwfrA3B+13K/ZQpo5zdqO4RKqEsUzLEpYV7rk1pIizd1M6mw4H9WLku6uazySJsvkWM4PDzru19xcX+fLT/4GswSiBgYtIdrmGIkGYM1jvhgzX9cod9ki9wKI9t1SzvFFLJiyaKEDS71ffUbTBsSE9sI/l7WnWe7S3r4Zuvfz+d3ZR2P2cYsdz6vFqvi7IEk8RHkwqn9+6ahR07zegyl7N0s9wPcWY1HKlTmbZ4SFnVChVwDvgjytftlZH3uetVOPV9P4uK0f3hflvWZB90E5v7xvgKJ9moedg0QvwFQ7MHOPZ+3bJEv3nYttImG7rn3aicY4d7pMmmDbuYNUBrQAHbHG8REkQc7lwM5ujQQIyYRp2Ha7akCRkLMxRx7jSBWCmGYug0SiKhob4xkHS8MdihXRMI6WmnxFCCMxrhEGc2Pr4rR4vCE/wLclpk5qEhVz7wjVrNwBpAIYtJHt2KhubAVyrlmwMAsfj31U3BlM+6cWVDsl0rxhms6Y80ymuF7NaUPO0+IpgpByz0w1s3dPreqWWtWdYitteGl/yYBXLKf8++Iao5qJoWTOOlofMgyDufyYC1oHKIgIOqsFJG77ZFtTp5pqdqH+gO3d1VwEzjqxWh1w684dbt+6zXw2V4DIGeQu8kNdZ15fcxO0GbJpbzFf+tcSVKjgGl0cG3LjJNy1yoR+ghaZPzQQRk0Yqrf42tgj2bvQXwCEjDujqAFGFQzrRInSGa33L4SKMujF8kaCBRVezhc40OeAxHKu/Pf2NKMT22YbpvGvoJC7qlWrme16SysJnQBdkTQL2lR5bBMalEpHpQYNb1ZMPTPqglhfci7CobtNbo9+vQ4t6Y+lB1TFgtd2bpN77l1+p1CDUBcqUeOpSTc+2cZia3d7cPnFzNZ15K4uTut9lWxBL7ZQSpxdSz3tdP0ushvd3r5o2QajG4h9DtOsF2d/a8/ucUPGBMd6bdvHrW37HnBR8c3XF5TsniWL3OlKURFijqzmgX//rb/Kn/nWn+XlR1/k+PTA4h11naHNtqoFKtaSme/o8IgPf+hprly+wqQb3rjxBt/85jd44dsv8PzzX+f1N77DycltTk5ucbo5sexbmc1m5vT0jGRAs++B2AETMURCLPsrmTzt4FA2ejkOA+vDkXE1cnx4QAiBg4M1j1x+hKtXrvL440/yyOXLLdaVRtTBx0pXWgBkV4KEerY4PbPEGgnmKaMJyAMxjBwdXuLxa09xcHCJYTxkHA5ZrQ8Yx0OiDNUa1/doCXJfHlgy0m0B41DjFDq9VhTypoyVtcfbq5I42dzga9/4t3z161/lySef5CMf+TgxFkvWeZ6ZpzPmKbloiFv7YLzMEEfG4aBYNw9rDlaHrONxAdtWxVJ3iENpl5G8INJlWW1r07MHzvNMSpmNzpzOZ2w2p6R5JluG2JwTc0qklPjG8df5t9e+xBzKWotMBrQfEiWhcWZIh11qdu2Pwm61s/zygco2d9QpiXxe+udof2fPR5xPt73e5fffLaHTzqgdxZN1ZeGGuPjl/Pq6t+9O2Te27wJg4Xvk+7Ast12nyFYtiXeij8kWH7MzLvfeLw343AaaWB5U359D9UH5oNyzvG+AIi/12JHl0fldefbDIqwCzQyhgS0KFhC5P16VzgeHBghEXGgtKa2jGUsYoVNBYiSoazUtkLPGokXxLFRAy1yVTZB1NwgDYMTES0uhjeaqOZOe6Y2rkoJ2WBFjiVHkFkUxrEx29f6F1oeaIa1GfqnD1DMYVfjUJqOqqmER7p7mfQk+DJ1UU5j/MofFIiJIL+a3eCcO6KRcMplM88Rm3pB1Zs4bkqbq/tzcOaSbS6mMd51J72MHwHjWn35NZU3WpoROydqQQSNDWBPDATGsOVo9SpAREXedWrroAUhUJDpI0jFLC41hARob9OHgh4+lD48wp1NSmhmHtQVYDxWgypqXYnFn2t2726kFIm9QqDOe7feWkne7dNnt1Ee64iGl5cGFdTG3EAe5ehbABAnteAAtbS6MiJSg59i+cIsi+x7twLDt2AfS9m8divqq0I5l/jIRTXx99+Cha8y6enrmvA5vtVVoHXFX0ArItjXWwMTle81iZWslVEHL94a5M2oDgdxqqDLmfZ/3WCb556LELIBvyQiU99NVsXUiwQTLBqj1wGGps2ek+/Eo34uUeFnNsswEaPEg8R4oPvePb2JrD+YFWSix3VXFLSGaMmGXYa1pwilWo8nuraJZv1e638qYXfzsqfQR6drysAKm1W19udehWy077fLyblReejetprio7pza3wRU7X8PwXYDJYoEJYWZrCNBIyoTsOLo9DLXbl1jGjc8cmdgY0ZjZR/b2NiC8hgtCS1A0XzIo0dXOdisuH7jhJvPv8n1b77KrZdf5+TFm4QTZZyE+RSYhTnBPCt6lsmnxTJRzc3Z00GIRDszQwWyV7TsjIU0FVoSh4HxIHCwHlkdxQL4hcx0uOHG6gaXP/II49OPcen4EsMwgln2uqvSYu0qBVw1oKid9RTaliHPSprL3zKtWY0HXM5XuTI8znjrkGE8IoQDhuGAcXXAGAfGcSx9kZbYUkKX5DJoPf+D/SbdNR4Hz8Ehn38Lx4gyczbd4O3Tt3jz1ms8urrGE5cfrxleNSemaWKeJ9uLTm3Lc0vQ+IFxOGIcDhniIYfrY8ZhYLVeM44HjKviLl9BomDu3F0WS6c5KRX+YJonUsrMaWYznbHZlCQXmhOb6azLkJp4S6+yiWWsow5MQUASoyayJIIKqxRJ4R6b6sJbuNHDxXl/niLVtoGvlWYBanOQ79GufVVuKVur7uIC9y0UWw9Kthykrv1nUVd9xj2B6cYonOdiV6/cUsg9bFk2ra/zIvF39tbIgw3mu1F8Qra/drq1lH2KZ4PxDFKSoYTKbxVG/97jvoc7q8pL/0bPufb9V+4n3tgH5fu7vPeBImVH0DivLAGE/nsnF9upi9/dUoTFi1zUSmN5nSPaJordjfYAj2fhDFWxUmlWAi4fikIMnloeRB1MKu5rhXnKtQ1uCeLNdG1cdVcKVMTFrSpCiGhttxAs2PIQV8S4JoaiiSwBmEsQZqSPEGNm3iYML4TfGtg4meBl2k/6eBDLITIW3w5VWX42gUkowqnm3DJb2TVqyFOvtci5MIAeuHrOM0rRJNZMPf5s3SaoYnGbKquGm6C7/rY0qIlKwUa4WjSpWIrdZHMdQAaUSBxWHB89jprAIeYyJEGbQA1Ej8hQhej+AOxtr/qgyQZkILQ0SJA0U4Idx8KAebBsX0N1KWsnyrltnNZXb7nUx9dow7e0BlNrjXaCqZgg5dnHqiBse8W16Fr3SdFouzjaemncv2djc62SMZUlkHMbM9me0240GwPW98/vM7dHY8CdQm1bJrrVh4Mjy3q6EanuXtQsfR372l4ujAs4mLowue9Qsh5nKQKb/2bxe2xMJLgLp7V/AbxQ58EtCmr/CXXc/LcYYgkgLRs0GeBEBh0ttoyWZ8sIMiOSCRIpbrcDQYWgxT03YAJR7WsuQWI1lxg11p8kmSjCoIpqQrMScjR3QHO71bJ6JrOkHLIBEWRyiGbllAl5LPtZSrbJDEQD7gZz3fGxzEEJBDTOZeXkAbfAyyEzaHH3mc0aQ7wvCGisa+n8sutGsQQD+znqr1mungXHDBVArHdW3OccJn9Redtv220qlaidSe6C7E/q3OTUz6TctUe6KrRKoJuYCQRWyXdkiR0jQI53EDkj5EsEKfHl/D7BrAzVFAeqxJjIOnNytuHVNzZsNqe8/MqLPPPs13j+xee4cesGm9MNp6en3Lx1nc10wjSdspnO7MyYyVkLeCOWEVSsr5qRrMxoWaohoHluKd67kZUgDGdnjOMJx9MhMZZ9P7/xBuO44vbZKZuU+NBTH+LRK4+yWhfXujozDsxSAM4KhueEEoo1sa2DnAv3lKGAWQewPlqzOj4ghYwyMaczJCtRlZXAzJo0Rt+BLb5PUoKaRVHG3OaKYBeMPkiwhAGhXJedfmNkudK2xEZnprghD0qOShI1w0Yhi5Bj2atZscQgqSNNDhhNDLKCkNGQSVGKe2Io78UClc6iqFvy7Qgja4mGtwnFuigb2JPDTNaJrDMaZrLOZJnJIZFiQojE7NxWIGokhw3oipAOmcYTgqxwV+ltJUGzjBXfPvRkYZ9Qt2+H9nHJtOtcEArvJWVN+ugrWpI7UCz1NNj56partP22pDGN39sfFrydBbsKka0+9Lxaf9bY9c1ist3j53Vlt6rSx3hezRfn36vFtFv+G//jAFK7cAF03b3S+/99OTT7r9l99u51+9p3d5BFu3e3kW7K1caE3Kvf/bDZ2jCesPLSfR2VhyqTWAKtG9idAwSxLM2uDDK+1dem0fXd4djTznq29WfgnrHc4vW3+7a4+l7T/A6UfbEH917X/Xv+Bb5nz7/uQmd/d2XHND9EuUAFW80SlZ27zlsKjRP2b/uKHm4SHxZs0z2fHqS894Gi3IEFD1qcf6waGd/197dIm/bN7zvv5nJk3M399e6EtxPovGjTZveaiQIQNUa6MFGhrXq1Q1AjgQGVqQrrQkbEwIaa8UwLs4RbtZSDvHTb3NVyyXLlGWVYCI9FkEFMWydrRAZCWBNkJGAvc0cr2ZkShbmIqGnU3cojq2k2NReNe55RnUBTiSVEAUACUrXUizgt5vokdbycgdGOCDgVEeOEXRBzMEWrD3QBZzLzXLSUcz4zNz7t4hCUWoONdF7MGd3y01Y/2Z5ZRWafUQMPtI2zBRNPkqumNyeYN4kYAo9d+XCpwcCiYu6vRAOLirm/7BA52fm0FdvFQROoYERZlsXSJ0BxBcptf2Rbn9niVnmf2sFkB7f1r1mFBE921jSe9R4/dM0dsGMKfa5LUxNKRDQgOiM4sOZ9HIpAZO10NylxtlbFgtu6hq4EdW898PEJJT4XLvwuGWJ/XO+I1sbZ+psBCbVH/n1hctt+L0GXm/sGIls1tr9CLylYO0tbytwXYSmTpdW3YHPE16CNQ1+qIFLWpGbKmqptkxZPplrRKHRxuEo9QslKCDg9Qk1wFk4RNqHEWBsz5ByYQmCOtwh6RNAVediUOQqRFGdymNmMM5sxoCkR0sAqJ6aYmKIQyMUaIJT4ajOJqKckiSQdWSOgCcJsoZvMwkcCMcGQJ6awsnhtZVyzzCgrWzN3AMgxEnNiSEIOK2OCldmBW82EPBCnyBwCp9a2UYWcI4RAkgnV2ZjcwTjpTVmjeX2hk7EI9+fHaWpkYF8kosJQNyZpsQTqby12UA/6b523W/Xeu+3lrO1FzUU76kZvrtcLqy8XyqS4mkWNZJns0ohKZsjCOB+QJZBDyyyl9bl+mhdKgQh5KvT05u0bvPrGi7zyyou8+O3nefPtN3jr5g1u37nD2dkpp6cnXL9xk800sZlncp7rs8cxEg8OGNcryEpKGU0tlo4m3ydFAZFzd0b1I7SZiXHDZjMxrkbiGEkkwrRBX4dhvSJnZRzXXBnHYhljccTELZW6XvpYu6AvaMlimRSSgBarpFEG1sOadVwjeWh72PmEoBAhT8lw6GYlJqLkoAWEocRIjAMQQxO2Vaoiqsf/ss25KwdEhDTNxRU8T8BMkFSAV6i8UT1Xgq0ko0u7vGWLdeKnXdBs9K9bdxQ+p7nBCWQlqCUOUchZS3ZYX50Cs+YKSSQghwgSUXIBxjQSstOcARggqFlXiVlqOwl3gIgKYPVHtipI1sqaVZ7T9qzmZk3UeBH/h3K4azb+MTHNZ8w51axnEoJpsVLhQ7TQ5+Ke706N3ZkgQA267jQhG4czs1sa3dgLsKitBZGSbTcOuALEnx3E4085T7gEntCyr4MkkNm5NUJhWxbzvbfYHnViuOChendkP0t3wPdWqpXWucXH8mLy0MWuewjZqvKrjc63mJhQlYxCUxie9zz1603pJIHFYCz4v46TEpAoJUwlhT+fcwGZ11GK9ai1qZxMxWRR6oaxOjvee2/JF4zSp87Xb/ezWxn3etY7XHSbj3+gOsqucr4eaGNW+Tg1AfAAACAASURBVGn7W+5ukOG88zthkdQHL7/LAyEJ2xyHbF2ishBjq3FFWFyjnTns/ke7deY7U7bliZ2Hda178PLAQJGIfAz4JeApa8XfV9W/JyKPAf8b8AngOeCvqepbUqjS3wP+CnAH+Buq+uWHav07WVQ7huN8Yn3varSjd84l9xd49ffxjN5ktVZifznzvU8jtGeTLFzuOkDJGaVsTEHwuAcyk7MTaDvExd3HyoYugFEuOhML+ipZcJ9gZ9oqeG2pj4pFS7BsZqY5DbFaHoh0RLvfEMbENTS880GWXMxM80zTAJm/gLBkAjoiVtlhUcgei2ZxrNsdy0OvjWUqGotsWsKcFuauSzBjtzijU5eHWRD1tyz5F+v3grwpkCp4AkJSZVbQHEEjN26uuXn7VVarI9bjcUntbWBRzkqIwmgcZXRGE+eZTVgQWxtao/FY+5ZEq/zmWVicsy7UVSrwmLrrdw9+kWYR1DNzS+ajH1fn4KyRHnSZUBhaXwNVrtJaZ87NRi1bGm0JhcHvtaf9plZ8zxuDoh5no5utylh3lkTnMmn+LLNCqhZf/kh3QzFnNGkWhc1NSOoY6GLLuADU5qeNVfkuiIDFAwvSbNXOK9W8f/s7e6pbMlZ4wJ5/z7O7MhdCg1RNuxzWSFYO8kzSgTErY7a1Fk4J+ZgsA4oQ5YRERhhZJeFwApkDMQ+EPIIKU8xEyQQdGfNA1FTWQMiGC0eGeYVE4TQGzkIotlJhZIrKqBNRjwgUuzliIIoQdEZjIBlgJLopFkhBTCDK5KBoLlaWOZ4SNSKyLkFrJ9AobIaJwMp+U3I4M2uLgYGEhsicfXsNIBNq4JmP4t1KEdJ7d4SlAHaxWs6byO07+3PPP1/8LKxgr5R10VPAPjB/+Ztuszf61K/nBn+YkLDdWhcksZfMRm+2mEF1WjWT8hlTOuOVV17gWy89x8uvvMDNm29z5/QOp9PE7dMTzjYTd+6ccvv0lM1mU4NXj6uhuJat1ozrQ46OjpimiXy2KUkRSOSkNX6NZzWt2Sl9zm2zCcUldZpm4mlkPFghQRlWA6dnG77z6muQhEcuX+HSpUuM46qMilmmJM1LeuuWKVAs1xppIxkdDCEwjGviMOKKF1GKBj8IIYZqSJjTDJqJMRLNsiOIW7e6fJjNNrFYVYm5nNV5LGzIcjIUIKOS0Fz2M7mjQXXp9bRaka310buCalUkUSy56l07y4a2rtvfSgGyGq/Rbuw/9zVsf2rtakqPruP3WTorXWFxju4TZuo6UOuDWZr7GVHATFuTpnAKg0DOzCkguduXZlWk9Vy39lfArbn2lqBXPY3oz//Wth3+qgNfhmFgXBWLq8JnGc/aZWnbvr9ZvHTPfUcFvO6ZvQC6h0f/oDxIaftMhMqfz/OMKozDWPf3ws3xQc+6i7To3PnseMq6J95jc7+zB3tUGnwPXyRA+Afr/vurPIxF0Qz8l6r6ZRG5DPwbEfl/gL8B/L+q+t+JyN8F/i7wXwF/GfisvX4W+B/t/furNElu64sL3KotfkIlNts0p/LderGq77ZhjLm4qw94z5Oz/xAypSogBBnKsa9Kif8xMAwD0Px8SxBj03uJWzVEhAKOAAY0heK6Zs9vfEZhyisT3sf6kI5B6BvucK40QE/q+JSXUGL0qJYsVopLT4mcJ2J0k+fc1e/D5IJCqbNYoBaGpQ5kP3Z5acHlr6yFgU+pZDIpMQ58PGnWRRUUKgxxtklwCxh1xq0uQ+3fOhGrm/u6pho4V+K4wJwiMgk3b8NLrzzDpePHOD4qr3E4LIF5KziYGIIQGBp7VrVZUpntraVVOXCPlVOe3WXdwwG9EqTVM7nVuuqYZIMGuiL+zM4lza3B/DdbC1KtUlyzLNSgFVZZ6AJT+xEt3f5twpGDjW551uanNNnnysaOXrig3rtdlm503ffW156BD9EC19rzcBDVGQs7fD2+1YLprOvCADNbZzsChpSZ9f3omchqK6tM5sxXr8FdMlo9Ce2FqEXc8e773WKCg0YwAb30Lpe/RVGJzJJJ4YQUhE0cQAdiBoIJAZbNbsgjOkxM4xlnw4p5CgxpJPA2ZzEQ9IwcRiYZ0Dix0kRMA8iKwAZRmMMxwsRxmhhVCWFNjJcJHJQU1zkTJJc4Ibom5MAcZ0pOwECWU4IqK53ZSCTnkTHPzAKncWCUU2LKBBkY88SdqJwdK8OsqJRg9Ou8YRJlHgLjTAEtwoRMh8QwMslUhPS8JglsxsTBHPctv90Rr4LqtoC7/fv5ZUdOo98D/cz2MYa26+yuPQdM3Y5D0q4KLBhMW4DigKcBPA5v78a06xUSDVQuOyHSzgJZ7Im+TSEI03zC8y/8Eb/xW/8fb1x/lbOzO0zzGXdOT9jMibNp5vbZxMnZWYkhJ4WGxGFgfXhUhNo4kCRwsklMm8Q0ZeYpkecCFGXNpJRJOVd6V2ld130RIUSIGSQrkxb36SElNlNmsynWHkfHx1x99CoHh0cFMJcS9LVmA5Pi/mysft3b7vKWcialZpE6rtd2nsFqHIlhDRKJYWRYrYlxZLSAskEyMQaGKAwWZFqMjJVwdoWehtC5CXnQ93omuaVRN/WFkBeXep+7LaGwHysWq0KtHY0XcRfwpMV9zv9egBT24J7ulXupPEvFRR7aGP4dFGj3b/u732KdKWtP2jhkG3Nzma4xIt36nHK+QHNtpIZCkPqSNin30YlGF8BpUuGHQqUbaklYnP/Y7Xi9T11BaiEXFnz7RQfs7tfV9Xaf9PW9W97djiysPX1/5kxOJVlATokYYj2ffN22hBMsrBTPbfK7gictAaP3GlhUzkHjmfsxrLznfn54Wd5bfX4/lAcGilT1ZeBl+3xTRL4GfAT4ReDP2WX/CPgVClD0i8AvaaHUvy4iV0Xkaavn+6OoR6rkwamyoy73IiIPvRcaq92k+fsrLfitCdjSE87GfLqM4LEIchZCyGYqr5ZtozCvIUQTmqUSXwc9PPNYzmoYUzD3M4tf477rCPh9NpSebCJ0VhHlezUtXfE/VhIYGAGpxlhCCjMTw6ojZj5JBUxRacE5QYrFTHAm1I8QzxIXFrNQ/9NyjTPpIu6elI2dXbIFqq5lbLWB7oB/Hv+i/NwJOB04VN8xQMPAGUHQfEZKwuYs8fYNmNMpZ2en5AxXrzwBqWS8kSBICkiOoKkGppQqdNUntw707ayakM7snRJYFVYlTkbHMLv2pMULaZCAx46p69JHzflHLSmn21h7e7QFLjcER6W4qzXLLTHXgHJdsP0QgrmWKVSXRnGnrE4j7QPgcyytv+eVhRvHPq1hPUQtYisNfMz2jozYYm5zUHvhMKMz2liby5gVYaW71+Ma+XrvmXSA7C4B7dD39Su2D+s9Wx1v89liBHSrpLtw+3uLo6FmVegOi9IE/aATKSRyyMRZQSM5QJxGRDMxTAiRSRJnsmKtK1ZJUAbmmDkbT7mz2pDOAqt0BxRWaUTiRAqZMQWSRFQPiCrkeIMcisVRTJFBB3Jck4aM6EzUNUhmDiMaEsIph9MBv3Xtq3z9ynMczYccJvgzr/1Fhs0Bd5ggJjbjCV9bf4PXjl9gkInHTz7Fp29/jheOnuEbj7zK6TDxyAZ+6K0f5OO3P8nLx9f53ePf52R9h0unKz69+SQfffvTRA1Mkog6orIhRyWmA0Y8Hto9BJAdIeWdZtTOc7js6MiD1OqWHkbnli7X3Xqvz4RGeXuaAp6Wu4LQYOdVOQf8rEq1SidCzhSXvXdyeodvvfAcv/07/4ZvPPcscz4l68TZ5pQ7J6dMCTZT5mQ2F4ickBCIMTLKQIwrhnFFiJFpM3Fy54R5mpmmCU2pS/Os7d1debX1yPe8CEhKpAhBA1NOSIQpJaaxxNnKqbgGX3nkCpcfucpqtS7nUs4Ud1fszPEMjt1RpJByZzkMDGNkjEM1Ih2GwPHRIev1EUhEGFEtbj8xFmBpGAaG6GBOm7bQeclL972IEIgVbHHFQAtcXngNZODw8IhxLMkbpK4bO3ery56XJS8k9fwLW3R+uUeWO2jLKkla7BkfJe3W63up9P30OE/liPVVFwpIJFIhWTFHrzJRodWiTt99L/q+LIBSi19n+7w6b+/yHL3V2/IsbpwblTdxpU9bC9uuayKLGbR3A5a8L4v3+yvN1bLVsheLWPAJD/So90h5wEPA714Ag1acHppyu4WEyAxxtOv3DKo2eaOUXk7qL3roZp9Tek+Cvaui/PuuPPvBS3ON1P3jctEt88d6nb83yzsSo0hEPgH8BPAbwFMd+PMKxTUNCoj0Qnfbi/bd9w9QVIXtrlzEsqhjUOGdox3nMRJ1Q7og2AE8y2sqHHFuHb3Q2xOeAgpJk0OBkqUom9BmWkUHZ6D4/UtGLUBczrkIo9oYJbQEpiQa86UlJkLNgNRJ4p1uD7rgYt23xgC42b0zCw4UlfeckwWaFTRLF5fHNZSuIXb/+NjGwTlw7cUpXbimOetX5qKNXwiBKMX/fdaZoswtwb5FciOqte0Y07vNvHbzYv1tpV3rIJX/XnplWbeYSowAzZycFXZ1npUYVxwfHxHDiqCBoAHVAGFANVbgz9eYgylUTVvqtKV+gIX6fAkwDDCOA1ljjYeErR2xvjewqOvWon8OPPootDpKGJL+OtduWka29lNXv9Z7glUpQku9HAS0gI9uSVfaqQVPNmGgF17udcI1FncpNjeGtu1XdVKkxWpgSok5Z2Kkgm++Xzy+TKtnm8GU7rvyCouGO0gU6mdxoVmh+Z4v+9L3aSfQn7hwHZBUhPAqzDmP1bWn7qXqKujfuUVRsSYChTxykEaQMzRHHjk54858g004Y8gZdCDORwySyEMoFliMHJ8dcOU0QJo5mgbiySGJwBQ2TEERXXHlTsn4eOsAAjNDiki+jGrmdIjcGSODKIMWYHyWxBrP4bdCdEbDhmcee5Z/cOmX+J3Hvww58+Sda0xnwi+kv8zZauLK7Uv88mO/zH//xN/j7YObZMk8fecp/vZb/zlfXv0W/+zR/5uDzTHzpdv8jPw0/8X0X/OPr/xj/s+n/ikH02XStZv8zNt/mr+d/g4fvfkESCbMA0SY40TIkYMpMEf6pbZ/XUo/vT7uy1g+Fym94LWvnM/7bnPcu2t4cXUPHrtyR/ddLzufZaunfbSE3jppeQJRwCJkKZSKRTDTTJKSPOCPnnuGr/7+7/Ktb3+Lt95+E2JmThPTtClWQRmmORdrHS1WO2JJGPKQTeOtpHli2kxsTs9IKZHTXLTilnmsWTqIS+wuF2E2j/juFEpMIwuvV/qTizVMIBhY9CqvvPodPnHrJlevDqBFoROC1LEtMYsy2fmG+swyNiEoIRSLoJQmilJog1hcoBgtml4ubqTVCikGVEdyjsU1zc5OCWGRuNUBCVcMAGWZBrfqLGdfCB6XRolRGMJIGKLFzBFTPoU6x23NL/knbASLtXObf18gZd/kXYlN2kqr4yRa4pl5cPv+HNqzape832KD7il+Nmqtb1nt8rzZvrf2XLdowXnoRXdnRk152Lgz7X73z5rbOnJFYhtP48E6QA7169rz7HS/q3y+v7nOK3TumXZWNmuSJVBU+1CVN82y+J4EdbvsjOPSGvf9U+6FALxzY9GHpei3p2puLrs7+3178d/rPOkX6N32mD+8/nMfZd8mvMfG/B6VCoMv+OFtxvuD8l4sDw0Uicgl4J8Af0dVb2xpylUu4pC4rO9vAX8L4OmnPvywzbu/UqUdl3xdIL3L4bA4ye6tvX3AhtEdXd0j7aCtQZKhCZpbjIYDSp3gJ8bpySLrQ65EM4RAH+jMLYQEOusbe9XsZ+WeTK5m4ipaM6GVcQ1WfwmgTXDpt/2mC2IDzXKmHxZdXtJQGrNicGG6jF/WTJRySOTsTEqwfnZDjGV6Q3YCFi7Bt54pa4eSa7NCEAYGsiTTSs8kd1nLZpsk4ABV7zqg9VnaP357ALpG+zpdBGuwjyU0pqqFh8qwmW6hKqxWI2fTpZJhTgYgEENkCiNB3A3JXZqsXxY3KOcSgNVdEipAFIzJM23tMCjDINw5fYvbd27YIb0dCLv1w6OG7PSzauHcSsbvMY126NwA1MCWmhmp3NeWjI+4EAjmvpGpWd46SwEPWFeZ6X7cF/5U/csZfu+ef7fFkEgTftqh6r0s2vpU4125hQiVma7PakvfqvX4QrL1nbdM2rz2f5tAuBCZuyXWl2q5BKBGK9R+EdBk/v9VLGsuInT1L91A7WoRPF5F/Wx0eDUPfOexb/PNjz/LjcMbvBXe5NpvDnz02atkIq999IRnfu5FLj8/8MlnrhFy4MXPwq9/6g+49qGrPPLqZV6/8hLXHzlFr6/4+K8PPHr9Q/zBj7zMrR99i+PnRp5+5iopJK5/Fl565DqXL6+Jz2548oXHuHx2QCAQtcRNSRFURlROQZRhPuJfPPav+NJTv87PvP1TPKVP8y8v/TK/9Il/xE/d+jlG1gzpiBePXuLFqy/yQ/NPcC0/yq9f+zV+9a1fY3PpNm9cfg14nUfyZX78zs8yyDG/d+33mA5u8vP5Z3nu8Ft8SX+DP/PGV/hQ+AXGFEFmVEPJqCaZzZBKzKJtFG+7SFs3baL7mEUXL+c/qcx/FZ7PX1S0PbC/tl4xUpQVpdJ+dbmY2u87OwWRbn+1mEe0pd4aSbMkoe67er2WOH1zmpjzhm+98By/8Vu/xkuvfJvX3nyNDGw2Myllpo2y2STmVBQpCXM3VgiDBdfNmXmaigCelXmeyGkmzyVVerEcch6jOpri3mGVbdkRaGzsjSaSMGVP4pQz5s2GeZp58803ee211zg8PGI1rkvMGYUhxDreqgE/SwqQHJCgRIQQIMbiJrqZ7pCs/aqJs7PbBUQiImEEIm4RJUEYYnFxDxKKq22I5ZxRLQGetQQQRjA36XJmZ4uHky3osUTMarXQjSHC6cnbvP7m6yCYxWgxU/KzvI9R09ZUz9eJxVdqwXe7xdqNtjMJGODS/VIPAzt3HOzbv8C3+Eg9RzYsZ4BT2LvuvnuTgPqoi5b+Urdqa254PjbSfXaaYq6EEkwxsOzvwv7Q65O27luL9/CEzvJJO8/83fmCxh9vv291bs+gyd55uFcJbYA7RrNZX/zxFqKb3HG3chcZ64Jldzf5HsuLPe5K3EX7Ko1se2m3ydpdz0575R7tV5p8eBHrsG06s92O77ty12Y9/Px+UL535aGAIhEZKSDR/6qq/4d9/R13KRORp4FX7ftvAx/rbv+ofbcoqvr3gb8P8MOf/8J3cUectxllz3fbl1ak4h1u09b5cl936fJvbZ878dIqF/tYBASPw1K600yF630WxFBC0QwWc/hk4azN/zu19OCeKUJFC2YRC6NWrIi6mETO7HdaO2iAiTix7xh96tnv2iIhxIE4FNezUo+50eXMnDaV+S+Ma6kr2GHWmBG1lmsbGraZFS9aD30RIcaSdl4ZEM0ksQTfCUQSyYWNLUaxHjRKcYtyaxkbg3uvrg4k2jLbLm1MxW0wC5pOyMCNGzNzukmMa4a4KoBRGBnDqrgGhLHLCmIMX6AycDUYpDFnfVwBP6RhAhKnpzd46+3vMM8nNjfN+qkBQK4N74fXmDmRvjvW1ebu5+1TaLGxKCbrYoFv1eNcienaLeNalHYfkmssKToNds7GPlS75GZVJSI1M2C/PqRr03bZOToVtkFpj6NQrZpwobeLGSUt7koVGNydTLB9s8U4V+shQPp1Yvf7dXu0rN6zXUapzxRCR0fsWbW93uawxXTZb6FZI4UaI8TnOLBm5KUPfYd/8fO/iiikKFz76AF3/ulElsCLv3iD5z79Mkd3DnjrmTMe/bfC7Z+HP/jwi1w5u0T8+MCNg9ucrTaszkbOrj7G+Owdvv1Xb/Lm029x9fplbv/hGf8/e+8aa0mSnId9EVl1zn31c3p6Zoc7M8t9cDkcwpBE0aIkGKRgUYYIwn8E0DYgWTAEwxAk2IAN+4ftv4b9S/Av/zAo0JYgyBAsEaZtCZAoyiIoyiZXy6V2+Zj3zvTO7PQ8+nH7Ps45VZnhHxGRmVWnzr2ne7p7e3YnG7fvuXWqsvIRGY8vIyLn35rh9M/s45UX3gUHwlO3d8H/t+Dabz+FxDOEGDCPLVJLQGohzRE4EWZpjjf5VexKg7/60V/FMycv4uYP38JvXf4NrLDAxbgPTh2eXb6IS/1lnBx/F83JAk9feR5/cvYz+Ma9f4mLe9exd3IBex3hzse3cdIc4YXuC3it/128e/gWlsuIH2tfxr/BLyMkgoSIdtViMUsQtGj7FqfzE8wjY7utmpG8WPvuPDW4PDZ1Z725sVFpHND+VC1jaehefam6XGTHEC6p/PmkAFYskk/18+Tx5Q1UWkElJ58aPlH5BEVEWeDNt1/D177+/+HNd17BvaMTHJ0eYxkVRO/7iBSBlAiSGDEKknnBKvBuoa2SkPoVOogBRT2i/Yjlmht7Pgx6OJCXdrW4rqgMjCaHWSwNk27+dNThww8/xI0b72B//wCf+5ydjDl6FVGqJtPAE/McJvMG1JOFFohYoesXWKyOEUKw9a6nmQLBsr74BhNp3pCct1CBoiSCaEmHyfrDfvy6XkBg9WRO0DkhpnxiVwjAYnGIW7c/QIx9llGu35DHt7sKB+SQOeeD/t4c6pgJRAbPAUMSzteIbJ/LlBV17d24Lgc5j7Yt3wPbkezEOe2S0pcmDC5riKD5AFXexLxGfSPPpWMBcRwI9JxA9j0maNouDUG9sQRG1k08p5RvodSAjaYoGNa7zoCk+n2/un7Nuz4zmKdKHTr8cOqbrlfXcu1qW7zOffPY9cXpkz7r8ijn8gkFhM4qvnm5BnJ9RvOf5vJJTj0jAH8TwB+IyN+ovvoVAH8ZwP9gv/+P6vpfJ6L/DZrE+u4TlZ8IwLSCjIq3Pz5iHyh7qBSUqVK2VeuLmFb+KT8yqGLwfeXhkHfBKCvJJQ+PQPcS1eAv+RMSwJ5HIYLsXkD0xB8BBA4OMSiEfMpZdj2u+2JdkTwKUprlzfT2cUDbNCAkSOJ8VKuAkAiIKQJihj0U6AoiEA7qR1F7hZEDLGZA1HQwOmHHp6A8aoqvMEhVXSQJFnZm3lqi+SXqqfAu6TCVOZw2EOo5rpWlYvgPducApBQRIwFYIEmPKAt0d+8iNC2aZhdNswumFgzd3W2aFoGbbOwP8ukMjp62UaaS1LKMY4/Ur7DqjrFYHCKmJQQ9spY5gFNkKGfq65WSJZWhNbiPgAxAZY8bBYsUaNL266FmebQRkcCphwKXRsdJ63AYpmRakOq3Axj+uwqRmjR16/CzsoO97rQ49NLysWXLmaVdLcnX/ft6CArGW8JMc8J4WC6gTMNOKwr25ES/ZPcN5qJaH9nGqkEo8+sgAqAJ3S0LFPLxs1VC07Lrq0c7s52MFIgHObKSuLdhAGGO1Uzwhdeewwtffxq/+bN/gLt/6Sb2uhm+8/yHODjZRTfvcePlj7C6vYPl5R4vvncNP/fWv4Xf+PI38MHFQ4RE6GY93vujJ7j3pz/EjOb44t+/jtMv9fi9n/wAl5oDHFzo8fx3ryK+mvDxn1zi3T91gi9/M2GWWoAETdrFihi7MWIZGCRzJACL9hT7i4sAzXG8f4jrpxcw32vVuzAlHO3fxYf0AebLBjRrcbzXY366BJ2e4I/v/iT4sMXPfPwz+Ob1r+GXvvRL+ODjD3DafwiSBsuLu1g0h2huNUi9jnNKAEjzN6mHSYs2hTJPZxWq1uqIWosMVM+7+y15BVJNORtyPlTojucH2lyr8oBk9flJS/X78t3GnJUcaTgc/krn8XYx35J5un7vy4U5oGkb3D38GH/w6jfxjX/9Nbzyxu/jzr3bWJxGLPoOXS/oUgIlRuwiYp9KpQIQAyGQnvgV1CNHkJBShz4l9DGij9HazgN5s1aqoRzPZS3LkwPqYiFaSezkPcHdO3fx7rvv4sKFC7hy+Qpm7VzDpTLPT6PB9RD4eoNHwfPEyUAYRpIeKfpaD6Ck+fCiJwmWCnAhVg9PYpCBSYkNJEOGnss42MaMhkBH/Q1tc99rgnmgQ7c6QUx9bnepY7PelHf/q2cy/63ursff9ky0loqWIbYBknl1Zp+lTcbnfCzrLb1xqY1ef/3jNsUy8J/75bRC+YQ4Mr5ePEp1Uy6JngSJWm8iXQtN2yAlYLliWJqsrP+d2R5a15GzyiQodObrYV3RqEp92tram6qfLcvEVG6e3e/DQqgmY1RyjsQqR+wnLVlncd3C1wuDeUre2fvFN7um5tf//oGZtU9YHjdH+qw8qvJJPIr+NIC/BOCbRPQNu/ZfQwGiv0dEfwXA2wB+wb77hwB+DsDrAE4A/Eef4N2lVAxoHRYxQs3637miBsMH6lLim4f23Pq9G3MLrdV4xvcZ/DmfLUktDc9cnJu/k+p7laOagLk852NZPALyk+zJAXsFiyAg2xFKidT7yJNgeivNxVy7avVXmhNBNEyKgAwo2JG4ecdvIAvUQ6gJDWQ21+NrY4eYBCl1EDHjVdzbhCCph4glxDSFJls0kPLegaeOfSKpgKEi8uvdC7aj1R03CBJsLIpSmCy0KLvhQyltcJR51onHYJEpZVLmqHxbxrSQvbY5WhLylHrEtAAAcB/QhFO0sz0wzREogCmgjw2a0IJDgyFoAUACICEr5nlxDHZwBEBEjEtN7IoOvjtPfq+1X91+3aPIgZ5qnkdF7B0a6la78/skyRhf8oGEe0TpF5bPCrrTzxLNC8BpVgGPoTnhO83e7+HIjxuawZP64uRyrS5KeXv2qstrsTohRsra8mcZlkfKqio5iUx5F/vbvSu49jJy2slTM1DUCZRPvClXuDKqUX3WturrbY2Jf65HTMErJvcmCqB8IqL3iZBEcwcRLyAieObOBXzljafw+k/s49WXv4N5P9PcQX2HPkRcfD8g3G3RocWVo6v4I99+Cbd2Vvj9p26gFbb+IQAAIABJREFUlwSeEXZOZji9coorf9jij/6z5/HG3Y9w8+Uj3PnxYxyHE3zlHz+NL/3qNXzjc+/j3rMLSNDTAJftEYL0iBTQB0ECQ2QXYEGDFquwxIo6zBY7iPu9UVnCXV7gxtM38A/3fgXz9iL+u9f+Bi6t9vHXv/Kf4O9e+7v4hQ/+Pfxs9+/gJ2/+m9jFDH9r7+/gl5/5ZTSR8efv/Rz+8zf/K/zmpd/Gf//8f4Nfjf8I/373H2L/5AoWsyPs9oxEhMXsHmYxQNCeb2C5tbbhWyeAbaVnTQOZfNx7QARS1Tn4PU4QvNGOK1eFUk7G7mCI0q0bBtaQzNcLHQ/etsl4zJ0yb0erhhg4PT3B62+8in/19d/GG2+/gtt3P8aiW2AVoSeSCRCjQPoEicbqpAwKmWEcuJzoRTCvDEtcnV82GomqGnjOt8kQw2yMiy3pOqzbv9R3dn2H92++j4ODC7h65Sl8/odewGxWnxhXRk55kvMpKW00npXMy1K5noX0EpnjZ4MkDE8wW8tMPSEwqP4h6qWpmzuWUg4VWGST0SUCpWg3eEi8JukWEsR+iT4uIeaVWbxN6wnOIwsNZeZMx7o+JsbW+Hqez7qK0e1FQ1ifnxzaCNcbNFcg2WaFqyPlGaftmv9u6o81qVoCU+R9fkOrvgHDfRgYdyDPLxUKzyZCMn2HQTmBOGcv0UoO1HRA5WXk9GutrWm5zsc4lCnDDorTMOW/ai6y1sWiilL+0c2V0b0T8zw9bjUzlMm7aXzvZD3jCzUfRVnrqOljc2XTr6LBPWeVbcDrAekOnq3mE+7pN1HJ2gKoUgicMVZk+kLWo4jQNA1ilLzxOfBopqL7lFprGVX3oHrvRD83laKbjmVu/Y5tatqunOeZmNfFwyi5C4S11BKDNm1dUfXQ9KicS+L3Uc7XcM5nj1rRkHq+H8onOfXsN7CZL/7bE/cLgL/2oO/bWDwxbjYuoc0ahN7kVgCUyjOV0NMP1Y7fWgf899TUj4QYMDg+HTCmOKbqTVROxXNnUz2PjgAFJazGuWytzAPl5ApgzDjJjqTWAU5gbq0uDZ8pw8DFhZx1vkQ0DMi+HVQt5EmZUQALiWDqEQRoiSGhgTS7YAIaWqDnAOoZPRoAEYIeQAOQKWCiIkHz9qjHSQicX6teHqlqM4HQFIWBPHmbrGlgRAKk6mQ5AcgSQffS63HazOh7A23MiNAfdemnDHpoxUniyGSrjB9pLPQp2u1j2veioQPRXK1VkOouSxdPsOwXaJoW87CDpmkh1CIhINAMIbRQl1xXTKMZCCi0LCUES3NKCEAdEjokRPO5IbgDeCkRoModf0B7ZGuz9lZSIIQ5YNbOABE9Otoec6+b8opixBT3HQUxNTRNxy0gQShqAllLFivONzgCaEBoQdSa6mg0kezEOJtscg+GPIPjmPhMYAO1JCs1Rg8EAUsCi9bpDtFKlI3dxyUsk+y7nOgWIGoqsIWzkUXV6XmUKqXJlfDqZL+hYo3c83L/kCPZQYMgsdMQhUCaySR71ami5kaFehEECYjcINEuAhrLBdSiJwHHBkI9uhDBEgASvPX5j7H46RN897kP8fSdA+DjhONLLb7021fx/lfvYi8dYDcBd8IxkjBaYTSyxMUbM1y8cQXv/8QdxCunSB8Rjp4K+MbPv4PbP3qIRgTX/sVF7P/4LiAEjgGcBIkYMSTlO90cTRvA1KPjAwhWAEUEAD9x9BP4zUv/Av/Ttf8RPyTP41cv/jO8dPpV/Ppzv4a/dfFv4y8c/gd4afESfr39Nfzzy/8Yp+0xbu59CDpu8Itf/EXsxX38Wfw5vPXUm1gx4StHL+ODvW/j2wdv4O9f/Qf4vSu/ByTzgoSoXQ1GQoNESZPSowHXbo6bylbChEcUMCxF4fY8X5k0Ch0OrpWw06lGUFY0zRNNCo0l0nCxSKqOzmNAz3qqF6EDUQ/BDiJHNLKASGstYzsW2734zLiFACkA0ph4t1Bp6MlYiQSgHokV5BZ0ODw5whvffhW/9Tv/L9648SYOT06w7IG+t9M6IbpJ0EtOd5a9mgAEAhrWHyYASUFdZoZEAXoCJU3IrAa5emH42nSwJI0FT275yFDk8oyuPOUEIgSVKgHLGHG8OMV3vnMDly9cxIX9fVx96qkcCkZ2UqlY7iCV6wrtu3+gG+IkQeWA6xImkxKRbRYAiKYrZNHhYWXq2ZlZtgngVNGVZO9NG1xOQwqiBGoETLoh4odaxLgy3iQ6udU4ZdlKFa1mkK4yI0wkCREiN9ljkm2paZep6pf9FvW0Kd7MScUn2yERSek8otCC0gplnu4NUL4JEPXg0ABowNRYOD1nsJ3Jj42bDllkKibd4FsL3RPyXFoegi9ZNvsaJlLOriCwhVLmzT+AWRCQkKQHUo9o4yRCQ3OSGKAA4gbcWAoD8mThpf/1PGSJKwJmRts0ObdmkoTGDqxYLE6xWJ7icrYTBGDbLIsdRGK1EVZRgzjfaAC08ITp0Wjb389n8VDZZDRLflNOxbDxXrt99DVBLHdWUXaEzSt4tGGn7K7kWKvBtwEPkcELRg0YdtRzTG0ueREDoBxWTgiQFHRcQ5P1o3UTu84o50U39UTG14v9pekm3MO9eC+rhzIyKKVRoISQ86naxsOmvLQ0TII9bOvZRcZjuWnY0vmeu9uBO+vztX7HQ7QkKYLC+fXJFv2bDPkbrw06Y63cRxEIMNHu8RUeX3S2NHioDs32axNj8jARrsdQHsqpZ9/bUjFb/5DXuFRMqvqehs8WiS6j68Ni5vrGduSnZPCSjcWbCuAcZlu/xbpQ3a862NTz90GMA4R70LLRjTx5Xb0BxI4hL8miPUzNFaTa+CdWs9cTZLvQTWbth8BZWVnvF8Fdg4kCmBsEniGEDpAeRJKTXjapQUoRoCVitwKzJr9MMVloWvJBzUJUT13R/pIIVPa6l5X3Iw14QD0nLoM9iMe9JQBBSpzvF3PBjrazGmMFtDlYaHoNuYKeQRTJ85HJvQKmfETXScPGWXy3Uk+fgSUgF0mGJ2mbQmiywh64AXGwJvYjOjHlOIMWvvOcbPc4ZmAm52sQPa2lTkBd03fO1+AeYVTCMFI2ViQrs1kpRlHiCq1Kvq6Jlz1pssC9uNxQKLQhuR1wUzkrWXXtm9eabqTL4D7xPhamMWqp1+28RHJlNUCj9I+c76oOm6S1yTdDxflTTa/ex7wTXDx5xvym9Ktq7aDhdYJgW/cEkBkVA95FZvTAvRSTegyhz6BXog7CQAqEJjaYRcEqEHoS3Pj8u7j19B6a5Rxf+pWn8MIrl/Hqz9/BGz99EwLgq//kAE+9uY/4/m1cuj2HELBqCUcvRpx84RAiAdd+9zpe/KeMxZ8hvP6z72K2avDj/+g57H67wYef65GOeuytdnH9xiUId2g7gKRBpB10rCdERQLaNIdwRNck/PHTn8JP3/4WvnbwNXyr+Rb2uj38+Q9+Af3uXQg6fNC9jx/nP4LfOP3n+DtP/W0QWjy9uIL/9MO/hread/C/PPM38YvP/c9I1OOlw5fwX978b/HLl/8e/q+nfwVvX3sP3WyJP3H4p/DvfvAXcLC4hGV7iFnawaolJBKENAej01Dfrcu0QuOgK51lxBidJhqpFJOysOZfGH322hRUjGSgMmmAKMO8UymChTCLO0gUkTIwyVAwnxEEIGnhxsaYhqXyRsg9mJKjFR03M8bhnXv4g1e+ia//zm/j9Tdfw93ju1gsF5p0HsqGkiCfUFYdRgWGhpuBBU1L4ODGk3pbpOh5XJRL5vXj69Wbl/Jtw3ZW/RlfH5eUCnClYeMBXdfj8PAQ7773Li5cOAAR4alrT+sICiApKujkPF7cmFYO57Kc3QNzwGNkMOMY2GPO7zPHLXPk7ivj+XOgYq1nxUBeT2aLAhTS+nwXPbK0Z1BzRaqZit1Lid0jssiuYQ620jM2wC3meyi3p/558LKdPnieqXW2QVoSA495uoI1xcvHohAVRE6FDmgw93p1NpvlU2n1izQYy3Ev8vXM6yq4QTDQNcYlY3CSKiBF68gnY1V6WK50NK3D49S3LDSsZqpvo9u3ui4yBE3rbhddsnre+QtqHd0fOKdRDwA0uNf32dvd9Utl/fIa81un1YFJg4pOhS1n2gY+6Rt4k/0e6zr32f/zlrQANQA5ecuWHkDZeeIxlnP3pJzxn1fP+LnzCfGTlW3mBef3b224N8zVmmzcdN85LzyPFqZTltx/+fQDRRvBnWJErxOBGcN2WzEqB+y1+r2d4vUgZRMhyAbBZg89vAaM61XNxxTU4h6sbTKBvPH9FmNlJ3FoHeYho+41xfCHKVI8UNMBeE4S20kmYM2IMMEmdVgKNXZyVwtwBIMQk556ktjc3KFAVuwjIkX1zvC2mGJOogoCCSFI0tNXuEEOibJcGD437AuRkK8VIx+De8nyGqmSaB4ibOE07olj27JFoBOcQtWTy3c0igHmd+g7KoVtYoYKSUme7/ysgSUxApSWpT8iaEDoiYAGCOSuu54wvH6TryOpBoLgUX0lYWzKp6LpsKaBQeVKqA6Cf+FKvya8JCZEEcTUQ6QpzLzuPfnsei+LJ1kZx6od8BAny7E1MYb3wwAKo66U162ftfvNenI7sYR9VWMEH9vaw8uVHcqXBurXmK0RDe+9Hz5T5YNSU9XAOFvTImQn/hWwSHfFPcltCa8J0GSmTJqPKAUNbwR3iJQQOjtJEA2+9Pqz+OLXn0PzbsKFt4HLqx3wrxAOvtlgp2tw7cYFHJzuYv8f7ODZ+TOgS4QfeePz+Ol//TJOT4+woh4X37uAqx8d4N7/eRv7v3sd89U+rr61j7YHLvzvhO5eQIh7+Pw/3cfVvSWabo7DeYLQCsvZKUh2QBQhBDRxD4t2geePv4j/TP4LvHXnNZy2J9jp9vDy0R/D4fEpPnf0PD6/eg4X5Rp+aHEdp3yMhF1c6i/gR09ewkt8gi8uXsLJ7C5mMeGF0x/GC0dfwl9MfwV/4vinoDnfWny+exF7/dNYNV0GI0MiUEgQCkDch4QVzqO4oTE7MbWZILYgAy5J5cvD9fsJJLGyqqbldyQF6DVwpUPkhJXV3caAdtWChbFo1cuHERDJqc+OM5dGT6WjZOy5GABDg7AKZZoaH6tTkHDn8EN86/d/F1/7+r/EW++8icOjuzg+PUVfGb8xClKUfBJUNsNIz3EITOAGoKDykZgsAXOyeqjCRexpgrURlbxB5bhyhmHh63IEDAPOVs17KnkImp6AduPGDczmcxxcOMC83SleIjXrRKGfmpvmDqNcrGy2/LeMAAbA+VJ5gQ5Deekw1N42obxd3jgyqWlGs4Jx67M6ps31a9MjWn+uAXap5oSqa94/Ij3tKzl/rvgtUICVcQ6iR1O26Suyw3RdRhrZRL1Fd9HP9df6h0KLdVglWTLsNJhjgW7UZcKq2Yu4h6B7aa2XKUNpPWF4LaMNFCqugGsyvPTvyStJhhuY02XcdtXpJp0fqvnxex+kDMZrwqza5rn794Ch6gfZrhB5OAb0Z+X7t2zHHX8wyqcfKNqgaOZr2YD3UKZiWLmi6ELGbx8KjU11P94yAB8eURnULcjGZvme8g7NtJDUHTVhP/VLkwJrcmD3lElZUaDasDQFaiqhdYkNr+fNlVxl+qAA5hYNzxChIUi+s5UkQKTXHb8+oKMOMWrCTT+lo+S3AfT4X1eIPakwcvtVyMB2sGgjeejjfR5K12MDkYFmCYEtgZ6/071lIMXjx0fCATIhjB3GxxtBWxWpvbVU2VXPGs2vQXGFzgwTn+9IANAYUOMto7opubNuWxajTE8I8+AzcQBIKjtg1L7BAOS2asJoZn1BnzRMrKSd1nsV+Kub5u7XovQojfU75fYCkpXVLbSturHDzzSYueoeWrtczKHh+9SuLf2phyN/Ig8nMfCO3GtoSpEthhituYxXoJKcfdTyevG215PkBi4rOGu77kMDsATCuBEgFECpBSGA0YJkRwFhqzVB0HMDEsEzdy/gJ199CV/6nWexuHkPJ+EIp3v3cP3DK3j+Oz8MCcc4bRpwE/H0Oxfx9FNXwBcinvvoMv7Yt17A6u4pTsMCp8yILeG5734OX37ni1g2CaezU8y7gGfvztBRjwX3uPrxdVz/OCIdBMyj4MJSgfGQGsxjA8EcITJ2sIIg4OLxV/CV/scQeiC2EafNMa6l5/DV9GWAevQh4Yfii5h3M3Dq0bdLHM47XEwHePHD5xESYdYDsT3F3b1b+OKtz+Gl1VfyOxezJRY7CzTdHDE0EF6h6VukuAcN9yQ0MZw7ezwAEaoFQ0PKTFuQRJPGNLBOK+WY7Op9KH+qgc8gca8gIHFC5IjIyhP7oNSg4aDmJccCJIbiSaJ0TMHar3SnXbKTtiqwaDO5O88Fum6Ft2+8iW/+/tfxxtuv4vD4LparFVZ9jz4prQOksiUVY43EwnEs1KxtCNxqzJl6zCSTFVG9aYktzIqqNk4MZwW4nG2zbv6yGMwJKTEk6fsWiwXev3kTe/v7eO655zC/PK/Y2rC+JA7+WmedC4uUEzKHDNx0riHPkzwPziOkUteGvKqUmlfZc9lTDFm+DTwoUPNGGVwvns/VcwbcUM0jqycduM8yxvqf9ZTk9dt88nAsxqUGlh52GQMk7uVpDYTOXQFPdWtqAmjJD1iYpkh+zv/OuCJJDkcfjFmulpA9o21jJVqagpJAniBxTDHbFirKyPSo6F0WW0Km88gIsPpeFxnQ6oPTRtHnij6tfR7mJJ32jDu7hYNSbWKN1/4jNWQGSmPRNzzVhaSH4bH3WflBKA/Gb7YsY9e+J7h8HwBFwDqwAwyn2A3ZonxQVihMGc5MrarrSZtDokdKWLUJly9UV89aMLWLpwshM19RnMN1R5iyQlFCAvJJJ/k4bBQPpqm3Z+XRx4TMAg5gaqA7jZYrxg1scmWeQZaHRo9sTeo6P9g1UjBBE9+pgsieAM9apSERHvo1PUZVbZWXmGboCcTZiiAwxPIWJSJQSoBEC3+qd0Mpg04eDlDTa84PsoUf9DouUyvtSVNIoNPqbNebkyfVsLhvqRS8PBLj9ZhyHcx5NOBrrdCHDJqdPyYHlVAlri5AmTgAQX7UF4wJ1wqtvquMoxpHYjxBgU0ywKiEA0Cw5nmzGUAa0o8amMW5f6Ccral663X6znIdmlDnvlAa1faQf7aa3Q2/fktOKJu3vG1s3ODMxxh7W+s8YVM8dtDa9c8ZrCKkiIGHVk4cnxui4LCDWBwbSNOAhRBSQmRCkjl2Vi2QEi7efQpffu15LLsO33zhNRwd3MHOvRmk6XF69RaA90FYgtNFtCQIu4wPr/e490yDo8UR/vDodRzdO0ZKQNvPwJhh1USktlN+1DWYx6BhTdRBM6c0EI642M4xv3IJ71z9PE6emqHhu2jiDJFaSDgBSCxfTsCSBcIJO30ACSFyQgwrcGwtCXZEiEBIPQgJLLuARHShh4A1f44QmrgDwR307RGEEzi1mKcWoZ+BJKDnHokSgjSajL3psQzALIZzxRjJ+BSyIjez9yYE8cycAPqWYPmAhnRAozunaKnIGeU++t5gR7gnSgoSEYEjo40Be8s5rt27hkUb0YcVWhGE2CCRgGiFgITOgEpkY0iACqjKnhsybXpx9vAQLFdLvP7Ga3j7xrfRpxX6uEKfesQkELB6EYlomLPxGg7KLgMBTSC0DaNpAoIlJ0pmhvcAqFceV3gWDeSk87TBWNa77VsA27Ur+vhZEUEfEwg9Aq8Q79zBzZs38f7772N3vou9+W7hSWUykXmI6SdZzhnIRTa9mX2JrXOuDclahjg9lHEol2v5Vnh0quSB63apHhPnjd7OijcPDNeB6kf536Cv0A0WoXJnNcIANPTQa3ATP4lsTEEznrrhXN6/zjclsny6x2Os4+CQPZWUixWtDCqWMhSj4c30Wq5Sfr8Yf/cw7BJKVo0xqIRsZj2dqsTug2rX2jg0/gufOXMECSg7VUONYfhgkd/fs3IGxn5/pQYM6znTv8tG7ThXjF+vn63zBY1sLpTbS86gCV2eHtbIljbU+nyJUhCAbeNpRCvbA2KflR+k8kgoIsvH7zE/2bJ8/wBFgzU/ZnzVfXVojhtu5M/4F4+ijQ9YqQvTx87AaPS70pPOe7L2BiJXEvzhOtxLTQJVB4YgkQskyneONDkQkIEbO3bbfosly/M35ATAmv9Yc8UFvSdwQk/RdoAjkkSIeAidvinGaGCFez6pBUBRjcASi2+7vzZXCZazKY/M0EAy87go2AxNZOrC13d0bOwE0HAGG1NXtPMOpPUdqIXkcG3kvA0bGBTZWhA7X10kIcYI5l7zEzWq5LEpcZJ3vMtu61k4yrpCrP9R5S4zZcjAdviKo44rdEXFLM+5au6UM9D+rX+Sv3Uak+QJds3jaGSjbC5jw9c/V8k3yY1AGrRs+vmxQVff722u6MossLEx6YZXdkqqrmtS2qoCt+Jg+blsbWVluQLS142QsVHjxoFpf1W9zAGS6vCzWkP0cDWAiNXgF0EQnU9OjJO9Q9x45lW8+YU3cXgl4aNrN9G/vEKfThCSh3QAbXQvlA+1xSRoZrfxr5p3IDGhe3kB6jUsSWhlSVMTQtLk7THcA0sHSnPEIBD0mPdzkER82Bzi7fa38Os7r2DOjJ3UYkVzEC2QuEeSPQQIZmmFjlokYrRR0LEaSq10EDAiCZrE4LiDjhNiswCbJdn2MxAarMISPTcQmWGWOsxkgSSt5gKiqKc68QpBAjrM0SZgnlZYBkHHDZptFJH61DC9UFZGxbfHi3cw63bf2fj0WKas071/H1lDgYOBoJEEkYAgDXa7GfYWLV6+/KP4c+nncbX7HIIQSBKCsCadRvSDs6x9lL0k/PXumZlBywkZ6/mDQIJ79w7xnXdv4PDwDvq0QhI7mQwM6W2tiiDYc3qoAqFlxiwwmIG2CXrSWdB3JknoIZCu05PPkp6eR6JSa51X1NNRDJsHVThrHpuEcm4l9xC5c/cu3n77bVy9fBV713dR+KVzTTL5OtQTCphU5LhAPb+0pWw5r2oeX1EGVRcHDV7vZ95/GRiIVld9qInlpCthanoX5YTPQ0ntKW8GnhxOT2I/rkJWsuIskZFntFKmpsKa/HoBuqrvJ+ToWllrxEASljWAmgYoV7xtbguRqp1UPue1ll9m4NpUHUhgcA5tFPE8lbZ5EH3T0eoeTlVuyFm8pyKP4QCcU9RTt/DF4SsfL7Aw8PwylkBy/++fmttCgxP20Wisin5ffzdGQCs9Pf9dvdfUvToNxaOwu7xfevpe4QtUeSnrfZPs/7PyWRmWhwnsrIHwTy4Bfp8ARTQadBp8Liyrlo6Y4IFTGe0fpDkKAQwvTSui59fzpBczHmtNCbWQUWGfqmF1m7MyC6uf+h4/04VHQ1E8kciMT893ggwSJQNRCkhABAT15QFRD2b1TGGOiLFHSowYWXf+TLNw9+Oc/0VEQ+tsZ5qSVizsOxZugFgnzPV+OGSVIguASE9nEdGwPSQNzyPWU0GSiCrYoqf9qJ5bvaMGPfKJNC78SoJmn691X5YxMEPw5NGa8DQNFUAq4MpwrkuN5QQzAsRP8xjN8UBRxuAzoVLEyBX3yvQQjTRhVEagZZSUUV15/buG6ehJpcv4CXe5/ZURO6WXDvo6BqI23bNuBZ1Z83QpczQEWdxY4XVeM+iA0eugrQLPK+Yx/Ln+2kyy9w3Nwk3tQwakNDF8QOAAQr/e46oyRsAqKGjaQCAU0GGGJnVgrPC7L/4+fvWn/jG+e/V9RAYS+QlqniSdwMJgCUiWbF04ANKbqaGnAkEiGu22gUsaDglpIGAIdWAkCApgxhLAkhApZgBY/QoZCQGEaOOlIY2EaM8rv0hmHAYRBcFguckk6GmP1NuasFO+RAGTvLahp8dBGqtLwIjoLfxKEHRNiBtlZ1PuxklYU/Lra4+nKOuQgSHk4poFIGH8+ur/wb3ZPfzld/5j7J9ewKIRUOgBWiIR20loHSipmuMgRe4dAQ6GK/VbGFM1bClFCCJS7HHv8C5Ojo4hKaLvVmAiNBQMnLdQVUpg1g2AWWiw0wbszGaYNw1Cw2gCKyBEGqJ2ulpCuhVIIoKfxoUCoGSAxQEhYCjXHqBkfrkGMgns3EdEA7wWywU++vhjHN47wrUrEbuzFsO8MmXIMqP232QhsHkCnS+xzSuvGboeouTjP86FPubtyqcAyknU/XpSmSOAHnrhYSfQDZZk7XTgxv6bMvy3DfOpD0pSjjIs63xvLHef7KKG/bqYk41/lQGZ2F6orlZ6OVBCvwFkWQSljSmqLxshtPVcjWU8cjtGRsFadY+XD24srspsSHx8fpLbc6rfiiZr3XOqTMmRh1kqu6/Sb6dvpaF++1n5rGxTKvKi7EZ6xtq6T7l8Pxra97J8nwBFQL34B0a0KSzZZ7PSiSQbiyb9ZGgAT5ZtZnWCDw0iRs6479NbHKjwz1oKKFSHoHkYjSUphBvxpjZMCql1BUQNYs6eCswNOLWqUJgBBgpI0mndwmD2sIAGKaq3DFEEEyPGXhV4Ubd0SQKhpJ4mosetFl9rBVCyS2tOIuz/uRLut4+U+xoQqcbEj2wmUrOTSceGkruzE0ABg921zM2o1OOeQ674VDlzdJeMLAdSpWhM4FlalyccHmntDgKVjg8/uc3lJ6glRor+3FApq72AMBgmVUQkf7brfVQPI2u97u7W3lTrxYfq7ISIDgqKDsBIbzzrOcCBtCk35uIdoDvYKP2u+1U6bm2u58avVWFbBMBOLyI2bzri6pQ1f6yiybVPBgCZoeQKN4uCKGeP19R6rRVtXfdN08Bzj23UQUkQhLAiQQSpycpA4hYCzVnxzrPv4p3r7yKxAiKcImIVEpooIiGCJKl3RlAAqZHWwsj0tCwB0DOQiEESdZ0DYImARCSugyMBICGaEa+Ua/mJRNdpoq4F71sRAAAgAElEQVTqyMrCuRhBBMAKfXAJZJ6IdsRySAzxPDtphj5oOCwnB+2Shr2xwkJqeHcKWLFoyBIASECTGMIrdPmsZgatmauPumxz9K2XSbPPvnEjutzn1B9FQ5WPdhb4J8tfwy80fxEHcgmR2E7eFAha9CSYJ/dGQAVoqI6QuWM2IspayK3x95Ogjx1iXNnTCW1oEGaMwIQVekjsEAKhIUYbAvbnO9jb2cHe7hzztgEHAge2zYeEGBOIE6J0aHpBl5Ie555DgWhtrX5SxfIsLwgBshtNnxI4RQQ06GPCvXtH6PsEackAlXq21uUHw0GiobwgYaPoAiINZlf8v9Le+jTlrEFUsmid3Q8RG6ZQ8R5rsYVB6wmkllsRk1DBRsWt5q9jkKh8GDduxOsHN5dNmNzORw0i0UR/NxXfeNlEhCZDQNX81B5dfpMPWPZQ1ZyDLr+7rkeM0cL0CEBASp2F6A/lpdfKlcfreb1xfg4M+YvVVO7a4En0xOjtNg8DHc6/OscjbBsPmsfhZVPT+Xl6Ri6+cPICOvfsPgzn8NNgmn9WvudFNi31h7soMjt9gt3avk+AojK4RfiOjVqgBPhWBmneqTtrl3yt8nPuo4nbJp7bWkI/znJeoya+z5SeIH7keL7XbqHK2yUfz+0KAw3uK5+9mg0CG1DlIZlXkYFFIuapRAxKMO+fIow0LEB39ZkbSOqRmC0PSkCShJh6BZL6zoQx6dHAMEVFSmgYkSdXhinaroxz8eqpXGzzj5+KY54bBFgiYgOLKjSBSRVbQUBKCVFSNnQqb96B9irmxaDLwTyjgOydRKZg+Kla9bzWbuOlzWuDDz3pzj5X68lr055F+C60AnhTa/MMpcY/pDKPMZ8olKGi4c0TJDqsdJ1Gh2+sw9PWa3Flvig4jrS4MTalvlWmyMALS4avGXhRmIFYeefkI+RQPAOKwoVMU87TBiuWMLg/7xBnkNGBN1PbxoLLvcmqNbVeaM2VXIFN/exJJesdvlLNEg2AkBpIE0FYYSYBXbME0gwBu9Y+ABLgybzFQ1nIABzEDLbqWmAIgEAKIiWC0qLCQ3kOFbAOqE/0cz8USg0SBwg6sGjeHPVgaiAUcyp1NfMDQlKfSTGeEOIcQlHfrecylvEiqDFrIY+cgQLzNtK7KtkW7FqvQLIoSJbDbyFgEZT1uUXZFLvh4ULn5D1zIGeb1xDJ9L2DvHQoobQA3EuEiaF55QSILfq0g1XbI0DQxF0sWgFLgzYlcNK5WSPjzDuNdhxIWOuTG4yC5eIUx/fuIfUdZm2Lvf19hGYXfUe4d/cQq+UxiBPmaLE7n+PiwQH2d3awO59hNmsMgAa6vkOMEX2MiNIjUUSfOixjRAhATAAlDeqKE8ZevX7vt0wBEMNrpEm2q40NEcHh4T0sl0sc7O6Pa5yecVmfX+fTDMonlXluqJrXZhKHyV0DZcegU+YzACC9eSPB6kkKEIHAASA02gLrW2DSXIC22cPMOQ/OwFOThmM05FUbiow+5N2ydWO1ho2kfqdgyNM/YXEP6W12vDdKRqJK13PAxnSh6gfV72k93OsTy8FmdzHlenOuI+cHgrwWBrVJ9YYMHp7Xx+H3k0NCkyPw5JWJ7ir/d7rxNYz1G8/SucTzOX5S4jvLnpCymVlPwjavPM+zw3U401uHoJlL6fuQjZ+VH8hSmVNKow8Q6vn9UJ5soEitsTNvkWxku2ahwmXdpTkObGFPs6xfOqPaZvd1G+GR1ghK4ggvJ4CYt6purBSOdwq2I92x0Vs/Xbmi0/qOsMoYyYJ4rP4NGjFoq4c9VcrCYBuxKGQeB577KgBM4JV5qQ11KUPMDA4BnBiJg7Y12ck5BDBLPu68dlFn1pCvxBYmxQDFiADd6Y8U1QCT3hKUcm6LjwkREJGAlDIg4rmK1PhoVBE1YSUwI1yTQWg/RMNhhPT0rpzXqFaM8riR5rcAF+UsT1QVdoVkBqfPO1XDbbmTEqlBIhNeAIPE0AXc0XwvwXaFGxC1AILWkadZ7BlT/6lDoh7CAmJRSwh1WGAJbfO16D3XvEturnMxWquexhhtM7E+zWNE69XiZ8C8UDw0QUOdcjCREICIlDrzDiJAgtUdbCypAIAGvoDMACLOHElI57kQOtbbJUVBVl7mybmVhjTOjvWkPOYMGIjoSVTMCjmU3WhbT5WdYvq4vT5V/USZN/f8olCACwDB2SSVrCme/8iHlTKtoPpeBiQB6MlnzE02Gv39Pqe9PRdY8/gkECj0CpKhARk4k6jDTj/Hy6/+KL70tWcRb59gISegFCDQkC5Ax4aI0bYtLl++jKtXrmC5XOK777+Pk5NT9EnXG8zTiUJANl1TMj7iyqzWNZvt4Jlrn8cLL/wILl+6DqY5iBgBoUS0aPYxKJCkf3MKiLxC9vKzNWmzb4C2rRmG0kGpEO51J4jVO6LNg58wSXrMOaKGuwkhJQ0lcuU/hIA2zBCoBYmuXeUxlhcn7/ZrKGAB9ofycSwtzw39yIbHuqE2fsqPo3ePSQ03E/zB09/C/zr/Jdy89F2jT1F+LYSeBYl6EDQ8TVUBNqBNaoorDTJAV8AQDgMdATbeIAI1hNPlCZadJjufhQaXLhzg2rXrAALu3bmAe3dvY3F6jEYaHOzv4eL+LnZmDWazBqFtlD9IxKwX9JHQdYQYG6Q4xzJ0WFBEREISy9MCB3oFkigDDdpvP8raeEYGPb3pVPWjjDlVGxAMcVGZNzg0NxtrriIRpCjoVj1Ojk5wcnyKKxeBhgIoObiWIOhNbwhAKt7CuR3wdhJg+ZeKkWaCN5NHCUaH8/xk7nh5Bh1cMO1AIoCdzGOUTyWQ9KZX9EgJCHYqKkPXuB5a4G3wcSq8t6QiKDye7HcQ2/oQ9YoWA2tLSZpniowWM6P0MGnk8NKSm9H0K6sre2RtUNtc5qSUVK6iGMViB0BsTlg8cdmEh4feF/1K26CyTnmEk6NYbscoPYx6VVcSO0hEV5YG4Fp//WUanGsnAmqQvT7jYsHWX/JQ/NE4MKpjMDKo2QOSDIC0OknQNAoYJulAaMBVp3WMK9qCzxcjxYjUm37nYJ94QHK+UZ9x/ph129oDcDNfzLkEq35sXYw8x944g+MJZCLwz8Z2k22xvtlb9XOtiePUEBOl8iDTmYuFN2hmewD1oRz6TgFA44nPemIaXCr127BkcDbCT10W07ds6RjNS+YFPheqgxaP9TzGNLItxcHqM2xHo63s9Pa4ioNpOVG78t8hdUzoyudXjMIjh/RxXv9EziMUn/W4/sXaRtUWIdjOQ88oJDD7bXB1XM2AvvI9tH7v4O+JSRfAbIKp9lYyfMsdggcKQ9/isKO6PNlAEXw1n31LVrWd2Q+0vax6D58ZHT9Bax82vWuLMjkHY2a33QRvQywbVICJys5okgNBEwvZDZ7MG9Y17tED9ZejG2V4uRiYFXA3vnlqMWY9mMBBdxxTaqFHuxMaJqQUzXsGQDRwREwgWX4iQIDkCozvLEZNYmr/IjFS1J1fSRaChgJuJAdxCKDK6yWfnkVFpWHAvGH8VA9NIupj7CDTePSIKAMmQ4+TilXXu3nwDFBSfntIncsPDIMbhuNOuc6UNLmkAj8EUAMykIgwVzAsAyhFgYc90ycA6EBoAWp8FOxNMqQL/1PGS5+qX2LC3e915d530tbXlY+I7xaTj48px0WZcwDFFBHXmMVDgdbpMAssV9JpeJcnLh2qXkMNvfAoT46u37tCU8Ir1ahwoC/X54oOKm+DrKWO16MrLuvznr0VnDitEjd0qjdWtVYMgYrCa09Ckir7KSuh1dANRoU8UxASop4KiABhAlNAYwqmPx0S4+rhJbzwzrPobx7iWA5BKQACsDRIKYKZEULAbDbD9evX8czyGZycnCC81Ws4jYOSDhYFB3pFQSJAhbokeIL3+ewALxx/Hj+2+xKewYsIvAuiMAKKyjizcM7XlahbNxxI8rjmgCh9sVJFQmkTIogFMfXoY0Tfdei6BbquQ0ydehumiMXiFIvlAl3X43SxROo0JK8JDQ4uXcLVy0/h4v5V7O7sY877lhZNPdbUA0pBVLacTgRa20NQWi9eHTkM94yybZJckgLwCBKCaKhhvBgx5x24AS2UQIhgMS87dgrKZJLHu7AFW3u+xiAVWGiKHZX7mBkcCKFhtE2DxnJezdsGB3u7aNs59ucz7DaEo3sNWIC93R3stgE785CBIoHoCWlNAFJA3yYgRayWHQKAGQd0SOglgc0DroyVbz6ROs05/QgsCT+y8etLfmDQGMiB2iNQxPhFNZ9ioDuz8f6E1XKJ46NjHB8fG18OQGKly+CHRjBIzGunVnbB1ViK8dEGkJA5XuAGZAC4OktKXo9AALjNa8rBMYf3yMYmDvaS9BRTgoaV990STI3mSCMHgotu5QYjeY2CLCcAA5ttzNyYrA3pMuo1d3Q6dt5OWZZ47sWiCrnHaPVg7Vl3rvFVJEi+hkI72bNUxsCAy7ncdKuvbAgM+kJU0jUQzCPavTaSJnf3EP3x04LiPTZQ69Qc5wyCjiSsEXM+ATcPu9FclQSryGDNLVfqErPXlDc2rM8QEZhUrtZ9198JyeVWKjnDCqht4+AjRdV4ZT3QKXQsZ4dlmPj7k5cyRGdFSYyhper5vC5GY77VW88qqoNSXgpOCEV+1G1WNbYmFhTiWbOnZP1avk/gOozTiC9vJlcBfU37T7UoJGU1sCTjH76h+m+6CdaJx5+PLCuBpQ2judpMCefVPJy/7fCKTfNU1zvRpjFfQC0bN7xJfPWds/6q9w6vDpgixu0u3v5DXX7UiPUFtI3+A2y18B4MQxitqS3KEw4UnV8G/CPPJa3fk+V+zTzHovUxlykieuCqNuceKDfZ7zNuE5leVoNr94lGDh4dakPFKC+XCq/Ov842LtR4bnUHFAIkTUwdKCFZXhwideUXihlQ0dCRlIWEiBqqIbjAYjA3CNxAkia87vseFHvdRTPQyF1nPQFjzvNCAFECc9ZEy4kyTrC5r1LWL5G1q/Q979a7si8AMed9jNobxw1yBXWS3WP/JwUYEoDArvENQ+MGEyBcMW0CUQBTixB20DR7aiSjBcxIGBgFlosjpBma0CJwwOniFP1qlRV+WALgWjy48ZIVlaya1UqCAKS7uWWsz99hGCplRTl3xkxQI1AcYJwsBdgxNRe6E17tQJ/dijK+uUYzf0QqwSNWryq6TJaLgQiep8h3xqGqsdYlyOCU16P32TMTfEJkbAANW+uKdVbyafS9faoNgfw3OX1GDHa3CNl49byzpukDLuSpnqv19unuN/kjFv4lNmau3K/TRVYa6+3rQaekmpeqj7bO+74H4Lxn5EXmD2ahU+Y7Gxm2TrStauhrRJXxE668F0mTyieJWK0WODk5xNHxIQ4P/ecOVt0Cq9USq26FPvY4OT3BcrkE4DyKkER3cXd3d7G/ewFXLz2Nq5ev4cUf+iquXXkGs/kOmnZuXg4YGMs+/+Oip8lYD8+RP9vmXRl4SVaTcNaqygcQjDc6BIVvrr2o1K8rZahQe11JlBdfunQR89kMxwBiH7FartAtl0BKaClgdzZDbBowA7tzxrwB5i2hbQFmDZULJKBAkCRomJF2Wpy2jJ2GsQqEBQSNtURSyp5RhR8Y7ycPD6tYLpxmKa+7PAa5jrrj+tmPR4eYN1oiBAIARowJXecyk+A70kwzkwWiYB0FEBob75i1LAeKhmGTeuAEISCEBm07V49gAohFgRm2wGgKIJ6rjDJeYTMOQJcJiQLLMP6vxn2EoAdzQtcd4+joHlbLhfW39N+NQ6pmf2gwapvVwC1AqHu6+b+apgWVsWvASgFrUGg5v6JaZ6h4nJyt90yXMZ+r5B2drSeO+eRQJ9DfKW9GweR7eafSSOU9u9b2wgMzrzcZXP5GBumlCi+vn9JKqNQwsNPKO3RzRUP2XfdKKcJcPLVWHuoape9aewYW1gbL5pSLNlBeT7AEf+OnHkupSGmjDiOV7lOXRwZkDPQGWf+iSuA/aM8nfeGgJsv95pH7macNgUhdspTFdgmDxFry8KHH3pNe6rY+jHkercnBOzY9YQN8RpHzhP1jK2ePE018+nSUSqHasumfeqColDN6TG44oFKonghKfDhla2XCFRAZ/l1VU4TyWWXCLXDLsq6kjal1ui/nGiEghNCYshPQ9312gfaTaDRcq88hNJQ0D4kr1G6bieXAYNLEo8Tq/k7mzRE5IMYOgk6rd21lbOA4mJDE/M9LX4hqg6g+VayESYwxNFdCiAAh2+k2WSdADuliWPgIeRYicwAXATMjpir8gCgbebUS6cKRmBA4oGkahOA/M7TNDtr2AG2zj8C7EHEPI+0PLBQGkhDTEkkWiGkXi+UCq8UCUQCIpuJ1ta34oAxntpRhCKIb+przRqqfKYNoTD+1i7vCC0zm9k92MhaG9FrmwT0/FCQqhkGZP/e2GBqf53PmekfVMRXKq1IMKLKTtAzc1Pfb6V/udZB774YK8k5/MeqHom49bYw3ZtzmIWEOVm8GGu3EQoIZAoKEAnppx4oGLgOlttbMBbUry9ArRH+lpInnvTUEyTt/9fQVg6dqcdJ3ZZfz/CKjRe8ECn8UNzjga7k27uoqynOqH1X9J0A9lMzAtVgTYUFMCcvlCilF9H2H4+NjHB0f4+6d2/jo4w9w5+7HOD1V4/f4+BiL02N0/Qp91FDJEAJiigUwFj+9EGiaFjs7u7jNt/DRhx+jDTv44Lt38CNf/jE8//wLuHDxCgKVxLJT3nnj/q1JjPs0Nqa8jIYrx5XLkbdSXr+1oW9r2PkJAYOcHRX9uPeWkmOlRMuwbkAQY4+LFy/iYH8fH32UQAyslgscHt7F7nwH+zu7ml9KIoCEhhvM2wbzVsyDQUME2eI4kyL1YJlhsTvD8RHhSCI4AY3JJ93GYPNS19Wj64mNtxY9ZqDaQPni0LMH8DC79XGv15uGMkUScCRIBJZYYtX1iEkQQoPd3UtosIuG5uBgYSd26qiSi6VY1/jVzEI0l59tNnCDJrRo2znm8x2EENA0ARzM+4MMcwCDqTVPPdJNFw+FyjqdIEqHzHfzGEQQdTg+/hh37nyM46PDM9S+2vjzvwieZzDTmY1ZglgzbKZEciJwOC8ZsYQEMj6XOXz+XmX4WB9T4X4/3iYZAN/u7qp10++oQaK1eo2Pkwmt4t1TZHGhQR8MDzGq1ne13gbvM77u3piSc4zqQ6ahYDCp9U6Gt4urvJnVBkB+TzUWRQ9CmcMzh9/kx4Tm5/38nhSXp/f1+iFveFQl8xwIXFfMa2Z4J7bRmc4qhffY35U+4htz9fXBc1N/fbLmfI/KlA66LY84r1A1JlukbtnGyeC+wfFPXsZvvK8p/lTSxP3RwKcfKHK5AGTFcGywZ3Esw2eeiDJu6yNj0FP1rhvPj7WcmVBYy/2MRwgBQEJKlEOl1HW9AaUEQVN2EsxAU0PRdxp0B8oBGw3TsqOwCQikLvIcIygy0BNS34NSDzeMi7LqbupJdyFS0GSNpuzm3bkB43YlybwKKuVlvNvN1o9BXhBmIElOfpvgBixnM93HKZqvPtlu+dQMeChK0zDaWYP5fIamaS1JqP4wzdGEXRDmCM0OmhAyiIIMFK2QZIEuNpi1t3SHOCqQV0KUagV9ivfWSq0rxGOZIpWwH/doWKPrs5J3lAUlVl5zgGzm/lQ11I1AQs5LIxbaOBCcNfMhrPfQw17smlmFbNtfXIW9ZePd7xU1PjxcUm8qYSUeCaBKkxlvGdygYpBU4FH+TL7zOB7wMlND8KaELbitKEA+YS+DIjKkaaP0bKAVwNp28bPxV95MuZ1YVy4qg6EkTy/x+cX7xN6arWz1TsreND6cbjAyG59gZGOFylgMWmf07Dl/BiGI1iQOavj2UYGemx/cxK1bt/HBhx/g3vEhVt0Kd+/exd07d3B6eozT02Ok1CHGDl23wqpbWpL9hBh7EENDcQmWnDdAN+iVJ4UQsLfqMGt3kGJAGwQf3f4A++9fwOWrV7B/cMFOqKvGH9MlezrW3hTnsOtN/LwGqH3sK/LOBubEg/AcLwr4Gm173iUBQFVGkgF/EM+ElPUGvad4UXo+ICbGwcEFXH/mWXzw4Xs4PrmFPmqS7IMLB7iwu4dbpydgBuZNQBsYs8BoCWjIEm/bYiQAwQ2jNuDShX0slh2WXY8+LnDa6eZCDwWAGYAwZ8NPW+zrrAK8q8UxHmZLYYNEMqTSKgG0Gs0aeh2NflsmLFcrHJ+c4vR0AVDAlSvPYre9hCDzyuBnMDXGXiKIwmCX3hLaAAQLAWtsE6JB27b2ExCC3qv8ouYDyB5FdW44D20V6VF65kBhBPEKbQvs7O5kAzHz2kJENgRsPM5B7uJtornOaAgs5KaZV9VAZmSt0ykpg1jKegw0qncGrI+WufCBtLHJ5ZVZZO1NNF37mcDQ6D5dkaky/IGybkx2QHO0Fb5uS1K8rZWOQxqWl1LSUNq+N0S18pAbqEEeDFyEXAYKBWWuxACJSs/J2pW4zBr3fzB0RWGo9RBxjr+WrQ2DST2vENmm0sMrYvVufOWIvsYbRw+92NzVoX1i3uqeW89ldNEffJzzirvPUtabl2Re7kBEE2B6lXuDo+jCOY3JUGc8K5jvySzroyYD4n4QLqMPl3ps0IS3qO1B3/eIy4gtFhl5VntdR1RlxS2ZR4ixPsSyQW/eUD79QBGQlfwsR2gkUXxr/kmj0bUdmsdFYRuU9WKyP5rXir27TqI72aYHEglZ4BARuAkWMmDv42B80fNeQI1WTpUR4p48tittrvgUTKljAkUzRHyXihgxMjS3g8bpx+QeV6kSgmLeKlT0CE8OLHpvbsMgeSSQjVnraVHuVTkduMqTnvaiKr8yLz1yWVXnBIF4jhskaOLSEuZTwp+UHJk1eXYT2PIp63NJNBQvco/ACYEEDAajzYYBWd+aMINwA1muwNSCEHL6n1qpykpmRQVT+0yFjmD9H+dF2awEl7lgCElRYvPac01hCigyhXP0nYgUr7KxMjndkom6Ud4rsKTP5t1kJKLhTgZgSg89yYfz3KkHnM9jrIxBN6yymp6NaQAlETnqI6TLLqwq1Do2xCEr5YNm51J4r9vexfgvWUUGxlF1QT30CljjIBFzTef1KOpcpCQAk3nOUF7NPjaD+v2Evvxenvxs+TVBpEl9ifTEpsANQmghUO+LOidVVp40oYGNne6YMjMYAUKC0EAB5EDo0woniyN8+9tv4Xe+8Tv4+OOPcOv2bfSpw+niGHcP7yLGiMAMSXoyFpOgj5a4lTShr4bbpHx6UEoC5hnIQYYk6PsV+q7DrNnBahUxb3dx6cJlEEe0LRvYbiE/GbzJQY3Dma7o46EXquR5HtvR38okC0CEAvJmOiaXa5sLQ5mdnqZGFd+lDIoztdjb38UXvvgl3PjOWzhdHOr3HLC3t4fr16+jWxxjuTzGPBDmbYuGLOFu1GPvYcagvsZGNDQ42G+QhPXUzuYI90470NES/VJDdKOoP1G0DYwYfb1b//KglNGhkeFJZjSx5XGDbT4MvPhQvEpjSmAwIiKAHovFAodH99DHhMC7aMIuOM01l5QKSeX9ABgR7mFUyzdiBSoLsOd6meYsCtyiaYymTP7YNKBuZYbICBnQkNggG3vOnykBTOjaGbgJRkOS+zreAS+hfBWY7Im2ayATQIRoeF7ug/5kkNnbOrB1zQNUTNOo6izfI+sVCqpvY8APTP6NYKz2o75/C7POx7fuSMXvxt6A2fdV6vVLlU1qnm75y9qzF3A/OjtZAVlsAYUoBtac5C06qy2rmMoTGYl7a7rWqcm/o8lRKfkX63647etzOzmY9QcZX8S585ZjoHDmnN1vub+It7Pp5eEV12uHej6RmMNwHm1M0uUEjzuv+NJJMeVcU30fLdpAMJupnt62TUlW5LzB9SEgb9rl+Rxt7j+5RVAQ2QEjgi4SqYXHfZSiR2blFNCNgXOro3OXhY7ug0etPEjJUqGSpxu09ImHq7sex1J6qGW7Bn/6gaIREx/7Xug9j601D1TGCvH3tKzHnzzc6kfzk410Ge/KbC8813aBiBBCUOGfIkAFHBJAryWLXRfJHgfJFRfRu4kYgRQEAqlCrOFGepIGQZXhyB2S6MkKMUVQcm8ZQA1776krsEBRd/VzGfKhsj9UwrwPBsJUrthFZ9FPTJSPAAeJgkdipx+BAekRo4UyZfHshhdZLDYsGbCGnDEru0ipR0SHlE6RUoMUCU3jZ/Rovorgyi5cn9PTkzTvqgERw5Yj/yky3GEb6BYmsDO457Ht50mfmq6rN2fcotQxmatn8KfTmwFqTtVShSGsm7QTBut4rZX2EQmY1ajy3A8xdoipt+eaQX31kfMF7CuCu3gdeG+p/DalSO8rwFuxDWTU/zrUrxb8Tq/2fWVUlPe6V08xlAZ+RBwg2RuhnAZEUEN+fZatDwbCBmKjXW+jglOSQSrKLa89Teqd/Tz/xg88zwcRgylYUvGAQA0YnHP6eN4Hz2eS1ybBFGIxLwoCN2ps3jn8GN99/z3c+M7b+MNXXsFrr71miahP0cUOq7hE13dYrVaIfY/AhECCECjzuaYJBhZpSFxKeioahYAAARLpyYAxQpKu+S4ssFwt0TY7eObpZ3H5ykVcvHzR6rI5sn7pmqypjbKh/ihL0dvrdVtTU30z2/wwJANHNgeVQT6svQDwhUdX7wZQki+3aJtdPP/8F3D5ylO4dfsmmnYGIWCxXOF0uQBxQOMAEXE5zUkUkPYOEZw16jprmXHx4AB6mmTAbL4C0TESEk66DjEBvbg3kBhoCgAJbOBl8awxg9fGraj0DoAqv1zLA2WdLvJF30ciIInoYoeTxQkWqwVWXYfdmYKyjOIN4PkGBQxKekpjQffMW8QnVRKQ8/dFtG1rn5F5qv5RhQeSr+YCCZF55ugxge4doOMWt20AACAASURBVEAwcQCosfxHNNhQqPl9vkYl1xlsvdeejZkkHASpwIUMehkt1bw312mLphYPYnI6rek6lNswdRLtgxnOZXzLO7Yv615GRV6e82YQyprU/C814JTgvgjKK1VnKSJicrvIWzVoX23QuweryjRfCyancz+k+hnziPPLpA5TtWDjGD9qcObc6tdp/9EXByaM95Lzq1T44Rnjb6rh9u/y52xdq6faCovFMp+UO2t3rC2Ahg2HwjOpAMck6vUFcW0O97t8Hn/JOt8YhPMefhKrc6Lz2yoE506iyoTHOb7Ze9lV2skmTjWo0tkfScseZdm+xZ9+oGhcaEqsyCdbE4+hPDFENnVU+rg8cDLrbHWMXzr60zSEbV9TubirrqBu0U0gSJMgvVdkpyoJgTkiCauSnaLummeDzzJCWPJXMu8b0uPRQGzHuCKYcUgQO/GDUweKhF4oB3tl92aJcCUWsFCUfPKOXZvss4FN5ilBDo4M5qG4yfuYMsJwOs2DRkiNxUiMPmlfxI6HlAzumKEOPXmKbIfY+5OkB2GFvj+BpIQ+9gpkxD00oQVToz/MAEVIXCrI4Vpz3r21flmOlnJxGCKha9gUd6kUeCEwgp565AKRphU+WvuLUMLN0ujGtJHOxeZPj6AvY0KuCElSsACcDZ1aoGwslWAvOq/STUKP1eoUi2WLEOZgnqvxA8qeAkYA2jMKCByK0WOhU26kCAA2b6Wc0LX2TMsoURn+YjPVxnVlDVWdKALX6wM0XLF41sn4eDhvp9Ebe94Tchfx6THLeYLqyiinH1KO40awTTmYB2AkESF5PZVBVY9fZZ4iUEDDDYKtibLbbePv4+qhPiwgSuDAaGYBJ4sjfPDRe3jzrdfx2huv4O133sZ7772HW7fuoOtWupYoQkgQo+6C9l2HQIS2KYCU0qF6/Tk42Mxay6sm6OISJAoUpb7TE7IEaEIHpiXadhe7e3Ncf+Zp7O7uIDQNJPrJUGU813I4Gb+pyzZKvJ96s6kUcNxpslwnTCXiHStoY8OezrDXallUAMxsUML4oDAgLQgzXL18DV944Yu4/fFN7O412NvZR991kCgaUgUYTzLwOElud/lBpn3yNwfGhb0dCIC9/R6z2QwhEG4d3kMXIxZ9AhMQhcChjEsT3AvMkvVGPWQBouFbnjZGkJBIN0OEh7KVYGvFleVqGJmVv4IEp4sTnC6OkahHwkoNKwPn9CRM75/2WV/ja1xDTp2nkWgIZ2AFO2dNgKcfyvcbn3IZVssCh3iTjXfmPvYsbJz1HQY4b6SB8ltnjfNn/e0epC6lSzuGBpdM3FMGtEBz0wtAQTRaX0cb18uIsB+DjitAlXeoBl3O02ANJvLhIt8y1J+UIiR2EDt1kplBwWR62qS/1++d1ilrQCiDRPA2F8BoGgTbhqENZc6wKWeNicuLH9BCZQ6m1s/DKZJXMFBky2q1ygBV7TFXNnd880llWoDqKpaXXnUUAiAlR9oTbl5WJS/Ah1Tfg9Tz5NH9QH4I3CF8y/Lk9edhl+8/oMhLrfTIp2Qqi0vMI3rBlsLvUYzWxtdu0IpMmdi+ckI5ot13bQPQmneNMHQPP4IpIlkIT0oBifQUMz0mVY+Dp5SQ0IOgLsqEBCZzcrbT0sAJDXQ3UKzulDh7gsTY2zUqAgk55qr0PNvaU8qO3zuiDRp+n0evEnbsBr0ZvAIgkNZV8r4AUdTZWxMw29tExy+EGRqeIXCLHLJj4IWIJvMWN1BihxQ7NGGOJswReGZHHwuEe00MboI191A8qWzJV1Q6M57fAjyIePv92GgGee6OSvnLVQ3oyQwL954yhYVq3W0MPKzNi3syyfggL/3D8nEMwJTz1lVBzrLHgEiPmDoslidYLFY4WS4w39nH/u4l7O/NFOAT9SYJEuy0INhaCEYDDmC41xzUUMsdrj2o3IisQIK8Js2a9H5Vu7d1qGHeZXcFzRUxy/VSwtoc4FFDTD1Bgt7HleeOhaMQjzy9JAeXIXsy1fSfk5yPDYEqdDAb7JzHilyJFwcDBUCEex4SMZoQ0ARtH5InTiaTNb5v7m0pSaIS9bhz7y5ef+MP8fqbr+C11/8QNz94H7du38Lh4RFOTk7QrTrlEwHgRsOA+qjeijEJYu9jXPh5NpBBoKD5X/RYd5uzJEixt4TaQCQgBMKsDXjq2jVcvnoFTdtCothR5WNgjga/xkXG951Zzr9n3TOYhp4m6y+uvhsZz970gTxxuqieFaeLujfqUURowNJgb76Hl776Yzg6/AiHR7ewuzPH5UsXcfHCRSyPj+GebEQMEs1hpJEVSU9yYgd2HdBV2J8pYd4yrlw8wF5M2JnvaNgvE44XS8jRcaZHpoQQ1Nt1Pm8xm8/BzEgxWViFhlnEvkfs/3/23rRLkuO4FrxmHpGZtfSChYtIPQ2lmXfmaDTz5uP8/0+zHB1JI+ktkuaJJAgCIAj0UtVVlZkRbjYfzMzdIzKyqrpRDXSD7WQjszIiPHx3s+tm17K5XHg7iFsnTonBqbTFdMulgn+KCra7G9zsrsEpQ3Rr7ULJ56UiJXZw1+rPJDaf44CDyHn6bB2gAFu7hK5nj2BpfWPvDdDE7Wdp1reowaWqbu4wjwZQrG5157xbTR5znqJqdajN9Wbv1sp/E9cDDG54+afN6vuuPRdqa/DCHYo4E6ii7E2HKQ6FwkqyvO8B0p1WNZNDvEOX4OP52X1tZEq/AQEcUAH56lofjFyHs/9Qnm0tBKfKf31V+Q6YbEdhkVvLW9fTWxuiiGDaaJV3PXpPaeBHldr+nAKLbwskat8Nt1ZM7voavxuAXMdKENdXsMi6lctIaz+rrPQuJx+gB1Plgdvd9YKHaY8frlFfb27evu79mNL7DxSVY1/vtFByGr16ppbfnr6HMdpyP0x+B2zjj0nnv06mcrvT1RVvEuHkeDp+z+sN98MN+vBVs+ttJeZQbSid87pNjjiPl6Vu6opCLBLKWuqMo0cIoiNUEwBTmESynbzDgAdVdtcMBZKAnI85xlZwfSgEKsYBk1JXBDZ15TT4VEZmiFbBPdzRLI/W0qRoKwiQpjimebMU5hQXfgPIiBNqRI1d6a3kkNYmzFQIbY3AWgwsghG6ErOF0S7NTiBKWPVr9L2FQg43oDiVCz9i0dp1u6zIKSN3GV0a0XEHUAZ4j93+CnncQZ2vInTwiNwVdT02Fgt+oagWIqhuPqboWL+77HkwdIog4HU0CwdtMnVQIM9PGBulsigb0e+NcBhj2edxvbfUYNrn8VeccMbYJfttHDO6TjDsb/Di5Sso9Tg5fYynj2/ws5922KzOrP20Rl8L1yHjt2BvCwNkUuBrfm0xleJNWaNaxWoCLAFoySAhhIqeGX+JKRR9oywCpBYK26LNmeUa0Lmi4ETqybhbDKyt0ejKy2LOQKAUCrC5a9RxbPMgZwHDLK0gAIMhBdBBARMNNAgAS0v+4sqd6/xufWcThr0PrP0ba0FWZBmx32+x3V3hj8++xr/863/Fv/33f8HvPv8Nvnn2DW6219hut9jt9xjHyi2VhxEYQ6FxHiZXVlUAKqS7XIVvApCN2JeIPYqUkd9rFrfaIKSOsNls8NFHT/H40RkePTqrA1gjOtWBKcf8j9dK93VvKBZd7egjINzfOPTLuKYJjASioYCfpAlE4sNlTt1a60GMCuAXF2Xnb/OxQZoQwOt6dYo//8Vf4PJ//hv85//yD6CkePToKbp+jVW/MUJnHQAYKJPV+l+ynV5zInfnVfQdGycPJ4AYPRKEEk5A2PRrMASJgG+ePcduewPNNfLZybrD+fkZzk5PsFmvbIyLQrK5HlpY+z22uz122z2GDGxFcLW3tQ2YAixxgk5exmg1BtzNUZHzgN3NNcb9FUZKWPEJgARQsnsYIFQ3LyIBcwZTch6ZUNyNG0+RIZpB2mEYGaCEnjtbBdtAFz622x5U2DCNFTPWdZXsp8E2F8zyVf2AAc7r1G4yPr8n0UZbmTKU3MZlFr52av2nAXiHlWTMU/Vx2+7thCIvtEEEDGx2qzYlH47UyCBVTqACuAf4HvJAVQArGFOfDeBtMhMWpmV74GLPNUBOI7mUPbuAZrNcfM2avdH/Tu0kRlhfogHmAANxwvoj9sVY6moJ3eKzbL067bfoq9JHVVcI17SoZ/kMkIlQ2rJh2bOhGe0XTNv31h8aoclrEjkvdFF5JEYfFtfm2hJL4vf8/ro/3lJGHAcrp/ncnSrI6hZ7TM215WO55XdXPs+2DIcu/3EgGPKERf1crdYYx2xyQOn/aaRbQowx/43avEsvlPE/H43tnWXuAa8xPh4q3Wdc1rXuuyWdtdNyeQ4PoZbve5DGuqXRJ2Orqf7rWRMdaBgLP80n43Rdve1A4F1J7z1QVNwFYgGKnTh6XmEC452peeY7pkWOk4X3FQuYWSnqJu9TWOvftqfaJlZP7nH/1fqW8tiJ5X3a6paFJ4SsYxO0lSWOAUP1LZi7QCy9z/ZpacoVpuO20FNKFg1IjSeBxax9MsxlzE74kimDFGTBpiwJEVhMoIWq94XxpYg4f1G4VjufUQh+zB2ymkuWyIhxNOFHogljYyubXHBHtP3vG5aGIts0E9Vaxgk8gcy1gBgh1lnlY24oNI8mwAVhZkO2C1hUNAKBuUe/2iClVVFGq9eCovJdWHweSDgEKFRH5LyFdAnQDKU9brYvsN1dQnQHUbOaKCO4dDFN9i0pX6mOeW+vIgZPJDdX/EPQjzlCQdTaDqfK21F4eNRdzmbujDa0tHmXC/ca4FyRUD3aqwsgpSdxZO+LdvQLaq1CRP5zNksBybgZbvDy4hucn91ge7PFsB/xySc/R9+tserXWG8AYAVOnXPuhF2L80OEMBbuUGX84GC9KqUO3WQ2BW0dqsISmrzA5VzX/gnD3D1XGEdYOVgBEp/26iVkCDqAxZUeey8jeQQ/U9bbUpi8L/5vhFJCuBNa9CVGuLnlUSzKUurtbcSAZlTjutYWyFxR7YK7jDr/iRHBM3Syz1Cpl1kXKqgjpA64urrBV3/8HP/2b/8N//bv/4rPP/8Mn3/xOS4uXmLIA8Y8Yr/fY3DeBGbyf6EIGjG1uhujlBP44K+KsaiGkUHdssyIWglayZPVhF5mwqNHZ3j69BH6rkOfEtZ9QoYrqD4uiXhO+duMgBjQ2nT/6+2jB+MuFDWX1Nw+p6xlEzs/fxWhK3ON2uhTBItSp4fvaVU/kILECaeLVRoc52OwJjDMlZb4BCcb4H/6H/8X5Jzx/OW3PhY6rNenODk5w3D1ArvtDte7ayAPiBPtEkGy7wCoRfhiQtcZaTaoQ1YbW6sTQkeP0BEgwxbj/gasBoKkxPjo6Rl+8tNPcXqywSq55Y3ArDvHAVkFWRT7QbDd7pGz4NvtDp99/Rzi+1Aml58UxR2USKze5JArmTModDTQFoKXL/6IfXeNjjZgdOj6Dv2qc7L2Dn1a+XznYgWoYAdaox9M+ScmrHqLprlerdF1ve9L4S6mUAwTcB6oTqDRzaJiezFVJS8lBmjAmK+wu7lxovuqYLf7RoywKme1m2wFiMLiEX6wBO2g2kGRCodJ8XlFW1+fKX56QahiUnGLIwLbsU1gllVOmFjxwC2yrF3LAU6ZUzGXAgaP9cLHd5Oqgn0wPTwPI3+uD8BkBzTyS7PHTtV9guS6t5HXVQs87+ttgHSx2c7cjEM2Is71Hti+Yc/Hfc16RDDLbiSoJKgTpsPXbRW24CRUZfCpJXID2zDcNV8a+WK6mdff7k5Fhm8au1iy0W18TCgKLBFqR89ey7mWrJU92nytfe6jsDfC2GumxbVd6wHExOVdubR4e3+1CqtKtTWCNOM8xsP0PUUwd9k2Z7Mu3GxOMQwjUuoXg2RQHKKpgYoWMdgFcILN+3K/yTET+ccKMd0qp398z2lJ8IzfHqg8Zb27i4Q61tM7srqHDp1zaABH8im66JF7Yr77O4s4F3MMVeqr28Ed7SWzdy2urfZbrKvHrUbp1uvz+46lhwCh3nugKJICODy5aC5+j/Pzrm6h2aZ/W+IQXuaDetL59ID10+N5ubK8fAvFDagg1qySzSYfT9yWJtfvaC8CWoIDfyY2mYQSdJObqCvZLIyEspMEs1sf1WhlJLZckIfOlWzWOCZoGlgEiSDGRuRMEHASCGe3YGBkcSujMcPC71r7FMBDbPNTaS2N4JZiVGROM8agovhNRQJbXJk7b2v2R4tGhUL26eXvnOS6yuARCtgiO3Xd2oQpP10rG6abbKs3vkWq2kORATH+JxqBPDKU9gBG3GxfYr+/QpbBwaVcJJ/WeqDwkRzt7aZ9Zu3QHAQfjpHJ5jMV1KydouXrtSmWOR9f3jUaQkxz6eC0UIvgP/29FTkcBGjuaK8SAdv9NbIqxiy4utnh8uoVfvbTX+DRo6dIubPQ7ZTM+q1tnxbUCOGsqcvhXlIhrlCRAA1jk1mbAIXXg1BAuXoqa78lnpJRF9cwitNwi5tnio1HgSok3e6uc2RRKBRNalouCwz0oCqYqjgI2pSpkmWjlINbywEyBcT6iadt6QIlIZVIhSBF6hiC0bhkEuHz33+Gf/iHv8U//5d/xG8++w0uLl7i5cuXBj6zCb7DYEAyCNBkCoy5sFpZmRTK6opXDXcfCouvJiCFRzG0eRxKHXsbwUGo1HV4+vQjbE5OsNvv8PLiBTY/eeQGmeJNa0rdoXIZY2cqWd2Hq+QuoaYAutG+7elbGZHzh5pPMquKUKTuHZuhCPo07V8/bY7IZ4lOACI8Pv8p/tP/+n/ganuJP/zh9xj313glz7Hq19gr4fLVFWR/A+Q9ui5hs14jdQmp69B1PbquQ0qpAEhM5oIWrsFQYNMnPD5b4dMnZ8jDFgkZlBI2mzU++ugJPnp8is26RyLrZ82KcRgwEiF4vnDKkEfn2Gegu77Bl99cWCCCEKCZqvK72FAt0E0Qzbi4fIEbvsamO4WKWe51fe85sEUE1FlI+VaZ901D1KLG9X2PruvR9yswdx5R1MAii7SZi2hheyagVPMHCKNbqrbCienBGWO+xvOXz5CzoOXaqvsB+bgJnq9mbxYp4I69XSo+HuvRQbNNFbNDee0BUpSdaFKnu9N0QhQMqlHID1/VnHjPtanlopW2KSTFFKTdUkdU2XO1uGKCmr0D7f7Syjl68L4leTOskIzQmmp/36Xs1RyO/Pw9KxSvmRa28ncyzV05j6UJUIn7dV87Zm0vrAdMmw2jS9mDtISl/NGcENZ1caBaiesPZcH61If0rqZ3xXqHjqx172L60QBFJb0bY+DW1awocG9QVpp93h92ud/Ljojix166kP/8s71FJ+1yD7UCVXA/kgohblu4Isn5d4GWEwYPKQ9TPJUc3CH2cKoeXp7EXWjsn3o+hgdZ/qLOa8IESPL3+KkmGRk2YK41SRmJExKPGN09YBQ1ZVEUkLBEClAwzLDngAGWx1ajbBf0pqj4NQcFIyf31RYpAmIo9UQETgyicDfrUBVmK1eQaVsJg+QbvpEmiO6NMUgEowiAPZQy9uMrjLKF6gBRC/VuzavQLKWspuNROT2LXj06BI79PjPxrE01HSvx3wLaxfH8RAE+0vRTpKdRZmIDIM9dStdO8mgKL84fY6VIFXQhsybJMmI37rDPo5OQv8Juv8M+D/gzzUh9h36zhlBGkFeTW0lM5lpYoNyxZJiBCjUPLpupFyWzkcErv4jVyUAiB2JCKUAoJ+QAF0OUjcsKHpq8hBAOxXDh/fUWFEWCtCHAtvemZATZ4yBQIRSWXFuQ29FnoBDBqaZ87YC1JdTA4YKneH0pAZTIPYkYQoIvvvoKf/t3/w/+6Z/+AZ///jM8e/EM1zc32A8DBFJ40aw8Wt5e3FhQ3eZE7NMU2HoK3cJ5FTDSdikBCGAhKFcLBOYEFcXFq0u8ePkcn3z0cycu5ian6MM5ODQdAcdW8/ueis2fKdYWd9xb5tvsl/LpxMl1/VootVsmBDihRI1rKzsImqDoQLpGRL/rNxucnj7F6eYxvvryN7hYPYMoI3UbAFuoMlJaYXOyxnq9Ruo6pFUHSozU9aAUQIW5vlkZUl1XO+B8nTA+2oDxBI9PeqS+x9npBufnZ1hvenTMQBbkUTGMAyjvwdlc8FJP4GTunDthvNxu0ZGCNUKEK+ru0LbKtH1jXUyJwZ1x+iF1ENoXTjvoYPeKgsYKvKAAmpF1uHcAqh2IOqTBLec4uSs4IG4VmFhBsH1UAyRCXUSjDxUDwpIPXl4ms3YVHXB9c+VmUsFjOAVLDMxQgKRpEQevOVy50Qz/uSQ2T/V6zMF2eXoIXeV11Ir7vO9wjrZ1rJFjK1h0VwnIgT8CBi7yRbUFrrkXC9yY+40CVSrQ8gDNXs1MdWzAwIWcBau+bs6iiiwZXbqnLPyu6BJ/4qlVpLWxEJmDm8vjtz7LTh2RmICOG6CoHc8hxFVwmxpB6UAWBw43qg/j5kO6I70vAFGkHw1QVJeEYzfQ+7vyF2S8/IDJknWfPRv3qP+98nk9AeWuVECzpQuTN80E2Mkm0Szy0S5tnn7qWU/OQlAZHdxQO8HXERnZyBPVQaESjYvcyogByhABmM39gzSIlAVA5ydZ5q5i73T+FWVw3yGxWTBxJox5QBYjvG5raUY+FSxqlfX6rQqwRI3CDnOr0fZkXOtTyTkomLWxYEIRipkZ4pwcCIVfTYh2NbQMPDP9Jih5fZ1kevTTWLt/CwDIsoWICfTwNg4yyUrLaZUtXEwBKSwM3SUrmABpbkvi78Ls9DjCtVaLr/uN9AIuzeWH2clj4bjROhaBliqxFaErECAqEA9RrZqRVUDDFokyXl0bgMRM6DoLBX16+qj43yuCb8b5IJroXJNIbweJqpXMsTvilLaYb4dGZGMyXD2oWAeleqrbjMdwWSC42xjg93dFeUeE4aZ5GaqLgLkeweau2WeU9idKSMlATxGpQJaDRHBwpoA0iDKa21bwecFdQ9CCWOTNyuZuBlYMecC3z7/F//V//5/4u7//O/z2s3/HxcULDJIxjKM7XbJbOQCqVSmNuTuOcc1dz/wEtnW7jJkzXf3MflKiv711BIAIMIoii+Jme4OTk1NcXrzE13/8I/7DL/4K69UKQeCt6nUP+GpiPtH0YawFt0yXNxGGijsHhTLJi/k0Q2n2/PT61IXB36BS1xHX5FvYqVqcRHkYBHMdU00gFZyd9vj5zxQ67PHy2begPCIPAwYVEEYQ91BiZAUom9ufwLiWsiiY1cdxBcuZFImBtO6Rnp7j0ekJtvsdFHBuIwazIpH1JUMAycjjHpCMxIweHVIYxzGDVFAcPMpYaqq+1EUxDtxNrOsY5t1pY5jYgUxylwO2fINUP0AiivFTLIqs/RQdJLh4Yk8ns9BlIrdIadepdq8PcF8h2EHCytUr1SXrJ5HB+AgbMNj2zuBVa/dUKmUJ4v0C2MJJ3rUZFwsLpHq7tT+0BzKL1tZvPU3deCZXJicG5q4K0HS9Ub8vZNHgXbplXw5LjPZgIK7YeKt/x5oFGOhj1naMnH1ctX1Uvlo7TixEPTpV4b+JzZZaoPEN0nuoOryHRV5M0zFb5z0wBYdaK6LpHKt7lvH2uVVt6qrrWchlcXak5T/QEnW4LcOhbKQ65a/6kN6t9K5YEkWaj9d3GTD60QBFwC1qXVknms3jjV9yd2fei6PoNYowg0gqgNBeufN993khHRcYXyOvh5mQhAXd+478Z8pKaFnsfCNtpCoozFrGwAp2qyHzWxeLfKYM0uSPZEhWKDtXT6H8CWulAIcU6ooeIxndsyaojugSVRc3d6thTsh5ROLkJKQZRpxrm5MG30EoNwxAqYRDLxYqqkU4Kj7gpcGqgkdwPodQL4sQ51cNJ3F5vJpMGIH3aNwyrkBCqXHgtWcLS5GaBQRIoEHuKnuoW22pKw/x8spTg7JBT/o1qjHr+iqsHl5sF2EDv1qmE6tV9J2BRPacaEblTbC8RW6fYmGF0Dg2FKUC5G3qChIUzUk8kJ0zpqr+DejiClbO2RQhtm4RJyyGEHa4wtWrF3jx/AxnJ2c4PTmFYAA8dDtzgmmpFQhRoNTxWGqBR7QWa5NZaZWh4qZUs2QHVyRbOYmdMFIJxD62fFCn1EPF3T8yYMTb7j5DZq3nnVlKBxf8A/gJYItiHOtUiei63vpXLLIhQB5pLfKLCdYKp5GXzfNQJCOyoaqAk7u5OLCsJBjGPX772a/x29/9Bl9//TVeXV1jP4zIqgjrpMpL5vBOKEiJXDg1CyIrc/izm8KmMFu+ydzwcWhjxgceWVRAAybsVF0ykHPGbr8DMeHi8iV+/Zt/x89/8uf4D7/4K/RdDXWOdgyUdsLs+5uv98uKazCrVCsOBbteeT9hynik4PnU1YEm74w1ELZ+equ21oTkIE5Ac2YpqghidVtHGY/OP0X354r99Q1+/9l/A0nGZR6g4xb7YY/r7Q1SSjg5WYOwQcc2Z8y4IVww3cWwrFPGY3SyWWOzBs71BKqCPA7IebDr7mbXpQQwwe160DHQM6Fj487ZCyE53xVQVOzKq1MAwdIz9TNuKfipIusIIINUzQpIxgLAEwh5GGb9CXcr9ZU3COGlAhLi3G7MqSj2vvSgjkH2nglrVH8H71Hce72GmsOtlOshQFM3XRhPRAyV4DZhlxEYBgwH4a0d+gSgzBN5L8CUpf2otsf9Uozee916d1633lPX03k6oDloy7TwQLzHcyy3MCzubOy9Za9v5HJbs1srLsJEAa8vLuU4tPgVkHrUV89bVBEu9O04v/PsNMDl2297t1KRqd79ct+ltkzcHj29qXph40prdNPG5SxAounh3mxcNftGkeV8HMubFupD+pNMh6Dmu51+VEDRrakFib6rddHdq9ub5/2DJ77H7nK7gvnmaX4CEN/nwtxrtG8jCANxgmxCE8cpFgfIY6emOY8gUogmdwtzgRVkZKFarRHiFWRYOwAAIABJREFU1CwIb8OaQZwEj7UvylpsTkLiyi2DqYOkbJZFeQCRfRqJZGOe6HuaZCkuOMWiKFR6VSeBFRTCPQVqJBQT3apjidepaFEhYbjSr1QUVIFzCjn4A1eYC39J4FkgB4mknPYCIwAYNxFyaaOwmbGSWRsWAAmNYB0jQ6uQOU8VVAyBd3pPEERreVeTc5TTiVBLhLp7C+lVImvFDLvEU+GjvDrKUzeLtlRxsm4KlWLMI7IMDlSYFRGphVBXFTx/8QxEHc7OHqFbrXH6+AlSB1dmnFMLU5CH76heASAIdZzRfD5WIKECXM01JQeSjOel8AWplYuDS4U6U9EdCHEqXcDnmhaymRDuitpf2tdCklfbNG/mUqZWmQvLOZ8gTV3imwFPIupuMFYPhfUJs11X8vXDQSKBYr/f4vMvPsPf/f3f4Z/++Z/w1R+/xn7YGUjjLhHBL2TbkbVr3/Xo+g7KiiwWWbHlRpBiSVTrXpT4yYn7VAiGn7QbebiNcxHBbnuDPBp4+/z5t/j1r/8/PDn/GE+frGpEmOJ6Vdtxkm87DG5Jr2NmTXQ7Q9mxl+nkWpStttGMacnfFep4EJJqJbAMRYDhodvd8qfZT4rOQD3Ozz/CX//1/4Z1n0HI2F5dQGnE9maHy1eXOD8/A9PG9xGzPjN3K+v/KQdX2142TjoXLkeygwqCBRcAMyCAjLnwHnVdQucuxmMW5MGsEfs+ISUBshRS9toiVGpetU0Ui0ERAxdfvHwOghSQKqXOD0eqVeiYM8I6s2ltI2GGQ3S+R1SZw4EdZsAtlWJsVyc5LvuK9Yu12SA33mpaQD8jMyb0aWNcgneQqIqIA0Axh5IvD0Gi71aPDsC360ld4wOcbuboZFegW+fKwZ7zQLJkK/LerpgcmVvHwKIWjZhtDerPBVdQ+78Asqt4Vndv8v2GmcshAHCw+vjrp0ACqR84kXPaoXJZceLGHbkt5e3pXT7pX0yzobYkVf+QqT3AuW0oLoFE7bXI6/h74jMGmTZyOM3ykOKiXOdKg7hNAo4A9TTVDobelbb9kJbTuwzGvA/ry58OUBTpu4JEP0CaL/S3C9EP8LI7s29vmJau4nGv28Z68JXmksdSSe6peES+5ZS6bKGmKMZvhqkIRCLyF6DZwA/jQDBh0k72tYyllIzjSCX7O+Hm2ewRvtQJSxlMArCChEE8AprQuZVRu4FKMOi75U7sXXF6wez8Shx1dPeFSajRZeAvFKRQQiTqH5ZCbtGgAYa5xZSqhyGnsIRwU/ymv1W1AEVEsPqrmrWUarEqIlfMrIrBDRH8R0XshiIsStqIV5Pa3Nr/rfVO5T8JJYYQ5NxQNYVfWxDrNefaZMg2EpsewijtLQXQ8H4IIJFgQFCW7K4eALuFCESgZAS3ggE3+yt8/e1XoD7h5MmZWfA4T5alEG68rakpwFK7lf9SAYoWqzz5uVWWvP+CEN6Jn62N3CKAEzitQNzVEPPEzh2QXHhLUAeZ5mUwS6LmWolI0jj0NQhcC8y0p9Xxd8IUpCiudeocJmJ/M5vlDycHTklBycDQ6+0F/vk//yP+/h/+Fp99/hm2263PH4FmJ6wvYbEdEEgM7hjcEUSN6FuhBnyJgVJclC5yt7JaxvazLXuAGXG4bgCkQnXEzfYGr64ucX52BtE9Li6fYbt7BZGnEO6c1JvdiLHlFpmNp3tMkTcShmYkWgXzW5xEx/aKxkpWp3do818CuQXpzBpzUnZ/h01KX5ui/gkKxmZzhl/96le4fPkN/vD732LIQNclnG7WODs5wWa1xnrVIcUY8jWfPRADObF15cTysjZ6ed/3DnIanx6r7TUyCva7HUQy1usVkkcQUxC6zsClvu/QdQIaKqgzb7WYC8w2Dy16GDCOI15evMB+2IIh6FiRmECpQ9gMWZllstaXCiiQuCtrHakD8mLznjmstHz8x57EsR+0YI9Hp3RLoTFfF9BC3dooMQOacL55ivX6HAkbHwTLsgkT1bKFTBDIRfR/M47LifAyhDH5K/buO+Wi+eV3QFS9N89YMzfVwTrxH4W0RuJU2NzmkMfCeoido6q+b+KqdzC9l+Z7HYntI+bKTuWwqBb4nm1w7zvfgfQeFfbt6cgLG8VkP6n7RYyviF7bGh7aHVLatPCtTQf7nbLUh/Qhvc/pTw8oAt4/sGhS3re8GB0Roqb3tOVo1V9yYQiNov2GaUaYeOeGMjmNXbpM5b5oyuKyEpsAvJkJBvDIaDUrp5uhvBkIIjCOH7O6CYuLpgpwbgdXjokzkAlCxhWhTCBNIGQodxAdJ0I6MyOP7sLlpyKicIuXEanrwWxlSRYeZrKx1fLQRKk0kISKokMUHm1c2qE2iD8HQs5GBln4FkAIK6OwPIp2rkBRgGzVakvdvaNyhbSCYGzETuJcXAYT7oli1vYvZu32XGvVIBqAWvBISTlBDu6ke79L0QzQ+fyJ8ctFIEnuvmhjMU+eUS93C4qE8qJhYeb6SvaTLAEhU8LNcIVvXnyNfrPCT37+M6QuGTjpbifqRO5R1qUZeqjQq4OK87Zv5/0SWBSPu9VEWDO15t4ggDowdx6q1gBByuwR9zqwdBYxMJREmpchoIDW6oBsvlCcXHMd99Di1lAVvVriQuFU/q6AIRH5HLNFruiQrE42PuBm+wr//d//Bf/vP/4t/vD1F9jurqAYfX64NWMzRAKISm79weHTGiqWcjNu1QECdsur2y0kpjwqlcko+nLY73B5+RJPHp/j448+AifFy4tneHz+FKvzNYg7lFGiy+rwW92OWmuFqM9E0bMC1F6vQI5ZSibHdOoaYKnNIeyHePYv7ndAXO138nUoABY35kGx8qKEs7Mz9H2H09MNBh4gK8bpyQrr1RrrvkPvfd11fbF2CEW5RudRTLl56h5b11iYu2U2YCYlxtnZKVJidF1XLIb2gyDJDptVh9OTNS4vdwbwiLnoEFA5Z4qlR+3YsK7LOePVq1cQGZBY7B8Byl2j8FvBsmTnv3MS/2gfrQAAwtLE25epA5BsnUrJAKMyH73F43mCz+2w4B2qlQABBqIREq2w7s6wXlndqG3Kpl3LHOfYC4IXLcGAYO/lBfnoLgClgom+5qPhb5q0MuoQpZi5756cWoCbdm4Cpe0nh12w4OKjikc9ixgh9eChOP9WEW32rnhDm5b32ygHExeXwPm/9u77ANjlYOk9Te9LyR+a3Lfu8VNZxa82991HHWwHu1ZFIWTXElTnQ/qQfnzpHQeK9HAGz09N3zTr6W70prkspntZ0+jBl4O1rD1xfoh33mNLvNddh5nVPxybv/0Vk0do4bdp3nQP8KoIKve4k+MEPtVT27AwUfVoS26mLJKNqFkFSp1Z08BOQtkFR+M/EEDYIyuMyAIosgEEEqKhCfaUyQVPmNUEJQOJHBAgsjDeiTvkZLxFkt0VLWdkcfMiHSF+Ak1dZ6do7CKnuwXYENIy3skFvCCp1bYfymcoC/WwJPhZqqVTYTkyJTKU7uY70HKrVJ4PezaDFpSSMnqKi8dsTDqIZP29PF7LSWSpq5WxCrAS0h/MPbC5v7RTfC1IWSkboda/bTks/AW045LK36WVw6qFXJXXWidVA4cMn0gF7MiSDSiAGmG4AMgJghtQSnh++QzfPPsaP0mM080jU0zIolnBlSBVOGfQsbJGkgbYiMYMib5Rzss91HwE30QCcYKC3IvTlQSGgzbJ502HxFK4erpuBc4dFArRUNKn4EiMR8mw2OIBEkULe5cSwcnVMVXOixJXamj3NEqLqpUHYHgAJgNrC8hn41l0j5ubS/z7r/8NX/3hC1zfXGLMe79XLbKf1jYP66biBscAiEGsSFGucgovjTWULR8FKIq57DWI+Vn7p0AeE2Us5z1222tI3mO9TtisE25uLrDdvsLp+hTUM5Ir/zFWD9Kta/fsnjbR8Yu+KtT297WIYnoegIWEINA5LGOdexUWrNdslJB/DxCwHRDuvkI2TgPkDiJkVQW4BavMHezkZIOzsxNgw5Bxi91uCyILJMAwC9Tk7osTd87AU6zB677R6sXkY7i4/WrhL+tXPbqU0HUdsgqG0fq87wgn6x6Pz05wsd7iZrc1/CPmiMEysfKUPTyAaVXFMOyx3V5jvfExIaNNB8nQ4jZmedmcDMC99kRYw9oBS7Y6UlfeanVRy1PV52qA9uwBAWqbGH+U1DbTAIfF1ppkNRExi6rKJRL7Y43GGMtYBcYDIBL7m4AKTpWmL2JqPXCa70uzMT4DP+xAx0j7q3VNu1dQDMXZOR7Vf82YjXHeWl8WQ4hZyY6lQ16Y6ewpRhRWyPKP2/K71Z2oeuRO+KHb1LJn/jlPIQcvXw85RUsm7W4bcxdoDorK2xoZ6Wi9/d5Z+5bS3+dg9V4tHtlp5Q27Jd0l799LFj7I48gz9y/+/V/VZKwx1xq5ZyGHhTx18XvNu5X5QuICauTF+kzrdVCbvx7MociitdxxV12lv3t6LU+M1+qXZVn5oVJpp2aeLSUCFsH2w/uW+dLeqGw1x3vdTKhd/bpFUF/3JmvjD2iU8lA8SO82UOQb/yQ9ENq8mOeDjUwtSsnRWwBzP5orWKHkqJvhFvK1djPU1y/rnff7ZqhNWY6lu8hNgGaCNJrJbIGtgg5mv+OO34687857rV2nHCYRJrg3YY2z/65QNvLSsDBRFiOjpgyIgUcQKcJpjZbkwqdmKEYX8AjQEUxG/mmEumEtYKepCvKI3WQnmZpAlEGUkUmAnJ0iyCx4xtEsDNy4H0k7a4YkpmiyFMUqTsIVMD4JAIYqOU9T0avjBN2JJV3Qyx5tK9zRiNwyQgJscosf1aIcGKcHwUK9uxUPKUAjAOfboaLCeheF5G3tWOd7nPrXMcSoykKs8KJiDoLh8hACYWgBofjABTKEe4SBb4XfAAYWiGSEDl+hoQosVRk5ot+Z9Qw1eU9cjAgopM+mm5i1D9TGJqmPFbgbIxdFQpUwZusHe97GsoAMPKQR47jFdnuJL774HfpVh65P6GhtI94LSyol34OpPvubtYk0pdEP1j/FYk3J+z+4O7x9kY38vJgFxJg2YSKWEcuDwUgQJTAs/Hji3klJFcg9iFY+ThsBEQTVDpAEkuSuYV6NIImmcGixuQ5vC3JFfMKFEhtrAK3xO4DEdm8isyqEhBJpThYiAy5ePccfv/kDrndX2OctlIzAWsTcYForoMollOr4CQuPFtFhI8E3TIKLIKIlCpgrXgoIBMwmdBsvlxhvh1twxJxjIkBGJN0jYcCmB/o0QvM1rl99g9OVh15PU0LfMj9jKJRBMxW654JJ69Alt63zzTLgvQMlRQpLLCJk8hyKMOfuqWVdC9BXfQ3xmwk+g8vq5z8biAm2+QfKCKA6oYNFOesRFm2KDhnZ7lMFiVswQMC8w831NbbXF1h1ds+YFcIE5QxmC1lvzqOj5208UGG5EGuVUWpU5bbYgEa9yS03IWASCIntLRCMmiHZXKFJM1ISnHWCpx3wcs24TIo+A4N6vE4GEsTtqwhKBCGCOKecqmC332G330FlDSNAswifBLLoatDq5uuk7i0RMSls7vg+NzryZxaHGxB6ZDHSdSKF0liUj2Jp60Tu4SId0FbWyjEVkQNsfd4AWEGIzUVXpIzhcPkzEvwq+4j6nGFbw8rWIVrWFoK1PXOMKZqITq3z9HTNit9dDlF4xNMMQgJT9nltUdkma8FkuhCCh1Bh60pE1aw2aeSWhxrN+Fqq4rKSofUf08SzWR0QF9WyZhI0vLqbx9VlEku2Z3NzBBB9w7O1JKyKMfu0frRDKeO1I18LsiiSpDJWDCzPIBoB6mC86bY3APEZu7xbn2f1ccYQHcFNt94pCxcd5h4t71ndbica9zZcBHyHZenSjwK0+2i1xJu95l6FuWcqr4sAMMb7ZSCuLL9/gki+xqsCwS3f20xavcN5AhuLZ3OVD2oF24diXB0kKlL16xfyO6Xb9ctyTyMb1yK+QYPeI4UL8W1tYWv3eHdeD6WKu4xMsdkvlM1kieZAYyqKoCyc9yqUjaHJu1phBdOuuA3QfZd4ld5toOge6Y2H/LyDHrJT7mvdE8dQi7cfDup202wH2MMOp/vsDLe/MU6T6r3UfK9/TmqwiDLfc4N9o9TkXY7qyJWGXO4qEcTINgtyicssIMROTcmAwcp/k0FkVL1ZGawmrIsSIO5eQwkRiSW4f8QFbmaGaAemEUkEI2Uw75vTRTJFQKSEjyWqxNN9b6esHSlIu0Zoq0BLe4od+1/lJwp3AG8el58qTmnXGTxrH3uvkYRbWZJbjIAEwzBW4QbhquDfD8bUMam2jiEhdwZ0oaByGlS3t3YehRKmoYAVUCCEAcs4qKEiPHvkNy/PxD2DDhf96nqn5Z62evFRTjoK6Xmtq7oCZBYth9E14jTV3CgzxnHAbnuDFy+eYf3VBuv1Gc42Cdytkcg3sYjehelMptnf8+vxdygd4aZFmE5f9XFQAUO7kUhB7qZFht6U68XKz0+lmRmp8/bP5sYCjpP3SQvYG8UI1BPStE5Ulcy4P/rKwE6ddMyhewKKC4OPsjIHqwpvSuRut8UXX3yJL7/8Es++fYZxHBsh1e/TanFnPEz1faooiqyqIJfx3NRUtWGv0YP6kQPhdp/lw972EOsZ1qokn2xWeHS2wdnpCoQR4/4VLl58jQTGyeYcXdrAiIEZ+UD4OUwFkG3ahmZ7XG1eb5viYlpysLWKcu2vGLHN99IGkVf7jiLo1+cm7/T2pvpAM/dm89hzmUb8izWMEBG4Ypw8f/4NLl4+R5fI9Dlm9L27UjH7mohShmK1UF7odQlLVYSx3HwBsp8c5m4IuFHI+IUUykBSRc+K0zXjdEXY9B4IURRZ21bxkd0cSpm1bEbOI4ZhgKhZARmtkpFnJ2PXLqArs4Iw2mFLUbhK7oAqkoNkTBswr0FYNSCYAr6PVusNX0dg64VKgKoMkrFWoFiMmkWjSB1V9u4KrFq2y/1e3ok6b20faK1mpfxerUMPHXVr1xYVYbKClDlcon1hcX36bun18mnrE2WflrqtC0q5p4TR0328bP/2Au+j+b55W+nb/KKVq0zc9mvbL8AheB1Tipma/PQg3/I2qYcKAThP15b25uU6HEsLosGDpfvmN7/vQfWKScffcttC5z/Y8J/oIccyba9N+TLvlo6+r3TP99JDtt3rpNte+tBt9poVXATdW0nuNZ9dLI1Oh9qkztWu6X0BiYAfAVD0o0iTPakOEFWdCgqtSdtbL9BD59jg7tpOkiMgEYUg/zbBoiiHFq137irWTvpwgUsKiPp9yYEiyn6qRx7xTJtHHVQBQzk7qCRmKeHR1cyVzU/tw7OfDEBiHjGODMIeTGKcLjxiHMeijEpjwTaMeyufDtDUI6UeERo2lBttVjJXtb2kcQ97KG81wY979CAId+jY1GWLYMN+YGbtJxGpScwthz0Kj2r2kM4M0oTiw7PYt8ckr/kCbAqFkGBUcgVBSl3ULZjCMmGSazlxmisJ1a+diGEWaH5fOxfJ265IiqEULxT9jqSoChbc2icUSHNX6cAplTaOoqpYuZTMgoqUkHXEPu/A+2u8vHiOZ8++RffJCVZ8ZiePjevY/EzKdNOqlBFgPFrR3GHpBMAIYwnl9ISk9Il9ccDDm0fJuCqU/Rq0hpV2qxkCBY7l9WR3ZQhAJFxtpsp6bXuKxvRT7ejHxiLB3e4Ac+drIwu2aeL60cyXcNkkru+2aG2C7XaHL7/4At988y2GYXQFlUuXacy9hZNcm8PZDRTNlShrrqf6DkZQjBUfixqk5ykV4FWb1ZaoAwFgFUh26zm3Ljo/2eDTj5/i46fnWCWBjlfIpNiR4sVzwWZ1gqcfJZydbgAywHkCME5Oob29Jqew9d4W/5zCPKjTutG6bM5afEaa5EZNPiX32a81j7CgquJZ2YW8P/2JEgCAJteC5L/cR5OK1LUPACfCfrvF13/4AuO4g8geDDFrG618cFGn2M8p2oCbvCd19PJoWD24JV+cGLtFTABOigBnrGGJgV4V66Q4Wyd88mSDl9fXyJqBERjUow16PcOmwsplFy1CGxnvGfdgXqFLVABHTlTW2Vj/OYWLB/me74cGzZwxEusVuu4UiTcg7gxATVa/doi5oStUPUJRzGNKGGWs+5gv1zaCgPWqR79a+7ITa9ZE6GrGRl3fY56JCBgCJD+EmK+cTb8spWNCf2mvePVSzxMdyfWHT2EBB+9zZkaXOnR9h92uWdtiTDQg2Ty1a8txygXfa45YmbSAlmorfx1RymLilfpMFbqD+v5gwMCH9NCpBUAPUnF11Qpw/TCIy4f0vqVbh0mjw9wTOH1X0geg6AdPt20+DQ+CJzoidDxseuh3zPxNQ1miZq8+mDNvCyBqhZXZO9RcK8iV37DWqKqN1Og04gCRwsLdEwMyRiAWJMTpfQdx838gQSNSlWRTnGEm/wa4mOWNWbBYNCtTDqrlQddpsSQahgH7/b6AM4CVN2cjz81CyGlA162QeIXkgr3F90ZZrELhKjK0mOKRKBxrBHG6n6DuEebWRAE8lIglVek1kMtaLsuAlAYnLG3NeEPo9nwWZPcAasiFzjl42FoRhdDaKoSHQsECMAAUIMQUGbNI0CIoOOltoxgCFUioP9EDDNtaPwOK0gQcKBoqRf1NYTS3DTOPVwwYxh0uLy/w9PEAXRMoJbC7xhV3hMm8nCrjRXgn1Miy5dYWMmijXzUAgP8tDgxRUnQdm5scBCB2fJa9jlVxUkcXiNiii2nMy2MC/6FC1loOAfVTtXkoxpVOrXMCYGifL0TclAAO4mEq1X7x8jm++PJLvHjxHPv94N4B1IB+dl+4VMQnIeatVP4vdbL8sFjwtSeAptZaK0jomX2cN4CDgbkOyqlZuBCATd/h04+f4meffoInZxv0GEH5xqbmAOwFePbtlwDWONk8QdetfQ0kBDn4VMM9rtBK20kAoFIBpei76aAxQC+aHXDrxWodcNjP0dduraOxhps5vHHZzOZ9GTeh1NKkDFGnovD6WK3rpY97tb0iMePm5hoXFy+Qx71ZdEkugBCcMJrV50+J1OftIIfKywSwIQNA2Qdc3GsYsu0ROeeYPJNcOiasuw6PTte4yRmfXJ9i1CvIVoARyE51JtHmBDA5R5D7+Nr+wFivTrHZbLxtgJQ6dF3MLXdN1mxAUDHuIZAfhIgYMJUBgBK6boOTzRNsNo/QpTX61cotD6XWPfqhGSvknUKAu4RXdxFmchehER0L8rjD9ma76D1y0N4BErf9PxlrviCWNG/v+HUZhAgQo/nhaHnexsny60pWxzlj1HGbCgTFulZT0YrKY5N3l7at81p16Z0zea18ne7xJbdyoFP340MZoILHRmB+a1d8SH8SSaefHgxjqje0n2+zDG/zHR/SW0mT9WO29hUd5/1bZD4ART9oahDrsjBRuVYEDcXkpP9tlkeRbxH58R3XrqivL7535UVV6Xx752qzRVlRrAUKwBCCt7p1AJOdMCLmvIEoRmTmXCPknDxheaFGJKyUIUJuPRRkuC4geRhx43YgGGm2AMmEdWYDjizqWUbX9UgpFcui4KuIiGMigkGz+4ArugR0nVkZRJVDsWzEtFDrAbAHpudq2RYWOh7GnJOfEnMyEvDiioYifIkYaCEygsi4PiKqTAVxGuFyEW2n8nFM0I2uCkCpzTCUwSKATn2lyj3qgERYy2hYmqEFGxBaY3nvJL2WJB7nyqUgTcmjXbwMHBZvUp6bvswARtWMIe+w229xvb3G9fUNPjrTmpeqh4I+3LCK20A006Sk819Q5zPNTtmNZAZxmgwA3DE6ZeigyDlsvFqXHrOMCjcykQAIjWCejwBF5Red/zG7b66Ee5uG0ln/17hUNjMj3lYACSYHIID9sMNnn/0Wn3/+O1xcXBro4yBzHc8xltAoVEHea7mbZaADRarOR+UXox2JSkQpIgaTuw8xmwVfoCu+rRgRrwMMTFilDk8fn+PPfvIJfvrxR3hyeoKkGTqYcn+z3QK4xotnV7i5Vjw+/wkePelBaQVCsmhaxV/V2ponf9cuKJS6bdvLvH+mq3tV8iKbuj/S/F5g0m/GFKT2SRZgIJouXIKsDxtllIApgXVtu/kbqFgnUbNeeDZM2G5vMOy3yOMehIyO3cK0MW8PSzrjNQrluNYy3Nmiv4trrGopR9tW7FY9iYHMHvzAx5GBJxmgDqs145Q7fLo6wVYI26wY5Bo5S8MtBA9Hb4A8O8myWTQmiDKYO6xWG6x8/+n6iNZnpNFEZGTUohh9HqtHkyPqIGruYKOYo/JqdYaz06c4OX2C9foUq9UaxAZSMc9HB2BoprkrmvUvkHUs1l1B8k0QkO5B2OHV5bfY3uxgvD23COptvxOBuNnz45DIDwaIW4vSNoNmKOlUL1i2bHkLss2RPWiyw9yhr0zc7RAAl05+r8B+m3PdJwloaFJaW8KDl01K2Vp0lnffUceyz/C8D+bP6nT/999aXpt5n76Hut2fVJpb6r5+Bh7pcDJ06po9/w0LVx4qKeoBRJG6Plg0vSeJEPQdNYVu08ob79eC8gEo+iGTzr4sLQYyP4J522lpE52nufJ0jzzjyUawWg5aRTguTbzNRM3nlFivCj0OOJg0XQCk5BFoFAwSU6KICZDRCG/Zo01pLgCL0OjHt25VRMYxpGI8NbbxCYhGJ1ZmKIfljAFFOY1gZozj0LihOXmqKFRHF9ZHA66ycdwwZ7OG4GSKpXJ1pVGzeFDhYutjimfCFLgzYIjZorOBEog6MFtIdgrGRygUI5h7iAi6dOnfR0RHF9uUMvQaMKbtmwLSuHhKaCw+bksh6C7AMmq0ouauYfeYgtIsjS2B6uLp0mG6X7lq2WZPV6sX/0ypdxLc1n2JwKkR0hXuvjRiHAn7/Q12uy2ub14hizGZkIcMNwF+GfQJCwrS+LyjBlQebmop8pPfAAAgAElEQVQQvV+TWQ117oKoHtmvN9LqiFql9UHrqwYG8OhoB+93EEA0g921pTb9XLCbKgCMZBZPFP3bKDkNYGjRaODXuVGSjOB2GLf43e9+iy+++AK73Q55ND4u0ZZ/B0XZBMz1hgp4HnU5VMSiMVXhrkFxem/1U8DKJwpxy7+wAEkO2HHjxnOy7vGTj5/iJx9/hEcna6zZ4RUVDDd7DINgHDu8vPgaN1eCTz/+BU5Oz7Hqeu+bAC8boJLi98Zx6cjwL+TBBCyRclJYP2HC4uL/5dlQi/ZkBAcbxffS7mZRVNaahsy4NvxyWY9hAAHQqHLpw/1uj4vLV5WbygFFhhP2iu0V0NpMSo37GQUYFAXy8QXy+wjmWtuCJwZWEthdoMl4bhB8V/YiAxLNpasnwuPzc3z8NGM/AmO+xs1W0HUAJYYQmdsxERInfPTkKZ5+9ClOzs5xfv4E69UJNutzrFcrczfqLMoaO4EwE5uJklfTrIoMMAN1ECFkJQxifbNeneP09Ak2J4+xWZ+i6zvE2sZU+ctqV9n4UTPn9YOB6nIcrruEDNI98niJ7fYawKW1NEVAi9maRQ0g4MMzLIjC+q+uao3QT3XeVsUuLtbfChDh62wLHrZlePCkqOOn/lTfOb99Dg4tXVOUz2mRa9to01Q25bzuzQFc2VdjLdOw5DaL5DpZlkqK5qCp8geVvZdiSh1O7vqTLpT/oEUOnv+Q3q30IJZ3VOff1Ao91uQqp779pM3nQzKKf0hvNSkwNVv1PaBVdlWBhgf3fUgfgKJ3LYXSpbOfvre0iN48UL44zNsVYS2KQ1mOa1sIN4r6YTKT4/uu3nNl8Xj5Dt/nFgTuIia5KjlGQN0ITWquCBZePNzJRrCE9RFAPNYtKNwOnM/CtQj70583iIMhyO6G4qS/KYF5j5QYYx5hZJuMnMnd1MIFIiKzDEicwKlDQgdVRiIDfgqXD7EBJ644J5DzdZiyRv68ET4nEPdg6syqaELIaQTGidfIIkh8CaYdCCNUBhB1oepOBNkYF8f6i4KT6F5D9Vg+weZStXARRep44r7TTsaWHPOhU6u6mHuYR3GiCM/ecICoFsWGoilcKDZrgowhj9gPO2x3N7i5eYXHm8cWgAxcBauZW0e1mPKxrHcrLwRaUveLeh9uil23AqAYxz0AuOuWWSwE+BJWE8EnYhYcZo1gFhhNbByt9RXNTuCbna8HzX1RV5Q6hpWSEc+nWl8btG4xxK6khhWCuYgSESixhdtGBitwdfUKX331Ja6vrxqFcUqYG21RyoU8ETrVx6A2ZbYuoBKhq94fhPHtexQQQdbGXQgavk4ABB0Dm1WHTz56gk+ePML5Zo2OBCoZMgyQYTAZJmecn57g8aMV8niJPF6D8QjVli0AU3csi3LP5kUZz02HtDvMcZ6IuKnlpdJDOSyAoQCvSrlcWdQacc3mUgW25lxRy/OaYimuZS9lDu4qs7y52e7xzTfPMGYBcUK4v+WcMQ4erTAZX5StoQoSL6ArzihlrO+fWBuhLXcAooRWuW9dKOGgMjEBWdExoxPgZNXj0yePMexHyDBi02WkPiGTjfIhA0IJTx5/gv/4H/8aP/3ZL9FvTsDcYdNvcHbyGKtVD2Z2a6bODhi8RzRCbyGAUoIoAzCgSImQlUHcY7M5x+nZU5xsHqPv1hZ9rrHU4lmfBA+XFEsoWDRDj1SoDiYyASQ77GQPzbaGxIA8hGgO9/+Jq+0MTCgyik/Kao0ynfd1iIWSGVEj4/ohEPO9JapFWZSIZoDR65aNKLZRsoOwZh1rFsYCGKkDuLGuB/9RlYnb+dn216xczfinZv4srzVza6HvV9r+kN5eeuO5FLJnux6X9XlJf3mN9DrDqwztt6WTfUhvL83Xpfm69X6lD0DRO5Waze8HWxe+vxcbzYYLUIZK1NMlFw4m4sBs4f/up29LE/ku8KEK50bSTE4q7CpLMkJrUnMTU1felAQqDOURpCZcQ9hIiEHO48IgYRRXtlD4lEBILpCKU+WQE+sG5wkwZkYSQkpAztksl8QAqjyKuRjo6Jb7GawZgtEtPZIr7glMPYr7TQ2JZsFMiQHujOuGeoB6s+KgFRgdCMaDww4UMRO4U3RpjXHMWPUX2O22UDYOJehYTvuX2rpRKSeHLMeFgIlKNevbmooQ2l4lV7KYkThNTiqrMdOcR+T+Y3CumAZP10Lh/B5z7eu6Hn2/AnEQFodCOL2/tUQRGDiYZcAwbLHbXwOUIZprPxZY0Kxl6sk9NR9hTXVLXWc/l1M5CjcuB6Faqw8nCrffDAQr1hLFsoeRuIPkjDZSW32PgtlIhJsfCwgwd3SoIFtTaK5q6MSSyC3j4MpGcvAVSuAumRLK7LRFghcvn+GPf/wj9vudW3JUIIt8rQjgqxaVCwl9gEtzWZTacY9aHQKcmygUUK+huNIscCsCA6qJCR0pVv0KZycbPDrZ4HSzQkcKlcGAZBntnypIBOvVCU43wH7/Eq8uv8XJ6acg7osF1wHnj5KTlUdBD/ustrMroDgG8re/Wj/IwW3etg6Ys4PdEvO4tGCwmjWzvgFVJmvJ4hBvwKJmSGmAiI6S3txscXV1bX8rF4B+HEYMY1hIEEgEHbK58Ln1lPfeBPyNtde+L7k5TYs4BUEqoGIWWoLEapaSpMi5A58xSJ9i0yfsdnsIEUYlDKK43meAVvjln/0C//vf/Cf84pe/wqiMy1fXuH51DUIPaO/rfQQ1sPlRWlMRm3z0FCJKmpLvNWmN9foMJ5tzrPqNWZtGnRsAv13sDHgKziq/DdlbMOa3WRUxdWDuQdwBbvG0nKaWf207zlOILe1yeZcsUvnzjl17i6kBE4H7y03HyvU6bj4x+wgo/I2x3bdzsrjLR6SpmGusLl8p7m1ZsSQf3HvP/qCQv49pPiZpNuYjHcpv02uWuCw981sn2RU54i2OmZL1B7Do/UiK1ko6qAkmuwnp8c3lHU3vP1A0l1UXvt13eh1XK7/vFFvoYVl++PEVisndJWnLXts2FtYiPjgm5KDIQQ0Vx3qmWlUUTXn6/jsFovm7quRHB4V3t4qJvFHN4os5NhSQDqDslkcEJuMOUk4QTVAdQeJWDR4pLUIBKzuglMdyiBFKT0RbscgxpREAgkc2M09F4y3KYB4hWUAYMdJYlVKIE+daNBfREVlMKSfqkRIhaVcsnAhhYROKfzLlnux+5t5AI3hEHCdpNZN9485QHbBZP8LpyVOMeUDqCMOeMOYtiATQcaFPmsZuT1YWTzenoEBVP5fHaXUDmCqU8SQzQzxyW1jqTIfT3QJCuI3U983fX4pwUI069w20IK6n4THmTRGa+T8DyCJgFnN3ySPGcYdx3GMc9uhSDxSOqgp6Ls2VChCFUjxN06kwB3EmGrWDJijWBeZCVi1szL3RQJs2L4tSNtbygjCxuCAqVnWJGeHZU9bQya3Vbc84fqaVtSciil99H7lWSB7GvZLqmnXAbneDL7/8PS4uX1RB1Mdra6VSuTMcnMNUwK1g0RTcKmCLD1lTuF0gmcgcMVab8teGhxLQJcLpyQab9Ro9E2QcQJQNLMoZELOQyqTY76/wzTdfYjcSKJ3j6Ue/wnqzwdxqqLb24a8BF0X7V16gRvEHHaiA8+k9dZyZvrfOxZi/rUVNbfHyziYa3iyjwz0EkQdNby1gZoAxdTzvdnvIsEXKO5BkUCaouBWMKJTNNZjFOZSW9r1iUVbHz2QsLEg6vsvY/6KqZOtGUkEHQhaBEuOs79CzYt09wvnJClc3NxgyMChhO2Ssbwak/hy/+uV/wK9++Rf42S/+AoMQXl5c4cv8FVTVAiSwcYdRE9Qg9kJAy7phWJeDRW4BJmSE8GFxaPsaN3Vs7BS1rTeVtURFnaeIYIaB1gYQ4wa0CKSRNyZ9JYUQHpO8j6elNVwPfl98srECPQQ8HzbFAUR5Z2OtXW868lyTjvED3SX7TS/7OFYzbOQ6NBAjGsWSqExCBNeeleNwPcDCPLhvaq2Nal7NxnuvpItf3zS1AMat7UsL6+AxMXZ26a3qNdON9vC3sq/NZOtbsrnjhbPnli3eDvfXkKkX9Ida0lKOeu9Bzj5kXq9VDw5Xbrlz+hcd/HZXY0Udb835+7RgfAvpbj10jvTdktfyE+3L7i5Q0dXaHOd7xA/V5m/+3vcfKCpCVJt0evm+c3l5RXiDIt3zhYvK7hvmdY/0MDlpsaC5+4UzDTiUJq2Ex+XU1LKOH/27v4ePC2NF2cL0lkWQ6MB0tBEQJr8tvWvqXKOurCkAJHuOhAA1ATW525CqAGzRhkQTWEx4FhBEGIqMTGZFpCJ2osbZyi/s7jRhhm0uaIYIoQGmXZFlQkICcQaRgLhDHgVEGakP7poRWbYWfQ2ACiMrYSQG0wjmAT2y85v0IO2tlcQiPZGTm4IY5C5aogymDkQdwAaOKZH9TZ3vth26nnF69gmUGMP+FFfXz7HdXSDnXbFyKpZlIOv2oumJi5UK0wjavnSlGW3kkmNj1JQVhluIlOfNPYNdyR/3o93HHbIM5Z46PsTAjbDiAWrodwrFtkaBaz/nqYCMEWo+ykTV2oSU/TSWoB4FT9VIa1s+JIKCVF32HkC6h2rGfhwwYMS6c3LkbC5/NBM85y5CSoenuDRfc2ny16SuhjUqkCs5dZzYmcsXgxolkKAgP3ZWNdcSjflNPLGiCVAgwBsD08jXlFT6KyyIFGoRCiHIqlC30itARoBARN7erqYTu6OXR/UhIHXm3kZkEf0uXr7EfrtDlxj7wSZn4BTsyml12fL2Cn4bKMIiwiJblZLXfvB2trYZo8QwRyGTZkP5YYVZFsbbCeihSKpYM+HxZoWTPkFkRCZBysaNlmUP0QxkA7DH8RqX+z9ANGG1/gi77Sus1o+sJUgBD3xgRh8KpcFcA4NfaHE7a39UhJXAgm5ZxwpFJC9GuHPZfT4nZpu8tRf7Gk3ubuljYAL6BAcUNXIC+bpufQEKWn+Ag2fH55hSZy6JxFDK0LSC8AYvr9Vw7zGD8xanXYeePN6gAoTB5yiDxCxEKbVrUVMXymVPbHZKH9d1hIQL5tRlURrpt7M8UkaHAYkIa2ZsUoeTRDjrCTdZsRuAy5s9xgE4OX+MP//lX+Kjpz/Hqn8EyoTH52uc/uVTXL56haurq/KmsiF5QyqSKfpkwJA6H19E+iSYNVLihC51RkNeFEz/0Gq1FnNbRMqaC6+jRZ+z9mSw8/MRhAmgbHxLRUFVULJ1RJmRhSAsEFY7VClcVnCCdh8HsGvEhFQsbcMa0iM26nEluLr0ikeQ9DaTyvlDCgO9IvpmrM2sCxlmANnXSHODLZE5232xWOvOoovelmI/ir0tgD+NuigIGVmzgcwYYVyKAhKB2UdnGADsETtJIWTrb5EfyavmQCF7fbNmxKHYFAAY5wVFM8A9Q7YwgCmVfIkIyd0Zqxu/RZll6t1NN2Pi2qbi3GJqe4Qqkl8PTyRbfu8jVT8gRDPfj9uujTSL+RAy8jQfrU137FVAmTO3JskzEZrM9XZSzro22C1BT+BSUg5r2nu01TxwxkK6HUAI2aS6o5r1PvuaU8Fkuz2CSXj5SxkPD+nuKBWmY3Yh+b5qQmS8pwlo8RqpHm7GgurdEH1xJxft67/v+0wVqD+e7qP+E6aMQUp1ehRJt7TbnaVaKFGzJt+qm7yt5LrEd3jtjwAoAkJULMSfb5QFNYvZd+zI+yx2RwcdHX57ILDofrW6ewN5/UTNZ0jtVGbiVPFsBOXyVW9tA/Uy398V7XgFF7M4lu3cioIBKIPjJM8fVj+WIHFFV0cwEYTM9QwYABevCBbxTCSHEU89+YAAMlrIejM/KmGI4Xwnqr59JUIHs4JIqYOqKbRdN2AYFcOwc8VCTCxWIKMqG10iaBJ0zX5lJ5Nc+JEMMEou9BsoFNwuEeEG6BBcKsyEzeYxUuoxjmdYrdZ4ddXj5uYCw/jKlCIZXZBDKRdiXitmAQVChQtLkbioBQSZdx7B2/NgCyEf+gaETlyCVBv5TKef83ncjOc2zTlE2t+BOoZjTBX3MjbXmkoO3lxv9IFaNwAUpOjqHFXZuKug5qYlpkCoaInAtJjK73Mhc36/Ht7jUkmordIqHe16MDtFVZgbguRc2t7I1eXAeirypuZ560YtJ4Fxf+U/QlFIKAZUYx1Ck+9u7VTur+TaTISUGOpWW7v9tric+RsduHX3Oq0CfnuyajpYE+petcmj6VM0sFEsoYjIVD4eHOhkdZhEBMoGmZEqNmvG47MTfPTkCU7XK+RhgFJ2ovMBOe8x7PfIg2DYG9HxXle4uM4QfoRvn32N00efgFKP4rSoAJS9vrFGLMy711lYQ6EvfQyAKj/U/PnCq9ZOhMn8r23azs1i71S2JTosEi3/UQEnD8OOhM3JI/zil3+J/c0lXj77AtsrxeWzC6zOEpLjlgkN+NEo8i3vUIBXMce1KCoo61o56RZ1oGS6xhxY2RI5RGPKAycG4Mo0J+PcGjOIFbtRsV51+Is//0v88hf/A9b9GSAd+q5H358CSjg9eYJXp69wfXNj1orjvgDXGuso2nWWHVAUn3MJjA6sHRJ6kCZAbb+one77AAFBTk4wq6yIjlgM+zWjAwrhv4LqvgjjMLNDm+DDiZ0iylcnHfGU8wmFYzD4kloOIi36VvDyHUtEWIy4Nl3N0NQfjTK3JLPUa9XFIX6npkpLNoCvJyovvT0iRhbxrHEhV9xS7nKocaQE3jkHoPLC7VNpOSyUDm+YzA20+xDFIlYeKNbI2u5hZedFqfRErv0e05Il5LFbjz13MNCOpKWtfX69OWyx3/RQLjrolOmcg+9xd77vVtn9/i6R9jZaGIONHFCI6ZoxRXE4FWU53OtuL/vd5bOtpY6xh8JfdP7XnW39rqf7NMxxV8Nyx2S99eFLiMC9dS14o/KFe7T35X2L/TbSd+jrHwFQVCff0vr5Xs+DH1FqxfP213ahjo24VfqKEPJgBZnP0ubv7/ieOSFw5UNR55cxdzSVBNERRBkkAhNtM4RD0A5yX4+Q5lYjATiBs52A+CkmRFy4NUJtEbj7lwuQbNGwiE1BsGgyjGEcLEqWoijygJMDZ0BTBpKi8wM6qxMBDmhZqu5C7P8jNeUDxUXNPhWCLjE6PoGsz7BZnWK1OsXL9Efc3HQYhi0UeyhGQN2KgFzpcf6aOmaoCJQqBC0nWFLGzaFJ8ZRXpWLDYdlh7SAl2kozMJbM9yOTEFJVC8ADuBK3sFHdx08+TqdVFFmyW69QMX4oeMJkzXNybiHjxFKFyIhh2CHnAVkGiI52ykvLE+uQdLhaRs0uTNqnVXZKm5UQ3zTpjzih5gDBHJCJUNdEYY2nHu4dBrDOuay0KhtV+Uazu2PaHz6OAlghAOAKNAL1BNraO0BRwKwzQpCtpOIgYBgHXFy8xG63xZhHU0oboKK4orlQHWq0/X8+1oACCJQaVd4XN3hyOT8ssMJ7KXsERgWrOEBkXqMdA2ebDc5PTvDo9AR9YmgeoDRi1BEiOwzDjY2VwS0yCCBKAI94efENtsMlwBkokdUALcfXIV2hIcHHd0shlBdrDDpYvyvvf7hbtQpqwHr3e9dUx5x+C/dDJYsQB1SuGgWBqUPXneGv/upv8Gc//RlePv8CX/zuv+LX//r30N0lhNzakS2PsG6AclWsKITSUjsUgR6tO0TIOzEOp0BR68JTMxMQNRZHakBo5+umWbkyeNghccLHHz/Fn/3sL/Dk0adIfALNHVSTcXcpYd2t0D8+xdnpDvthh5vtFfb7rXN0AWPeY8zZQf+w1DNgCm6FE4cJTD0YPaAd4O7abR2o6WhCApFZ5QzjiJwHKASUDFjuYPO4jB1vH4FZJ8Z8jnYxQnlFscyb9X90TdnHKeZjVeJU1VyqBAaeLAEaS0rspK/enXRXiWKNpVjPSVFPrOdjL4jJp41ym/JGzX/fLM2AiMVrNd1q6Vueer81ie+99NOTmncyLQGorXV2/dHudo3/yPx4W3W9jZvu3Vs7PqQjaSK0u6z0HqX3Hyg60t7vVzd8H+l1UO+3lJrNI8S0ABnqLS1IdEQ5/y5prkkeXPuO2U8sJKgos8wEEX8vM1gTKEy3FSb8ymiAkVABiqxQ2YCWMO2XCtMQUEm1IW4WzTA3rgBD3MVI7ESbKUg+9wUsGkXMzc2tUCA7QCyym65MEExk5vcqzlmkAyJUdokGRe6S08AL6mARADCtQCxg3aDfbNB1azD16FOHq6sLjOM1ct5Bxp2dPou6ggPUUWMVpxJqPJSCeGOeCebHjk6myqyBBFNhoVol0OS59ls1oAjh2KzCQuCdn/TPlQZtBW6EUmJZjXlEDuuaeFcr2B6ZrqbACMZxwH7YYru7wZgHgBSjjDb+iCdK2TGrD1qoe9sC1ZKzbW8geL1iLpiVlmldCgcz3Z0xOKEURs4ONcsiZgblXCp1QHwcbR+bcAGI4rMFf6g8Y/xZ7kDoIFHhTYKZoTOb1ZBNYobkHLY/yM5xtF6vcHN9ha+//gq7YYtxHCrQSOyglLmeaqn7dDzMvy91qbWPAW+JCF1yEIgT1n1CStaPBECHESRmaZS8WTYd4+npBp88foSzzcrWHgg0W6Q4QJESg6iHJgcyKOFmT/j/2XuzZkuS5Dzsc49cznKX2rp6n57uAYgBQQoEiVcZn/Wn9SAzSSYziYQkgwgIi8ABB5hpzHR1Vd17z5IZ4Xpw94jIPOdu1be6q2cqym6dLTMy9nD/wv3zUSLajrBYNAhBHYiI1fKjRH19C2v1dSmbcBTQTXzuHQGL7pOmV1NBogCYA5pZNdb7FJBSA+YlOHR49HiFZ0+f4+MPn0PGLf7r3/xnQPZm9VGrJw4WoXD8VI2a1zyRXLCa6ybzbR2xKJrUAQAw5nuUG0/3Ea9HBk+JsOhXePzBZ/jg2SfqciYd0shIhlKyuWh2fY9+vUaSEavlGpvtFba7Lfa7Pca0w3Z3hRhHpFjAlTxNs96VMuAKkSI/k1sLpUmfCkjJ2hGRMGA/7iCibsDE6tqNPNfdikng7pJRFJplgrW9AYpSnjCXD3RPM3dK28/hxN2irkjeV2Xdx2Q++KGRWzY6SPRDA0XXPv9OIpKN1VwPX/eKm0yuc53HfJ8RMkL02oX6TRN5l+YigsoudnSPu+VhP3ZdolpZ3s30jhWuuGzBOn/u9qUAaHl/rAIPoUxMCnXz7+/TO5hk2kcT2f06neTdTD9+oOj7TvMJ+32g5vcQJuorJ5ti1mhvec7bXrAFRdDPp3Y0vyCfTl0Lpn+nAlRCfpXu05U3nYiVEzQTpMjdVRhJxKx/TOg0nglK0cAWj1Y25qg6lDlpSjnJuFg00oxxFcBCY1tEKSQ1mJBkFk3CSDKiSQzmhBAahGGPIY7AOKpQb4ARRC2NJCbIPiLGAU3ToW16UJO1cD1hd84iU769XQXq9y/qFKB1IXMM4AZEAW1LOF0HNBzQNitst6+w37/GfneFOA6IGEDGDxElmt5k/C/uasPTU6DpdLlZEC7uJpQBCw6Cmpz2eFdPFePJJbdaCx0nUaz3DsmKo4eeDhbdrih32WrF22T+bAJiGjGOe418trvCEPcICXqCj8IjcN14VvF6alF0cDqMQlSpX5jyRfO5XZaYCZ8KVYDObGdVKg9V9GCK5kGPZs0gFTCVADAZEBXUMokD3MbE56e7mKkyyBXgSRZJizKfCAWAE6wvtO2ahnF5dYFXr79V67ykrqCm6iNBrXvEFWVUgFBWLuf1QamDqzgiCKR8T5TUOmXZN/jg6WM8fXyOEAj77Q7b7Qbbq0uM2y0kRgSr56IlnK16PH18inXfog2MQMFkF52/xrql1jEcMCTB8M1rjPsBy0WLrm2QecDKYIBb+hwPv/Bmqbb+m+8OGTTxP8dzMgzjHEL3fKh2TnnipF9sDpBzWPjY9VoHQHpAEogT2naB9WqPRX8ODj0oDQBGi6ZXylrgifLc6WGJKuLOlTIHjADJAPOx6vg7EiN5troIEkQGOI+TQ1gcAh6dPsMnH/8Uz55+jCYsoC5hyieWxJ+tbl0NCF3fgZeM5XIJYsJut8du2OLi4iWuNhfYbvZIiTCMyoGl8ytZORTokrzMuFIWDQCzeU+EMrUJzAnEEaDRgPo2r31EBjZD99IQGrCZxKrFX5qMoQkoZLJHWSOo+sXX2kJM74cigH9/tCum/eJg8e2XvhV5TI4uondPRS2uwTQqPwIKFlV8YrdaEJksxFIOwaYL493QBJsR89xRwHl3NpxrcFUeswW5xp1+DIl8kPqKXIFvRU1V2ZTeMcsGmQ2jt/gkHO9VazeXKQTlINDX22xVfxtH0VTPyd/eps/Rkfq/W930o05TS0HAZcdDVOd98vQeKHqTVGnsN5/GP+Cz7pveWbNPKUAruYJbL6hFRXDF98CK4CHSkeaZnCQ8lMJTiIYAUz5hFjL6Z+5jzusjCihFA4s0fLcROZMAaIxixq2IIkQYJBrlikXHYkoMoYSUYNzP5eSaKCAigimoEh1HcBgwDHsDixhIA0TUoijGEXvaInCDvlth0QPUOrFvAIVGw2p7yHMiiAQUHpOEOhw72XMJgJCg7xiBBV27wG65xmazwmZzgWG/wbDfYD9uFThzxTu7K5FpBQBZGN0yco5v0rmv81gzxYCLVVTTtKqYZKlfrxW4gOkn88fzn46jQ+X52FpRuDbqfAAODbpugbbpwdRcW5/6eyILUoekVjNpxDDusN1eQdII5oURcc/LfP8xXxRl618HeO1UmbPy5nlXyrFbwEjJi3K5HAAs9aNKQfd6qtI1tTrIv7MCIMzmMtO0OQqSoj6S25TJebXYns2ZE0mBEaVm1XHRgkOTwdvf/OZrbLZXGax1bpZkCrWPGzMIUiBJCsDmv89Kr8YUleAcjG9o0TDOzxb48NkT/KuffYlHZ2ukUefvZnOFly++xcW332C/227A8fkAACAASURBVBomn9A0hGVHOFm0WHQBi75BExoQkrqQUISQ8qKRBHBgjGPES77CXhJ2lxe4eP0CTAnR3PcquwE4KfdtI+iuLhzOBuW4jfa9O7pOe7pYj1ER6mluQXC4h6hbWwVGHRn/+ly2paZaM+x/7UcHkWy8ICmoNl6h78+wPnmMtBsR92rRJykisXHSGaedGmZY4APYeCMykLSyPhEXdCWTAgtNCe2zMuh4lxgXmQBCWtqIBA5swKZy5yVhhGaBzz77GT797GdYrR4BaCegNZFy10KAYbDDDAKajtF1Hfp+gdMTYLffY70+xevXL/H61Wvsdnvsh4DduEGMAuXUUwAqiYHxJGBzIcy8YEylXzzwBRFCAmjdo+1YXdzA6FsF1H2s6H7bgKRBYNYIbWyHAWyARAWaOAjvBk5FSNFnk/g1ZXT4vuApW0rdko5Z2lS/Hnk3v9+tDwtIdZepldfn/P76ND/MqJdYyrn488uel4Goal6WZ1/zrIIw2SFYRkTzfnB3uGa+55Q5WwDC+8qUPx6oKB8aZTivcLuV7dTWvry//HD6wmTozovx4M0us/cKEBwGWfBSGTeoT7DMg+l8M0eQLSnPyQfGsDF3qz734xlnP+ok83EATA0Y7rig3vqch8nmh07vgaLvkmoTj9+BdJ8l6iFqXHS+w0nrlh5M9Aab+vFUqQTVQjFVkO8q6N1UJIe6srApBWLw/4idhwWQpMIRMyNGRkoDNAgOAYkh7K5URgoNNnLOaO46GvklmOuZSAJz1BDM7NYbKuimRCBOZrUkIIoIoUVILZrQYG9gkUQjtoaSIWuYdcI4DqbtRlAQNG0Dpg7sJuiueFqEKzU2iNm1YiLuqSYAogZtt0ZoOnT9En2/xnJxie32CrvdJS4vX2K/v4LEDTwihKAI/jlSFUGVpynjdemX/LWfdLo6asqeWUcxCzhwBhpAXPUnoRbD7gNiHgjec6ui7P/g3FMEYQZzh65boGk6K3v0gWTjp4y7CUCVgYqEOA7Y7baAiLoqxko0f8D168CrYPKhCGCuxEt9lRTFm0BTktjquvoUtFhWGOGsAVSqYBqwExpw6MChBXOjljPcGKjqVkQBgJME14oxAwyLRCZmpdSgCR2IAi4vr/DLf/olrq6usnJPWhqbDwoMOSg0plpBLOCjlNpZHbMNXlbHWgY6Jjw9W+HzT5/jq598gi+/+AyURlxevMLYCR4vz3DWAfuzDvvdBtvNDjGNylG06tA1QBsIbROMa4eM0BxmfaWAqSQBI2G9XCn30n6Df/j7v8LJ+hkePfsSi/U5CBpJa4Jmmb/rTWrdXZQS8lYxgNnbwp9Txq6+d0PGGsoRU+qvA4Ec0MtrMtHR60qZqcqfbarqeiOQTPYPAE1YoD95hq++/DfYX/0W3/zLgN/+y7fYbjZomw4NM6IwupbRZjGsAGMZEBJYFK+auFnHvBPua7FtJOmyoXMrFWUllSFXDAQJiEKIKahtUOjw8Ydf4dPPf46z84+RaAWgRV5rIDlyoe9d46j8eBwY4yCADAhNA0KL1eIMXbvAevUIV5eX2GwvcLl5ic1uj0ALxNih69Y4WZ3g7PQcITSGcAtCIIunPu2xgoI5Ub8ekCRJaAwAUtdsyq7Uy36FV9926NsOARp9LkctowCQHWRYlEWftzp27ADBG2ziDn9srFTSSjUB3gU3M03Wf3K89Nclmr1m8KZaozOgaHvsnea5r7XiAJHmW95Pnnqnkh7Z+fOaoWDXvPLvQr/cBBxqeuN92g+zDzMsl/j69TbTDUpzDjBYl6sCXB7m4WWXzQWh+hkVKXx+7DzimGDqduYA0nxM5dE2u/d9+sHTdfOhXHDjr3dPN0lB32/6rjL+e6DoO6Z3DSJ618pzv/Q9TKqJ8HFPaWmSbr6RjDPI1RGN2OKhoFWVhLu9QIplUVChP6Vi2QBpTDAXEDUQGUHmbsakCrLyoowI4hFhGMJquZBg34kSw6p7WVLzfQdBglpehNBgGAfEkRDTCIoRnAgxCsZBSW8329eI4wjiPZp2RIoDuv4EfdCNU8ztQyPZAEitWpi4Vmftp4KkcxcJODRouEegBdpwgkW3w25/haZZYbN9havNSwzjFimmqiuLCjkdPb5Rp+oUeOYX4Ipt1p7cqkTAZCCUKR5wF8FZ1xe9oRZC/PP9Bpe7b2UrBWIwBbRNh75bo22XKCqx4y7T55jdi8k/CZKiIUkJw7jHGEdwa+4mlcXKd0nFLfJIL1j+ybhI1PUEGg68bkcRjHHMlk2mr2lbMGVXj4M1wsGwyhWHmLTfMlhkkAg3IG6NK6yxIUIAqcWRk1U7+KjAgblUwnu4jJWUBJvNBt/89hvsdzu9z9ojY36ilh/J+kpf6kavFBspdawVMwZARk59tuzx7HyN549O8fGTc5z1DYbtHru0B8uI5WqBVbNGOmkxDkts93vlTYoRi+UCfctoGgsZ7y4w4taKBYggIoQQsFws0HcRoQMuX/8z/u5v/y98HgnPP/4Ki8UpiksW57lxK5m13AUskmoG191OGS+oFcvSWg+9A9LkLZm7hgNZAoKwILnFCRQ04aZHSwGffPqHuLr4NXbb3+Drr/8R2/2AcUxoGkYnCl4GQSaX1vXT5o1IVlOS1dfDe0+BB8r3pKoP/VXHcQFjYRZSSRjCHcbUIMkCj59+is9/8ic4O/sUidYAegDqrpnmY5d0lREBYhQMQ8I46h7UNh1C0yIEAlGDRRvQnPZYn5ziZHuGi6tLpNhgczVi0S/w5PwMjx6t0S86jcqYIz7q3E/1tlHV28EySUAiI3UngVMPpSSIaQ9KHV5/u8bV6w5MSqLNYAQChAIS+eGDwUG++ExcYeuxdc1adE2acuU9TDpKuvsW0jEX6Tqyuu4+FUxbWZ+Vv1sAEHiLzsEh/bu/ukXV6vHjloi/a3LZ5NZWeMs6bb383P2m+95wW7ohMwKyy7x/rkFfv988HMo6iAw8TPL6rmDDQ2EV79NBuluzfpcOeHcAoodK74Gi92ma7mo3/X0kP1V9awumCzfHTgNuSbfYemdS2gkQESx3F3aLibyCQ/pbCA3UagRqRUSCHP0MFg3NeYnsj8VPOpK613Cy018gxgg2TqSUCrospmglI19WwV7/YgBiVKLrmBgxGvnvKECKGOMOiAljHDCMG/TDFdL6HC2WCKEHsICe6y6AGNwXKqvFHukK0DIJsQkTDEJA2yzQhIi2XaHrl9juTkEvGBeXLxFFQ71n/gqpTjPrPjRhf376WfrY+8X7Qv/Y3BTcmgjuXkVUka1ierh8LOX94j7zpQopTQrctU2PruvRNl0llIuBSdZm1n7eymRWLHqdZBewcRghPeoz8lLcN1Q83JLp+Imo5SkaotoJrQ9dggTJSbtNUkspYRzHYkp/9Bkercg4kchc1ljJd52XSC2CGOCAZK5n5Ndan3unFr4NGxN5nHiNVNmUBCXujaOeM5pbC0UHTKGuQ1JAPQd8HVir5wGV3st9pvZORmBNgvWi0xD3J0usuwYU9+hY0JFgN+4RUsBq1YPRmcthxBAHjHFECAFdpzxDkkZrdT9FrYFO7YPQNGjahL7vwR2hCXu8evkr/O3fBoBbfPjRT7FcdAaizpXom8fL3a7RdneOMpZCPD659m1uEVYakINDNiay1WLFi0QEJgV3YhKMFLBYnuOnX/0cFxf/Db/+9d8jxr0Ch+ZipeCpGs8AMBBMsltIsSOavs8lm1jDSrZkY7dYhYNFBkLDtzvSfUICBCv0i6f45LOf4/T8E4BPACwgaCvwyl3fyILWa+ZJoED6oJaKEAK5BZT6SEOjq7XomgWWyzVO1iOuNgl9e4E2AKfrgCePOvSLoNE1SfN1Y5MRhvlmtbcgFe7CNYiDnym3i+53PSR1+JfTNf4lNJpHAqjRMcQsoFjbo+UnTJbtKV+Rf65B1h8qPfzIP0a0f/y6IgpKKteXQ4PZfnxDqudwtiTNwBNw5z30AGia/lRcfaGu6t9RlpX5QMGb76NvNx3hwptfYQjgXd3QMu3GHa+/6Sq55oKJpZHnQ9W4u6ZSx/rl+idTfq1XWOfTnPAvTkus31P9ua6IzD7fMYlFePU6vIvD6cea7jXf3wQsOiIHvVUd9vtJPy6g6PtYgAucfDxdN9AOfB4PRLrZWkLm7lAGUUapJ4t1OizOMTP62qXl6DV3FM7l9mX/MKTz8frePCfFyJoL4SGAmfmrmOJ2a9Fn99jzU/Hh94guUrT363OY6EzXP/zGlpoILKiU+ipfO5lOZvVCzBYwxMJym4uZKsFsdWAAjZnVJ7PUSEBKBp1EsAS13ICFJ2Z/npaZRSDCypVhPBkhgyECCgHMHUJo9BlpQBwHNKFFGiPGUZCiQNIIicB+HDDuN9jvLrBcnaLr1wjhBA0PaJqoQz+2oKaxtVef75uhtrcHnba6IoAZoLBA6BZo+yVC02G5fIGLi5fY7zeIaQ+k0coIsARzzBqrDpA8FwxO0BHgJ/jWzspl04ADEBoGh0LAXYhHK+3F/qT6p/k6P48pElSPIZ/rgoNh6JJSJS3rEwKIlPgbHOxJsSiTViRC9WrmBUxKxgxKCA2hCY2e/KeEliPAzb0grFz9Sv5RYEfcQ8jaRC3X1HXI+h8ph433+eDuXr5WJFdISQBRUvUYRzPosvu48MoAMB6gPMutDUMGgjxUtnokklkVuUtaAHEVQQpU3PgsIhtYx0kAwzx90IRGB20itO0Sn338BR6d/SUuLi7MhWWEW2Eo+W+RbM2bqAAr7pJpIGR2HyyMrrmuDHMfHfZIcUAIgMQ9WgZaFoyIiMMOzbJF33dIkdFJQowBMUYkEQQmIA1IiMbLJRBSayoxEFkt8BhJAppmgeWSEYXQMoFCxKtvv8Y3v/k1njz+GOsFG/AGCMKMRPpwfXSw5bZE3uc+vmtLBapzlqJZHoxm406abSBFLeDiqmhAjzhoVmko6k1nPD/sLpEFWjtwhUTCbn8FbnssF4/QNF/g+cdf4fzJX4IZuHr9Ui39QjCMZURzDYurr5NljTFLHrMzUqWJAESABGqf6u6UtlMLIbrJkTasATwMoh7r0w/w7IMvcf74U3BziigGqBArCO/1clJ50QOJkvS6RFCXMdt7E0gtB8WPPcxtk1usesGq68FBuemYguZPhBCAYGcLQkDj3e77BVDJKdqTbVWWup+ZAsZ9g0W/AHML5lbX1HydrdUWtECqzT93CfmqZrZdVOSfyXp4JNVy2VESabLhK2xWyO7SqFXhVDuf1g8r15GaIee5lS3eavetY3rsdckrZU2hy5HvgL4vWcAMKRaRYmudngVYuZlBOQqay4RlA3EoHIANqUomPDj8yS2ZX8n205wv+cqaKgC81KNwOyUkGfUvuoVINefF2wGVeCvZnb9s4YfK5LHIgw+mukicydXH1tNDUMgPDfO2I3I4JPIXdf9gOnYmz9K+D9W6rHmbbCvV9URFvgNVoKKvS7eQCVfrv1tUTn4+aHOVhm9K/vyyhh5OZAeIJ/NYN7tqLLsMcXD3jc8/XibN81Y8o7IAvvaSvJeXMh8VPB8I6J7Mt2vTNb9X+tL00OrGJ965bNPb3uC+e91TJAzkvbr6TRei+5fhSPq+3JnfaaBo0jU38Afcmu7YmOTPuW8+R065J4XPCwpPvwu2yNfjyswUivB5ZLG7pYz1ibg/ypbn6+/L99enw9elI+AVMKvz4QZ6rKzgqfCUeSN8kmlwlLstCdUjxRS5vJ2rLFgVVMpf1Z45lLWgCDvXpVvG4+TeWuap3NC0DaxnTBGBGDcTJ3XfssW8uCYlezSDScmszS4IhARJCYQI4hGwMNgQjUAGYQudzWrSb9V30t0cvYiUIFtDD7NGjaIGkSKIRiSOSJE02zgixYjtdkAcd2i7Dbpuj74fAUkILZBShyYtVSCgACDCg8+o5YWdVLOHTW9BaAyXacGhNb6Lc6zXL/D64htcbV5jHLZIcQSlBMhoTS0Qc3EC3EUki/Zw8KDMD+0PV4oUJGjgXDl66q/E0AVsLO4e083Rn2SCTy2A+hwkL4e3vUdBK4KqC+NMIZOOQ3tXhXZygbfMHwYhmIVMCKoMNaEFc0DX9VguT9D1K1BQwImpGt+1UFcN2QOgNNfZL3IBsdTL15up9ZX2SZ4ZpFGU1PXRWswsIhwkiGlUsl3OBZyW0Xmx0nQjFnM5c4uqlDQaWuFpITg3Ecwl0seGGpBZFDT2SGzQcOK2vgRiBGJIIjw6fYw//Nkf4S/+4n/HL//pV2CKIAqQFBEVv503X7WuVOscgHLyZMBL5qWh3L8Qtbza7bY2xtW6sGkI47jHxeUrpLTHxx99CGG3VFElOMWoSpsBxUZmA4hZz9WHDRQgEtQKqQWuNnukmLBaNlifPMGy68DgPB5S1edlVFWLnlQ9d4fFPCvslSXM4W3Wa0T5tRYTKP9WlBgfRrrVFo4iMv43sbFHBkR4npJdhcn6pig6bnVWcwft9xu0zBBaols8wkef/QH+8I//DP/vf/k/sN8NGMetugGDoGp4zK5rUyMKsn4xwLPeH31I+n+UoNZuId/KREhJV8AoALsrWSIQLbDsH+HZB5/jw4++QLc4R6JOrXngbs1sYFMRSslfSYDkAIqWh4PubM5lpFR7+luMBnwQIdjBCBMh7hmvvt2h6wO6jtF2hNAo6BQCMqCZ26RabrV7BQ0M6K1GB0EJ4JPNdxECUwMiOwSQYu03VZ+K3ORvSJIBHmJrhoMUBUi7LeWIlb6+E6nLOHkTFatWXZqKG66WowZlQrVme/RK2298zPs25HyHdxKitMJ1f5emr9Zg0X06+rooZICLWHvVAo9V2yanNoGO6XovTQCCc0dVrvmTg4AJKOz7ZtlHi6yUyoNzB7hcp3URJKSkkV6PWS5l2USkvLf9tnYvnivHhxZFd5CF75CmgV+qSlVWqdfe6/8fn0ZZtjy4a7Z/aRdWGztm7122yQ9w+1ixcWgtJr6Gep7H5Kg670lhZ+W+vsbTsh/Wbx6M6HqraG+4MsZzuW7c2O7f73Mg8MgVAOqAKbcll0Ovy/I+eV2fhNJ3HOV1f99xPb21UAc33X7NsWf5vdX9N6/59Xjiydfu8v9jSu80UPSg6S5g0YPB/kczn332Ta0sNpTXRpso9yWXk+shnvuQ7t5p5jxYcksfTQTM+uqObTABuGR6mlEthFkErLOdCBN3b/OHQHMFAFKFhFUbF9UCj0EEIAvMyQQnt1YrjGQKrVoVqbVRA5GIRKZwJ4FGNyqCPbmViiRVPGGAhYEjambAiHABLoIQECkicVROpVEwRiW9HmNC3CcM44Bh2GHoN1gsRnTdEsCIEHoQzKrDNzAhxNSoYCgEUAOghVBj4kfpvLYlnJ4ymq7HYnOC3fYK++0Vht0V4rADSawUrFhPKsunWLFMNiYp3D8cgrUDcMBrVO6Y9GKCFG6g3JWSyz5RQkzQLJtFRY44kc3NhiDzv0yBgxLJxF1i2JQvtcrS0/MWJydP8OGHP8HTJx9htTwHcw9UbX843UvbSD6pRlmW8iVU/dXbe7newQMhJVWfRGeqlG0dnymDoMTGa4QIWNQWZi8ZlUYyIV5JyDMNtkblIrc2cu2gtLdG3XPyZsvVwCUiCx1v5OauEMJ0s8AK3oXQoG0X+PzzL/Dhhx+D//L/QRMEhGEqCNt6M5EvJ0uQC8w+VrS8UcTaw87dQwCRWrwFZqQxQloGBaBrWyAl7DZXeCkjnj45R7CjbwIQjD+pdispj08TocutuhgAsVqiBR4QWEOgP/voIzx68hzL5VIVfR2lqtBPFAvK9XzjndUU3/r+yQGo7x2TddsBHaoUZ8lFqq4qCtSxdd91Hypj45pC2ms5M48pKXTPhNAv8fjZ5/jpH/w7fPPiJV6+2mC4+q2CoeRWJHkUVmBAVRBClgccmyXoHAkZMDZ3Uy5WFWLt47H4BMEszAJO1k/w7NkXeP78SyyXTwBeQKSBk3OnFA8VKJEMFBWlXDQUGged41HBWn0uTxRBCr7+ubUmEMeETYwYx4BhIPRjg7ZjNM6n1YiB9JNuP+jP63qluD17Xx4TB6+XnCwT3P3Eqi4nzb/A1E3a10BzIzeLpcIZNifTvabIdP3ovHeytcsXrJuUGgU2y5ru+8Pks6/xbsVi7V92wOue8OY1yvkeyTjvoNUhw2RPOprhcTe1H42654jMg6Zb1kObaIL7qzL5CWL5HZm07wY5/Pv0IEkn5Ds2oR56vvx40+8PUPRDpfsMfJXkq883m07e/my5vwBxJzPEa645dihxx7wmt7lcX4EnE+HjaDLhK8uwxwR/E7xMwayfn1N6o0rcLxUZETXpolv2lPJpWQiAab7KpZLcrsQES5i7k53GJRrVBQ3qgsYIEEqIFDFiVJLrCDADmRiV9OhRzC1BRJT3SPRUmcAWSS2COSKEpHwbSKAG4JEwDANEBogoUBTjHvv9JfbbKyyXp+j7NbpuhbZbAmgVEIoEESXUhrmceT0gZNZWat1ApHwoRIxF36Bt1oiLHfb7Da4uv8XV6xcYhi04EZIMZkESSx9ntIMnHZGF4uC4B2dlr3BJ1TBIPULUKoBFQRUxwVowP6WZu2sAh+Os+mzAJ5lSpV1Rgx1uPUGVAklGxCxKTM4t+u4UH37wBT549hlOlk8QaAknqK2yMoW+rhkV/XpSkQJkZiXdrnfnmAnwmy0w1JUsOE+I5eVlSG4lZwqlKuhmyUWMpmFw4gN5UQCQqOsbU0Vt7mtGLlVxf/C6MqsdgmfI1pZcAQOSr9U5EGMEgREa5Y5ibnF2+gRf/vQP0Hf/MzbbHYgDGFEjAZqhTgaJ6pY0EEvgbZRhC5ON/dxbv9AgfKb2S8R+t8XpsgVBkMYB69UCu12L7fYKV69fY71eQRwgyt1cvXP5OyU1PGBVYicEtqLcS33X4/Hjc/zsD77Ck+dfYbn6FMBS3c4kGGDg3FcTNOaNhL/JbJPp9/UYLa/1nxedikUGeT72u7sy2pMYFvmqujv/XoOl/kRrozI1St8RGAmDWtGRQLiB8BrnT77E42e/RPfLr3G1vQDRgMSk7jlBLYGSmDtlHr2ucFtdxFjKWJT/ykI3S0oQGuGmGeoil5BIeYgSEYQCBC1APbr+DI+ffIknT79E1z+FYA1JLer4cR5F8JB83WAtqd1FBEgJaVTLKDQByco2B4fVJdZalwgIym80DoIYE2IUtGNA1wU0LSM0gqZhhEbB4tIH0PxrYziqliv7jsnXJDroL1Tte3uiNxrLOU3urcapu4+j5m7z366HafJ29oDJnzd/6oQcffYdQduYLYgA+XUViCswi1G3qKKZiDepVL1fv0m6+cbpbiZlCNzQ1u+U/vrOpXpDLrtNER1mc+14z8/e+0ZfXfcD6O3HyNzfp7eY5jrwD5beg0R1eg8U3SPNlrG7XzwXnq+/EIcoyV0KJmbONv0O1SJ3Z4K9Own284smyMcbpJkmkKt/Ty3jgDuhfkJmp6lE+sNE93zkQ6RSNlUyVIhSITyr3JnD2F2GwuQkUq1/lDeAKIDNQiixc/hEEEVAAqIR2aZEylXiIJG1HYPAFhktVZGxchmJFTAKgGBEiAkxJBD7PQkpjpCUMI47jPsr7HcvsVqeol+cYrE4QdstwdyD0ClQxFo2VWQ4n6hmc28wJAUVNIVAaBA4InRLtO0KbdMDQrh8/S2GQVQRzBZRRenNYCE4g2RqdTAiGOG3Wlj4WKbJeFFFB0UBnU1VQT185vO+8HHJUcWEkF3W4ACQStQOZmnIa8kDtbjC+W0uhRNC6PH4/Dk+ePIJlv0jtO0akgIkWUnI5fJKgar8xdzKJXP4VKeSh1NkpryL9oGH6XZFqAh8pW0K15ZZxhFU+SNThK292VxVaN7CXJTBHFfQ+sXdEfwOtxJz1zeYrVz50ce4gwxqvcPUwIFMDg2YNHqaJAIk4JNPPsfnP/kJLv/6v2AYd2ACAml7unvRZHmsT+vnWhMAd+91hVyjEgLLVY+2Y4iMuLx8jfN1j5YYi75FG04gacTFBSFFjUYIaQDmrLgVR8xaoNeypQiLsqjcPQkJ7K4/DAyDEmI/OjtHRIOUAlL0fij9f3A0Mft4J8H7moWYyhA8SHmpmD92olHXr8laQYHBA5DLiuErM6Tso9M6zMaj8aSwcp4DBETpMMoaH3/2r/HPv/o1Ll79CpCNmoASoFZF5uZFAHEDgJAcDALlvUGBqwjBqIEAYkRCBCgWITsB1AiE2FzcFPiM0qJtH+HR+Wf44IOf4eT0Y4TmHEIdkoTc7kWBtuAIYjwugjzH4NxbbIcU7Gt2QooCkhI9kAywTKWRwODSno47gTAMCUk0mlrYC0KDDBo1TUAIpG1r47eQH+dpZZ9Lv+TDAOvjqTxUg0VzuYZyTg9iQZzBFVvfbT3SNU6Uuwq+5ro8czjgs5XjvRUat3K8QR+rAKDq40H954B9VTrkSKO2d7lVaW7qUhqdd+VyAIUT6c2S4Gbgr+xhycbLnEuqWAJfA2RM3lbzfwYsvJvE1m8jyWwPm4ND/l0NEM3mJ+Fw7h5sA4e+Ee+BnPfpYdMNc/YGMNl/v+37H+NofQ8U3TfNBdijvT4He+6xWXwHoKJs7IK5Ynf3JLcX9+hkeMMN8SCvrL3aR7pj1rWiUgTCyaPypTJpo+9jMz94jr9PHi3GBMTZz14H53xQ1wKY0mKgiVDFx6SKn5C57BhpNXMoYFGi8hdNUaRidZEEyiFhf2pFRBAo74+6EJkjmpDey3pNShHjOCIyMA4RkvZICUj7LUR2GOMGw3Ch1kX9KUJQ6yJBD0ILPX1szZWOrS0IoMKrE02rIGh0OKaARc/AOdA2HS4vv8Fmc4EYjYsDEUA0gTBVAAiggrPbZSkDSDk9RSYjdpUa9pqHqM1X8XLm/qaDPjx8Xw//Y4psBdbYxW5VIMZt4b/DI3lxAyZCoIDTjSFjyQAAIABJREFU9SN8+slP8fTRR+iblbVXKZdicaWubrWAXDcbU3loUbkvj1MT8gmZ1wUAJFsc6dhyIMjbBpgTkdqpOhX3BIKOtVCdZs+5FLLCRBZ1DAFi/ENkz2JSotg4RFve1N0Hxu+T6wEvj1j5AhpuzPIoQJJyp7AhAIQGkIC2XeCTjz7Hv/mTP8EvfvEPuLy6AEG5jJIoaEViwC4538U1grWv2e56YjxMCjwQ+q7FYtFD0ojN5QUuLzo0J0ucrHrwosNytcBme4YUjdBdIiSqC1I2b6qGnLsD+l6Rktk4khEcGWEbM2O/3+PXv/4aZ4++xfr0zEDb6XbFd+DLuFOq10sDBwnu0lQrGMjYqFvY+atka7tj7mmifeik8VKvvuUKgbsvzu/3MWMKZQWEkoHJuoYqN1ZKjBR79P0TPH7yKX7RniCOAmbtYyEFHcnHFmk9OehzhjFitTpR7roU0QXBOFzh8uJbDD5erUHUEgiQmMDBzSQbCDr03TmePP0Jnj/7A6zXHyK0ZxBeIEkwNzUflFYfcddQYNpCVG3VBXkgESAlEAWNfuaN6/2Y1AlOtywBBetNMbJ4Sba+RRuburfs94S2a9C1Dbq+RWPAKVdBLXN5/LPIBDg63n8+jrzePLm0tobKg/3AXfd2gS0Pxem3KDZ4Zvso6uImogcZ7iI7L7evu2+SikvsNb/Xr7cpRrb/Ud0kPv+sSV0ik9xR5cVPK/Qnn3F4A7n1sFDlkKlOVcXNEi/XUaq5bmM/r295fBzYWh2ATMwPtAb+6JK1BfGBGD+fn9PurWSQSTo2uAQqKxSZ4H16nx4mXS+nAzgcoNeeWMnNn3+E6T1QdI90ACgIUMIYTq7EG+/iD53uAxbdtcjXjvs3qPOx8h0I9PMTwONluvEKKW4uzvtxDCSqOWDeRpoopHCFMB3fVatSkSuP4nw1XhfKyrDFmTcwRJV0BgGhEjoNgEmpQUwBMaqrmJg7S45YASPBxlg9R9URCh7JikESkFhPQoUDJI1gZsSY7FlARERKI2LaYLPdY7e/wG6/Rr/boO1P0DQrhPYUoSGAOquvdijn8MykSi8mzQK1Cglg7rFcnKNtOiwXC2y3l7i8fInN9hVi2ms9zQ2NWa08JlFozCw+SWaDggden/SfD1fX43OfmrhvrnEl4kRmyJklE3TI/subSTX+ph2dhVhj4amGiEeJa9F0CwRinCxP8ennX+H5B59htTxHE9TiipwQVZywtxpnXhb7kHlespWSGChEE7DoQMUWVzXdKqY0QG2R5Up4kpgVbTKAim2epCTFFcdNjeonM1XrhYMhqmxpX4WsjAAGGhEb2BNAGHMfer8lizwYuFHrIW4BaXIlmIIpchqBsG1WePr0Of783/85fvGLX+Av/s//hBcvX2IYBaMM2XorL3VFg0Kxaqm63oEhd90RAJIQGGibgCYowDEMO7x6/S36FlgtG3Rtj2W3QNMG7HYbDOMeGhAxKdDMYabqHyqbHhGIJIJSAa+ZGZKAb1+8xIsXL9F2A0IDIEeuq15uWKu9De4s4BvQMLVemKiyh9fXdSSDUI4p6SI6J6o9/JjntTueZdqYWT11H+HcrWzgtSr50AhgSUGAwAs8evQRTs4+wstvRPmEWDmmQmixWKxxfv4YbdcjxojtbofFcoH1+hSPHj1G2zQgRMThCr/6p/8Pf/93f42Xu18BIARi6w8liE9JueUabsG8QmhO8Ojxp/j44z/E2emnYD4B8wIJDcQIwCS5BaavXbXCXazSShsWwB1QIm9ms7RjLiCSXe8AcoS2WUJAEAEj2JqQQEzgBGAUA4oSxhHY70cMXYNxFJ0HTUDTEJqWNNCh8xhNJtO8673Pjo9BskFTu4EaDmevUjCDanzMP/kYdZflQ92h7PWU94Cyj2SAU4Dj1qc/fGIPwmF7hbqfHfgG6tuJop8vyBiAeIQ+wrFZfWs6XHPqg5qZrlZZr4ik7NJd9/ldGBh+CPXvjc5+v+9UF5JKGJ2yThQZ2MGlCWl7yQh5dpH3zRsOkPfp3UjH+u6dGM93B3fLFuNCw3ULzZHP1333jqf3QNF9kwBwBT8rVg/Y8Q+Y1QOX7HtKbwrUuBSfqs+U3/mGk+mARTTU9iyXI44T3ynN+2Cma9giowrZTebSxXKi5OacC9nhJhGQ3KUqoEAe2i4pqYsPS0CSERwJgRslEhblHxIwKDFA6pJGxEiU7He2a5yLwHhJUoIEteQQVlJbZgFzQhMSxmFQ6yJzgRvGPWIasBt2aIct2naNxVLQW4Q1koCERl0MWIVuEkGUhJSGUqfcmAoKMPdoFi3atseif4S2WaMJa2x3F9juLpDSHh7amCBqdSXuDuZRc+xPymtp8CkamdUlWxNypBfXs0zqFR8Fx/YTqV+mkNIUyHHwQwVy519Rq50ARoOm6bFYnKDvFvjo6Sf49JOvcHLyDG2zAqPVOpICD8mjOuWHZ+1XS0LWrlzIqOeVn4NDsyvAKGDcNLysNoBxzKIQtsPG1JTTRJUlzm4C/hAd1ZkRSstrllXqhun14fyZuQBFasHRZt1MlR3ly2Jq0DQLBYu4gSTO7hAJ6rYpxAbFMrp+jX/98/8O+/9Bgae//pu/xt/83X+FRAE3WhR11TTg0MDFlJI9XqqtRC3nmATBh03SaGcpRmTLDUoYhr1yEl0xQsPFUBDAmPTEXKP6qIWTdjE5/qJ9w/7BG9fXUf/TPogx4vJqg6vNFsMQQSTgxlxk7gISAY4SXXtNGQeUe1bdU1C4hKs1MK+LGc4pP2RsfV4G1BCCTKbmEXWzAME+H/PcLWUTlPGu09+4m4QNwFBQKiXG2elj/Oxn/w7/vHoCpoSuDzhZr7FYrLBen+Hk9ByhaW2OKYl503RYrtbouxYNJ+x3r8Dc49uXG/z2m98gxj0IytOlxOIMCoQkDJYOXTjD48ef4qPnX+Hs7CMQ1iBaKkgE0sFZWXTotBUDlsXp4gqo4SYyvty5ZaEkJBBIGOr2axekZEt1yGM8QvNKrONclXtBjihGdmDAanlFJBj2I4Z9Qte26LoWfa/rWgAQcuACB2mqfpwroncGX2yuOEA9sSLzEVXndezzdF3P5SG32C2r//H0jkpxvq3NLbWuuZSu/bWs6/et6YFliVu95gl9/HmlPL7HyOS3m1Jech60Ww7HyC0leIdS3Xa1VWL9+/xzSbceAmfgGrgt5P190/flUfA+1WnW3nkdPfzpXU0/Tt36zdN7oOg+aa6lkItV32V8X7etfrf0+7kAzqfudIPKW3FtJfHGwNSbpeOLS41m1yJM+U7LXUFeYt9OpOEAYgu5K249Y+q06MkspYRAGvVGhVS1WnCgKKUGkQZI2sMjTgEJSBEUlBcDYpY+ovwuwspvlK0mGGBOYBYEFoTQgHlAHEd9ThQIjRjTBsM2gvc7DYUeNeR31wmIBQERgJIfCwmQIjTEeirggqh1jYDAHMDUgkOHQAnMKyz6x9jtLnB5+QJX21fYD1cQRBAikoxIGJUUVswdSZRcW4GFw3FRFEqBK6dFgXSlknOfHgto64ry5PuM2AgKYuBgWLZpMKsXs1hiVbyYO3TdCot+jSePn+Lpk2f48OnnOD//AG1YgamDhuyqubDc7gD5u3p+OCeQ2Brn+uGkqJO7NeWRTB71SOsrqQBGUxJgbxIFNZMIJEbjNaqRIa17UcttCrNqsB7CO7kVl1BuxgyJiSvwQAgBCWyWS5Lry9zoGEWD0CzQNEqgXsAyAKK8LYBZjRCjaToEnOBP/+1/AAhY/0//Iy4urvD1b36T94kEMV4fs2hMys+R95BK12FJCDCCbtbfxv2A3XYDiS3CotXQ3yni1auX2O+ucLXbou1acBMQx4gYB/hJuZJx2/jh+XqjY7l249LBkHSORGAYE6gl9P0CXbdA23XgoGHGycNzU+mXeaqxoWP70uF3Plat8gZwZvMse/HzGiYYIEMzUGA++3ysSzUTbC4fDYBQIKgJFJXBIr+s/oIgaABqIeAMusDccfp+hS+/+rf44PkXIErgIFh0HdpuAWYLT88BTdOi6zrtsxCQErDd75HGDSQ24HCOfvEU3fIcuz0h7TcYRrV4o8QI1IBCC2CF9eo5PvvkX+H80WdgOgGwhEDLl3I1BMLFXSu5oSsMdPPQY7Xli//uY0pRFQWBK1SuKPTOt+TAkhnCiloi5Tb0yF8EEAuif5YR++2Itm3R9S2GocNi7NAvCT0acDPrGNh8ggPOhzvwhIPtmpTdpqRelzAxnnD3tZLLTflV5TSr4hLprHxX3n+f6ZgcdeyyKbhBzGpR5G5n4sNBG0gZ4d5eKi7D5Zk+Y5k90EONRgnyGcVB318DaBkQLCj4xe97qlfU65vjsLFEqvF+7Z1mTf5W1PI7eCq8Tw+aakyoTjlYzO8T+vIjSu80UCSoN9TrF4sf82Q/KpzYBusEb7fWbnYiAmAi+LydJfb7y0cXl5trQUc3Kpq+Tux2TdCt3Gj0OSYIvwXzwLlVBeUx7c+bC+CYXF9qItUlVK2vqli5ZUQ5IVYhVIV0s14KZqECtfRRXo2IxAEUGQlKdA0DVZg0gpoQAbEI/MXqIECFtMZcftRxK5G+BwihUb4ktWwi7EfBfr/TCGnjFvvdJcZhwHI1AosBIayA1Ga3DqO91HJ4WNsK7lArqgbMAU0gdO0KY7dC36/QL5bor1Z4dfkNhmGLMe0go7d7hJ8USxW22Zt6QhY9S3TkB5p9IFcUpz1YPua+rwUnTSmDWDpe2N1LOIGZEdoWTejR9ws8e/oMn3/6Uzx78iHWiyfo2xM0tAAkaB7JYSsXmC2eVqWfzetANIeCUM0NNwuv6izVNVSUMHbXFkkaoYn9d2QjE3/VOmteSQQxmUugzIvhrSr23oAFL4gICMHGv7kw2v+qNGiY+wyeUUBgddchBHTtEn2/xDCMiNFdcgRAQNf15i6kY04kgrnHybrFn/3pf8By2UMk4X/93/4X/OabbxBTxGhE5EovJRgtnLnPohqA8+kbAmHRBQQkrFcL9F2HwAGBg0aCIo0+eHW5x2azQbvo0fYdmhBUOTIBLCW1MGFWvqjACnCxP5y91QSOsMUIILEFgmzQdR2ePHmKp0+eYrlcI6YGgoAkbPcn1CN90ldUDbIJf1c15qovnc8pj6MapZytzcxUrXkGFMkRgDZnPn0ziQA4+X26zPp4nsok5aYMJhBszXKLRAEHm3PEADVo+zM8XpyBSJDSoIBgUMs1dV8NUF62ztwfGyQZMQwJwz4gDowoS5w9+gTPP/oJXr36GpuLF9htdohR1FpOAvp2jdOTJ/jww89xdv4B2mYJoR6CFkBrtbBxaUtQVrIzwCMZ2MkrXmVZM+XxsTbK+ZbucnDU92qEMG3nvA8L8jCE2DrlwAkAiYhRsN+P2O322O16rMYGKS3QLxqExldTL7tYXVJlPTLfb/3PN1OP7FbzLElul8kwqYbBAVRUixwHyVxqkQBKtk5VpP85R5nddYDR5PZyO7yayUuN2eo2vSXdIvoo6Cl5fSZSRDGEoJZwHnvSx1E+tKyOKUokBfhXtz5+vrECs8YtkV9VHrC1wACsDBQBZllt64u7E1p2CgKZ/aj3NdX2jchzXeb9U7nbeV0IN4OQk1tvv+rGz0WuvO2+I+m6Ml5bMBvtlaV1dhGfzYFjeThwe/uhts9Vv6+WeZH3iDdLbtl3zZPvpQfMr/XRUhaMWxi/rsnn7afbq3nHMtWX3bDuTVNxYX34ut/e4m/8xFpovS4d+/2OVtUPmb7r495poGiapovFD5Lqja3+6g2TxOvPV/Imz+HWHST5hlfff2RwPhig5ieLNyWRh+utm6Wt6jpgzmEC2MZpkVs0Mot+61JBFhI8DwEKgeRUGbh30XMuLqBy1Q9qDTMNM3z8Odd60IqKtFrumTAGQDlQTGzO1jdmSRFUyFPXsxFIIxgBoFZPommAAkUjREaQjKBEoIYQ46htmkzAlQSggbjWKRoenMxlQCOlcQZ5UhIQRUjcgWREjA0ur3YY9nvstq8xnpxB0in6bqmAkXQAWlWsUyVkGAhGaEBgBbTYrW6AEBgcGoSuAbctuOtwcfkSl5cvtOxJXeyEIiIInMxyRUatk5Tz9jxmoM8PE4EXAKLGFhICJedZKoJ73bsHMm8NFmVlgS2yVgBBATO1znLXvoAm9Fgs1lguV/jo2Ud4/vQzrBdP0PACDZZgdCDj11EB15UmwTCLBs5iIrYLz4BZaVHRMEQVG6k4A1QurF0+uRKyzbonuVAIZJcSU5K8TKpgasQtJ+vWR5N5jk3XOTJXvDKbGgABggjQHkyqiHpESCEDSkkVM41z1Wdy6sBq36MAS0AILQJaSAiAzQEfCyE0CI1FoUJr3CwAcUTXr/DFT77Af/yP/z36JeM//cV/xq+//he8fH1h1wvimBtby2jWG94ODLWSaVmwaIDT9QqPT1dYtA2UayqYlR+jbTpABogIxt0WaRwgXYe2aYCGAXMTHZKgoQBCAJJixULs3VCNUeWScXDHHL/QNi1OT8/RL5ZmjechzxqLVnd4YAFfm+rPdPPZc3XlgWKrU6UAhLqvmZUUKSCaYHOYmvz8woPhwvtMHSe1RiMx65P8RM7rqRxZhB04qt0liQx4JkBdeBkBDZhHgBskXsCj4CUkgJWon0TXLw0fr2M5pgaIjHEEhr1guxHs94xh32I7nKJdfIqnH1wA1AMpYBxfYD9uISB0YYnTk+f46qc/x5Nnn2DRn0CkB9AYebVbAdlal6QiADcOGtG12l1RgWDbsbqlehv4ZPU2dYtP52ljA11ACvgyEUgsKAJllNIbVT86LpT862DjS8HmERExxmzxtt8Dp+c91utW+e6qMcKNHmSo67SDMgoI6wNGWxfVgpYogQlIwhA0EE4ARzC3uof6mkYoBwFzEqsJ+iETN0gH0wDJ+1iiqHNQBCyE6FnRNDd/trINmhVsYgPfdV1LEOUZA9sWNUe3rk9UzZmswEmpZ7RjpGzc5xESmxZNswSRthEogs2ijs3KL9hhDkHBGjEyd7ec1L3P8jW3Z99H/G/KDVbe63Vcgf+6rvshhR5YGDm4dxwku+bquqdtmPxAgwSJtc6RRggiiBkxQccOpZlwJlaR8lnB0dusxu8qW07vdQvjetk9ipscWUcPrpl5dQlwdM2bXpGQuOxd+fs0l3K4+lw/ff797Am15V3+0u6pLJJ0ZZ/JajfuMcX18/Zr75KO9Z/rd5SvuB140vH3/dk+ECDhxit0FRhvz2qyWR+vp6pch4Da20r1Hn/d8+4G3l33gCMgUH1ocuyelO4wDu6WbtPpc5CG3x+g6H36Lul33xXNFexjAM/0Mn0RVY4MLMpbxcGR20MvYt8B8DwoipVa6guOSQwu0HMGs7UUkk/jKRnBc1KhOiYDdygijaycNqJCiRJqB+ULomTATywbtahyBCpgIXOCpBHR3MdUsdb+ijFitx+w3w3Ybl5i2G2x211gs1tjtTzFoj9XwutmBaJGXRXyqSFM6VUlIiUYz0wwEEEABAQLIc6hR9usEbjBa7zCbneBlLYAjMgbwVwuOLfoYd8RXK3OPZnl8ELEfLQL72jirtcayTg0Ih0RIYQGTdeDg/ZP1y1xdvYEjx89wpPHH+BkeYq+W4GlBYSVt8oUBec2qkM0C3xdOBznBWDVVycsVdc9qSJplZaZTp+sZlQWaIf1LVYaZKBiBWq7RRIVt4aD/FXbxMQ1s76qmgN1qxOzcQHpb6FRUIBHHb8E5TJqLOKeWyKkNJ1nToTNobV2bvDo8VP82b//c6zPVmi6Fv/3X/4lfvX110AENpstLi6uMIyjumGaIk6iurgTfncB6BpC3zd49OgUi4axWi2wWi3QL1o02KMJgr5twNQgScIQVclJUIsljqp0ctCoUhEExwKYWK37ch85ab4r/gIKKiKMKeLFi28R//bvcXL+JZ49f4zQTptVIUKafPY+LJ/l2FA7HBN2qc/g6Xd5RcP0RF9srJd+8dEw5yG5c5Ly7HoE1ZZF9Xj1KHLVhTp+QzDidcA5s7L9lSnk6kbaaKQ949mSqFEe9/u9WtBsldh5jA0gPYhWWK8+gDxO6EILpgWG/TdYrtZ4/OQZnn/wOZ48/glWy8dgWiBhAZEWRI3pxgLnmCJyTdHIqC0KpFsyZsc75ukc1Ztv7NZ521t8Sa17JZu4q6rfUwIt1EC0/ZEokC6CKIJx3GMYNtjtFjg7W6HrgkWRE41cmAEpH4MlLyEHpATOkZStqORw/KSUQCzFIuKgwqVpRJCB8lIvd3sqFpdJaPKM7DI7HXn2Un/v6/fd5Ir8jBs7DHBrm/oJCngZsAVzF2QCs+1NTZPnhi8ltWUjMj4j0+JPCiNH/vz76V41b9NjqawCs33Bx5fPP6ufr/Xz/Gq+PefuwkGud03fQQ48yOeh8voRpGvH7NsDG+6S6MgUBfBDF+t7Tofz83363UjvgaL36Xcs1YvUEaVUKP/iJ2f1tXc56X7TlHO+q4VUnSaWK9McVRG6DhgrQmS2NWI9zSt3MELQ34UYItGEZbUm4kCISb8nCdATCLYT3gRQBHFCiqN+ZjbXMFUzAhHAovePA5JEMEcwJ4TQIMZoz99joAiRLfb7HeLrS2y3r9F3r9B2J1guH6NplyBqNEpO0PDkigOkSsBTMu+Ue1lD4gRqsOh6NGGNpl9gubzA1eVLbLYvsR+uMAw7U3iCgSMeZQx+FHIAtvqJp38rpjRpW/rdcyH4uj7ODytfGFeFQImY+36BpmsUKAKh79ZYdCt0zQItdyAJaKgFUQdJAYxgljWWjwDua0STZylIM5kPExRMT2HV1aCyAPC8s//mYV0JBoJQcQfQS2Xy59YFynHFWVkLIRRA92B6O/uPKjXkYemkPB9AOehx3U/c0s2/K9w27pZGIUA4KIkKnMcEGWydomLGn8SEwAsIBGd9wOef/wR/9Ec/x9Vui9PzU8TdgFcvX+Llq9e42mywudpgvxuUHBrqasbE6BpG3zLWyx4nywVOVj26AKxWPVbLHl3PCEhoWcBBNBQ7CAjB+qgaQ1m59O5R97NxHI2rCXk9cmsymDm+WtwBkhiXmw02u9/i9asrPPtQFW+pxg+I9P7JCKhBovskcyWb5DtTnI9kysxIbIqcYh3W/668TgGeW8Gjatzk9bbKC5iCIMXCqJoK1f3Eav2koABUMSVX99QtjUKLwA3USjJY8DAFb2InACmgpFZHDElnWK8j+q6FSIfNJmB1copPP/sZPv3kZ2hoBZEFkvQAFhB0IA7GsZJ8cmr5BMWCx3cMA03Nzi1HLtPt0+6jAsh5O5SkfedAgwBgs+bw0XEMu1DgX3mySr5+Rcogt4IuhDEIdtuI169eY9w/wdnZCRaLgNCoBd4EeJks4w7WzUerfc/zMTJdw5II2NYwBwrd1dWzqte5SarX2yND8Tox4QBbuWO6DSRyMKS0g659R6eJrSVZcpqVtfCF0bTAeS7ZnBZf2Mvi6paXVF2rgFxZz+oIfF7Sa5MPWcqlvenqW1Nxa7z7HZN9va7/Q6Q664Mn+z78PYFJ8vaedVi9dweUoGOT8i57zO9KetNF6X1659N7oOh9+p1IUlmvZPK7Y4IXpmdUkHrTNjklWybM7zz2/r4FrTTN+4BFUj93JmBk95t5fjR71Xs1aA2DUqq/VkscUjJn5REdQWLWOdxAZEBKRvRMAWRk0BIDNHS9udFJQuByCsxGSJvSCG4YKY0QiYg0gtkF6xaAgHlASoIYR6Q0YLsfsB92CJtLbHdb9IsT9P0KXbtAoC7fJ9JAjKAaksBCgJnAJwHIwi8TBbRNADUBi+4M69UjbLcv8fL1b/Hy5W8NwOKi9IgTDs8szOmYmFeEVpcNvIulGo9TwWGqZGaFqhqP6qainER9t0BoGWqe7AAOkEYgjQkSRb2rOKii6eHhc35U7svTZDpRCA6AFcXKXUimgj3Z/Qx3r1PrAy+/3wvzCHVz74r4toxKdUdk1pDhOQ+3PEBWMvM9Arhr4DE4ooZCafK/hXKGRwErgq1YHdX9R92AEqkrWiLl48rkt3WeWZlWriziBjEpANT2Hc4fnQME7K9eo2sJJycLbLY7XF1d4eriEpvNRl0USLk+GgaWfYPzkxOcrhdYtAGLNmC5aNAvGCEIWgpoOIFgIBMFs0yqATggSoIkIBBlrhJQwpjMxcwaQYE6HRQkBvaatRZzwHp9jifPvsBqdQYykKKAZyWf22TFeQSqowTDNpq8hc2uJX+aXTy9LiunpNEZU7Gcc5aRomAWUKDCgurCXVuH6WXlOoVhyyx2UNFnCKhYQk3wRrbyMun0JLWUDIHQtEusVh3GcYn9fo9xGLDbbrHZMWL6LRKWSHKFi6sNXl5cYp+A/ZjAoUXTLBDTEkQdkuiakEBTN2xBme/OZ4OiXBOVtj/mDnRrsqhvk/bKnEHWJj4PzR3SgZeSfULBOaz98jjfgyNs7Uj49sVrDPuIxbLFYtlg2I+oAwFou08kgVxObYZafjiinmbQp16tvUwe8W1+/XWN42PhEJC6jlSIDKh7awr5BEz1V8nT25fnGvyqox/6YYnmhVlbFrnEXcZEFO4PULA3u4oSZaufqnA5r+oo52BOXpcmlmLzYr2N5iQxA6rrgKHv+tBqHh/8Im+nTjcU42hQgAdPc1jwyFz+XtN17ewL1vfVCe/T7anIw+/T3dJ7oOh9+p1KR4WtIymbP9tdALKCkUk0ZznrywMs+OKndAUsuj1XV8DmwsYxMOiGPLQAU5zKN3jzrdfQ36oQM7GGBk/qlw8AQUiBonzCHKGO6mZhZFY9yECRCntEqpj6KSEldRVTZUT5REIIGMcR40iIMWGII0QI+2HEPg7Y7F9j0a+wXJxgtTxD0yxArPTDkkR5l6RDjhhTtYsIqxUJgBAWaLqA5fIE6/EEi/Ua65NTjPt0q8wjAAAgAElEQVTXIHqFmKqIM2ZZNBkuVAmrbslibZxBomOC242nS/O+tUh1VgYmRtO0FpI8QlmSCOMwYr/b4+ryCo9OY1FKzFRhLkDXLiNSC1x+Sj4pfRW6nsKU+B1S3ePRyNxdpQIPYJZCRIXKKOdSB513RUtDurNbh6UCOh64A8C3/UppEwMavO6miBKxjhGQWVkVngJ2rgcgK5QGKZniWKL5TGefAWEWkc1dHSUxdsOIFy9e4tWrVwCArm/QoEfbEMZxhf1+j93uBPv9HsNO+bnGcbR1KKFlYNm1WC0b9G3Aqm+xaAmhEQRWMCIw66sBeyRAFEGMERBBlAREU7gBIGhULBGAkRBjWdKYHKAwRQrBhz9AjNPTJ/jjP/5TfPLZl0Do4XxwlHm4vAtk0kK3peMK3jwPvaZygDrMo1KeM3ReWzWgKInHxtExkKvwDtFdqlKV3C42yyHlXHELKR8nqUSah7mnNTpPPHJl4ARiQhMYgRvNKy0xDAN2uwW6TYOrjQJAf/f3v8Rf/c1f4Ze//Gc0XQdqOqzWT/DZJ2tb26nsN2LQhFsSEey9OYX5QCcYQj5VyAtod3MrwNsbutr42lBvZXktsTLl/TH552JxCFjfOUea9WOMIxLBeB8FMSbs9wP6TYO2Bbbbna0BxZLL16gMulQAprvZlkOY6nnV2Kmbqa57BkfqMs9eRWq+LI8sOc8JmOdeu2MWBfn+csncEm7+1PJbfW0BtfKeZ5UNwdyD7btidVnAIF/fqQyK6omeN6m9GAFS7QvHy15kt5QOOT+PY0f+nCP1PWq5fTzducXrAZIXg3l57prqttCM85iq9gBt3u8BJDomJr91TMT7fDIV3ygf5w319DboOd7DEu9iKv08Xd/ep2PpPVD0Pv3upGpFvv6MpVYskS/Op4wyP1kuoloJ0/kQm0ktZN0lHRMu7gMSVc+s3tO8vcjCohsRpJCGHic7nU8qjYPSiCiqfDMlE9SjipCm6CNFE2UIIlENY9iFRlVOEhVQiighhIi2jRjHEcMwgsaIcXROhA22uw2G/Wvstq8wDldYLE/QdSeQJoJ5RGCBwMlh/bwxVIiCKUJJzy8pMJpmiZO1Kgz7LWG/28Bds1w/VAABKHStpj54ZLk5gnBUuL+tryn/r4prMn4HbdMmNGjbBgguKLG5lqnb2BgHxDiWXFypMUF1ynlRKcCW2PsZZVhmyx7T7AqIWp28G6CiCl45Da6gG3t+mAj+IsmizxQlwuOd18SUMUWE5K4m9VjVV6kIx0ncEkUVcq2HaruBFYxMosqIEuzqbwoMKZGt1Q4AGeGvIFtbWHbaLxkrAbFHdhM0oUXCiO3rHV68+Babqw1CIHRdB5C7CJlbTUoYxwH77Ra73Q4pGsEvEtK4Q0tA26ibmfJS6R+HYJxhCk8x22BNQExj5g2Lacz9nN1lWKPBKbaXjI9GIGw2YUQIFt88mVId2hbPP/wMz55/gsXiDPvUQC0IHSQ6tLAo78rYfrNEk3eTnG7QhdjhodkFtXWRF9R0rONPp/p9+TC1pigKW5gBKQruUGXRwkocXVAi7R8JQApISlSlEf/iHrwXkMXi85p5a3Bo0DQ9fnt1hV/843/Db775Bq83l6DtFv/wj7/Ahx/+BB88+xzLhQJT5CB+DRCZUp9J4xMyiS3ZmC67IIGkRJc7bCuqABDXWGsLvGrtzLnKBEB2zrTaaqjeu+Z4noKgZlnI+jnGEcOwx3YHmzdRrTFDQErJAq75AUaZH4e8MzSb+9fbhUwb4vofZIqWl7odxSh8jZrfMgdPHk5Dvyknl5F0T0hVH033l+x6R5jwMx1P07GUwTOJZc+cadvzeeh3ZjzGc5riSbOnSrXRzSp5S7qvJddkqhAOQ3jeS4Yrtaz3Wv1JgAnJ/NtPNKvK2+RDrddzbYnj4+N+6U374ppspoPwPUj0jjXAHBQvet37dF16DxS9T79z6bYpX7sbAKj2BT/zrHN5CEXneHqzHO8LDs2TKfYmbbtCznWkDgJgSiCzKtRJokY8A9QyBwxgMMVHQBTUisGiWCXSKDGumCSlLlKASMbM0UHQyD8BDCCCjZQ6hITQJIRxxDAOGOw1DnvERNjudkhpi+1uhUV/ir4/R9edgpcRHbMRBBfT9OIWZESsIkDS3xIISRiBF+jaJYa9coOojuDCZJq0uCtb02gm3ngH6uzdk+1ZrsAxa+S4JBFNaNB1PYQTREYQNQjUZeVnHAcMw6B9KhHeul72Mi88UkpEFnkngBAqhagaKy6Nuu6Ux5NMpkkBxcx6rLIsmrpsiCoD2S3DCVALESqQNEpE0mh5M5OknB9nwIByVUr/VAo6jOA7Wxkph1N2SRFDf/Jf0Ur9bSHart6bkE4EdG2HIe6x2exwdXn1/7P3psuR5Eia4KeAmdNJxpFn3X3NyKxMi8z83H3/x1iZ6ZXunu7KqsorMg6S7mYG6P5QVUBhZu50MhjMiExqCMMvMxhOheoHPTBNCTF02GyA2HViqWCWdWr9w2lCShOmcVSgZ8K0vwGlCcQJIU/oAiOGrKmeaRa3R/oi62tSZTkbPyNCyhLkOuSAGBi5zAMF/TRwOXGo6bS1oX1/js++/C02m+cYJ4BCLyWTZgkjyyXlpoUTmFuo51ShzNpWs0Jm0iyf/pqiSLfWRjYf/HIsp+8GQFAFjg4BH0RQV0RxQ1yzxGiVI42pBelPAYoMDDHeqZ2rgfklILvEXCtZrVLGOA6Yxr1kmczinpnN2jEymCfs9xP+8s23+Mtfv8XNbkCIEqj61atX+F//8i/44+//G/74+0t0XbWotHncKtRcxqpYBVob+USApFBd5wU005tzscBUoLGAue0Yte/nAEso6630B0HAC3VVCoGQktQ/M+PZs+dlfzFQwUCp4mnGEPc/ihVEmAM1K+hDlSkOgTr+N78G5mvhFOlF61945BE05Agdi9HVWnDN61zjTWm6RHej8Uu9vmTLdOU2NzgLY4qadc+AospnjzeNFnPIP/MorU7oDwFy6OpRng/lt4vf71WuJ/ow1T9I5P5/LPKI4aM++Ai5zY5n3/9aMYjS7vkgfUwd8gQSnUJPQNETzeihOO8p5XzoBdrWwbunLKvHkIDI1RTdHwkcFQQ/euLZuw4wYd0pR6KsqKCmGWCIJG6MZTwLIQBZ4guJYpYEaqEIIs1+ZmmPVWgHZ2SSQNeMBOQocUOYRElCEkujnMGppkaPyABFxC4iTgPClIAYkIaEnJK6qO0w7K/Rde+w3b5ESjtcPh8QwnOE8AwpjSBsQejF3SioY0qWeC1J0wwnJoA7BDqDBMiOejKuqZQVCBDlMjs1ytHRqeEEiaPjVE8HRSXQk3ICuq7DWX+GhCRKPiICOsTQAwiYpoT9sEfKCYFSCQxtgb5ljH1WwAAyQZ8LxAIP8AAmsLdKuCnZjVpuNu/lWVCLNJIU21wV8qrg8EwBlHJDVCsZBTNZwSIwN91c3De94G0WAFzUwQKC1DhLBIQo6ZINKNKcXZZhznqpjYOB8gxq1k5tDwXC/nqP3c0O05Q0PTKw2WzQo0MMUa329B51rWO1gpjyCCCDhz3ysAPyCEwDAjIixGpOghBD+4+QVDGXxNVAyZfHOvJ5wsQAksQA6WKPjiNijEgGC5hVFAhJ+1AA3YDz0CF2W8T+DJlj6SutAYgZgVisYew7amZHM8SnUO1rgPyx9co6I64AoRj5iRZtoOVqHCQbY3XXWIJF5lZodaDmvqPkg1VnDxjZehLrF4IA5aLsJpibH4GR8oj9sMew3yGPkimSs1jYZR6RMIFowvXVO7x6/QrjOICREYKw7/20wzd/+TP+5f/7f/Hs4jN88cVW+86tc+TWKtJ1sYEofsu02E+H2z1z05UvF/0vy9bH7bHA1P6aNQCkBrAW4J9h8Z/A0ha7r8aaE3fqvtug7ztEOxQhCFg+Ax4JEZIOesnja/e5uq2BLaXb1ib7XeWeFTCJfN/w8b2Hl+vRaPU7+QF+3MQ4Vfn62t7X3lwf7SxFqamzrU9r3qHDhFPI1m0to7ahfuGdCGk2pz4stfPrYSXJ+wJMnyi1m/9Jt8x5z4eydmrAygZ/mAOCv3Dy63kFx3yiT48+LaBojTEcORVpLnufZ6w88zHpg/pPrrblAdp3EpBOC8XvEN3WB154dHdh0ZaZAYhXZarJOxbmtPOrH5tO3BJNnV25i0u4ALEI0pg+JmS601OLu1JACi1B1BdR4Ll8k0CUgSSZzySOhlrfsMWYkSDYctKbAJqQA5ATIcNiwaSyt3KSJ8UYQIGQmEGxQ+h7xJ4wxRFpTJimDE4DhjFhGEbs9wOG8QZjeovx8nNcbL9E4EvEkBDoDEQbUXA1A5pYl0ABq6gKXQBzQAiS+5uzKMGrgnlJJx2LIHrIJN1EbHNvOngNmxjLCubBBk16PHTW61qWBqwGME4JV1dXuL65xnbTo+/jinLbnr42sV2YVeG2YK8QpZUDKHTyhZ4ic1GcVekVzRKkIF+ZgXpCPOVU4+TgUGDXCjDpVbAYRZmzWGWoYghUcIBTliGhoDF1bBzM9YwV5JS4RGAChaCAjYA2kkHK6kDa10EzoBFSFusEtjltbV7Un8E54+rdO1xfXWEaJwQidLFD13foaVsAV6dOyYhzxjYlJExIaQBv9sjDGUKeEDgBeUQeroG8B3hCRiqOcpGCQEPMsHjzYAJlIHOSWEUKhkg/AhMldClKlrBAiIigrC6XFnyZA2IX0G8v0PVbiLVMrHPUK8wyKKi8FLpU9Dvcfy9jWKBtYM6cSz82lkV+zZ2ucC7k2gJqyvsQQpnHc2sib1VU9VzHi5tlP5W1RwbmkblPyZwLKSNGxmYTkSOQRkKaJC25WEROmNIe47BD30d89ZsvMOV3mN7u9akJNzdv8K//+i/4zZd/wvPnX+Ds7FzXhwCV4las9St7cajgXNHk/Ww91Hv2Oxf+BdiwZBQXtAI+VEuxCg6s8c/6xODcaqSvxbqOOZREBhWMFmu5nMSFsgTktoMQgsY/U95awIbKn2qMK9fOWRUXMI5pSM4iprg56hcGjtPKnFsrc/lrBXJobtTqH9/MSS6vrZxw4Pns73EufhacuZo0abM01luo32U3xsIXfD+wyO8rrT4e/NtTuz7Lk8u+534LFieP63jYND0w9U6pwtzC0Luj1mt8iQZYufveQ7bkRmA9rA+tufetk/HMB5R3531sVlX3LW7V6rD9fNhaTuaFvc6rebBafHg+LB51sHsfDyjy1pjy+UAdHmiYa/nk9mJeXuS35AeaA55uBQKPDcG8uic875Q6fTi6PaDFQ9DHDxQtVvJKt9w6MU7syvuCRB8QOJLgjMdJMgWtSQv3oIdqC/Np/Tmv9wM8/5BqYNvzOjHMLQn5gfrSPfU4PeRSn211xKgZZySddRFKipAm1kBgA0WglkVBY/noNYgARXRgUEwAJAYKg5FDQonTozGKGFmwjNhJ3AjOyGkCYgLlEcAAoglAQuARIAGbOE+AKs4SZFhcKUJg9LHDFDuM44hxmDCNCZgYadoh84Crq7e4vn6F58/e4bOXA863n+PsbELfXSJhi4AzAD1ClqC+UvtOQAdmpGmPnDqkFMQKxoBDIngXmJZynbcEiKtTdOOh19RBmb3ap4x2L2fJgNV3GMYRmSUjFpgQWFx/KhCkAZT3I96+fQO+kCEUy5wNLENVBUPNla6FKwg121RBiwhg1hgppvRotjWhIInwiIEgfZQV5GIWtznOA0CDKDVBA6GTPd9bcrDOnwmMqcTIkvlDyEiNUGwuYpml9zIBXGw0NNseCBmdAI/EAE2IHRCipBnvYg+iHpwZmSYIO7WsXp30Y4jaz6Y4BgBRV5soTxQChmGHv337Lb77/j/x3Xd/BSPh4vISxQIo9zA3t/Kq2gojIYeEyOKSk0JAvzlD4AkdRnQhoY+XuLl+i6u3P0mcMCKNLVTjNEWIFRczgKAuT9wVlix2JNJ9AzEkhJcEyO56AYI4Sfwbzj26cI7Ll3/A51//HTh2IIQye5hShYp0rNwM0rm2dNC8jWyuKWJR8IoSnLzOAJihGDtgg8sEbkGdObWCHTV7FoFr1m5wmXdzxXsJEmWAZP5m7pAR1MprAkiCLRMEOEdmBJL1knPCFIHMPWIkUIzYbHpMBEyBZH5A3OByQpkz1GXETcTzz17ievgc7/bXmKZR1lHI+On6Nf73//lf+Oy3X+P3X58BTJrtzhR9yZAoa8UAjDJ8Wl8bAM8Pl31pvSch1pPUwXozh1nfOaVZb8y5HQ8ykKm8jyCHjNh6zDwhcACzgPbBLASDuOfJ4Ye4FLMFYLKMiSSgsFkkyW8b6RsiJEKJAUbmUo0A6Ny3sttTKNsryPULu+tlrtU4VRkha/B57WMmyeAmAKLtP+o+ywlmpQYkAL2sGUF0gKCZS20N3VXEqAtOh0n3BkOQSHkeo1gdckaJi2Yur0FdygvDUf7D9rGsWd2v4GIiKhAo9WHM3Y5hfcq695jzq9WNsq63CR1ldBAXXlZrYTAjIIEg14BiHRcQEAJyEpBdrACj2/KMtxBKA9DyhLW6NtKp72Og4WunkbvexoUTisv0EaqSjaNIJWOZFGez9f2plJF1HJ3FmgHE9xH9W9D5tGv1k5tffgxVKlroU3pN02UerDr65PpKt9fzIUnm+XEAQ/a2h3hYOaFyOkeU+ah7sVhwAmV7fy/dZ37vshG0+u3sGq47XFu6H987YAgPBBadYvlWD2QOP1M50nvV5eMGiggtw3uPATh1oO9slnhfYOPkpnygej9SWSc+8ODzTx23eQC9oy04tIdbOSt1uj89ENN4j5Ia0XthYYIiEMwfUkCDRrYRwVZYvwW9BbIqQBbINIassUQYTBFijiHAD3Mqp7ucxMyfOSCpexpRLIF+mZPIFswAdQCZC1xEFxKmbsI4JkxjAieJ0zNlxps3P2DYj7g4v8LLlyOePxegKTAjZgEFc9C9jLOAGeoqRzCXL7i54Db72cA4iKOZWrdjyg7wMMnZ/cYQSw8CISeJMBupRwn4zD5AtsT72e2v8P33f8W7zVt8/tmAs80lnl1+hs1mqyfsmqkKBxQ+qvOBQItMKmUOOFnXbaf1S/ejBa02pb2e5Klgz+bqVuOnZLUkAjQ+UYjFEsb3kFBNyyzdKMIRWONUlXGVQMORCDEEsfLpOsSwASEik8T2ETc0UVw6jSmUcoeBCDHqvNXn55QQAkCU8fr1K3zzl//AN9/8O66uX2EYr4DAODs7g1j7ZDD3VVHUPg5kFnxJ4gpZyBVO4GkCsVgg9R3jxfMtnr94ie9Dh927N9jvrxDDpFZz0qeI0HhiASkHcBB3GvXc0z+GKD6ELm6w3V4iUETsesTYI8QNzs6e4/z8c/zut3+PP/39f8f2/LncY+PaTFtaTvjFnL4DUfmvfD6ppHVWdo+Hc90T0AqN5aqZRVF9ryCQK6BYbzou4fcZmRsSv6cjQgidWHlFBSiDCeEEUEbGWLDHruuw3W6xPT/H+cUF+r5HzuI6xch48/Y1/vbtX/Ef//F/sO1f4vLiC4QuAFEz+QHF6dIvXbL2QC11yXGbI/uysH1pj7fOYVW4hZfPAg83xbUc1eLleCufOQV0YDKF3l3D8qspKxZEvsQpcgC148Y6LtonZjlDNeFFsKDy1J511bY2rav9pZYyWTY1ARkNTPICfxmA1R4ufxX8NRBJe81vRn6sSPtmpVyuHQxwdUm2NtflzO39spEUnmYLsPLi6gZbB7LONItJZwAHub+2vQfqC8fTqK5BXwYR1NKJ1T1eDopAGvNrhX017ZvTndjamiBw2wPeo+z3oUPCjH09k7PvWnRpqWIJ70PrrqnHr/W8ZOFqXM3P9MV4yEr59+qDNSvkxyBbdys/PWh9qH1P7bcVsOCD8+v9aK3Q24FOXrnqcex0DtOpa6wA9wcLOv7zqfRxA0W/eLplMvy8c/VXST6I4wOUdtoQzw4Z5kzCTrlOYXiHH1gn0xIcsBN7r4Ubh/HaoAruAEpOIQVR6qabYe45pIJvRgSHBOKMBIlxFIKAFowJhAmcR4CjZECTqEEIlJF5UvcZdVuw35AQKaHrOnT9CE6MNE7Y7QfwOGEc3mAcdxiGGwzjDXa7t7i8/Ap9/xyb/hI9JRBvQCGCAhCQwDSCMMECWLPbeKrgfah/Txud9ftY99UaP0hk76B7KgNm0q/Zk+p4aLDqIO57adrhJk1I0wQwcHk5ImfGixcvFSyy+Eu3VHet5iYkzvCgw7u++45VeWZrn8SrkpN5s0HJYFN2FFxiFfxTyhL0eVFxVSrYFEoX7BVZwJbEYBJrNokXJCBKzUw1X6eqcEBBJpJYPrETSzarnykwOSfs9zf48zf/hr/85c948/ZHTGknwGSIUidSl0OOVbkr1knkLBYks2CkHgkTxrxHngaMnIEpYcodvvziKzx79gXevX6Fv3zz79jvfkLGHkxTbUsUYC1RJysmR8Rug+3mAtuzc/TdGV5+/gUo9MgI+NOf/l4s7KYMUIft9hKfvfwaz559iS+++D36zTMknCGlzrEFrgqtvgnrkEp9Rye6ihPBTiXFcg4HlpBFTKkusxqYCyWA/T03UrNUEowgHF7Ca3XSdrppAujcs+Df3rqTIOBoTrVPxdgrClgZJCZZ13XYD5AYRUmE8RAittstnj17hjdvt9hsOoyjxe8RsH037PHqp5/w17/9DX/4/QUunp2VtSiAMB9sn8T/0bGwcb+FsmqCbJ9yWGiKdpAAzHUx4TG2RuT39f2rmUvc/iZmjmqNBHuPAspUPZBn3y33X+/WxGAFpVgtSwy0We+XNdeYagHh7z2myfnf5/coGKdj6ON5rfHLUs4J87nEqYKNA3Rs5jGH4BRs11YYH02ww5dsfUlmRWKJBuZrtR4uiPyzVAL988zSbNlmVZSpjnVmWWCL+ezA0OU+13z98Hruz0km4z0ikkFAsdhskc0PQx4sCiseGL+o8fREqJ4C9sUHJRvUh/TMuOvz36ONJ+havzZ6Aoo+ZjoBDfzw7PWJ3otOODZpDvyodWN4b+sucocJ7ATEGdrffNKTvyq7mmCn6bJR3YrEHN3UNAYowgRXEyQJkjGIkTQFc1DFgZCDhOiNFAGMyDmBYoZYGWVxCSNRmFjdNTIlJIwaL2RCDAC6jNwDoWPEYcReXdhCuMZ+GDH8+AZX16+w3X6Gly++xrPt14jhHKCNKnRJwCpMAjAQvFR8eBwF0Vn24cm0XMFmcUIQS5KUJL5PoIDoBWoyTVasX+xAV+LdTLjZv0XKGcMwAJTx4vnn6LozdFFdvw4xl9J2r6TVueGcF0E4HHWpCNQUisWQ/SKgkfWry+TEdrqeVbmQUMvFwmztOZyLAqBqiQIWkv1NvBImcErquAFX/3qqKPNAAKkYLeZPsk5F0DWR86SucRkhSt1+/PFb/O3b/8S7659AlNH3EkhcYkPLWOWc4O0oiBTKKCfookyrlyhiL+6EE4sFwsjAm+sRF88Jv/nqD/ji86/Qbzb485//N66vvkcIDE6MnIHEQEZEpgiKZ9h053j54mv8/R//CV9/9Ts8f/YZNhp3KMSAly8/R6AIRoTFvgpxC8IZQthiSj04dCDqkNW8nGHxqAwtUtDY9e3a/nQnnlaw6LmVQXNJAYt8bYTU9eV9yaxKTr48IGebU1DASF07GChAvGO58peRxgk5C+/rY4+u60AU0PdR18AE5h4pRwyjgAPb7Rbn4xZdFyUWSwA4MaBz9PmLZ9ien2MY9rjZXWOzvUDYdAV4DlaHUn8H6LHjUicJHHUtUlG8feFWkFuzXH6A50vF6Y2t3Ly8hj1IY6+ev+m80AydZlVk8Y4ad6GFzOX6oRn/XLVc8vVq+UnTK2tA0gyf82056SycrR7ayhW5obUC0QE8IpbUW+fxdpQ/Wx+dADQBak1E9lgtY/a8CigZOJStCrpPAHPXLD9+JaYRz+fDrDbKXxvXKxKglOd7/IFtf+khtgZi/cxk1lsnXQscX9QPpGU0CJzOU/tj9/2dHnXSJFzeVdbEr0SD8k2cr40Ha76tpzkvXmEWhOV3D1aHORO/QwOd5ecTVfplAEW3DeqJg/64Llcn77RP9Ih0SMi7X2EVLDlI5ec69+bzsJyE3Pq42fx1x/BkwE85TVyb67PTNAZqYGAT8Ezi8/N3Fp8D0DTE+h4EigkWyzhnQg6SW4ZYTsrNyiPTBLM0EasSQmACY4S5WgWeAJa4FIKVcAFSzs46dJHQR0LKjDQNSGmPlK9xdXOF65vvsdt9h92zv8Pls89xtn0BCqEoaeA9QJZdSwRxD4qUXjNtvokkWoGbwzQfRS/QU8Fnqhk9VNG0dM52rb/dBPqqpGdO4DSAMyGljK7vsOnPcHHRSSwCA+78Sb2vT1EGKoBRfvWKI1ylVlrGUMFd2wcI6CFxRVhjtECByRoLhTlLKnDKQNAcXl6TRZ3vJeA46wzW/gvQoNOA/J7UnY0ldkYgA8yo3JtZ0sqnLPEpBCiKZTxCAFJKIGLECFDI2F2/w5+/+Xe8ffsDxLpLgFSKwVl8M+yEzcaxWJOQKbEKqmkbKAb0Z+KaMU2MNGS8vR4Qvn+N7dkzfHa5xW9//w+gkPHNnzP2uzdIvNNgraQKUI/N9gV++9u/w//8H/83fv+bv8PlxWfYbi5BsQfQaYwigBQoIkQJMs8dCBtwjmDW2C7q/tQoVSvKNanyYbo34YF4qn9KKZwKoCR9qQB2sQJZVHCtsANAlLd+qJZCR+ukxAyklNV9rHIPARtlrtn1aZJYKGI1NymYwQgI2Gw2kjGv2wCQ0/DNWY+MDfZDwEBA1/dgkhk/TqPGNGPE0GMTCdvzc2zPz7A522CcRozTXrPnSdwzpghijWGnbQwnWn/NSViRAiheWXDMq2Ivmo3w4Ph4ax+q4C3V/pSCqumWPMbH9TEQWjikWCgmSHERHwkAACAASURBVOKC9rnVAk35O/yc4FKONMdALi7ttgOU450D+DnpgaHDc+xAmWW7aRWjYy5CxmfXqlnlhMVd9Xt23d+gXPOyAD+0TBKihvxvcG68OVc3PhIgugRULxjCgb3G1q5XQG26uXY3lbY6hDU06LhVgdW5gERuPv/sZDGA3HZ5vF6eN7Xzc37bvdtXgGMTkZy8a09niTHYziM+MB8fhg6tk49gFD8cfbDGsReQYXOoTqnH0XdbmdZ0ldPusXn4RC19+kDRpzyofMpJ50wgAqrA5Ux4n+iBiAE+GLT4bvQQw2IbWU1wfuBZ5dU/tArnDQDUvimfmfXkDyZIqqKnx3BclF27d3la7wEjbxLewhFTVb6y5vAKAJK44GQOCKFDzhOSxqUR3UOz2OSArpPApZmDuGAEAucEIokj0/UdUpok6PU4YBj2mPIeKQW8u36L6+trPLv6Es9ffIWzswtsNmcSNyKPAI8gBSZ0q2u6jOZKStPvx0bJNAx/mUmzFTSQ54qrSz2cUcDKTrFnijkDGtsmqyAuQFcmIOWMt28DzjZnODs7B/WtdF+UMEELtGwnyCkYRkUIr0iRAUFry2XuTlHccaw7MzQQs7a/oiooVkUQc4wCFLnnVFsFtdQASywYc6OhIMpukFe7NUYJ3h5CVIBKnpf1L3EqMZXMaqbmC8hiKRTF/efd1U/45pt/xw8//g0pj9USaTYdxH1HAuZK77bj7a22mlhrXYdNIMSJseOM8Sbj3dWAH3+8wlm4xOef/Qa/+22HNGV8+7d/xdX1j2AaNUgsATjD2fYFfvPbf8Q//OM/4/nFFwg4Qx+3oNBhykCCgBI5E4g6UFCrIlKAiKKCmC4ILWwKuLXg5k05Y/d40l0Y4h2EO9hjFxgPF4XvUGnFAmN2QaNgNYqNZHZqr8tunOv/KWUMw4gYYwnSbCBRShIPKwRCSqm4VaYkcy/njHEYkSbho9vtFtvtuVgXKRAZY9C5TIh9DwCYpoRxSpiSZf+CxjeSNd73HWIkpCxZ9EKM4tbLwkXMsidSEMtPXYfICmoR6dq4hZZ424FBMIW0BUcEFK6LyNqSc0ZS12UBjoQPUAEW3L7LFfIp60uXhVh7CYC0esBuwMQCkKK2bU07zZK2bXw7T0nHok7xgp+hbb+B3Ef3FK5zztp5LK9JPYhYl3AW1kfNuM3GRwusB1kV1DM3PpszbJ6b2n/tQUcdX/mOUcC9W5q/rI//trbQz1gGDrtQmkUrlv1zKCyBWSeZXfVHQewtptbcrNy8Png4utyP3qNC8mcy5Urfs9a72eOP7gP31/1aQF/ie9lBzRPdj2qXVn4An5yAoeD6sR35LlSlEK3ByiVzWXmtmE8YQ3gk+vSBok+ZbuO/T/P3V0VtwL2ZS8wtc6XKcp5xMvwGLd9kFYbaTdjHdhGyzDAmUFSxFXpSW7NGWHBQL9i3rk2SgQbuxDQBJKftEsDX4rMEBI562m7KQAQgwFHWQNiUgyo4KsxkAockGdM4a3wYSZ0sWdEz8jAgMzBOO4z7CbvdFd5d/YSL8+e4fPYcF+cXiIGQ0hUYe3gQjPzmNpciDeQop3JoFAFXSPv+4Pom97tm5WmsHViVPygwovsh28NZ6s5AgsT6ud5n/PiTWoR8GbHpLzStdFXS1gVCLyzaHKgng1qbVV5WrQVQ6po1WDkAzYzmXVPq/BLrHotrwaqQ5APCrMWD0axFClCx9oX1nbjzaIajJoOVExqLsmMZkMz9TE+7SYJX9z1hHG/w44+v8O1332Ca9hpnqF27tWB2J/mqiMwEdA+iEZEqwxp0dRMRKCANGTkT3rzZYxtHnG83uLj4Pb7+TUZmYPhLQuK3iBHgxOj5JT7//E/43e//Ky4uvkaIl4jYAKGXdUWAeQEKEBQl6yOpSxJFcLY5YO4ZrFM0lP4T7pGLAuJ7VBu7Mrcegkyx4IpTOUuW4/B6SwvlxXgmOSUXQQOkO6XZRWYi2No3sIgxDGMDhOScMU0TpjQBAMZxKMp1ygoUpYRpmtSqjTGOe1xfX2GzOcPZWY/NWQBn4XFd3yOmiJQSxnFETqkwoeL2y/JcA1uGYY/ICd2mR4ys1nWq7hJJAoFU48Ogae9Jnanj0vJNcv3ETY+vKY7Z9acEx05pKmujukW5fQ6V/9pBmgdvKgDFsxAa+h1zAxq6pFy3N3nOS1YUEOkSv1ec0pun1OADri+s1JPadjIbj56vIVc/8puevdqhnPGVNnbTspz7U60KteUVxmEWn1XmOZnI+PrH44ZWRYgMlCys/tdjd34IUsFJX2znsPVGwK1y7mqpD6LoewCDSn3slyc6lfz6FnloCTRXGWL9djr8+9FnGt19En0cK/bjpSeg6FOkX7gF0UO7KHxKtKawF5ec28Z9rdv8yZJu0vNUosdFBnXjgCm/GhC0WBZVt5minC+QfhNY1LIjkLqh2elSQNJMZimNYM5y2k4dcjDLIVG2AgckiiCaQCzAkqWjZ54kcDZGEDNI462QZXPqB0xTwu5mj3EYkXmP/f4dxmmP3f41rs/PcXYWMexf4/r6BwGd3ObmTz2rKFHBIYlVURXFCq54zWO9l/0PFWSS+4J7WoC41IDFcsUdnxfQRf43oCMgQCwUrm/egELE5myLly8DInca1NHAHA8Ezt0uXOXVws3mZBnpA6bF/rDXlFU59SbNOGPBuAWsKNhmZnBggDM4WNawuTUDA9m7TOZm6vmsfTlbqvSAzJNTWeekzymn4wZYZdUxJuyHa3z/w9/w44/fYRhvEKIANRZLx8Crtg/aaE51rVCpm91j8TfsRJ4AbPotNpsRw/Uew5Dx+u01Lp7tcLb9Ai8/+xOYgN0w4bvv/yxWUWPG+ea3+MMf/y989fU/IuMSU9oC1IPRgRGQMsTNTNcfQhuOmkESAEsttYpqb8pusYTzYACW7+9IpwahrvMdsLXWcp7Tn3j8JrVesbXpQXy4z6ifiwWOLlMic3FljOOAcRqKFZGUI/MuJ+F/SdPUZWYJ1p8EYBrHDpsxIHaMaZoQQQgMpHFEGiSwf1CQL1JEFzrEECT+VtbsjllcgZPz4gqk4A1TOfm1vYfKfOYVpfMAKU80/d8OE0o3O9CxAlEG8OreQ2G2jjJyTnKgEHw96ngUKzFYUgBN3z6TK4SVMOAPNQo+QQWkIqZV76Qy/8tcOL5F10QAn4Z80x4coXlPa9+uNJ7LYQE166Q+wckKxdrAgUUK7D0E+MKE4trUlmaHYvrCsg+tWb0cJfK86OOgu/XaKVffj7s2T1k5SLkjLPdEp5Dv0EdTGbnKgXOTycd49gzse6KHoSeg6OekO8LnTt1/ol8YHQvAWC86tbTltlvP7Lgw8HkmF7vHMjHZfWBVcgowZHWuYFF5jmWDWXm+KQEEsZCxuBJEQAziRlYtneS3lCU7GrG5VxGAJEK8gQqaXjdnAoLG5wHAat0hljMBZ5sOmw1j05/jYtsjpx6BtqAQME17vHn7PYbv3iHzFd6+/RY5T+UQtFUCyP1Zu+0du2ud8uGPp+YLuErarjwPwCm4R6wgh1wr/Z+KUlVBKhsDsYRJOmYZA25u3uDHn75FjB2ePXuJoMG8JTaOKTKqGlIwOGhGVE7sjwnGNp2DpvmuYKKmsSZSy50EZAmSSqqJmxFD5gxSS6HMCTVzmqsNVZHe4hhJrCJzaxPrJbHWqDPduwy40dOMTSgAUXGjLC4RGa9fv8JPr38E5wE572FAIiDPAms5bAHfZe6HEtcEqHPF4oLUeki8joicoam4CTF0ON9eAlNEThm78Qqv377BxcUFvvryJb74qsP1bsC7XcbV9Tt0RPj7f/xn/PFP/w0vXvwOwDlAWwFTOSqcWNd5hlroka4dxyss2nFRwQllDKUlpP+7+WLj6b86WWi8m9qwvFrHdm49cGIha0u0VZN55X37maHzjsUmUqZFQs4Tbm6uMSXJoCfeDmIRl12aNFIXRgKX/mfOYjXEGbFjsQwimds3Nze4ur4u8doEIOrQ9xtcXl7i8kKsCMFZ1lImkFo3dRrlnd3aL2NpyHXhYS0i0lqm2vdU5wv7VTaDGMi793IBzEoRBkk7WSmlSa3tqkWbFFfnWq0SQeB1c96eQxzLsXRcGJ6C04GKSeUc+Cjr+aHV3vV1Y3v1o+XpWcuON/tcDgaMF7NluINrRpVIipCxssqKdc8HQRF0Rq4A+43x0x2L/Jisiowetjbvp4iXeIr3KGLhnnrgtycCqoxqAuyHft4p/e/X/Bq9byWftOQPQb8MoOhjs7CZmWofpo+w3lhhuCv9e5LA/9jjkrNYW+g2zWv1XrvPFO/brnM3WDDXIvyybZ7tncejOWjUH80UVp696rpyl+6sT/W+6AQJHFxOxctT3catzyZfcWJYPC3vViG3KoBEjkGbZc2sVmJmzDIuBECDrzIBSAL+iIVLVeMjBTB3QJZYO2KNooJdJsl4BjF4CIE1BbUq60iSxphIjhK1UaHv0IcNYjhD158jxoCUe3z37U/47rv/xLB/g2m6AWMPWMpxN76NVRET0MQsqWmDm0Dg6vLnzIXg5wvruMmnUH4bU8KQJvQkcZsoJxCyti/r/CFUF0AZrwKHaMDcTAmUGRkB19dv8CN1YADPn70Uy4Moz8wSYlyVsGopZoNpmb/0qN0pedIXFAhUAssSwBFEnQb01cvYYlgwJLudzAVCAJMEMpesQlnmF0PcyjAJIGb9zwJEcCCBFZnlM8s1lLkMDasSZyAO54ScMkq1bE4qCAkW10bOCZlisbySuZ6ReQBzRkoDsgbX9eCPX89clH8HohpYy/afrVVI3CYGOCdLmyZ9S2INxpmw3w/IgfB29w5nb37E2bMe59sNnn32O1y+usKYXuOrr3+H//7P/w++/uoP6PvnIGxA6KQdbE+0uQME5yYoVbH2yndizYbKiBwoYIGQG2KsnoUcYmPVlei++0a7noob4owXzTA5d2cosa6qfurixMy2dSvHMgjWHcTKyCiujKFD1n9jGnCzuy6BdM3tS3hEBeYsTpnJExKHXdY0Jyk7hIAMieX29t0OV9d7hBgRY0SgHjH2ONuc4fziDNvzDUIXkEn4MOWElMzVTJ5PwTLaGQhiVjmzpi16T/ukxLVTO0gz4eDluoCCKo3vCQgSXd0srGyHym5P0QyCHNUFze4L+pxQ9rIS5LoAOGZVaPOayxj5acdgECeU4Dr6SlnZvd/qLZWWK2AVLLI+KEB4fbatRXPVJUH2K291nW97BFGQ2HwsrqJ1L7iFDoqm8/ra8ytfqAc5yg8g6yyXduhwGuBXMD/lNQUwUnkCGYFIs1ESAmQ8KYsVl7kcLsCFZhG7fi/80qxB5XkpTcgpA720hzlojWoyA8zGjMu/DCDpnDGl+5BCutL/+cD3TVebnOBKunUsdS9ZkTsXV+bcXCJTeq3ud+e9mdq7iPy8tTLdouH2WIGp7hysa7XUc0WP4iO/e0vu5USvmQm9y72Bzot2aVw2m4NohPAT4rQtKm4HnceJ6LSyb5sfld3Wfl8v6H6g3bKc8sCm7MZC8DDzcdec/sB2T5ntv1al2+b0WuMX1SDca8zfi6jhYbUegPXlY1kwfvpAEdH6QC8uu+Wak8GdE+gDIdsPsZZvpbW634eLPAJI1IyppdRiW0BYnxsPVC825nqM9zUbZq1z3cx0u1QB2f/2UNkz1qeigR2OwdLKDc1bXpbnAY8ixR9iXV4ZqJ1WYnuQgAP2GgIhZ1NSAgJ1QEgSO4NZBPcQxG0GADGr6w9AFMGUkSkhpQnMJMpTSWNMCJstun6Dvj9DiAFEHa6uN0h8gzHdABhFmHVBudseBOYZzqpC1QpwRQGg5u7m/VJMhArmYi1g/WDgUH0iwwKMl7hQCsYVdyloOTwBPCKnK3ACppQxTSMuL1/i8ryHuWmZ8JxVoDOLB4LFrtGa6wJgmNLj3RGhSo9mtStWajOVvcQBkikklmZRxXMXJJht7GabNUvco8w1Ux0DJbuZYGjqdqJjkznXLHI295nqZ2aA1T1S25g5aR+LK1pKam2klm/V6anWF0V5kvsIrPORHTyDyguoNEmeCQIl8Vui0IGCBGkXtJQkKHJOuNpd4afXP4HxHJuzZ/j9H/4L/vinHr//3R/x29/+F2z6C3AKQLbsW/NhoNpXZZmKlQKTBd+eK0bestAU3dnqnw811ukwQHQqDzx8fi9bwNJaYMm25yUYM/Er+QCzJ5nGYVZMNt6uoMSUJmTOmFLCmCaEqHHGOEGiQ+l+Zf0RqJnuBtIbMMJkhwyS4XG3k0xn/WYjmSI5IsYem7ONWFOe9ej7DQJtEONGsvIFceEKMcjzyNa58TMqvWFvlmCEU9TJq4AK+NaunHWx7c/CoxKqBZgADyaW5UZgloOMCTFHtRa1GpL783Xj8lI/20RXPmB9C5QDJjsAMbvDwtXthlnbS7PK/r4kgoDV7DvU1302xSoIJq/lHcVqCQMq7Vg8b1aP21eU1KV9buVnBsB4F1Q/x633g8XyolzjPNke2MSUovIvICBArCjlknCLXK79ZvyrxEwzMbDKLDnX+eOBRaJQwMbKJ9iBHTZ5JQNbnQ21R5vkA1jjRlrH2+hhRL4DxC3rWp8sJ1aBFm+r7LPGIa39oTzHZnyG55t+z1yn+tNK8Hv3usw23Aius/3A9rI572jL9tev7gO3ENucPn5V05fvRbM1vD7oB/a0e5PfC5qKyLvbHnWndnupZB6uXn+/S9MWh/MzAebRyZ4/X7iPX5dPHyh6oid6dLIN5fiC9adv9bt2QwOqQH5MwLyN1pUtVTpWi6yC8vw7OdUJC4HndvZ07ArS6hRoyD1ehYgizxNCiCBK4EnEiaAgSKAODEZCAuUJgMQuIg4IPCJxBCymjJ4u5qzZe1ygCY4CNFEXEboIIkYOGRNPyBjBmIAsCpwpEU3fFsFm7dXeu+9pjek3hemvFqCbTMsFkQSuLYexCoeY66AJOWVOkYlg8j4za4yNjMwjGIT9xEjvMsZph+e7a+TPE55dvgTRRgTiDKcwclHqzHqiSv82SxSAW5G1YgywiA8GKgCm9FYLLB8XhTmru6GUW1W1lixmibkrFkCTAFJrMtI0ywZ42TwEIMHAg837OhYMOVEUNx5x4ct5BPME8KQxjjSOUcnQY6ePfj6o8JtZf5NgxYEBM3cSJUVcLE169jKKpFJPSEyIkdB1HVKfMI4DUhpxs7/BzX6P7XCB8/NLfPnlC1ycv8CLF1+iixfIKYI4Amot5kRkN1jmMGFKks3Gem0JPM5YjHG5aqY0ATJVxFDndg7S3s/gUxSsw6Wtlut/5cWVM3KKCaiC+cunaGkrfRNCzRooMYkmATIDpH25zklToYreEkh5pPCoKYuVhsUtypQBThiHCdM0AUyIIYriTT262JUVGmLEZnOGEM4QgwBFquXLqbrV09paXrUbWPnYbBgdtrXeuQdIXEa9LcW6IsMsqdMrv5BxmNTSr8nShequW75a8N46Tk1MI6fpNvPQgeNr9TPw5HbS3W3N2qgE9fcVXy+TFJwIDZjxkCiDKexmEdd821xl/1uA9nGaME1mLaRAFgGwjIlcvy8gQ9m2FGYpTamK3qJ1zfzy43rqONjb+aGcb9WnQh8UYXqiJ3qiT4A+jMuz0BNQ9ERPtKA5gvs4j3ooK6KW5mARz7K9zCqyIuCfeubAh+TV5qjJAvqy4g0EWMwaC6DsQI4YNVsVcTHFF/e1CSBRbjKCBLnOAYgByEmDAye9xoReZaaZkQJJ2m8ECXLJahFCkh6dnCVObV810S1dWoAUB3g0g9oCY7OOKa8EUlcZU95NIZP3mWGZ1bVrVMErSk79ntni41htpEcDA5kmdRnImBJwdb1HSgOmacA47vH88nNsNpegEIsVTomRolmvzArH62ACRDgFF2WIIXGVWiHeA6ZinacpavUeiUUkbo2cs1hfUDsTSZVICeqt5dMsI5uLK8SQZ1X3xqwxkBT4KnWr9cus6biJAZaMejlPksEP+my1NpImc/OetEBmjT9DGYGt/8xaxxYdq0IlCrl38bAOZc0G2PddsWwaxxE3N3tstyP6DbA9e4bN5gX67hnAPaAxiYqbRbOiqWQyK0qgV9zY4p948MteSddwHZH5LC/zwUCGFZor5dJlumpI4KpDh7gGlBblGeYK4dbRCsOj2euC5xd2SKB2erir5pUysIjKDW32SmCaJtzc7LDsT3V1sHscFxF3TgKHgJgDUlI3Eg1WDw4YxwnjOMqcI0kaECiCKMgcGUaklBBIgKQYY3UB1n4qfUIEsLqeNtr8CthGVKwxyr0ghAIqUnPbyYIsl/+ae+0vZ7EizDmrW6uluK793sJPfh7boYzxoIzVvdfXe7X1rqr3JG7mAdcpe6xQqpdltvl/WpSi2/q/tRK0m3R/bgAxt3eV+Z1LUP7yPQXZuMxqEu2aQKn33J+vWhjXBAZzcA2Vr5gF59p1K23k2fL7MLLXY9CnVO9jcnWVB+5LhxVlZeA8++6JfkF0wsnEE92bnoCiJ3oiEzj8KRe4UYDuQqdaBpXr7sHf5r6rx5/nJSOgfSA3l2Hll9uAopZF11PTecHFqsDFAWEFj7xftj9hjNHiUFShnsGIgUoA7KzZz4gCEpvrmij0mTrN9FMFBSZGJiCEHlDXjRBCSUfuTetbcOh4j3gx14CIplMbFMV9TSWCw6xkUksqpyRlrinKV2gpKEmp1T0jgzEgaxwP4oBhAm52AfwqI6WM55cZ2+0zsTqAgS8kMaCQVtogVk9VJZNnyYrKRYDz/Zcza6wpc5fL2nZzerJ6m1uaWhT4HtIOJ5ibj57WU3Y1gCJkKIBSCAaLmOIprn1ZnyFZlcRiiKFubchIeUTmqbTBlKHarFzBQ5ak6QAkMg1P2n4BlSxChgEcclu12PHYLsPaL/3FnEAksbpCIEyJcbPbYXuzw/ZswtBNONswpgnoYgVNAkypt3nmwLHZkLZz+QRy9V277xh/ui02kej8h+4n91LBIdL2tmN0O6M9ZsBUU6UHV5LxO+NrMhvb+hNiFFHLLC9CUPdM4sKbamh4BYBAOsYS/ykg6OcMNhBVQZPdbodhGJGzuURr3JrMmKaEq+sr3Fxf4/xMXGrF2g6gxiLHK/8GQITy2TbEOWA9B4usKBmSuduf45Arllnsb7Z9YrYTMZu1nwClYg0FB9QfAPD0fY1P0tap1CdUSysAGgNNEyZYv8zmasUr7mkVzPM3pwkFBeu7Jy1cMhtLZ89z2zUkVrz6STYSl1XSu/IoT6eAQEGAebJ91gGj1t9c76tza62FZfAUiLpnB3xKOMuCPpXKr8krBr3a2gMeoj33XX9HzjGe6JMh4/O/zoH8UNZEwBNQ9ERP5KgFi+QruhfrsTgkJz3yAejwBrnGPHP78wnVuotZo7gsGCpjD1EFylwZOFTFWndpCuJmY8J8LcsE864o9hK/KIkLjyopwYKKWqaqEEFIAOTk2RQrIomzQRCXHAl6LPERCAGRogJalrJ8Dfxqu5C0rYx6Un0SlcjM2g4TnCw+joFIvjv1hhLgdaZ8sdfa/aOQYMFmwWJ1wBSRAQyTKJ70hpBSwgtmXF68QBcjLBizBIld7wMT5rmMga+vNM6AGMCUulCBIARxW9OAtDKG5noGBW8SWIOq18K51o8YJYCwKu6mqoCyPsvc07h8BtQdSOdJVXQE4CEwxrTHMOzEbSzoKXeZlxokl3LjIuifU77JNf6Frcpi3UAE8ifkhujo3GjjtAAx9uh6GddhHHB1/Q7n55fYbPbY7W6w3eyx6S8QKCNAspy1INEMjYK7AHNR3l9CS2DJLfm70u2CvQEVB352uKF8ljlYldU5D1yrqFdUl7+U9xwW3/nH+hlvZO5RzIxxFMueMl/ZHlt5U84MChpvS/lnhU4hFjREYO4wTTIPx2kqayRQFstKHd9pmjAMI6aUkHJGzFnccJ01orfyIFXsSQPML3rCDTQdGBiZ12iuO7p/OCyH1fqwsR7KubyvQE+rgBZQvyRTcEooUfsQ+4FCXX/les/r54CTq+MBup+ySvVVx/zoIRJzBdCRZV4+qH40i+nDefG7vQbPM0rbSfmwQbZBLdzIlT0bkzJORscaNJdphMdbvyzBvAoUlja9J8h2jBoA7kGfMl9vn6ZSvIiddQf58hAdXXc8W/uztfzpWpT92qld15WexvMh6QkoeqJfFx3jK0XhqKdV4la0dsNhOnXTua810d1ppT4fTEKysouKXjbpokIVLaKCCmwp7929RenS/jTLornbUkCPzAHMEeAIEMQ9iBOIJhAlcMgS60WVDmIgUAdCAPMETqKKBSiAYgBhUaacIO/aSe79g3RbEQQtHKwFwxYrHIupI7I7N8ppI5yy/JkLWlWVDYjLSHmCZB3L5ZSeswS5BhM2mx5df1HuJPes9QZb39irDxRrQE0qukBmsdrLWaxjqrWQKdFF+xA3E56anmJYYFiBXajeDQu87avZWotxKVcyq9Xg32JBJe5wYlk0IY07TNMIoqyKqI4FOYXWA0Jc46mYQmuufKXyVlcyyzPWOUXiFlQU4Wo9Im4yUQE/sVTJGZimhHEa9MSekdKIcdghbQbEjbrkqBJVMkG5yTvXzw7J7D6Oix/1pRXH+9Opp7xHRUTrVqeIrPNnAxlgEwUKmcirc8Nag0Vki+DVLmBmTNOE3W6H/X6PEqeN1UVU524Gg3KdWxSCAOFqMQOu8Y4EpAwIAcjEGIa9uJ5l1lhsXNarZOdLyEniWvWdJX5Y60uviM5532GlalUpv8eUKLzdffZ/9Vm2nhiMBM51f6CSaGL+8Pn4yzhTsDHm5pmHQLCHIJqtMbOjLOLGAhA4UM7s9e71WI6bcFM7JDEetHyCzeN6TWvlDOVRZJa1JN81e2d97IGWHaz5Lb/P66p3PZL+2KwZPOQs+nBz8oPSCpOuADXqvndP1uNi+AAAIABJREFUsOg40HQYJHqiT5hmslT75RM9JD0BRU/0KyLbZA8e06EwmnLZfJu3Mk5Llbgwr/8FnVy0Pek/qRTsgBQfH6IqYygn5SVYcnGryq2c3wh65o4mUIqdTjIiKHZgZASKEAuRiJRGZE4IQYCiKSVNmiansFmtlABVGohmwvqhObD2B9xtszKFVBpX2kw1aKv0D0tACr2jKE7OoqUqU1VoL/c6mEj+s1hRhJwnJH02s2RQAgiXzy5wfn5+oH36V0DUYwAS1KomiVJXFG5CtSTSiik+JCCMWBBRiDqlrB3LHizvVElsAcdZ/XUKZs4g1hhDHKBJnsUtSGMySQDrEVPaI/GEoFZD9almqWZWV6ZAebDIvZermpFntaBaxu2wGlcwrLp3SDa5SBEcLasPQEECwXPKYlV0tsfZ5kxjxsyUcNdv7ahVLY6xuHh+ZXm9TcS/i6guwXolabZYCpxQ+JqLCttqAIo146K9UqOy5qtJWKmrsaxGIWmmvM0tXlhWMTNSShiGPYZhKN/lnM1eTi2yNP6asZuckXhCBktmstAhkAWHl1ZFMCYIMF75pmUHlGvM0ihnAYwa3uHBZXdwsZYuulyDFjBa6md3Q4iMJ/Hs811RphaYPWWvtUlOoFDXoYBsBpLdqQr3IOXfgGtuXVU1rtjanX5F30/BPk4nllt4hb9e5oCz6UQ7L+z3+1K98ySZ6hHFLj//Hu8w0NFHKWPyrFrKfygD6ooIkq34YegQD7pb3/yS5PUneqL70hNQ9ES/QjoiyNIcAKqCzqlbxq/JlPUklaCY/RJKnAugaBiixDnApwAZ1o/OYqQoMnLKz2pBwUmFDmSAJI5MoJoBJygQkHOWuByqgxZQBZJJrbptHVFrSyyS23rmNFoooJCwOuZwIkqzlCfKpcXgkQ6pCk4VUlkD6vrf5nUXwCmLVUJOYIwAAjgH7KjDMO2kPyzeCftYRM6gfgGokv8B4g6WBSTipBZEQcvLM8VdxoNcuRLMOjZWBfU5bWwnEuxQ3ktUYwFQzAJHleMQxPWxKNXa/65nAI3RUWMTTcWCqdZVysjIQPbgprd8qy53ta01C1wJQJ4TKAT9vo2EY6Be6YMCQElZwTL8TZoyjQJykvliYCgavuSV0dNAb09+7E8Bi+Yq5O3lW1YnKgAP6LDgbuCgz9hl8yG7SnnLs7akw1xswQ6OXqlB1/0zWeIETZMF2dc4OwrWzIEns2LNCoxKHJ6IEDK6KJkfQVnHTeZBATdh+I8ETw8aY2u/F5Aqp1znnQJQQLUoa1fx4ZGaByBvrWPuStVajgGwJi2YWxI1d3grPa14C4MeeRqbLu2ArmyfPRAm333I2A9C5OpvFqS1rutNcgDfgeq18YfuI4+4PvAAalvzsoNaZVse4/eKu9ShrbuXCQ6NMc0sSOv3KIC8/H1Y2awN1v1zEJ0AFn0ocPH+9LDLzLd/zu0Py2+/Jtn9l0V3O5x4orvRxw0UzeJdrPK2j3FNz+2KPzDVs+KlUNt0z30Z4LwtJ/oTn8pwH7Ks2V2Hnnj08sWJpiIUVZ11p8bsTgRNIKJ6QkioKYBNIFw8lOFG8P3oeF86re4IDuIvn9es9b1vi1goX4dkEdWEi+pcrrF1QyrrmGBIRSkmypJa2nqchEeE0IEzI1MCRYJkPBPpWwLGqmtRtoDD6i4EsSYhkGY+E8W6JOthmi2bZWdVE2ovgFSQS1yJ1vrWl1HvJejz9TuiiNj1iDEBaVSlpoIVdl02awQT3Zs5Wse+fF2ADH2vFjyJB0xpL25XnNVFT9Krk2blKhYH7nmlaU3b6gl9VQYZyAlEETlLdiZEVuCIAbKQ1oDFpEImjVHUjoUE2ZYcdYUHlvXKhgGVOmRmpCTgYk7jzKUoidtKACjI3OCckNMejBEW3LUoR5wAdaez93WgxTrElH3PoWW4DMFwEJkBSgQERNenBl/ZeClIVNYA0HUSV2u/32OcRsT+HAgB0zSomsfIyDqOWqrLsEcewG2I175cuapmO/K31oetfnGkvJndFXmVs40b1+iH5L8nAQhBTf+vWxStUyN+2nZgVgJlPbnTcqJqAOWArZQm+ctJ+Yxbf5g9ACjXlM85IWdGmhI4RcQogco1Sjs2fY8uipWgzEdZL2ahmdKEcRhKPXKXEQuftZbqnywA4Ysznnawn+YuTLN1WgEdL5x4y7usMcxQrOfmbsaez5iVVs4J0yRWdH7cl/X1AfW5qVMByVjnDEKzJxWggrj0Z7XKsudI1rXj1PJIq4p/7yZSAxI1+yujxSLLGHHTxnmQ8YUl3JzKPLAnVRhIbuPyS7P+UDMzzl2urE6g2o+sHVCWEdUxLw3TeHOyT7usljMhwxT7JsZWsY6t7Wj7oP7NrXCb7jhJ5vTyi1Bx9yYJJh8QEY7OjZX5uiYqwkTRWxgyNV10Oq2wehub+nkp+7fWuyuFWqV884hqNsmS84NWy1/WZ+1D+9kfCNicadvhp1Jbd39dzeL3flTn4m2bqfG341edNDXphIJO2dxPpPsB6sv1czKV5AVSji/TXXSHqrj7FnV6uH4Sms+FZT3X+VJ2r0uedZjer/4fN1AEAtDDM/ZKXF4fBACu/gqPU1aRDt6fnH2BF/fWuf59aK2eHyHq3i49d4rVVPWOfW6uUPPm5qApwf21YnlRtn3OIK7Zd4r2MKPHAIlMCLPIN5XWN0IDBOoqW0oRc3EyFECkSrcGXIiwmItEzHkWA8GnhbdnOQGkAiAzYYJtlEMR5LlsPhlMEQg9xO1JgrgyZ0SoMpYZrCAIJ0JK0BPHKnQuTgjJZpfVR93YLCYDAT4DD89mZlNQXa1lmlkUnVx6tUPXnyPEDMIo6dz9DaHe7wWz6tZkfWW/ZTcV1RIlq2URD4iBkPOowMwEpg6EDsRRM4r5vlelm6h9BAhm4SXZbaLgKGrBw6WPJBNOKOCHQT7mxiVKQmaWDHe+L4kQArCJEQMiJgYSMSJlhCxhzOskZIQoJackijd4AOeoa3wEQbPpdR0yMca0AzABvIeVZj1cACxtH+ck593OBdAUJL/sSfmAWYo1VhskLoQBCUGDW4OCAjxB4mrpWmFicFAlCRJQNmfGzc0Vbnbv0PVbdKHHMNxgt7/CdvscJTA/1XrYbGh3D+s3lLEowz0nmvGvsh6bSxwL5bvtHbYWFaCL1PQY4J/dcH99VJkqXhnToOHzhqw93n6iA5cUXgAwcl0AXmFmRp5GTOOAKU2YMOkKdOtHm1pWtj7YFDVRWBKICMNIiAnIEej6gEiETdchgBHg5l/OQJiw2WzRdxHTNGAc90jTiNyNCFH6tahWGbKHQWJg2RSpLQmqe9Q9Y65MF/BFP2frC66ul+U3Eks8VkDVAlXP/2Dc0IFGRKTufAP6HthsZF9rXebWFc8FqEXGq0S9JMQCLttYlHssoyJbkH/N6qiAXQ1uv8LvGeCmjbLXiHuZjl62rJKEEpTdrVVh8QKACi4toJ7E7rcA6HVPNVG0tYI8tP5sH7EDL1aXRvvZZFYDzKnM/6hgSND9xOaUZadztXfdoWAWoPxdLOBY406JJeqE1sr7MO/wgc7n11tsL+8ODGiCBVSLZX+XLEFqvpE2z3kON5+y1aGKQdJPDU9o605zsayU1j7C8mUep2NjXJ4Imj1wTULhGbtec/8mZ7ndfl/lDwK7ZCO2B+lvASCK9qXGDPPP83V0TH1ZOSDYHmd8TQDodtws5h/hkIvt3HL5QayMCtB8mKS7Drub6lWzOXjgcXTadT8/3bOOrDvSkSzAVHSLY+Ucev6abHG8rFOmiddl3J3N5ypf+l9NK3DyuxcuDz/x9ktuoY8cKDKabezNd5/CQviw1Kihv+LuWDT9AXj7+9Mpm/Zj0wmTpLi1QCcYlyw9XlBhZdRO3ZD/zSqkaGr+2U5oPYShHP7hICAm1iVRlOss8XACWa0IQNYYIIzMk0TpUYGunlDnUi16H8vAMuy3bPhgWPYv0YdN6aTqDkGErutxdrbFOA0YhozEolDIniP9n9mBOA1/nCtz9fnkNhHJmKRnw5wxjQNK/BRUpUUd4kpd2yxI9gy2pumTqpUAkJG1bcGsIsANE8uast6UbwYrcJWrHgUBSIgUK2sUUhMAa9sJNs6SeSqGUMAeCtKmEAhdIDBlTFMCKCGSZM4z5R0QayEqfS7wVc5TOYG05lvQaesYizlVYk9B+paISqY2dxDuxqwG4WawxkkCuq4r4CQATGnAzc07bM8u0cce3715hWEc8I//8Ay5gE8OoOLZ/HCCR4Xab4Oybc4yfDyhCqAsFbfTyD21LEXPP2hx2Vq9Vi8iz1/4SBn3o2ptIgrsNE7O9Wuu/Cx7xcf/YVWi7baJGcQTYmBsUoec99jtbzAMe32u8m4FwmOMOL84R9dFCeifzHKHkDMQo3MpLZVZq5VTiLlaCS6VqGoxVZVFakogaxuqRdEaSCTrKbc3aYaynBPGUUrs+64o/94Kag0komYuKf+vEwwGF5X35N2WoJm83H7ngIfFwp31ige72v6pfw1AtqIELUtn+AOq2m6r4+yOta31CPhR+IExWiYHFpJ7cSAAqbzg15juNbPdqMgMcP1jbag8d0mttZkBE1atskkIfuCAosL/PqBY5teFqXdVppg16c71+HSE/MIjTrp2Za7ek+YY0lq95Lp2zt0eqP+JnuhxaWVG4rF5wCcCFAFPwNBxKrKd0a+wmxr1wfbjR+LxFQdxO5SdwLnTpZ+fvEsHo7ydX9WiRCrg+cxUVAV3uwxc4udUaABiHeGe1wrvax2jz5xjHipyrd0jAokpAd7qRQVXJAABTHqCrSmqA8RKhUyYUQXiYYNQrjD2RjfnqrBzVQ7tvaUXPtts9dQUyOMAItYT+SRWIUVg9pLoGmCkY8FclJ4S5jcnZBrBHLHf3yDnCV1gyPF10L6UQMpSkrlcrIFUMhY5Zxg0QfpfCLxy0q1gTJM9LMtYqWVNq9SLIiWKSXX7tNTexUWRCqQGcz8Taws9uTbgCySAkc4fChqAO8vpdkEruCo3lKvVlLeIsBZlQOMjWbrx2kOLk2FViopVQAY4yLylYCCYrjBmIDOIOm2XjrsF58aEnAe8evMDxpzwD//0X8E6ftxOPp2eHhyy9+1YrimPy+8d4Em1jIVeijvQgWxibWFLBV1uCSWyif+F3RXAPfaJW64XS4kKxqUkMYpSSpqVzLp+vj7d/qHKPlusMYLMCc4AZ0w5AWNCGq9xfX2FYdwXkEOs+MR1LMaIzWYjoGIJ1i9uMTJVA1jjedmarG5Lh9vYAh3++5URn7HAsjczcHxw50CDxaqzeGCMnAOmaULfd0iJCzCw7oK2UplySdBRCJWXqhsaM2kfrSj6DbByfGIswbWlopqtrNWTchsnan6vS3jl+Q60LOU11/LstVkZs6dXl2Db46UaK3UtwDRgYB/VgtyTzA3NQEE/dnrxyQc2tT8q759fwrcN03vR4ThqHlRrKnQHBlT8tH42uitwcros9bDtknV27HHH6/UEED0GPT7o8XHSgf2jyPTz6x+33z5+oGixKT9NrFVy+//CiKNcYz88AAO8TYp8ZGI3LeZL7pB68/CVWOmTj6eLCrVOZSvUdJiJrVVQb5RAh1BWwGP+QAMmqorWgExYY5OHhHxXbKOk+DgEUVLcq9OOb7dcx+i4w6QxPAIRJkhAYILo2mT1XT0xP0RrdT3lXsfkyPeJ1YFgcZg2m60oQnGHYRqANCGBHbhiz+RZ+f69UwO4PAkiqifwNCJQh+vrt9jvb3DWPyuuOgbGmUtQPcVHUdzs+zwH9dQcvLpHZCAY+CPZT0RxMhlOT67tGb4/S7FqDUEGvVgZbv6wGuuWr8TFLbME+iUFaECMwJ3UjxhAAnHSa1rljthAGQWJLMaKi5xcDMJCBTC5zFMPFdl6tL6dM/DWnaSCURk5d2pFh6pshYQQMxgjfnj1HXa7AWMa0MVzgEMZuxooulpQrM/XA3suU/N7e1VbXgWYPThyAjWWGqsXSMmEMgeWoBHDY6dcyq2fLdj5+1AB0p31GABnGcMFHGFa9lTzqegw0q/B3DECAA4S14sBIGE37fHu+i3GNCGr+xFY3ChD36E726DfbNBvesQosa8EJLL62ZioVdEccFhhxYf6yvjx3KHZwRmzmbScV62lyJKXFUsTkg4RY5HKawxEYufm4hvSgLlcPwvYKm4pjWVkJt1JvBsyz/7uRo3ljI83UeZF24O1F2a95yysagtvU859vzqe5ttslptN/3Opa+VT8p39LWNnKJBuz6V5v5mlZFuzBa1Y97auXOaGW+d06Y95gT+D3OotyJ7ocajEiHT8+H5xdJ7ow9KTTi+01ILWaM7jHoM+fqDol0ynbBonzAY7uJHrby/LzG+lCvfYuFbKuXdZD0m0fL8mxn24WipQwireN/rKaUzgUWg+pw5WiRsRroI8815V1sVcE4lpmZmrCbqBL20ddJPwvuI866sVAbENMujv9Yq42rpYtckyR2XEEMUCxAaJoem4SYEEa+JMjXNlFzcihrNIMoUiNoBtOmmpW5tibX1xewAQCDH26PsMDgTsCXtAlL5sMTxUJWue1ypnxWqA63sRqDTzFgDGHje7d7i6eo3L85fownmR1du4R4B3PSwgURaloChVBcihph7acNS4ERbwlEvKamEztFzMTaIDG5eglQyo7mLuWWSKOyNQjX8ky1XcWcS9Ipc6GdRRFVEFivS7nDNSnpDm62LBG20dOCVuwbi1n6iqT+KykiCWXKbksyq1qSqvQeo6jnuM4w4xRrx5+xrDmIvyLFZNFk/LqkTzab5CDrAxkHCm+DCzxARz+i5rE0lVydoLx/c1IhRX16ogH68ku1e/dItBks5VP0bl3SnrE7VOcsuBm1Q79Yp3LutzDpccEJJZfyEBP6kBllhcLZkxpQHjtMfN7hpTmgpwYLwjhICu69B1HWKMBVRlNvcw4R8I5iJ5ez+cSosRc8q+xSU6pLy1loZaEM9d1IAYUcbUwB6x7vNAXV1/9f18XAwokjVifCgEuOdxtS66p5C+aOt8Pp5Y8G3rp33G2hyb9S90aCg3g9a4c7n1W1YZm0uxuAiXrH5QEL2sRheQegES+T3LMY/Vdhyeot7ds+5V5eZVCeyxRNZHlY0549YJWg5XHqE6iurOn3arxc8HqUd9/bnVlSfy5Afj9r2+XvdLIydn2Z5UfnOgvv1/UhcsQfa70hNQ9HPRA4FE9dpZ8aeU/d4nqE6w+Zm5rhe3ynLyi+zRQWuRUsxl5DYz18cms66pCtSxq49UvAEN3HWqhQS0ypW/zU4AGS4gcumr2WhquvcK0qhDCblYNGxquLUuAKR6PwOAZKYiCCgUQpRYP6zKgB6G5pxBlKtoU2LxwNXFVHoPf3B576002Jex2p8zBY2BeqKs7kacQZqFiCgiUEaMHWKSzGRcgrGa8GUCvwpFRXFvlZImloV1AGUwT9jtr/D9D3/D88sv8PzZc5h7WhlAE+qLgO+b1CpABgZ5pYxNU1Xljiw4NhFyknRlERo0ltXypZgS1D8N4Vo/z+bOIYt/UayDyMzFbTGBs7h/EOUS96kGx5S2Zg26XRTIpj9tbsNZIOh7069cvcpG7nGvwp+rwGCLNXMCZ61fTuAoGc8M9BvGHRgJw7DDj69+wJdfdEVpCgbYEZZmHweJF2+tbR5kVKxK15JX8KHrkG2ZrozG/JGt1WMVndao8lZ2QK69nsx2T5BPudT+0IW+lspjlacIoMtQtuRKlAlRlMkyFYwveL6na4VQ1noaBwyDBKkmt74CRXSxw6bbCL8MQS2TXLQrN2ebwbonYlRdq2Tdstp0VuCTdK5KD+Y7CrB2eQVuqtWh8C/bD1oLjgKsNu31AJBcGwI398q6yaVdWdedWGbR7A/NM09rkO+bw4DCoV6ql7dXSFwYaj7bdQfr18SaWwHwyr5XNhOUhAUwEM/+6gGRuVG368LZ+tq0PzYVyK2J2qjldTNRROo1u8Xx0o9JJnswOknP/iU2/DR6Aog+LlokF5jt34fol20dlo+0vnLCk3qAT2IIR+njB4oW7fslT4770V3FuofETB4dfzlEXn7wX/1Mm8Jpjy3Iw4eryCrZySo7vrxW4xK55lAx8uIYdmmR/45NcNPn8fxq/8hDYJEqBaZ4O5MFNqiggHGKuCuo4vGkrHWReDQBlEJTnSK0M8ozisitymsN56y/enN6i2XToiYCXs3AAP+7NF1PrJEAtvTompmFBbzJ1oAsLnMxStaukIGkKeQBdq4t1CgJkvkqozayxjwwdybmUfoqj3jz5hXevP0Jzy6+RuhMMTMl3u73iIMDq2pH1ucX5TnPjGlEiSY1SWNO6sUQQGRgXlJATFtW+tIpJ9pXwaXcnW+TmROmaZR4JiEhdhGS6SjCAphL/bO2pVUu67joq07VOVBUXlXpClTdvry1SbFSK/KRdh6bVVVtZ9bGpDwh5YzYR3Sbvg4vM8ZxwDDs8PbNj/i3f/sXPH/2Erv9NS7Pz6VM5jofj63Fpr31msXsJuhaFSsCucV4jGaCJMnsBAXV7suXG/axoHmhNW5Ze1fdtVqd8Xjp/vZqHUTu6wr0GHbloapmr9Q6la4jqDWfBdqlAuTMrQNLQGYLnJ4TsoJEBhCQZsKK1KGLHUKMIARQiOtdpWXbvrAEWVp+tpgTi/lsYMO8R3Weu/1hLvBXK8dctwPtEwCNdRZzTUJAlCQnlAPX/Hqcv/d/VhY7sLp+t7xWBpg0UxwJyH0vDfQ+98zn6C1z9kEUqrmkx2V/tP6uoLnyAQcMVfe6dr+wLKVmRWv1bcA8WycId+otD9LbWrPyS93ofiPwRJXWJcfjvz/RE60T3crOfg10G0hk725PNPIw9PEDRYVOFOKe6CRqNs0HsCx6oruSqVuPPKcZTjC276hWqSGr33o92QFNBzEQfaY7Cr69jl6oXz7UKVtcqtZ8p3eSZ6NMoBBRrEBQlfui/pqdZwOEQNP1ercpBdAaBcqE4FDL0U1PRloCPrPTkmuAZQ25S5b40gKaGvCRwBpLp7qZiUIeiCR6hlpIeQCjdncFaKz2bII++18AE/gDiXA/jnu8efMTvv5yQN85sMT6iTRrmBf0/TX+G+tvA+C8YgESq6kg1lxMhMQJmQklFlDuIIGa2f05BUQtoUomN4/tlJkgiuVudyOZ17oeG9qAew3IXVwtnFLTWLdUkK24ruRc2ibjeUB0XgFkTZkpWc/KCEEDrzc9WigzYxj26Pu+tI0BpJwwDSO+//57fPu3H/D6zSv88ON3uLp+i+cXX2AO6pkCtrZ6rb9aiKWFiSyANlFGSiOG/Vvc3OwwDAl9t8HzF5+h784UmDJA7HYe4Ndz0cGP3Lb8iQpPam+lxR208u6+tFSn5VMIASGIRUtOYtlY+4HhEBF467ha0+qmJaB4FqA7s2b/GtX6rbreEgV0XUTf94jRgp4b0N7GZDJQSni5Wde1wIrxP+9+e5jqXGaYJeCc1xy4k806CO550DYtwRsiwjRNwgOj32KWAFQFj9YmUwuQ+TJsrYsVn1ifinVRrZ93fZPnuUxbR9u7ds2pc9EAm+PX+z68F/H8w6E2rYFYLqaRLpDqcib7amtZZlEULe7Rer2LZOLvnYGqBgr5ffexqZlPa+KUHnAVa8VPzOTlEDdYctqPj+Z9fXwP/2XRx9zWU3aYdMvvPzc1h+UfYR/fhT5yoMgYPeBdMZrfcftGDPwMW8RtE+MDmM1V+Wi97MVkfQCQaFFefdhJ47JarxPKlq8Ol1/199k1fNrGdUqdbsdy+Yh4Y+8fl+72RJ69zkkknlahpHoosCawH1P4VsZzrqbW8vQhs9TsZmpedfJWMpPsU3khnHuF3ZS25Wl+qyYvTizdxeL6AcmUUzqEapX0IoI4SwQiC4ghAAahAB7TNGKcRoTIGn6gzqtAAX3XY0xQd6Qax8b3a9uPvOhZD4xZP+Y8YUwj3r57izdvXqOLF+i7TVGGBNjIrnt9Xy+VCS5ye/29gHaqGOSc0AcgQcAxThMSTwjUgZFASDCAkJEVdJKg5BJfQ6wQQoDEKUEVwitLcIqmBTQP0FjabsxngFbpKXd/LlZHZXSbuWTulQJSUrmOiCTItb03MMT1IihqG80iiSDTiYolxTSNyDkjhICcJfPabr/Dq5/e4Nu/fo/9/gbDsMOw3yFGBSMUZJyJyO7/MouX+qF1C5libDGpEt68eYUff/gP/PTqDfa7Ec+ff45/6ntsX2xRYoGBIC6gOEqNm6PVlIDZm1qtFRDuEK+R+W250Oqcp0NlrDzTyvaryFxePZOo7koy3pwzcp6AaOtHCmp5iZRd3R21nmygAANZLJDevn2L169fY7fblXjwFbQnxLhB328EKKL/n7136ZEs2daEvmW2t7tHRmZGZmXWyapTp+/z3Nt9W33VAyQmTBBDaMEE1C0Q6gFST5EQAvWMAQwYARISqKUeAJMrwQR+AOoZE1pMaFrQ95577jn1yqp8xNvd9zazxWCtZY/t7hEemZGPqoqViozw7Xub2bbHsrU+Ww8H5zqQm767/J3MxdNZrJmSVrzlbdv3xuKuVXqFcoD6Ul+9q2xa65Q4N3W/c7Vt2lxvBreyCirub5uxhMy9tq6v7FVmwWTglvJxjd/UtlXv0wE3UaqARaX9tcXSdtmsyDM3c6coblX1TLm5RGeFELaCuJsiN9r5qJTdkNvtoOaDJsfX/Fgfzr/bww2rzvjyZvuaoMUsnyXrnYPFm3LOwXun40h1sTfs84r2EiS17Xl34NJ1RKXrXX3/joIaIPmqKln7/s1k+wnXzXOsWA2239s7lgZskc+5fa0dHGQiN1p9296Xq19X9we30yhvFUWO2/XcG86L90TXt2+/8d9WjPTR9Iv3qEXvqdO/7xG6VufbMR+vnMM3+uZN6e1K/PiBIqzbS85NbkkiaHEvAAAgAElEQVT79cH7RPT2rWsfRrQXYLGlCyZlbxXu3hVItO3zLpqO53Xl7rgm+hNN7tny7HX17UuU9rgn/1c+14LW+6ZsObTHuLvW8mCrwgWVR0xYrm6vwxLVcBIBehou0lIWb+ssVVnwKB2WLWCaNrEKYXq/fp1UOZHrFneHm+53qQOxgA5MLKfzfgbycyQ4tQ1K8FmhpDyeDEt7ruovqZWMZhwSodiDSFw+mAlIRQgqWQsY5A1cAOBcTjovJ+MJY1pijEskHuFYrZWc9JcETO8AyGm6MAIPYjHSTxqPqSi0U0FMwBhQ0nYDgEPJhpQQY8B6fYGXr3+L2dzh6OGnIPSQmEFQzbTuWZ032drMAur2YlGhY20COzOVTE3gokYouCUWS4TICYw1zPoov0EMCHFE4iDWPU7Girn+XYRzsAODpB4H9MSYOUbvWDKcgUFM+koWjFXjcGRFLmVQplZICV7mtc37SvnPCQcssZRTkJCKdZpo+lJ3sXMTF6KUksQ7s/VABCaHyEX1CiliDAHny3O8fPkS37z4Fv1MAh4Pw4iUArwnJHYQ10avfKwomdZ6tvY3sbXQrFMi7SsARCPGcIxvvvlzrFcjLi/WGMclPv/sMxwdPWkUWpdKkJ7GdJpKOyiRtM8RwE6xlwCA4fJnGWPKjxblXtamrktTyrR+A/asBWjqVyCBKn5jFThkwAAAEkV9TMGUrAqK1RupIp84YUxJeYpYy6QUUdxxABAhOYJ3HSzzFsekAcOdgA8aaJjYqbUZYxyXOD47xjCuAU6FJ4HhncdstoDv7sH7A/TdfTh3AIYHHJAgAGwigsRkszhKBOe1HQwFn01RNX41Aeum8oYgBDB3LtkiioUIQxISJBbgzCwLjR/Viq6kpS+7SQ3COOfgnReXs6R97gxcLXu9AXbyt8XVaTN0ySGBA1xS/m3XGOQk7lI2viOLXeaRLfIqkKrUYxOI9X40YJKBR6kGiZRPUDLAQ3iCsZZEQHQMOFL+4ZDyOqg04Wo06nfcUFryrVcoOCbvMIRnJzmQyPzBZABD6ckOVQhgguOEaKaBts9UDXD6T7qJ4eCQ2JW1Re0b2bX2s8oUMYE6/V4PDrz3uS9lDvm8bxXLiu190HTURHm+VppUvpTM8tKeSgDDidwBZeIVcLRRb2aYV9lRkDaxlqeAJlGIXdoiA06rNgksf+aqz6ubdYsuN095gRaUd3mS/cs5QoCDIwnWv00q3Qs8rdtil7JaSLk9eQ/SPmJEgHzew4Ht/bKtTZu9RXs9e1u0eYi57SZgjxk6ITul/AD6iRIR9oqfWE+5Te5w1dU3pW3GKttqLPL92xDvqfvvp2u/XVs+cqBoC0377sPN5/dHt8iAWnPz2y37Q9Neb9Lsdu+6so+HihJTgx47b87UCK8ba83AAnNNqL+iioFtbqqoAaGJgFYLbuU0ixoFr+GNO4DDXKbVYcokOSTELJyTnrQ/eHCEL774HTgKOD39HpyCKLaug3ci0Ihlhyi6RATvHDrn8+klkYd3vYBE1EFiL2hw5o0OVuXBAAAvm7T3DBcZKTlx5xmWWF6eYTaL6Pq5ZAtyDj5ZGFGCpWm3024bndyXhttYrfl00pSLqXk+558hLHF2foL7Zyc4WDzAbOYA0jTbrAL69gmSyzDlWCxoiqCV22zuZykhppihCwEIJaZRTHGy+WoMlFQJxvnE31AZ1t8lA4/NDe/llNl5B+e1/Und/zKbKJZDyVz+9H0aaxLnJIKGyfM1SFS3eGqZsaXbyBQuGxsguy05IiRnQc3V6isM6LwTRRsRq/USy+UlnCMs5jMMqxXWq2VWkFH1gojuvqjLGcAprSn32efytywFxhiWePnySwzDCUJIcA64t+hwMJ/B2ym+PVqBU/a+RmUmJUQKIHjAOWhiLjAikimtG9kXpxrDduGgcUm9hqbqNlAr21Vn7SguW55N4upIxsWo+DZnUC4yxOpH57yzeggobo/GGCNW60ucnh1jDGvTz/NYdb7DweIA3ncK0pb2Gz9NDLi8ZjhnQENKgplAqkqgayPETF22TKEyaz+zALSsfSlFjb9WrAnFzTZrllXft2NW92tMETFGeN9V/K/eY2qFk2EgUQ7eX40pEakpojFHynHCckw/rRf+KlminZs1P+PJda7usLfFZE3sqmF6diJ9o3OgsoICapCKNh/Kl+pnquZMedT0Gu9uZ+OqOLVaasCRXUBJaeBu0Y0mPNesmFz5Tt0/U9q0NHsb2ktfZ4111zxjcyy/wTW1TMdqW01bvtzRaVtmwbUlTsXn/cXgbXe+nRA9xazq3fZK/n5bOsBHTy0fuZ6mo33Fxratttvq0z0X53Ug0a1Snmz7te2trdGorvTKivYs6+3ohwcU3dG11DLOO7qjd0hUZ5yqqah8jRvUZPPZdsLZxP2phcqtOMTmxazQ8QSroupaViYcCBLM+P79R/jlL/8Ezz59iq+++kucHL9EShGd93Dk4O0EV3/MvF0yqIkVkXcdHHlR9tDBgheHVKx7SkPtVBqIHMEIAIn1yDBGhJCQ2GE9XODkGJjND9D1C/SzBeazhaagJ1EQNYBoFpO4vGh2BcmgnN1j8XfqfqslVwIQMYYVLpcnOL88xoP1kVoHIbvBAa5SBgii0GpQWTJLgcrCI8NbhKy0KaiTUkBCjwTpE4Ja/zD0vna8E7MGKDd3kModw1LCq+UAGcCjMXWIHHzn4DqSQ162GFHSG7VyJwFzo/RZNZAG9oi+o/CLKSxUerRCnjb5Mmlf5zSNVOVuqm5T5JQgujwBSDFgXK9Asx7Oif3buF5hvbpE3xFoIa6CIYy5/5xa0ZWAsw7bz3PRGIu1sgtl653EA169+grfff8rgM+0fR0OFh6HBzMgBjg4sFrqiZWYzTPlEVwpiWrFwU7Huw6ETZrhCrUy0M7rq4TbDDjs0KnqOzdumCrLOwuwd9LHqhPoMj8VPM2AI8GxV+8dApwr8zejvNLhZtM4DGucn59gWF8ixRFAglO+5MjDe5+tbZwTq6DETa/BejHZ+shvLfyDXcXJbXqa8m3lcIlfJPfqaX3VHRYbztKkJ04ZfLU1VqeqN12uuHAaL6cMmtbAm/wkeT919YM+zzCwt8Q3Ixgf0CZap2hWuvzWFvcJavXAkvEwxohAEb7btBJqFKZGwWgBmJaXVWpPftf62ZYYxt8V9MhBoa3/lVfUB4M31I+n+3KZx7zRJOtjtqlqSBLb/Chv2dr23h6V2FwlTlcBmHQMb2xhcQvtyv99vKRbtY7dh27N+6GfDF50FTXCH7UXcgfd/lq9ox8W3QFFPwR6Q452rSx8R3f0FmQn4dsmWj4U3gruUPnShPmNGxMaq3bmSlGprleiZ02114gsH1NkVFUkkqCkDLU+6UBgzPr7+Pnnhzg6eoRXL1/g7OwUcRwBTvCe1IrIhE951sAhMSOX7EKOJIuWuVlF3mZLK0qTxXghdXFIKWEcR6zXA5arASkFLFcnWK7O4bsFDu8fqbUToes6Nd2OcJQaRdCUvQoZm5ys15lgFEVrejIhcQABGMYlzi9OcHl5hsODB3CuU6W1NbmWPnF6epoRFHEvye4e4uYCEhcaUssKgJBIFDixFBh1/CoLCLOqqLRdZt4QZdgmgQJMAohxVb+ALfAAeyCRKu0kIJeBPqbAxjgixphBoRJXyO6SukhdW7NyjdwFG0CczWtyTWCZrTy7DpjLCjoSJ6TAGIcBnYICBGBcr7FerbCYz9EtDrBYzNG62ahlniq2GRzbqLiZSS0wUoFxw+oSr159hXE8BvgMzIQUZ+h7hnfiWkSuz8r0VBupgT0yICRjSBpPLJnyWwM4DnUbiatJ0cAhmz2are2ovVrKaoq+MdUgURuDx15dK7D1SQxjEUxRQNUKCMmAhj3LjPPzExyfvMJ6fQ7GCFCCcx6dBtxy5OC9R991revNdpYtc1gVBlstjjmnOYdCVMULbMtM3QHcF3CggLXZBUzfv3yvlp7sQObVhdJ3RBajqJRfutRAxAjAgZJZTiVYIHwGg9jAY4uGpWCXmE+BnCYm0HWdERAwECMCE4gjui6Busr99K0Ernpu21zfQVOgZkfduS/pZm3bvicjT9n2Uj2/qy8n/IyYkLIFERUA9FaJoFH/sDnDoQco71kqzqDVx61wF1lg0tIfgRLxsccb+rBUCer1IW2+foVg8pHTtlF/16/xY5xrd0DRx04qAdSTby8Tv0qx2AiUeUc/OLpN5sN7CmjvdM6YqYK2w1HJfGIKXBsPBK2ZfWmkPmNqzHQjoArMEkHWuZKmPCVTF3wWdocxYDZ7iKdP5+j7+1heXiDFEYQk4IIKxs4VUMg5Ta+OEkST1O1MMpPFqmX6fqa0WPwbBEDjdHjfoet7dH2H1WoAY0AIETEmXJwzVqtzeN/j4GCBrutUWI/aD7VgqpZDUGUra7Co9BJVlbjmE9avosyFGHC5vMBydYmULAaLWaLUPU5ZLi4QirrZ6G/KMX8AUmAoQ1YanyVyQuKIyIDFckjcngaTDv/UsiO/J+mYM4PI6kz6jkkDWMvEYsRs3cGozCi0X2OUdPSmcEmAW42FVdCPLRPUpmlRtgX0qmZDY2WQ1XHrTWm/vZdSSowUZR7GEJBixKjlDMOA5eUl7h0cYNbPsFgcSIr0nBJKldCrFH4Y6Kf9yCWLWKWVg9OIlFYAL7FYMJbnKwwhIo0zjOszHL/6Dg8f9Zj5OUBUYkBpMSljJQGWlt0axnFAjAkhRoRVABKwOrrIc8sUPhnmAjS0tDkuZlVUu2aiuWu7Uif17Hp2051NAMaIEEIV06r0bROrDJTnJidxPSQFPbJrZ8V7xO3sAhcXp1itzuAooZsJWOjJA+jQ+S6Pv3OkLpbiJrtrqm6CyAoCWBwxchnn2zWH5YLNL7UWU36eEqq+KHG/uLIuAozXF96Vr+tcrYNs5/hsGXyz1SI8l7VAiwNmlnXEYkVoHMzWM1Bll2OzuIogCRwEiV6XENiSDIjFaT2eNmFuvIMqiJILaIDRQplPv/UWvTnXd4JE+fvJI4xqHI3fUmY1hR1TWT86gd6tWFp4KNdtfJdVbqOrt4aPijbB41uiH1Af/KRI93ZhViX+I2d+anxml83xD4Bq7At3U/FN6A4o+iFQvWnvsbO2ps9Ufv8Ikc47uhnt53Zxe7RN5SpCaLEI4EqqJNVGm2m/Zeo2oAZhy/w2lwUDQuxvrVaVjBLPRgKYSogKOY2/dw9IkRDjAElEXxQZcT/rQKSKgsYocmQZrjzsvDpluw1TmlgFZSmTc8ySKGDKuNL2Ebz36LpO3J/YI4wJcVxhHFdIaY3ZfAbvBZSSMn3uS3Mvs7Eog2EWGtVAofSPgQQiL4hVyDCscLm8xBAG9P09WKDSqZAh7nwJKc8zAX3ENU7ULlarHnMDa0EqoHYvYiQgATG1Vlm56Y6KlYLAPCjzajrVWYSe7NKi45PM0kHbpH1jsVMSR1VmJbYRN6AVFeV7O8ZQ36n9W/FlTCAOqud7WR9m3WQvZspPjCW4dtdJPJr1eo1hGDDrF/k7K6dU5sqcuJYflPUpbocJKQ4IwznOz19gtTyDJ7G6i+Maq+WA0+OXeH34HPP5A/T9HEQ9vAOInYJuDiBz5Rk06V9ADAPGMOByfYaLy0uMMWJ5uoRDh2/6vwR9MWo/WRryq9q9AwSrlUgFDdp+b0ug6jkDidrYSpXlTRa+BRRZr9cYxxExhvJ93a9kIIgF6aZiEUkFwMhgEqBrRtxCh3EF54BZ34sbLHs4N0Pf900AX5djW215xywe6HpjiyVmbTSOwRXQ2SrdeW9RcEimmikeuvITI8bWzQw5blGqqqtAFwtuXZEBRTZA2aolB7sv7mmllRabqHaF0vu5uKgVryl538QMx5IcwIKRR9bg5OOAruu1D1tQh3Lct/xa5Z1qulIus72peWDzLmrn6TbabnW0WdbUAm6jObuaawc9VTllndUFUNWYW5ZJFaBsYyxtibukNe/q+ttt1Q9I7i4Yp3z8kWjU14GfP1WiBs3WDJWbEvv7bdQtUi1XGY981yrQj3Gu3QFFPxDKW+ob2jffoah39HFSfXrKzWXKwrrSdXP/CjC0jh0h1ucVUMCmnvkMODEHRGbA9fD9XIVeTb0OVeqI9CTZYhVJpjPKQJGZv6vVUP26+U/7LmWQCJSQPEv9qigQeQWKCGkGrFYDxnHEOF5iDJfo+g6zfg7vxcUn5RMh/c0M3pD6XOnPSZDRoiwQgITIAcO4FguG9SUODh7Awd63Eja0iFQJnKIPGJBTu54BAiIleM2wZFZl8lxRfbgtvmmnZaqq7zJbrVqxybGlyGWl2dn4K55WlGV57xx0VwNp19lRzJrB5kIe051zdFNR0cIaK4SNpybfMbMGZiWEICBjCAGzmYADfd9jGAacnZ2h72fouzVCGCVzGjm4SlFmpnbZ0JZ6GWqxk7JrnfMRYzjD6+MvcXr6Ndbrc7nOHnGI4MD46jd/AYod5t0ci1mHfnEgrpFRgEw20IITwrjEerjEuF5itV7i9OQ1TpevcXFxgcvlCsNlgKMev+3+BfDFsNGXRcGZduD1u56BudutN2jye38iIsQYsV6vsVqtEGOUOaexmggaDDyV8luXxrZeznOcsVotcX5+itXqAkCE6zycN8sayVrWdT1I4/nEGGUcyXhdO0255hdZqa5+Muu19aYFMJDdxhSVbeYrF5AzJrGuknhfBgwpEFQBopZFs/Diulelro1YQCwx2iRDmvHcIjNRDSZZLLf8OOXthxMjUVKwX79jBbKQ1DWWEFhchX3oEcJM4ybV4+bAHAt7Za76tX2dt7FxyW57U7/s90k76m3HR240K7nbBom2xjrU/ydw7u4GV+W8AwjrJ053vflxk834bWtDZKEfLGUe3Hy8oxvQHVD0A6K9FeYraHrSckd39PFQK1gWoe52jgDq4KdNrWSplBPAkjI8JYfEEcmUOOdykNScUYUszRBBND6nbhCmcMqPAVPA1DnO4pBYsFQPRgK5iOQZTmPiuF7ii8QYEZNYv6Tk4ZzE/kkcEcIaMQ7wfqZZ17zWIcqjtBnNKb+Z5MsHBti1wAr0hF7dYEIYsFxdYgyiqLPGWNJObRSDUoxlPaoVIlOYEswlSyyfKosKBV8IVLwUmYpCVANuGbTRHibO5ZhGWGIpUbYWc5rJzgBJFIMGmFtgMhe4OGpdAtDY38WajMCVMLVhObQnTad5ARepZIhSJd6p21xMASGMWCzmmM97seqJI5bLCwzDIeJ8jZhGMAc4N6sK1rZqpVtFebbqLeiwWAWBRpycfoMvv/5/sbx4gRjP4NwIioT7B4c4vjjF5cVrvHz+GxzOF1jMCI+ffArf9wBkLQVOiIlxfHaK84tzXF6e4dWrl1gPS1xenGMdl1ivRkQGeszx+NGnOHp0lPctC9jLdaflDnsDYKf6v/x5BVC0BVzLj+paM/ezGCUwu3OSAc6x0wDSMn/kmdrKbdoyAQTNy+rk5BjPv/sWl8uLzILMJc7Wju80oLWvgEwbzxpX1euFF5Q2WNe2oJJZFXHDL+w7ea6sa+aIxDJHYxxbUMp4OyorPwbAZDhRxa8539f2tcT4SimJ7Y0T8N7VliQMdblmILFaSjBYrbgMb0mcK62sdJQjZFYnQfcF6HIgp7wkx+EqFkCbFo3baNvK2/3Ux3hYLYcTm0BYVj8r4HObm+Y7aVPdCNtjYLGnts/rnywZ3oiPR5H+SY/HB6EpotJsqj9Y2tim71DgG9MdUHRHd3RHHwFNLBsy3YboYopjOVKQE+LqDqubClDhyOW4Hj55xSgEpN20XDAQxlprATXtgoEWrlXIssaW1HpH1RLqJGA0JMC1pPf18KYQOVJ3FgG0MA6qkI6qlGp8JOekrGzdVJJc13hzjiPSCPGU2wZyIDBCWGO5PEeIa8y6ToCeKqtdsaIiMErcqeIpMo23w1mbase/KLbWru2n7xP3oy16fdaJ9VFRnn3OBIXEbXt0rFTVrSwRGK7xApkAhlsDlte3c5lkW7/ffqqXVe8K6GRmmZuRkFLEOA6QOD9A1znEGJCSR0wDxjggpBESs8Xc/tTlxtyerK4J6JLhTgd9TmI5vX71NX71l/8PTo6/QgoXcBhACDh0c/zx3/xjHL88xq/+xa+BtMKwPMar778EMODh0RHWccTp+RnWY8BqGPHq+ASvjo+xHlY4PT0BEWPeHcDPenz27Cnu3X+ATx9/hqOHTzH76w8A9DqLPEACvrhsKaZzPukavqFSuvU8dddwbS27AAoxRl2jkgGMnADCzNo+tMBQm60J+W/LJoUcWijh7PwUr169RBgH+N6jc+rmyhaTB+i6Dn3fw3cdvFrQNXFa8pQv8z+7pzmS4PwZGEf+XaMfGfTJBXK2woMBrSkgcchZA+V769sWbGqUdwMwG4A/oQWK7G/hn8wESpIIgScbirj3pgxqONOO7d3hQZzASYPsJ3kvp21yziyTCJEDODkkHpD4UkHAQ3jfKY+oLMMIAnCbZVNt0bfn3Nw2Bafujx+KDMwzd0IZwxLcu/Zg3OBvV4CtN2uD8scNjNh4LrfX9P/azc74/E+ZroDFb4eU91x1gPJTH4MPTxVz//Ds5e2oEv3twPGH/kofgj5+oGjKUNJUGJ9uAs3DaKTeLUJdM232Of3dh4kVH4Z3Q29QPlfCaL6W0vYe+UCWRltHkm6Q0PSadjOwZf5sKcb7a+/Zl9rTUBTQ4KZ0xSPFMoSxNcjGTSmfnKZ2nm2Zc0zZcaNqUDXHblzx/k9Y327M12TgBjYNDBjgFAHLHJWVGw02zQRmAVlMOXDeI0iIHelrIhQ3LtJiNa5NZc1ioEl2D7AGUL1/2bxwWQkj50Hew/kOKQ0IcQ2mAeZi5bxD77p8sh+Tw8yRunUEhDCAoyg0znl41wG+U8XPC4hEXo1EtH5nbaFmrrlJ5w3jEienL/Ds2WeYdQtw6uC4h8WLye4ocCB0YPYKeolyL/GfrEwnJ/pqmQM4SVOPqFGOkV02oga8pgqMsRPQRCU2kWNowFTVUNjAOGhcHGosLFIuL2kGNlZlW2d1MrcTVsuIEstIRtnZZMz9VFsTmWsXQOAEJIhCz0lBAnA1NQQtm0JpqfrNYESOiBwBz0AHxFFcXNZxjcADHjy+j27hESmAPDDGS0S+xBhXIMzQoQdhpuiPvW/RZ+V1DKDUmonVGmXA65ff4s//4p/h9avvMA7nQFqicxEOjO7eAT598hTPHv8Mferw+vgY80XEML7A6+MLnJ33CAi4WI24XAGnF+c4v7jAcrXCOCSEmPDg4QN88vgTPH76KZ4+/QUePXqGw/4hnPP41cGXwKrtIQMXMqiFkuWpAUT2IN74ULs+FU5XXA2pfKUPEXsUCz0JIg3HQMdIUTITJhCQANdRtnhBjpVGueQ67g6xxP6KcY2zs++xWp5IOPkEdZXqwPAgN4OjBQgzsSBLHVJ0iGOAp7WAaamXdZeUN5HULTNa3WlZLXOcA+k8btLI51euXMfqmF6IqhgmtSSyGVz4IUPWV6O8T/5yNh/tmjVXnwcLBMoWIJ8JhAjv1MXPALKUctBzi5Umu4BYTVrGQyQngd+TlG+j4ZIBPfKk7BcRMawxjA7z2MF5s04tc6QcDJTXStliqewLOcbVZNJO8ygKj3IAz3TfYlnLCbr/iBUnk61t67MpikK55FJd0j1SWzUBp6H9gWrft1WY116eH+JUzQQkYiRnGe6qTHcGQDY8cxpDztruq8bK803we2YwIhICGGIZDO0HgQgTmDYtiwAIj3c1UDQ9PNjCRPaVtRgT2SnpyUkUS2Xdc67OMRKzmzRdETjIgoSzjSMIiBa3a7euw8pEa+ixVDOtr12hG2L3Fv2kHOSYfGGu+DprGNVeiQwymlzbWiFerRFcBzQVF3TZvcl4C3N2rc5vTbsy1t5chq/n6o6WyQ9r7M63VsMKX97ZJgAb/TkFWvec5/sBfPuBseSobTnXEpGQ4yK/7ygFxICLpYxJkfuTynsyPtf06W3o/vvq+HvdA1y3Zq6jjxsoMgnQaGunXLGgpvdvY2jTL64CG9470s2T37toKgBsK6pdZjsn8wcCido27H1xcsv1fbAv7cXMbtpXlTLRtGmfdjm3uz5dAvlE7Zom7NcNN2BUU7DojdbJlme2LM/9yxKpS3RvqhtXxlYFABO/gTLupsxnpS2fDE+aSlyBZYwiYFodQGUK0LyOCFkCJjAAD4LrHLy3IN9zxDSAB3mPyEnAKhZrpb4nOBfhU4JnD+8cxgAwR4wxYhhGAIS+n2FGc3SuzwohTJnIJ6naJgU1cvPZov04gCNCWOFyeYJxvATPH8BhriqMK3OQXWWhJWCVuHiZQJ9Kd1BRMcSKy5QDk5prqw2u/q/6k0o6++wJl4Ei7W82MJBBjrSPUb27KVyl7tqSiJPGUkJxc7P2WXupAg+Lm5vNpvZ6C2tU4rl0ZrbEsjHIf1f/kmZvYkpITAgxIKSA+WKOw/v3cHFxAe86XF6e48WLbxHCgJ4YkrqvdpEEWisRKVOmisR+kcENiGnAixdf4+TkBcK4REpiScRJ3CTHsMbX33yJR/eO8OjRAzjHGOMI5wYsl2swA6u4xtnFiNXocbkesBpWAAiLewt8+uAxPn32Mzx98gxPPn2Gg/ljODrEzEl8I0ddJcxPlJGqR+tr+3Kj7fddsQc3a7rMU7PskdhiBhRBQCJSFx0qCosiD2jnReEnQL21MWIacHF+DOYARwISeSegLNjDuR5dP5cA4vBIEQAxBjeCaIBzHZg6SBI8ndtgXXtqCcmivMu6rVKN06QvGmsfWWOilrcWRCXzor1LFZg9v2MbdNjWtN5dVSmgfAFikhyEJQEFfB0bLi9UcfMt7azjmnG1T0gKdxdLlkqJU09geOFhTJoFTjokAQhhQAgj+n6m9Tq1lmqkLhTFy+wVrgIAACAASURBVGZLO6+4nVZXbH0C4llSgPw0c85QRzB+3pbayjab9eeZd8U+TvV7MVrrs6rYadgvzlHakWUHKa8FhTYPgWzfAmofYcsIWOJiFTCSm/rNBdH6YlPBbCzXqJqH+4jfVxGbIKJV1kHvs2s35PBpY76UShisZx+tJVljVVZ3b1OGvXel7FZbTl6Y1PbMVAXb9s6c11ndiGnfVmAVVcHwOYHVn3ZiADjp92m/3FxP2XRZ1dmqHUCTBlxVyzs9TDfA+q3LAa7d/Xj7PTR99pZed1/VwGTj8nnLg9b2XW3jq7++OWWh9pr79pU43he9fQ983EDRjallrA3t7Cu+7oYPTHvM9D3m5UbGh13WGB+QslD4ZvvArdFGoMyfJNVwxtV0Jah2y+DctnsaRboG3hQosAP7tjn1aYuql/m7StqKrAYVYhGS5wYVsWsKZOS/2BQMNNIWVf8zQxV3AuDhfY+u62Dxe1zqJJbJ6MHsECFBaUWJSiCKSGkE0qhhkhgmSIc4YhxCdZLF8F6DSVMHs17IDnAMUJqc5OTdVoAUsaoZMIYVUgpwLrXrVp8pFlfFuoMQQRxVtZLrTk/ziDfdukoTTJgTRW7D3UIFPlNKWwk4D1KZFih8rxami9ColABOjBg0xTnHKtsSVU3UHsxj7DbbmLPDFaWoWKVM5/T2OW7uHZZaXO4ryrZYkwX0fQ8LpCxWZoxvvv4KKQV4T0B1oi4YKlVNZAARIIZgBgmOxLIwhDW+++5LfPP8rzCGC0ReIyHAISAmeWa5jvj2my/xCs8x9yJejClgvIy4HNZgEAaOOLsMGNMcrpvh8MF9fPL4CR48eIJPPvkZHjw6wtHhJ5jN7oHTHCktAO5Usagzzl3Doz+gvFafxgtL4hyvqOEvqBS9DYsPwCwUnRx9I4Q1jo9fYrW+BGmWOYIX8CxJnDTvPBaLA8znCyQGxhDg2AMI6HzKfMyMOnKgZTjk7IPswCjuQ0LmNltAm6TWQGb5lLMPcsoB4Osg9piANMxclW9ruLz+JmCqFlZsVlROea0E/3ckroi7lNviImU8qObZRZG2YOMA6+symCKglo8ZgFHeFMY1hmGF2WwO38+0vAmIY7+zC2qdKfLquTxR8/WnBQiMl2W3UhT3tyLP7AIi7HNpZwFsiqzYJETgcihSAvvbXsoVMFIB5lSsn+tYTteTtaPsutIEAlVZLIvLN7WPQWbOdSzhnbk9NcPMm19Nln7u1+omO/xogDRMAOq9aD8Bews32pPK+JQaGZwiJMvsFYeeHxmlWmh4n/S+6qPmV0Uf6L3v6KOkHxlQBDRMkHA16rDJz360tA1Nv6M7ehvaS6i6JcGLKOcNu7a+fIKtp40iiBFKPApValALj/VppQpkJmCaIsUsp9ZOeUoW+ratpVroJrBZFXEr7sp/Jjh1ID+D72biHuJJrAZ8hyU8EhMcoh5QRgSMIDYrHEiWNJK4Qd6L1cwwjAghIqUBw5hAwcGhg/czEPWAZgADu0YJyICLIwBFsWIA6/Ulzi9OcfTgmQaMzS9SAJEsaFQKShpBPGRgJFviZKGY4VSxcY5LMiUTmy2u1MY02KECGP/fmIPT8SrKqiky+RGWelNS6x0b+qoIeY1adasVom317W4JtMlNPJkKJLK2pCRBts2NgrnExOl7ARvDGHTaM7787W/w3fOvcfQHPytAHRQwyC+hSj4lECXI6XzCGEasLi9wevwSv/3tX+D1yXMASySswRAADSzunGOKuEwRER3GBKQUEThh4IRViAgpYYQD+Xt4+OARHj56gsdPn+DZs89xcPAQfX8Pvu8wc4eIASB0kFhETl0EDODcTvVIb9HJ3jllnqITVYKhazp1HbdJC5s/a/jS+JTFJyNirMc1Xrx4jrPzE3hH6LxY6zE7xJgAdOi7GRaLA/TdDCkyAiIcBwAefRcx+gDnejhnfMi17o+MvJ5s3mWwl5Ctg+x7VvCnth5KjSWRgCLlvpo3IvdVBpy2rO+6P3K9yuNZ270NCmnaWS/qqr7S86qYMyt2b7aOUmfiBOKkyi4AeMVBJObSMKwxjgNmswPY6UQGb8jab3xReerWTHu3QHWEeuLMU66izMepWJtStqwp/WNdxvp3MoZ5xasYiMQw0MhtWi3c7AWt1Rksqt5k5xNEV8st27KnvRvarZeUOdJebYaPqnsrIKzagd6ofpp8c5uzs7buu6PraGMne091vYfqbkx3c+ZD048IKHrDiXQdmHRH75Xy9r9TWNzxRONedEe3R1eJ3/rNxDptl6C12+S9tkCpaqbWyicDPlrWVPDlfPxNW4Titj4TWjixxqmReC1iRl0Um6QCMKuWxJRteQz+QSuibSdTQKwpcq8FQVZwwhGIPHw3g3cdvAUQJot14xHjIFm4xlEF9YgYWQJfs7SXOjtJdui6DuM4YhgGMGsGJg1ITBTgqAO6DkQdLN4Oc0mzLYf3BrZJu8M44vL8HOM4Yu5rRUt+cbb6cXksmBnEIyhF2EEyZZBIO4UhrgkUVUKtlEwGElNWTMFFcd2gxgqh9Het9DrnVGkvCmPr5sC5jdIJbTuvGemdn8tpOhki1NQ3pRpckCC8SdOLR8QUNGhweTdmRtd1ePr0Kb799lsslxeY9Qu8ePkc/+yf/9/44vNf4v7ioAK3rBmsLjYM5yWYceKA5eoCx69f4tWL73Fy/Aovvv8SMS4BWgMag8apskicwBQxhjUoDQADMQZEMAKAEBLWIYH8fTx98jn+2u/+CT752ee4//Ah+n4B8AyMTiwNUq9zqAfIg5ONfd2fbZ99DOy/uKskhBgRxoAxBI3DU/NKe4KuGH6LgWXB7GUuhnHAOAzoug5dB4AJKQIhBHQemM0XmM8W6LoejiTeGpKs5RgixlEsi/q+nsu2xlOek2JRxPp30nXkULvl2LpIXFsTMSxwegZjGvZra1gCSmc1oAFyyjgX3inr0NYQxwRyBmxVbqtXyHMtEFUaVUHkyEG7UcW6ylZXnJkKm0umE/hqHNdYrS4xm91D36lrJ1uGONsfU2kfb7FifVNSQLjlhzZGCSULZl3XZh/V75fHLj9jQB0j78dTJY6rNTBpoLn2lfEt45RbtBOo2YKSoMzHQpWlpYLdcigUM/D9/mV966MadqmuVUBcWY7cNLP00pa2V5fEPW3X+10NDt0uXVXqtrm135hkA5+3GMKP/5D8uvn/Dur7CPvkLqj5x0M/IqBoioWjKDtX0d1c/OhoY8R2jlGl2O2z1+zl9/vxMcz3TluVb7r2NA7Y/H7DLaz99qqSKghh/0VqgMa2qkzYl5NFO+02i4wSMjhbSJD8sAptSTdUyaijAFIDEJVYORtUGRK0Iq8Fp5RyUlLQwokbCXGCdwvM5wA5jxCWGMNKTmhBiDFkHQoAvANiGuE9A/DqntLBe49hWIE5KvgQAI5gJy5DzrOARURAjhtQuXJpGnkQgRPj8vISq+Ul7h8wDDAAOYnrpPcBAJEHkc8WMAlRnAPIF45twp+rrItIUlA7U0whbkdi6aJDqvxd4qxwFYy0BLA2ha7rfAZS0Lh+Wda0SlU0ZZ6TxN6pldNULHyyHsa7uMaWqwYO6e/tbq6VAjtpVwwxA0VJrVRqgNbKe/r0qYBhHBA54HK1xq9+9f/h+7/9HAdfPIKjhWCj0Vw3GEwJ3gO+c4hpwPLyHN+/+BbPv/0G56fHku0uDSBnrkYGBjr4CkCwoN9jjOCUEBgICUiJAHZ48vjn+KM/+FN89sUf4fDBY4EU2CElD04OUOs5A2Wtr63LeUs/MVQvpzZM7W0pQfu6dZiiG0LAMKyxXouVSdvyCUBBBF1uG7UCxWKQwQhhxHpYoes6GSsPmZ9JguB732E+m6PrLNuZBJKXOEZeA1VTboYAuAqOZ57ocxsT67urS5oBRkY1MMacCvjKGjha+a2lnXcs8DpPXtYcaBwVN1jtGhCTWM9Z0GuuYhexrhFX1gC52nqzWq8oZRrvyFksp67BzJIpkkudRZ+y9R+LTEFADMB6vcQwrCSBgB0HOJczoUlRFv9IrGpaK7O3ILOg1TaW95HcfxbL7SphyVyZjN81a8wshzJoVJ5qFc03XHF5z6DJ5QJoT1tbXLCsZrXCc7rHI8qeYfs8Iow7TGNitSVvOgS/MW2Ad5YAIVWAfUK2Op5iYmWruQXp9GpBuYax3o6mdVC1b1uf2wb61pXdmMpauOY+fIDmUTVGufL9wbQfB90p5h8T/YiAIuDGING25+7oo6A8IvuARJtP7aCfGrN9S6Jqs8o7ZqvE7hPPaTtAxK1UsjXlB1fBL68CnHa+wOS3nrgyIIJjQOK1WOWkCJeVDQ3IyhExDQACyDGco+2H1aZ0uF2nnvZ/EWoJlZIGyfyVWNJKJ80Y41ynB9YWKNTBayDmcRwxsANoBMghRlHCfY6h4eCI4UhO9iVVdof1ekAYo8r7hBQlQLVLHcj16LxYJ7FaWSULCF0rZzHh4uIM37/4Fov5QxwsJCMV2MH7mSQWtAwm6sYUU0DQmCUEjaLBElNE9AwFb5x2qCplJauFABBZwWzGoPTtdHisPgAYxxEhBAUKq/Gh9n79Q8Yihko5goJGPJkHXE5yr1wK+o47vittkDIzsAXk+DYhBgWKkqnWAEwZdRk4Ojo6wtHRQ5AH+t6jn3u8Ovkev/ntX+CzZ78DP5tLH3iS2BGCS2rw6RVOTl/j+fOv8P133+D07BgxjAAinBfQgMmr8qzWa076LjEjpATEiBQlEPaQgHUAxtQB5PDJo8/x7NPfwb2DJ0ixR0wJCZLFCVDrIVXqWAFai2XDTY/pmOuCatVaG5m3p2z0te/9mvlrHEeMYUCKMWfXKxihZBcT2m7lIPeJRZGlmT89PcHr16/RaxwzR0BMoVi8sADDst7ncG4GRz286+F9D+e7TctPMrU/Nu0T3Y4AC55LZilosWpq4Mt4epLsYpnL6Rxml8vKq5RTUZZVcWMUANWaJ0ZDTvt1YolAVAVStn7LiE5joTIFBRxJmVkJr985g6A2NlqGpFGTwNim/LKArIyEMI5iVdTP4VzfzAr7TXDajtpS8RaIIGWSJUMo14sFI3Kb23YVym1rVtQ+spXx1e337nrTOoZSMxZVezb5po0JVUy/ZRCsiCMDGWjNABebtdj29t6qmtpsMIBl45Rm1G7P+vU0y9R7RituT0Le3GPzylUXTru2t6Zm43xLVObbZG/JoNatVbUXlanCNhnKYd0d3dEHorcGikhsWv9PAF8x898hot8H8GcAngD4pwD+fWYeiGgO4H8E8C8BeAng7zLzr9+2ftkcisADoAKJrsHHN+67ox8W3Y3be6GrBJVWy77CdLwmbofOBNcJLtXcDxXxbrxxZ8lY/2cV1Aas12e4WJ4ipkEsbFx1ssyiiIvuIKfl4nDmSjbp3AVeLHzIgeBhWdZKsOJ8Y9sFqnhJzIvCxwgOnevh3SxbhVDSlPbOw/kZCAOAAYHWIDeAXAdKayCa8ukl27yLeoId4JwASakX5YcTYxhHrFZrjONa7k0RznfZZUXe28GCuBIIKSUsL4/x/PlfgRl4+uQz3D98iMXiEJEjiGZ6vzkdRQAREazZhaScnLlI+0VsGTQYqd5ngJFzJLGUti757WeDdgBuj4QQMI4jYqAMwpB52aHNrkZgtWII4BjAUVIrF/Bm4mzIRRF1TVauorDuS7XlnrmUpZQkMHUQoEgCBBfBOccA0foPDg7Q9z0ulxfouhm63uPi8hRfP/8rXKxeYb6Yw6vXBkXpX6aE9bDEy9ff4cuvf4NXL7/DMCyRUhBXRYhbmqTw6sDskFjSnjMF9ZYQa5GQkgS4TglDBGL0CFHG1PkF/OwQzB3IzwEkVcAlc5fgbQ7JQUEpp5kFzYqpXnyVOisYVg1bvPcdQlgZI8UobnchIEVJbS2Z9jwSRwWsrYXUHiLnyVzAT0ZCTCPOzk+wWl+i85otUa14hmHEcrmGO5wrSDTDfCZAEaEHUQ/vO7jOZ2CltQKpma8if6h4LnHRb7m0rQEIiCQYPhHM8M+cy1K2RBI+R5rpkZwF0xcrENZ4OoySFY1YgFAJYt7GecogQ0og7yuwqB2Xdk8qgK2A9IZK12u12KRxql9ZJhlnt1pX9x4YCcOwwnpYYT5z2m6zcStgEYjVrfc2NVGbRFyBQTZg7WqweiUJQNs3+Y4aIN9Cze7Gbbnb27Y/beXnwGQ905ZvjQlQ/pxBRx2/Ig1cV9s7IKrLnko5V9R5Y7BoCvJdzQlrfMLwz1unak7mzKHXtmwbXc3ZbxJnKkFkVsV/qyVaBWR/r1QDvDV69YHAom2s4Y5+cnQbFkX/IYB/DuChfv4vAfxXzPxnRPTfA/gPAPx3+vs1M/+SiP6e3vd3ryyZJxs885a52iqqG9fs8wdZ9LdB9M72rDv6SOgj9A/eTUViEblr0vZ9QKI9vpWyW0Vpt+/9VXVMN1gpbRhXePniW7x8/S1W4wUYcirPpsgQwcHj8PAQDx/ex3wmrgTOFHOUOEDlx8Ni85AzFw9dv/UhL1UKr0on8nwuGYQOBA+JTdTDOUIHB6IOLs0AHkEY4GkGCks458RTTotMKcKCQjOPYAWwOioKEDihH9QyZBhV6RwRY4AjL2U6BrwEwBaXFGlviAmXl6/x5Vcjjo9f4OjoCT777AscPXgiAAR1YA4gUssQBLBaZJlQZl2SdA45ECRWkgfnrFwEUksHp4DcxmjvnFJlv0gswMV6vQYnl0+pbYZtyGR22m9xV1JCRMpWO7V1gllgbJx5K+BTgzg3JasnBMloli0qKusOUeqKgBxCQNd1mM8XuFxeYhgDUmScXhzj+OwFTi9e4P6D+5i5gBQBxx5hSDg9P8Xz77/BN8+/xNnZa8Q4gJHgCbBg8uTMcooAeP0cBaAkiDeODjIxIyXGGBkxdRgHdeUjAT3hO0QQ2EnmKmYqmddY0pHXel+64mCH6/G7sVJ1u5Td71LIrliOZJ47p0BDkoZmBxdCpdxOlDzNbpYSsFpdKPArljislmbjOCLGiM53mM1mmM16dH0HRz2c68HUwbkOnTd3NF19igkRrM9lHJAM9Ef+XYDJ6YEAZ+XSQFJPBYRIKYknr1qd2TAaRCSYjIe5vjFxsfNhBhzBOwFXEicgWUY1wHsn8xLI1nSdAWEo8qNzEuy7XC9zyNZRmTZljslYGnCkwFAFYBM5wEBsR3BOLLzE/cyjn80qLMUs5Rhi6bLfJH0z8aCARBtwROZbemAwOeDJs5Kqoto/UGANLn/nabv5XrzxV3VlL6vkK7+ubnQA66EG68ENdG+msn+Bytxos8IZULh3heWcesc77IKFDDDRUtTKhjJ/Z1A1EzdL2FbJhr1qBmeueb406T1SC8bsMw+MdsaovGkLzJJ5Mgc+jvg4e47bu6x6kwnc0U+Q3gooIqJfAPg3APwXAP4jkpX2rwH4d/WW/wHAfwYBiv4t/RsA/hcA/y0REV+7ImNVIdDGmaEdv7FxD2/bwHjLItiDQby/ZXvLNd2ACV9f1IcEN7aM2z6b+777/w5hpy1rP8XPrEvasrfN4Wua1Gz4kzq2FburHEarVdVmztWGfTVR86v9ihTgbTPj1F9n4LYpsfUZN6WiFHJNkybNK+b9FoCzbeIwLHF29gqvXn+L9XghcYm0opRUMWAC8BT3DxdgFhDIqfWPqRYS86MIpCaEWtwXZBcFavqrBPaUR8x1iEhBJmdrzH46UTLJAdTBs1hfiNINjPqV84CLDjEovKbBrgGCdwwLhpJdsnrGwcEBnPcYQ8R6VKsVRIltREAH1qCxBqSJW1xKK4xDxElc4eLiFMP6Ep8+ucTjR89w794D+I4BlwAOYqlEHgzO716pGHngUhKF1ZMTqyZm6VtKgGaby94r+sxErZZRrIpNDDg1LwphhMRusrurOVf9wSxuj4kYkaPEV4oRARALoySWWokjHJzEM6Ei2jNvlIqcyruZi1tAD1XOCyCVVJHXzHAwxd1nyxTVnwEQhmHAfD7H4eEhTk6PwRBrqrOzV/ir3/wKv/7Nn+PR48cgl7BcrTFeRhy/Psa3336NV8cvsFqdy/x1On9dmb/ZNdAUTF0FIAfmDmCvaz+CQYiJEaIgpeR7zBcPcPT4KdxsDniPlJyszirQbKJKxd3Y7+sx26IUlV68NcDILNOuJUKJYWUKpD2Y9SHjHKlivdmsrQE4yJHyAQCcEMIKw3ABoqAvJi5eMYhFUWLAeQlg77zGyHFQKy4Bfp0XsMoZ0JhYJo53uW0GjjhDnSsFsu0HfSMCSnwc4ZGeCziaOMH5KrC1FsTsNWW2zncImkSQyUxEaoFY4uY4L5nHYNnUzCoTDE4SiJu8E+stIs3KaJZ+pLxd+9zGN4uPYv1kbnb54ICc9IwdBuQ1WwB+2w2IAQ4B6+UFvPfwnYD1iX0GUtisGfN0tnepMxEK/2FJSGktzWOEqs+x4fJayxz1HgLlu2VCZ7hnw6rIQWLP1XW3dWSXmJwpz+6iRh7jyQGv4iBVUdaPNmesPlfuY5nv29e8gItcuerKoYbuj/CQrHgOxB52GGSFy76KindHlLcxvlTznPK8Qm1Nn0/ZkH1Vl5B7nyVRg4SwcnleVGwjP0VU+G67r+Q78qcMQ5bJPflBO6TvWJyveaHwCAHzAJf3O7nvakbb7OsTy8Ia+EOW/fKTCkq5qhDKvCdbBtJ++s9mu/ZTLG4eNqG+v5ZjGfsUdRPgbXsb7Drfyl56+zTRJTZkAHqv7eZmpV9H73jRZdpvrlxFb2tR9F8D+E8APNDPTwAcM3PQz18C+EL//gLAbwGAmQMRnej9L+oCiegfAPgHAPDZs5+rAKnEQN5dTTPeMKHeQnYcdp1Fwr69+T5Bkn3eb59giERw3l95CzODY7zyHinqfb5/rvSaG6fSx5ai9j+W2m+t36gfqJqDN+8/EYK1bVYiM5gmQfmubTdBfDqqmyflXvVo887T/mzAM91kdp4STa8Jg6X8d1UlLADq1W3MAQrzKTBEmGYROosQFeEICGGFs7OXiGlQWVcEQ2YHQi8xPjoBblIEOlW+akGuxBohcadQAdnAKXOtaN/XlC+GBbLMsSVMGVeQwsQ/+S0xXuCiGGTAadpsVYqY4CIB5OH09N3pUX6Ko+oTI5hDVviTY/Q9gbyHCxHs1mAW64SYAgQZcaBeBFU5APRIKSgQFhHTGikOePUyYHl+jtcvX+Dpk6d4+uwx5gsAWAE8ynxV85AyzqZAAqyxbwikViyQWEuWWtuZMlXPd5IYN6Y4kc2XMo04iSRODmrlobGSJtZJ2QJCfyIzRo4Yo8QdiaNY6hwuFsBsBoaARUz1fN6+vi0TXKswtWujnt+165kszwTvJF6VUwwzkQnLSeUlUbJilMxnjx8/xnffPkfneoyQALpnZyf48stf49mzZzh6+Am+//4Vjl+c4vTkBMPqUuaGHc7o0okswrVzJJYhppxzFCWXCYxOgApERGYwBwGv2GEYA5gTHh49xu//4d/A57/4fcwX9xHYg6lT1x7JSiRr0IIp67qlGlRhgKx9lFmXxmKWtW//ON2ioHiNAqN7hsxjjR7lSOOWVXGtAJDGviF7P9RBhq0umcvsNE4zR5wdv8Tl8kxAFzJ/KIcYgfVqAKOD73r08zlm8zlcJ3GJiBwcWXuMv0h/yZKSdSydqe3RdyDNzGgRhAqVE3in69IUWKmvF6Bp8k6sLr05A2OM2ZVXlPgkfMalnNEvJQiQr7JfHbtLom0TECUov/cdDg7m6PuFgGdReUwlKjaKqa0/3dty3JQGzZAHJaaRUyuiEjeu5tB5S00B6+U5HDFofgA02SFLn4grtGZiZHlXS5aQiAGXhO81z9leZsDJJCYRDGyhCfCi281eMsg0unqZk5KtTQG9CiiqXfKYZPIKjzRGbHze+p9Lv8FLmRpfSfq/uHGLFe5Y9WHVm9rOJGaMAEhi5jkBiVj7g9mAIOl5O9iR4dQg7ux07zZwSLOl2ekE1/1dHPh2gUQ2SrXq0XCSDNjJUVN1Kd+Z76By0ECm/wB5nrYcagswBMAyrZIeOrwTHXpyKAZAAWkdGy/WXgxFQY0h7dmaem/c9R0awEe5q/ExnX8GEDnn87Nvbkl0/XP7hGQo67a6TOU93i/dfn1XxTDLtXK9F15bYlWe7ok12zZw8Nr6bou4Gq/dt1zPf2+b3u4d3xgoIqK/A+A7Zv6nRPSvvlUrKmLmfwTgHwHA3/yTv8Wtksnvf638JOm6Tn7fk/yOyBQf+8wlKOOVozUBaYpIMRHib3lIi8WGfS4nu9eZGDPa5tyUkTfxN1SptQ2Y7ZQ7BUhq+RFjHEBOM/oQg5Ns1D1mMOXGBFs7cZ6aKm+0YeMv3rxha/DjWlhUQV/HilS4dWqRQ+yzYJ0oCqbuBCiCY82IlkDsJViwWn0wRoAjEkU4x+jIwxOh6xO6vkffrbEeBoQxIMWAMIpC572H9z28s5NeVgUOAJ9iNawxjpdYro9xsfwWJxcP8emzR2BaI2GJru8kmHQzJto3pvFfMaYmZDTn0yRAEKuLT1kNxTLClClOwHo9QFz5ut3nAqrsS+aqofys1uj7HsvVCrN7h6ZbYnOFGQBQApVa1qPpXK7XQjkVZdSxiVIQN8K+64vC6cTyJoSA9XopXVgJx855PHz4ELPFAuMobpUS4PoREif8+td/ifnsOS4ulkhjkjg6XQ12CK8x4boodq3LEeV7xLopUQ9x0usECBiBcexw/8Fj/PKP/hR//W/8KQ4PjxAiZwWqKLPVANTdCZb7mgDKRT+VIPAWCN4jUQISgR2DkKqg0e+ODCQgLu5azqmlWcUDinuPvXcr32QgxTHIO1ViCcuLS3z7/XNcXF5kwMQ6bRwHrIcBs3mHBw8eYnFwD30vmc+6bg5Cp+CGL8AP2Wl6UXktBhuwyZ9zhLc8XxUgUKG4xHaj9t2J87vW7wcGnXEhZgAAIABJREFUOs9qQZgKryIW/R7Cvyw2F0fNiKZuoLkdBIj1poADDx48wLNnz8BwODk5w/JimV3O6rgoZR5Pd5vJuFJ5H+8cHPSwzQmIQew27jcaxxFYLmXt9jN0vrezABTQRWcDt/UVZbHay64ke49Gw0Qdg+hWDvjYpt0O5lkBoqhu4+vklKp97d5ar/ld7W9BNCJS1+/KBbEpr7TV9lgbA1ZrtQzRbAA8uvu0Zq07m3WrKijvORX2Kauwc/mM2yv73dK0R6eybL2nujLvSMa2Pqrx3qPvZ3CONEHE21te3NGHpP3Bxju6Gb2NRdG/AuDfJKJ/HcACEqPovwHwiIg6tSr6BYCv9P6vAPw1AF8SUQfgCBLU+o4+JsonlVfR1cLVHb1D2rKTXckeq/unov/7oOtYd2OWTu31+jdQK9LXEKMVNB1gnmUmKFgWqb7vQS5JjiUrW62ALICyU5P4Yh4tlZTYHSXGTXZdgInp1qRpTxQtLR8w6E9iierTq/IDFrcMVmBJ2ibBoikR4CVYLCly4agHKIIoqOVPB44dOI3gNCKhA3FCpDWc75CS1OdYTuT7rsd8GLBerzCs1xjWa4RhAHuP5BOij+j7eQ4ELdDNiMQJzAFxWGIIJ1iNL/H9K4brEs7OX2M+79F1FuNH4rQ4BZ0q76ami+r5UE4La4CTJeMQicLJdmQNKJJQeBWzjLsZVu6KccDMOe7LOEpsoBglvlFKCcvlJY74E9iIk7M4H3YiLgAAZSWa872SWY8apWhzTk+ukZjq930H75xkteo7RDBevXqFcQzoey/WCAzJjhYiHj48gnMeXQc8e/Y5fvazn+Ho4SfoOo/1eoXlco0YEnrntU/EOs5O3yXmi9uiGJaw0uV0XVwgE/VIlBA4Yr0eMQ6MmHocPfocv/zjv43Hn/wcgWZI7MSmRtdPq+RaXSmXrypdo9hsJSr3sQZgfv/UKrY0fa3qu+apjLVwxQ/EOnE1LHF6doJxHOA7U4aApPNyGAYc3Hsg4GA/lxhjvoP3HUCd8gsqlZAozwLyGd9i2f/h1aKj4s0MmJVJbf1UDgQ0iLtZXFjZZO5jVYY3AzNZY6jleaYudcSQGExieSWx08TCMYSQFUFHhTd732GxmOPo6AjPPv8MgLz/ernGah3QdR7tcjfATh2HMibAGUiz93POl33AuLpZFO0Aipg1YHeMSCEAvgN1qc2MyDroXK13LocaV8tYW9CY5mPZharWbd67R/nbLS2uAtdusFffKtlc1jmo1+rva0vgNrCZEFdLl80lsHyL6XvfnhQsc357qfWEecsa6z11UtT7B4uMv9cN2Oc53vJ7ymTVkqxy17M7nHOY9zMc3j/EbL7AMAxYrfbwyrijj44mEnW5eKee3iq9MVDEzP8QwD8EALUo+o+Z+d8jov8ZwL8NyXz29wH8r/rI/6af/w/9/n+/Pj7RHX0Q2ptZ363G905F0mkuXylSbn2Gc+DY2x5F0St3u9VsnlYDOaywKihvxRqq0+26zhyPAQBRj65b4OjoCWIaEFNA5AhGVIv7HgeLe5jNZvCdmSaXfs4WUkXDqBTeosiJxZcCPK09cWVNYEFkDXAqCl22OmA7HbNxU6seSgIGYCYehREgeLAb4dksPSLYdeDokaLE2eAU4cmBOIJIMrpFTnBOEDXqe7Gqcg6zzmMYBowhIIxrIAyIcUQ/m8P7Lo+3CH4jJJAvcHZ2CXMpijGhIwfni9NNMqsGVCf9BhhN5/fUgsuUaEEaBLqgotQQaVyiTKx9pa4yLC6GeewqkMiAIgkenbJ10XpYI4SA+WKOEEd17SuxdYrLVDFrL8I/A5P50czNHaf+YsXl0XuPxWKmSrGHcx7L9TpbHjF7pMRqsQUsl0vcu3cfP//5z+F9j6dPn+KTx0/QdTOEkLBcXULDo0t8DyJ45wFy8ObioG5B2dWH61le1pKEIVeQCh6ROkTMsE4DxkBYzB/i0ZNfYLZ4jJBm4M6AotIzGS1tkYnq9xVk7K26VKbTh9ij9q3T3FiFhziSsRDrH6irZEQcVzg/P8FyeSEWNVyCN18uL3BycoJxGLCYH2A2m8M5D/IdHHXqmiogkcSCInFZrdbKdC+3WCg0WYvFHcviAQlSLEVYHKCugC/6ji3QXoBHYifWQSRuY2L1IYG6bdTlpD8h6Ro0dzTnHDoFcDovMZlmszkWBwc4PLwHog7DMOLs9Byr9VJc3CZza5qxrHxXLEadxnjKFklQfuUspsnUoRT5OpKuDHXzgfZZVmnV4lHcquVqKjgalKvVF7bQPgBO1e/ZMvC6Eqv9eK863o6uBs73eF44mfSlgXmgiVuwcuEKFBQAsIRkkP23Bm24+pHPRb6pZjkhg9JvIhnXNTSu1bcMRQF7TYHbpwx+lguOlVMYWwHqc523rE4BX9RWuoD3hAcPDnH04D4ODw8Q2eHkRKzLLV7Uh42/ekf7UmXQKp8zJ/4pwQoVsv0O6Taynk3pPwXwZ0T0nwP4vwD8Y73+jwH8T0T05wBeAfh776DuO3of1PjMXnfydUe3RdTYHpuP+fX9v+G+aeNXKef119A0xLc5rCYA7iMIvo2wMH2VSSNEIWMHQoeDgwcYwhJjWCFxQkIAKEnIL+7QdR7ei+LjvBcXlkrg2QAzJiKerQ1RoqkBjcqNtZphp+9VP1QgUluLjqOazTvn4SDgToC4JSXn4FICOEoAZpL3KMGYO8QUwCnAsShsDI/eC1jQOQ/0HTjNMYwDLpeXWK5WSAkYwxqJA/rZDF6zKTmQuoClbP3ESLkXnHMgjdWTUqoyw8l7OVWWSaXu7JJE2Dpn5JJZOBTLCMtUlmEMjfnEiIgIcNSJgktpp0iRgxJXAA4BiDFgtVphtVpicXAvWzs48o16XJTReq6UtXqVYmTCaokB4zCbzdB1s5ytKjELeDeOWK/XGMcB9w4XiNHBuYRxHDGfz/F7v/cH8F5cFMcxYL0OWK3WSDFh1s3FHWa+QN+LKODIaSBkgDnkTFLMETnuVh6PKqscCCAHdh5wPdgB/Zwwm3X4/Nnv4he/+8fo5w/Bbg6GBlqGgUXWb3y9llWxrunYTdeeKfi4PuzerVEGV+oWkljP2TinlDKI2VgqktxLXqzREifEMGBYX+LV6+8xjutyLwgxRZyenmK5XML7Ge4f3kffz+FdB8vyZGARgzRGnyuQpfZzhj+qfpd5VjEjIwPycysKUFTz+G3gSbG6K/WSIyRd5C77ZVGOsW0WRJzETY3IK9grsUW8xo3zrsPR40f4/OefYz6fYRwT7t0T0Oj1a48YLQA4Ml/ZVAhb8La4gFUgkb5bXr9o37Vez97pWCUGx4RIEY5SBpmMH2Rjom3MiDfnedtec5fh7JrJuWy9Xu+9KY/axvvaGEkZNYhYEZU4L29K2/TwtzkYkkOg5goM185vMDk0ylZcG+/dtrM0a9v+UzJa1ixs8/X2s1ax3aicPdHWvnoT4unfeQ1+AMqHaCLTOrK5axZAb1pseZbVItHmfowMcsBs3uPxJ0d4eP8AzMD6MmAcxxwY+w4k+mER1/OYK7CI7b8f+XiWDRjbFbrboVsBipj5nwD4J/r3rwD8y1vuWQH4d26jvjv6CKgci225eEcfNSknnRy2bTn1eYviszZQQQF7nhbWz191/7WxF6rLbfIkcc/ybg7vZkhe3K4iIoAIRwRHHZwHuq6XE3py4paT6wZQxUCwOpo4C7BgouWMnezG3NcaUNGyprHTuI9yfy2rm7WVXTAwpJzUi7WQ9zOQ84DTE/gYwSRpgzl2YApAigD3cDHCuYCYAiiNApY5B6ZO4h6xBL92zqHrPO7du4cQAparNYYxgMDoPKFT66CkIAbZiR5bbC0FjHJgVpctpUzdapw4quNPzu9o08LE6NwTE3yGVGiwwJUMC0hKlBT0EKW5Bm4aUGjyIxmMOsQgguVquUR6pIFOGxep2kKPmvKmG3kLIlVjrO/rvVcrDQmeK4CPtDOMA87OznF2eoZhGNVNUgMWExQ88FgsFmJ5crHE5eVKA/wmcAJm/Qzz2QHCGHB47z5o7mXeaMa/fNoLcQ1MOucy4AHOK8KWadKA5KCEbn6I3/trf4g/+sO/hZ89+x3Az8CYIbHXIPwKJiqImjkFGxzqcr8W/aEC26q1YWBkTTcLkXodbecxWbXU+ZZQwMXMuqoG7FZEDHizuWuKasLy8gIXF2dIHLOSa0r/arUSK7f5PcwXC/TdTMAi34NILMTkN4FSFTQ+gwy2Xs1dcrNNBpZwDvBrQIECwhAgJ89zkgD6zrnKws/Wkq1/We+u4csobUoW7FrqTGpqI2UoP3Y+B6L1fYfDw/s4PDzMdXWdRz/r4TuPEMdGVtk2DsaviVwBrRQEMJDIWTDrCigCt2s5W5tmX0IFnWNCIs0m17Thmhm6kYRlAovYdpL5TwGCOFu+WAfXf++qW8c9m8hwVUkDv9w63QQwag6eyCxvS9MIGqy9AmPzcwBI407l75oTqt3taOWYmq8jAyCTJ64sb+O99MdNeE5u5y0wtSlrat/iXZPus66yOr9lnZ6qeljH1XuP+VysoCMjZ4sMISpflGfuwKIfBk2XQAWN/7Q00UYXN+Zwuz3wLiyK7uinRBnR/EktzQ9CTKZQ6ec37vNKkvqI6F15om6UOlEYJKaJghpgTR/diYuSfYda2BTl1TmXFVRRkKoyJ8JGgTYol1mAogKKFSCpEqA2BsqUSq4ESskiQvAgLwqN406sF1wEp6BWRQGcvKYSSkiUQCmA3AgXe8Q0SGYwDqoUqaLoHbzv0acEngOLecAQRsQQipLCCYyIlGIByiT6kyq9EmBUwvQkmb8adBTstD+tT+q+U5jNTo5rPUUlPE76O2NpVTlOMq04s16wlMQEACXjycaAVW3o+x7z2QyjNmwYh+wGUzeoZLISgKrMg5svtuL+IspMAuAVQBovLnB2JiBR13WSCY2B1WqNGCO6ziOGiHuLe5gtFhJ3aiYxUkYesR4GnFycwLsLHN67j3EIuHfvEPcOH2A+9+h7j5hEycwn55W1QclWV1RUGTuvQUF7PHjwEH/wyz/B51/8/+y9aXckSW4teGHmHguZzFp7U7V6lTSjOfNl/v9fmdGZedK8bnVLXdWZSUa4uxnwPgAwM/fwIINMZnZmNXEOyaCHL+a2AteAi9+gi9cQ2YApKpgEaFYnXzsIC4oOUo4X69uVwpwWA9qNeygAbDnyijfaM8npCJTZZ+XI0v/KNYQ6L5xOQmjHPiwTGmuqOJtiBCIZU9IxWbw8KcB9KJwcfrPZ4OrqGpvtVj3PYgeChpzpvKBjWZfrCpKKOEl+4zFjPFvu/VM4tgpq7SAEgbmCk94fxCAz5mrMA+6l1Bq8ntKN7RkMD/scJ/WWUyJqGDij6ec1kyRAIaoHXIjo+x4hhBImSR0hckTXBcToIX6n7egmBZF7M2qZ2vm7zPEgO+cUKmnfc/kEB8sqDxEM2JBZPyrgzMUd99x558CfZXr5++Ykv4dnDvwwa/Pqkx8JFukHX0OX8wMwO1Cxz+LdWvBAqf3hvV53fbm+/JrZfHF+/Tjf5+6XUhxXWeRDmJb3FUDXYM1q1xwrCtRzPMTHXVER1Ds8qAdizozjMCEltrb2OeLj9fMXeT9Z5IGwMd2e8YkZOB9aPqAt/gIUvciLfJbyPpOBAE9QMGZ3+BssqMtnvs/Oj3qWqCeNiHJgOJ8OLIOT7xY7wahDAfMwIt+7kpUd+eZ5rpKekL2G5j4N50fzvtU5i8pCIKLAFgkbZBS03FCS2AxL/ktioScRgg6CBKKsvCBBd+oDTwoqhQkhd8h5hFACh6zGun3v2ZgEQN8xtsyYUkbKk3I85WyhDVoj8L8LTx3ftfP0xLAdegpBjcDyvsGACpR7zfEbqhpvPUVJss3zQEPRDEgjMvd+vSE7XnrSZnWf1Y39GCM22y08i8qUJozjiO12ZwawhbsFD0ELjXeQc5BUo3S5wz0nb/frLNuSeRK5e/yUEt7d3kJEsN/vkfJkhNvqNZVzsnC0CZvY4csvv0Lfb7Dd7HDz6gZpyriLB6TpDe7u7pATI2exNOSkme0s/AyNh0QF6hwoYAg0w5j3S06MNAm62OG7736Dn/30O+z3N8i5w5QihBUkLX3ELRVus9mJTVEKDLB7dUgDFnkLUUlyrYnKyThxPO3ys05TWt6ZgV9sG4dt23adnTIXclCxXu9AlzhvnNXNMBwUfJUKm3k9eDbC3XaH/W6HTb/RcUT+Yx5ibhAHJ292SBbW3ZwjyMtd+2Cdd61vNqHnIgALq8Oe9RVmWLxnC5p5H0LJVkwImmTAvF5YuAGJRqQ0zUAtDTurY9LBo77fKJechVhKIAWUIilI7F4jBcxbEFADFsXr3j5awx4G4+OTkSGsnHDi60AzbpcbBCS1hYtnVRt25iQbZb4zwLvI8vOpESDQeluFExag06PA02J0+LOXVtnzyNITqzxhtaCriCvatbiAxovTqOmztd/Pwbtm9mmeNF/nT5/7sCzDE9fEecrqRdV7eYalFEC+XnexHrSCoRlmWT63z/uQ4mPuGVTRdbF6YSHLrKibf4SIw3HC3YE1m2ti81q0cr14E73Ii5zIC1D0I5cnTf7tarKmH6xuMMnK4nzhUx/SXmbK6rlTLs2IdcE59Hyui8XYN9Xjb7VfMe8Ha7tufnyuZd1bp5dU99qxFeWmKmXSHqjHmhutk3O7HUCz61ocQZV/38nNYFYSQ5YEFkBsZ1uJDTOUINivNF6LQI1SW40tQWvA2FE/hwihnOnHQ2mL0jZODltStc5rTxYfWmU2ABCK9h6evUo1seBgTFDFidABQT0XJGRE6ZAlg3lCjhOEOzAn5DQp8ENRQRDSMBUhAwmCIARGLxskTkjTBMIIyFjAt5yThrYQEIhNAa4pthUo85A5LaO/dwgBFB2XkALYzYUWv/WnRgZY+BIFI4E2I9CgBQ9Pc0OMRZBZQR+CIBAhhoDkYNFmUw3acSgeDNUg9F17aXYoneenzokzPoXGYFEwSDQLHGpf8zCelFLhJrq6ukYIhDdvfkBOWv8pCTJPePPmLY7HI/oQ8fbNO8TY4fXrr/CLX/wDvv7qG9y8ymaoZ4xjwtu3b5FyBlHEdrtvDK4KXFVCaw2NFOeC0lcEAOSs9fnVV9/id7/7Z+z3r6AZaMyI96xqM+uH4MiI2rbtCY5L1N4elvZzObXO23ND7/JZ99yZxY+KzvCNLOw8tro5N3+Sv3MLLJBmFXNSaxDAmTEOA7J5rxV6uRDQxw4BGj6z3e4s1XO0HXRCjAq8wuAlNH1dH92AKL5C+VgwIEv7c31J79NrQGfQQQ4IjDza2qOszZohEEwG+HCtAgKEGTlPSGlU0Nmep7XvoZgaElYzkAV03Qb73R673RYU3WtJ3y92SgZPgQpIebK6i2NlOv5DMz8JHG/h4o8jXOdxD8+sbdgAHwaYMauXFECgIOYwdqpYyWKsiY8F6zHzRZAd39N+ZvPUjK9t0SeXz6uFbo7NHrNy3gw8mt3gIlkfjc0YQE060Yp2S0LrMlDDlqj0lTWVk7yvWH9k6HtQ278KCH1OY66A+UnJG7Dp3PzxOACiAaZaBQaLwyb36WhtyLa0Fz+iOM+he3u7nz5a6j3eQ+meb7QARFFHgYhlV1XAfJoy0ttbBaFVWZpvyC076MdEzz5zuXTz+IOAcW4mEOp+gJbqMcv/80h55NnZwCe0h2/0WJnN0e95r0Y+c6DokTPe36ucW0HXRNqdwLXORZAzuvLy/IsmhGcCd+TS510iH2AicyLY987XfE/ZBIAhG2sXmps/QdNj3fuQs8OqXQxa/XJ27IzisHbsNN6/vcCMlSWwJQRQpcGdX0ng4qUDBSYU4VEFlARZJuQ84jjcYhgOGKcjxikhsXIUAQDniN3uGl3swdei72R175EhZZ+egu1EOxxo4VTOZSHFNNPyBuurou/iTcrQ52QL4XICYfKn+e46ZeP+qYaF3UCNMEQIsSlBTpHJeqeuM+MvgjIDHC2RLIOxRScJkAyRSQmrU4ecRiQjgw3k7CaMJJotLQIIzOg3E2I8IERSDxdmsHkEqeeJQF0IYF4PNjVZS+oe6gRIBGFjxl3WwCPZlVTUMyWAlMyaQtBuUch62fqPEn13Rs4dRL0GPOSGyZlltHqZMyAJQRI6JEAYiXRalGgGZBbEaN4HLDqeQkTJwmYGsHqYNVmeyoBpjWwtS8ksBvOGEm1PMi8JB79ub+/www8/QERwc/MFOiJIYiAlTMd3WgxmbGKHuH+FkIBNt8dmu0G/2YFBoK7Hq6trhBDQ9z3+8t8/KFfDNGEY7jAOe/RdUKDB3klYAVUYeMCcAE5gM4BFCDkp4TCox9XNVwjdNRg7ZPSQZodc3NGneFm5kap1E0Q949y5Qo1170fmwNRc6exWDuYIlU86Di7QEcpQOp2Cyj3NRc+MzHpq602UhcFcOXakGad+XgjByuj9vwXK1LuGoBWQhiPGYUAaR32uMAIFbEIEbfe43r9CHt5iv73CdnsNilsIBa0jd9gDg82LEazZhsq7EXReIzEPRQ+pDTouZmCFSihGdVt5Wv+UdQ4jy56oU5rPxVS8bFh0HdI+psTciUdkTDo/y6RzX6ggvYiG0ql3gF4XQ8S23+Hm9Q1ubl6h64Jli2OESLi62qPbboBDgGJMVEKOACXNzk6pLqRjLjAi/LkC9zzROYMgIYPSXKda4ygiaLgmM5Cz9xQl9EckRCJksM7MtJwHmtTsM+CGoGuAlLIhCIQDmBRkVBApVuPcvStYwX4fZ2dGgq2t585x/SJAkMtY8I0N5oxkGaQANi9OQrbQYA9jdCDPn6l93kIgtYPMsohp5fqquygx6QaP9l0jL+YM56YpXm6+/oIgyKCQ0XUWOgnXe9t3L7DSTBz0DyGAOK+qp/4cf/5DQghAvu+82WL5CPFKNFCzzJiE4tr3nvKQV5NvpDhAq1oDGe+d+jpfSvj9kJSNGBIkI8DXtUY/j+MEwPppTtpCQUFo9/xqPdLWgc3VJ19w5lodfWwU40PKpe9ySSdennOZDQjgJCTtoV5+cXSEK6z3bp6bdzn7PHtfSZ9DLnjG+9qc+GyBoheA6MNKU7e+LSV+/JnrXbfZn/eezyEfCvV+r8sfuMFZkAhQA6+W4f573T+xtMBOGz5R58/z16/tRs9LVPvDbHfUDa1i0flVy5LX/Ugy40u9/T18TMCScTge8ac//Ql//PO/43C4xTBOyJLgnDwiEa+uv8TVbg/5xsrsWWwEq7tf9bVM8fXhQmR6EKlu1mTjaLtZDWkzI1f876nivlyrZv+SAjHFM8MxSjNIS1poUYJatVui5aCKADIIPbp+A049xvGAkJMZjroAZs4V9LEiEzK6oDv406ThUCF2SDkqWTYBwuqpo4SxHWD8RIC6iKuiFkHUIwZC6DKICUSdetlQt2h9s4ab8MC2aYpHTlQ+k9IP7D3gQFYxBs1LxjLKSdCQOA9rIeKSaamEES67e7OVf1+4pD+XmSu3CjmYGEAxGrdSRNd1OB6POByOyJmx3e7w6uoKu80W+90GeTpiGg+YeFSS6u0efb9Fjw67vfIU7a9fod9s0XUduhBxdX0N98q4vb3DOE44DgNu794hRsJmu63j24zHnC3EEBkkrCGHSaz9CHlidLFDFzcAdRAEsKEW+m5k9dv23GrMYKaoX77OP8dsLW1Z7nuOgbzLfqZgkGbGG8cR4zhpeve1PuBjcLHOFE8uaN84Ho843N0hTeaZBwUSu75HV8KqIva7a+z3rxBjD0Hl7iGKOr6Kd4+P1wb8EAHYACMYcbSBF16OppLO1hChJpjz0LaqN7iHoHoEiW/9ZgYzQSRbuNmInDXTWbAQugD3kjKDGmS8XAo0dX2Pm5tXuLreIXYEFlbAgAnX19d4/fo1bm9vAUkQCzNRD0yy8xRYYns5EgJyBTOKAWLIHgvQBkqeFwPQzLNMjBPOvQMzMAtve5zY2uDrhIWtiXuIidiauVYiwD3FShuvnKNSwZwywZaj1plCBbkyi5LlS+3LJNTUZbPO+xNoHjJ2PuwMOK30ZYesY5jKRLJcZ+vc6ynR5zc5bxCuASJPnXtm92rGyb3XPOlpKzoWfCx9JHkKhvLURxkwlTNbpkSd/5gFwUDknFPhd4tR1/f3l/tfYs2keOFEOpXTMaa0DudmpvMHTi55HrnPNixG1ooy+ujnXHDOPVNle877mrOfIVDU1v7LIHuRF7lELsBw3ueuzYK3/oDVXbc1ryM7eX2HqlX4FgvH7AXtxzyP5o9xcl7gcLjDH//4BxwOd5hSNu8D9UAg6jCNaryQGRSAukefFtu9FmqeJeXGMWUcBKBvyj1/hepCD4CUbYjK5N4CX1Lf0OysxZuV2fHUj8KM8AaYFZKSgM0BGj8PyKBACJ2gE4CC7hJnNi8nYpABRTDD0/YJEWiDvpswTSNCGpFyh2kalTybGVm8jtsdYjHjFbZbG7Hfb9DHDnliTFPAOBwNcKgvp1iahjSV/kFN+3sLFPellm+qtmXhIRH1UJJivFATbohSvpQ8ra6bY41R3RjiDsq54Vtao922hBTvk2JMFwLuUEh6U8qAALvdHq+uXmG72WG32+Hqag8SxmbT4c27H3A8HkAI4MyI2w5d36Hf9ErIvd3q/QQIFmb2xRdfout6vHnzBsdhwDiNGKYRoYvVc4oEnJPyUWX1vCNRhVs9aGDkxaqUH4cJ08QQiRD2TFHex/FIaQEcWXR8Kf2+9MXZpY952CXnVjDeza0KpkHridk4oxI8XK+OYTd4vb/6vGEGrQSQPSMzYxiOGMfRSOmBgFjbEYCwYNPvsel2iNQhhh7kpPaeSZFi8bNzALt4XLkXDZqQWmneT1DAiNJj/bxFzXnTEgDhSkDvoGpmRiADjAglQ5ywgGVC4owsNj8QCkgaRIFZPRxpNNs1AAAgAElEQVQsA2OPruuUqDooWBSNv8g9BhlAjBHX11fYbreYhkm7TzsfBELhQstc3jlLNlBumXvK53ObNe/RwEPT2dV45QK4tnP7ukfG0jBafDYPnOYuzcLU3sszMjZXNwvYuqHaAFDteSUcWjT808IjtZ1tripE6FS85gpCefJ+9ulZQQK/+8KoJAf3vF+6J2kF3Ojkqh+bnE6+NezqGRvhUxCG9tesekzsdP1h6JKQMmNKSTPchg2AH2Ed/GjkFCR6kY8rnyFQZFL6zvvCdi8yl3ZQNgZs+/+LvAiA2eRNVbE8t+dWdrDPLMiVe+j8TkvrUSSN0Ub10hPNs+4YciWKhSClCXd3t7i7uy3gi2boIgT0GLYjsnA1oKDKBJf/G8TCPkvzmWbfn9aHk1rXo3U3wumpwQzPxAOZ14vlDavviQoWORjS2g6No0t5F3FDqHzh5LeWJSlGROlAUdPKh6whcSyM4GnmRUExIkYMHWKY0HUZm03GNk0Y0xHDdISkjJwTpjwZD1DdDfc2EeOIosDY7nZ4tX+FPm4BbHG8PmC32xbcQL2yqPE0cMhL72lsI8WQEYQaziRe327MmHdVk81LHYZaEKjWXU4Zd3cH3LxK2PTOP+T3bsGMcwZl/V5DufTHAanQcGEpUBnR91tcX+v99vsrdJ16V3XdBj/52c/RbSP4Pxg/vHmDYbhDShk316/wKiV8ESP6bUYv1rtIQc8kjBADtrsNdmlvHlRkATHKY0QhqDdEG1Ji4Wc5GyDE5q1lnWkcJ3VwlAjnJcrZwytxmVAL9q3VXXPk3FCrtYz7duMdN7i4cB46BineE7OvF4awezPq/wUhbs6kcl/1vBDkzBiGASmNhmlotrOu77Hb7ZDGETkxrvbX2Gy2ChIV8FUBqEpmbRw5IIDYwsyAyjFl5PqltqR6psiCNaZWVplvg7+1hZZJcKja+NJgbWmeZUTBQHfzSrMftkmqALQhGHhmoaUSEEKHLvbo+64hh7Zxa/+1XiJEoRlLQLRQYTUazdvF5+7GKZcN3PFQUAWX4O5I8/Zu1gf/62EsCs416xZJCTeb4cWLHnTeQKqTOoEB90wtfbCC1fMHXGZwOXgFNGt2+czGASJ1s0KsHoMlafCnlBDx+sxLPGLez2B3D1x/E8w/N5N4bZPaNrA++eMTOfm39VRem8M+ZxHU8F+dB1HWZBHBcRiQc8Km64ouVzZzPgkvnxe79kU+Hfl8gaJPYSz/XcjLZPUic5nzCrV/Xdlq+kwzTi9zs71vt7MFiaQBiueAiOrGrfdGNdSKm34BrQBAM9lwy09AYl5EbheRub+iPmgJCFmmJTKGC5S/DVBAFRw6HVkyU1IKKGbG08xzolwhq8ZGUTPI+RfYiLwNIClG8VqAgnouhBDAYMS+RzCCZ4kKpihnDRunj/5oSuwJgggQIwZWwIi32KY9OCdMacAwDZiMsNa9ZDTcRTmJEAKUTFyN1d12j35zg6/3X+Pm5nWpBffuYQaivWPtGzX0gig0RowBbgZGEtzamddd7WmtR1HlH2ELCxrGAa9erQBBxZvo1AvOAQm/V8s15eE0wcKKmtthu9mg73qIaKpf5f4A7o5HDMMdMhOubl5js92DJSBExjAlyN0dtlfXuHqlMJga6bbjb6BEiAG73QYUg4Y4BQfOyIiZUyV/54ycJ0hOJRQtJ0DMe6ULk4ELVIA2ESBrsM09AGojjWHdslN6m5yT0xECG9fNre8DjO4tlDTjTI1gH2fnDC0nJT5/y8aLgYzEPBA4C6ZpwO27t0oST7WfbPoe280WaRgBAfb7PTb9xsiejVTa5kHlbDHApkyEDhBX41msphRUbcvehsA60FGBF5+C6/ubv424B54DpQ0oQ0bOTuqZJwZApqQcWAAacF7L6pnsgngIqANg2hacWeeBBpCrr6cf3Ah0j6JgfR+k/T1Q1FAwmXPsiTW3oOD1ygVGde73lm83FdqNkzmfnK1B9vy5SUilrp0L517xzgfLqFZu1HKiNaEbj1DnSh1IAxg1GJR2JatPr4dQyag9VLd9h2YvaPGh/nNpOI6v37Nh56HD1inrTDM7CS1oVq4v9fVjlubdWx2OBOLeuh9C578Mo7zwXjYPz1mLyzOIoPxu04gsYvOiJagg5UO7Pdwq11tnmVELYLbcfGsUy48mp6FWPyYQ7+nSAOEv8lHl8wWKAKBZBl7keeSSDAYv8iK+26z/wBbo+8fi+y92zULRgEStnHbdCvb4BqsaOLkohiFqaEzr0RFDRNdHgCxUIhAkqxHhiqc0CjIkQCgCUE4QLR4p14xZbR5+c4nuPwPMUHl0ZvOd2312Kpsxpt81xhJZWQwUkXKh+y0ZctIIg3Qn31JSIxg/kSlcHNzzhs3Qm8CSkXMPMs+AnBOIE0KMiHEDkYw+bdBtdhinAdM0YBxHpKTgg/L0dIAAEwTRQpcC7bDpX2EbO/T9vjHLpISb1JfFzHtDrPEpREDUAFfi1NCqYVjtS6WS5y2jzxAcjwPu7u7wzdeuaLa8M8u+LlqrhZvGjS8umc5iDOi6ULhZylMLSKXfaxp7DQMchgHv3r1F5gkiCdvtFr/7/T/j7vYOt7cHHMfRawoshBg7UFTDO4kUaFQAwFz0IzCrUwEXRVvDqjQzlbCmC5+mBGHLIEeCcZqAQOhiBBEMUNT3X9bjg9ICRgtz2r9/MLqsAT7qoSfMRdJ8oDqf6YhajE1/zmzOa/pPmcMYNVWLG/L6ztM0YhyO6m0mlSMqxojtZoM3rMDObtNreNWmt1lJwSI3ookAafJsqffHwmgv/7RzjZQ+6vVXudUqWFTeiavfFpUQJAPGDbRyADQEI+0XDfdMKddMaaTkyDHWUDKiAOJoJNaxgigG2nIDyNVwGh1Dfd8XTiMACEKFTF8J8O0eXEPPeMH3VwA0LRBYp3Z9NYGC8VY/BQD2zHVgBFEOL+aMIEGzTxq9mZ77hLCnBcBdwumaUqunlIYFwzz7VsfDmbW59Sxy4KXWyLKGZqVZ/G3vWb+ZP3Z9DD1GmBmcU2k/gvUfNCCeA6O2JrbD+u/DCDXwtvnPgRcHeJ9T2nDW97+Z9RBfspdlJeMg4ozMCbo5xGBJRuJPONzd4vbdW1xdXQHYYUYcbyDN07ignlOWCNjft5zbhHmRjyOfOFC0nGB8hXNF/GUAfRh5Tvj/kme9yOcpPh5xqmQtdwoJpyhOCzZdIHqqKeELM3/9HxRloux4k/ENCUAkRmQYzMw38Kdx0XFOj6rUrmgmFqol4qBQMAuk5beoirPfvhpV1bOkPf9iha3V+HzKbHakFBgyj50CQxFC64Y1kyZ0xY09RFOplT+kI6Bm+RIFirgCPmweAjmNIJg3ChKIesQ+oe+2GOIRIQwYhxGQ0QiwtXRjTqBhAmeysKYeXbdFoH5W5L7vFDxxEEL03Z1viSgghogYunJRNZYc5FME8dTTpQkllMbDwQzCKY04Hu+Qc0LX9WZQh3KlYF3BqR4HlcQ6RiWsVt6V2KQtr68bg6bAzsIYjyP++vavYFYDPIYOsevx5Zevsd1ucLg74vb2DnfHI47HIzgzumj8NWLGOhlhsXmwnJh+omGAkgXC2TzAUvlRoEgqCTkCUhJkYlxfXWO729qd2MaQ93NvqFMh301twAomKTiKc/609dgq+fdKGXf3h6Hde30jc9XkYWPIR2QN5WpAMKsTZkbfbyykbyogIqBeMwCw2WzQ9z3SpFl7trsd9ldX6Dc9PNzJy8rCiCE6RlzKusZT04ZM1u/59JzF27tnEVEwomf18FMybQdlKkAUI4GhvFwaHsLFo9BryoHRENQrMzhY2oxDwAhr8wT3zPI1wiUEzXy23+9xuD1AA1JDLXMwYn04yW2G5Donn21LBx0ckJNa7lIn1IJsHiZrxzTd3GmXoTrX+MZAmYOWcqZ4tX9L81PHyio34P1ub144oKwg1XNovSBr3y0WKkijyz9efJ32DSC9s5QNjdkj/bniz3SidAIx1SXvpPyfszxUrxUw0ulH5rrSZyzMCZkn9SDKgikJhHStffPme4zDEVdXeyiHpYbTV47GOl5+/LxVn6Ms1/0X+dDyaQNFAshiu/AZMr29yKckFy1IL43+6cqZ9pthEHbOffHfl7qbw4x6C+vxrCVUT1gpX4sW+UlSjBffndZUzHpNzYhSjTkST6Re37k8btGPC//E4nNbrjVvg7k3CrUa8EXG6PxudqldK26Et43jRjvNz3cwi4KnZ4cq31bvzqGjxhYA2RhgxMWDhLOGKIEnTNMA5gmBRjAyYtyi67bouwFDNyDGIwhHDWPihJwZ40RIeYAbJ9EMzbaxQwC0VXLz1ppJrQJ8xokijaFD9V1rhXm4TK3FefCC/i1ZyliN3eNwwHa7swwrCr64YavPmBvl/r/eRwnB3euhBYkKN0hmWEwSUppAFJDSiDSOiLHDbnOl5QuCPvbo4gY31xt0ocd+d427uzvc3t6CWL3iOGtaa2EpwTtK4q1jAMXwUlBGmMHZQ85yMfAle3hgQE76OeeI/esb/OTbn2G325WeFkJAzmup6ltD1ndz1aCrIQGw4/OrTuWCwfFeYJGDrPCbNEeX58w9c+zIrKynxrlm3trtdjge73A83tm40bBOEcFm02uo2WaDybzF9rsrbDZb87Rxb7TKFePgVCmpDgqt47YMqxm4bMw4cOFATDtfQMO2KFDligsRIXQFHKrhPXqtgzI5ZwUpxcneycCkTn8KwNuApixAsDJxxjCNBhgJhKlkpySCEV+rZ1vKWTOaQdZohnRu4YDcKJlLTr0WIELxHlpXY8TnVVEjtRCY65eo5MrL7FMtCLeSVfGefjsrL6HOIfrlQ5efSAkvtEKf7PUs7nfJKP1QosToXOb92RgTXwetjGycSgKDuENT4tOAwM9L6ngpTb76KnXxa7WCz12ICCwZUzrqeipAzupFPo4Jd4dbpDxCJAEWDq0/vtFTQdpPoxvMlJQXeZGPLp82UHRWnroL8dBU+PEG4hqXwqmYwvMjdLEr8+8ltq98/CnyYbfGdZWITpDMe0r+iGZVZU2afy5+ypn7PfxwadSme8t1XykWC22tJ//ODJb2+4VyXu5RgJI2e8nMsl8rVCmAXroAfywUQli9Flg0NMnDJPz9NaW3P99d2Ksx4MUrUFOD8axXSC23vwfBQYKGA0SMewIEYT4xmMtdG4VYx9W8EuobNwYSUSEmXZOlR0ub0jsEJy+1MBT3LHLzngnSCSRPECRs+hE5H5WbSCZkGZEogqhDjBsE6kHowXiHKU3AlDGlW9zd/YDh6gvs+AagDqCxvhBQQnJcuZdS2kb19T8lJAXVcPA+nuuOo7iHkbexACgeEg15LkF5ZG7fYbfdI4T+1NOgbcemPhVUyZbNKhaPohlIZCBNygwOuWQ+OxyO+K8//xkkwM9+/gtsNltM0whmwXScECQgxg596CExYL+F7poTITCQxwwmTQ1M5gLSQhnRsq1BBNkIyDlPJaWwZmgz1ZrVOEtJx2Tf7fDTn/4cX3/zLfpuA2EHH9ZAokUPNSPevfqCETMLLMxHvE+Givf61ZdjqO8pRgzdzGuVi4ZQ+G0EBWhmZnhIV3lXzK8F6nwxDAccjwe8ffMGwzAAQPEs2m23ePXqFWKMmKYJIQRcXV1ht91VgEioeGABZJn51BAyH0HMwJ9SFjeMlkBRLRuZx0uD/Oi84PMBNGNhH8w7zrJhabvqHMoipR/l3IJV7nXkY829Ag2AIs2qCAE4ZwgykiRwZhwORwzDhNhvbdxpP4rOJWTglHrxWLAgwULAKp+Sh1g6b9I83Ld6BXk7z6C/JmyHGjCuzE3G6ZazcQllQJrU3YCHvy1Z46j5WR4/A2rNbNzmnPlKuyKnY7R47ixOcxi9NWPbNbDKhTrRvQN4/cuZN5HV9dIbEz4P+5MpwDcAHFgtgFh51OUIwaPmnfa2JxfedycHfk6TgbTjp73XubBX//5kPmpRv/sRphO5JAzoPmB+Fip+QYWW81fqo2zkiGCapjKuKQfc3R0wjgNSmtQjFqJehQEF9IUsx99cxfxb0HOseeE+h9fX3yJ867JnNr3lAbXheeX8vFiHi9x75qco79vOnz5QtCSQfe/B8TAAcK+E5e7P2iMubZQLsitIvPBezyNuUH5IYKYYc5c+ZNnmnwJwZq7jAFB4NB5daXYBP3yhBJ713IBKLtne7X5d6/JyMvj0XmThWY9UIhZuGstCWcHZ/raGU6hgjBBqlhLVgl25EzBaV3NqwlNUYUL1pAlAFoCCIMQEwFJYI0FotLA0A0AMJRBRzp4ZgCUCd18nB46KUVUVezcYiionfu3ccPawD2YpjlfCRqjN7VhRr4uqttfU2qXevJa8LlmaZc6M2VI3ClDN26sWTYShqcI8HMRPMl99qQCKgAHW5NwSTDkLPUCC3CV0skdMEzIny+R0BHBACCOIYmmHwygYZcI0HPGHP/wH7o63+Af5K+Krn6qHkRKLQAIhdh22HWESQoJm6yIAHTJYLNsSNUZBCGXc+tli76kcK06KTfqX9IeIizIJCfbDGIYR7969w9XVFa6vXxcDhAUgURBonp6aLI08IBKMvDoixh4hWAYWB9wY2lkFEGJMo/IR/du//T+4vb3FP/3+X7DbbY3fJQEAxnFSHiIKms0sBPXEQlAOGAkAS6knCGs4D6gY9IqPZSTzJMp5BOcJnEZMKTc2hPaJxFmJhINgs4345puvcf3qNUCdrnCiHkwkGsIo3sdmHa70bi0D6hgR8jow5dFIpOFEugIEBoKHfpowoQmvXDxucVgMWDmbZM3SyKPpT3OD0tssG4igoEDKk3ma6Rn6lytYY1nkNKV7BjHj7u07vH3zJ9ze/QVJBrAkgJXo+usvX+PV1Q5pymAhhM0O3f4V0G3ABaAU1PTf2WERnQ9YzBASywBo/9trlekJBIoej6OVEszgBrehZAZqx1gA0th1iGGDGAghUkkcOOe48nAz5z4z/jbPjOZgIHn9spVvAgBkzmUdynnEMI2YsvZDJ5x2TJdIAZgpJYTQITvA4csh+7wNuJfV0guwDRcVIUhOIAk2XwS7Z4AEXSODZ3ULfk8gQcCS4GFosXQ2rn3JgaUCUnkd1H6m5xlyCuj87WuoeIZMPV9WQ6BPs4LVMd2uvw25b7DNgAJCagZRsXkkRF0HNOtlREqomyVnScR8DdGQQjQG+LoRnJvP/u6VhwkWPkTo6r0B3WCBrq0StT2FIsQyYQrFkg3TywO7pw6X2h9acndmUTB7VtZTryuv9UXRmzXczrnPtBCfNy6wB2x+Kvzll+ql1gedb62obc8ASMyKJwzJCUK63jHYNgVQNxbufWRtC/tgug7bqpGQRT2XWQTEQGDdaJvSESwTAOM1g8OyjRcR2ZiH6wOLp6+AU+9RG2ff7/5jz9kmz2VPPfVd1qSpd1m3WUgw39NYk0e/2voFAlm3087YNad8ccvLPo4N+1zP+eSBohMU+jkG6KX9+ez1D5ThWTuBKwgfSR5EG57xOY9py48JFskFq2tr7ZTzl9ZH8/nc7S6M0V++rSu5l/aO+Tn3v5/vlC4nmVmSicaw0C8XT2ivXd6n1FVd8OvlC6Vr+R5FYRfXhavh5srj7Hlm/RVFr5anzuVOQmqZcXhuINWH+7m2Q98oFm3ZCihh5z8oxQbwnecaFiMGHEHUAFkmG3bF9NxO2Ax4a79dlP182VpjxQEX+4F6EQQPD+FQ2tGNC5AaHyF2EOkQgmU46kaE2CPGDikf4N47gSI4EAgDMgHD3YA///f/xA9v/4R3X/0Sb3/xg93cjdegac+8uKTvycKaLQmVHFrsfdo+xHDD0GrWuoryLEklEqYKjHi1sIWeHQ4HHI8Drq6tj7GCUBoaUyJh4CFw/mw1ttWbyI8VzifrXw4GMzMOhwP+/d//B/7wxz/gpz/5KW5uXpkyz+Ud3FMjmuGuoTcBvInA5M/QMDJSGxJdUK4OdS6iWg8WOpjTBOakIVDOJbMIU4oxIFDEbrvHz3/2HXa7KzU5GmXOag7zXkqrH2V2oKWKdkPOhyM1/R8G9ikMxqwgmAOLi5uvyMoXfqgFn5f389KJv18lR/YfCrVvKXhpfU4y1MAxQ4czDse3+OH7P+P27m2dZ0Q9JV6/fo2rqz1++P4NxmnCfn+N3f4aFDoAURvUPCYyMwQJQEQQRrDU5qEpdfWG8teoYxxAAcJh48rB1hCikpUH98ipmQVj7BBCjxgt/DPo/K7Z8TT8I6VUdv0LMAVNIqDAaazPXjRIIa9mBkPBzClpuKpzpoeCFulVzKweWHEOhLTeJ74sLUme6/xAxqdEiEGBbbZ5GyAEIwOWAkYrSFu8yqAeq5kziKNyNxWQyLuZ9YPZvL0QWh5t9RD/W8dJ0RPqMojTTUp9B2nqzN9/dhNfM5aUENT0A2s3ObMReqJ6FF3qvPJ5ukzZ/FNQlznAV71B9YX8PdjGsQ9n0vR1zVjw58/LsgQPaynmIeTL+lt9kfYxrZwDiqS+78Ni9UgeGt70iZPz1p4lTV+5QD+4VGZrQNXfdB3UCpH21LP1eGaOtr5PBM14ZtkUYXBP5ho6DSg35bK/+No3ozsh3zR6roqYy3nvsM9Tll7T7y/n7+EZGJ929cq5rbJy/qntBefP8jl6pYCfYxt/8kDRi7zIUtql/NMQstCbaixUZe688lNXwye+SXPr55561sKRdC2WYvxJMxGeJcic7czanR0YKkbCfTN+C1Q0R4jMaJ2dhaIFztQOX/wVDCgKlEkhuaagKaSD7hJTQ3BZ3JJXDJf54qEKs10F4B46xHKYqh7eGIZUTjLQYEb4OldimxtgGRJwbxe8SNqLG7AECtQEyygmVLNaufeEeziACIwO6g0iCN0Wsdti028xpQ2G0CNQj4AeTEBED4SAPkTcDhnDeMC//49/w1//+t+GWWmdDOOAcTLlv1Q7taVFCJotCaKhakSW9p1RDfbST5rPbKmWlzuIBTRSA1Rd2VMdAyJozXE2b7cYo3220KpAJcxLwMjsCrMS7hbgQYC3b9/iv/7rv/Du3Vv8+le/wnfffYebm5vCc+TeGv7jYJGHyKkN75nLcgEvghBiIPQhgDiUPiiclWcqJfCkXmAsbGBSJQ7XHh4Ruw2u9q/x+9/87/j5t99hE3fgHMzrwj1o2r700Jxn86I0HnMCzFhnfcvchgwXY/GembeMkcvm3IeHTfNeVItUrndjpLy2jWW4N40SqXq/DGDc3r3Dmzc/YByP2v9IAezNJqLvNLxxHEeMw4jr61fY7a7MI0291zw8S1iQJSOEAJZ8WiduHIl7OhnQ42nWSY1oKuGwOj9GijNvohCipbvXc2KMhbOokmEriKlZDucgUQ21Rbm+Bd11TDQGCBxWU7A2MSNNCkAp35EboDpnq/eSYEoJlCsIM/OIcrCotKgbjTJvRxdmDeSjbITbQT1VScd0oXUTmt2jBV7mnkrS/H3aZC2lLRfGJ1o+LsapR9HieeeM9Pky14CLzRrla3uAZgpdfZenam5UAIFyZMXQ1vW8fYz1F1HPE7K1ydsqNOD9exXvETJvk2cWqvVS1+qVTaR7RBx4+ZCVcQZ8e5/7wXS7nJN5clIBapk9rFTMC1ov87/P6yn0Iu8nviH1kR97bu77/LCdZ5UXoOhFPls5u+nwUYROPztY5ArNJaUjcxd/yuMbkOhj1INiO7KYNOc7CFVBb5Tjcv4SNJpdDTWgZ9jS8qST+bqovLYTS+XmVQmW5kLfhchpMmJgNaKL6zkptSVZ/uiatt3v2Wqgiz4grpg1QMWq4rHQuFH5E9SjKBfAoryPOJAhWPVRb/TnFpDzMsjSgr1QaihC287azYPxj9TnOrGreRs5qFSMMEIIVo/EiLEHdx36vDGeog5gwhaMiAjqBNx3iFvB7S0D2QiUm/K9ffMGw/EG2+LG7qS0BLDuGnpYlQAQRgFr5qBb9eZyD4LS7u5BZR4IMUZw1n6es4bSjOPQgDNe1/X+lQej8l8pL5F6E6WkbR6CEmGrYq8gyd3hDv/5pz/ieDzg22+/xddff4Xr6ytknuzuzqfCYFEvoOLNRgpQhaiAHXNSfgaweV4E9H1EjAFAh5z8vc2rakqFUJmt7bWM/j4drvbX+MUvfo3v/uE3+N2v/09c7b8AOEI8RA/Qee4JchqCsDxwOr6cK0jB6PXvLzVQTp9m4YwL8HRpwLZS+KZmu5Xe57TNyHbWMyfc3d3iOByQ82ShbPrE7XaL7XYLQsDhMIBZcL27Qr/Zoes2cDJrL0j1hEnG9NRAIHVYlvmk9RgCDCCKsYSWtkCRckgpSKSeRKFcq8BShI8BzhqCdzweSya3dq0AMBtbHoLZ8ntpFRsAXrx8BFk0PCglxjhMuLsb0PcBsYsIpF6BwzBhGEZM4zgzl09AInunuOC4OfFUKX+NMwlADQNT4yaQIAKzfnY/Z0xdw95H6vu091/2O/+7HANngKN699IOAGb8duRzpmQDKvxWtDoG31/OrWcE9xACtKwsYkkSMogU+I/RwuViROgCOBeI/IPrUl5nHxQsOhH6KO/2WHlOYKbMcLIEXn0+RFnXortpn169+PwifzN5aYJPRl6Aohf5PGUVUXim+z7tQlSAaLEkE6Fw6KxKvuc7F6nLV7FLyLW1D7+0rW22Let/5lHRHuZ7FQKxayoocUaBXWSAkWD1Sm48aPnORfG7gSeAhSpkkBnsbC7JCoBoW2bm2e63wznuWeS7kgXmIRioMC968Slq9RAYL0OjKoo3JAtghr82LhcgSCkvGuNj8T/kTGcw5Zkag+diY7kx2uxWpcTMBmwlC/UhKmEjKAa7e6qoocmWoSwQzGmBsIXoa2ZGooQ+BGBiTDgg9NcQSUDODTCl9TyOIzJrFjW2thDSHzR1EwIhs4FAQGNQez0D7l3kIFHOeVZHGsbVWaYmRqVapSoAACAASURBVIzB+IEyhuGIaRpAtLMdajdkGrPA5gb3mug6BWh0tzNZBrQOIQbd6QbhOA04Hg8QYdzc3OAnP/nWpr6aMQ0AQvShwUh5NO+fhE4CUiKklHE8DjNDnYjQ9z1EGvL0pgsJC6acMI3qUSRkQAJ6BMu2dvPNV/jVd/+E3/72X/Hl65+g715DZIsp13bX+4ZHT1BuyjpHUT36BCnzF525xWX3rcVoDFWblFuPwza0YhbKy86KoW/HWdPDd1Hrf5wmDMMRKU1N/xNAMra7LXa7HcZxxPfff69A0fUrbDaaMa8NbfTS6vxgXj0NaAvMQe3g13qKaFLeIbReQsUzLxQAmKKBQgaw+/kgAli9OCZ7p3EaayjeYk5ZA23av8orY1WhMKd6ElkdMQtSYty+OyBEQYyEENUYPBwOOB7U64+CEr37Pb0sLjEEy5xFmIdXVsCczGVGgaWaAAHmyVLeYzHHtpso2iZxcf/5uSu97pGiCwE1fbJY0o+4Q5EG/xbytcj7jp292KQp0KTQrC0flrXJYraAlnvNdQs5qfPWo4SJUTwiF/XyEJ71mDWzFe2vMp97ljjXI+bGJ3m+kD/78vJ/lnb6DCCq4JC/jXsV+cu13tHv977uHf8iczkH6H5a8jmGg31MeQGKXuTzlXax/OguoytLS5lrWtNf6pG1IgpwyW57C6GU25zcVJ53Tm7uRUDl1EQz/bfg0CpwtFRO7wGMVnZuZ0qhZc3xoyJcwBVyDp21CV8aJdks4ZyVo6WLEQMpuaHq+Q7eBDPiFNQgN4Qa3gdzWWj2BWl2fVhtGw/3mNdGrUbvL67QSgVFF+3hxgXZvV0PJMA8d5eu/6eL9sNK5/nvWATIGZCMnNTopRCwwQYxurGkYXxmUZjpYK7+5MSzjK7bYbslEGdkmjCKYEs7UGIEAq531xjujmhhwLpZyMWoa7lVqicQAPHdfx+TotctjArPLjUzDps+WeqqfK6hNerunhBiDx+pJRQDbAZlgIY+mBHPjJwSck5GFiyAMPKUcZwy3r59C86Mb775GrvdFn1fPZacg8HLTDaPqDI8ImfCOFZ+lmEYC1eKciMpIJXZiFutHoU01TgoIGcgMZl3EJBZ0MUOX7z+CX713W/xm1//M37+099gv/0SRFswekACRCyzEBkZNQQgWRDNnxcRBhcQvPLYiHH6mNp/9vrGptXfzzAveo+glXFdTOLYNfNinc+YtV6dYLr11KvAc0DOjGkakfIEJaJW3qI+Buy2WxARbm9v8Ze/fI+UMl69ulHvmQjNPEaowKyDw3bMvfwqYKHARhc68xBToAc2bqnwXEXEEEEWYuXZvUJw7yPo2CqhXOpNxOACUI7jYB4dgHsPaRFp8RNKKFuwjJSe7Q8AkuTiHeIgUYwR4zjhcDhiHAcItI+EYODbOOA4HDEMR+z2++LFV+u/XV1FvWJOlrJ6IISALlq4HZQ0uA1jg//I3Hich7FVgIiozjdzTwgUj5iHRDcjqnFc+j3KTFT+r+/6BGmW2Nm7tfOqvZtzvgRA+5Uw5v6g1Pxdfl4z3aX+Md2nXbqkrW8RJUYm+9zoFn5RyWoqa8/y0lSg7enSblE193zsbW0ufLIXzomn2QOPo7YXffqiXaL2I5/nWtDZwfF2fVc+q6YvPNkD7gVsWJcz4/gTkqeCwH8v8gIUvchnJ2cXyo8OFq1JXVxNbZuBCqenP2ZymgNjRSF0nUPKr/eWUNdRDVOY3XZN2Zv/N1f1ZHF0fp2+Q2gU5/qc2QReNWC9zl5ePSvW39vbwL0l9DpXFAM2mx6ZGV0mMEVoNqsOgTbo+x59t0EXOzV+WpCoeVOC2wYOElGThnoOJbnRWoPN5tL27XndNeeUt5KZkUPWz1zxLaEBXjZpQKe2ju4Fi2jx18/XvykNSFNGTgqqhdBp+IbzjBRcIJTbmO5u91Ijk0KPLhKwydhSUm+rCYi7DiPfgTNDehQD09+5VE8B2NRQJeOKMnwKIrnsehfyTDGwqHmvpffAao3QqYHCwpjSiH7TQxCtfbm8t1a91n0ITnKbkVJWr6icEfq+9IqcM/7zP/+Iw+GAm+sbfPHFF9jv95pJq4RNrpWvchBNk/LBDMOInCuHkof4bDYK6PWIqChwDb1LKSMzEMMWwfhGvvzia/z+d/+CX/7yt/jpN7/Aq6uvQdgCsgOkRzX0fNIwxdxng5O55Jw4EKYcR1r0YMTibvD5ebW95tFdBkpeoAAWw/3CJWRuiDflaWxZD9MtIICfSYCHggpb/bC2+XA84jgckfOk3FPF4y2g36jX0N3dHYZhQAwRV1dX6PqurH0FsqbG6ydERAqIIATLyFaAGUtDr94x0eaKiBCjEVuFAhQF526DhZWVOc/HsIdsqYePexK5FxtBw1UpzMEhB1mcdD7GuBjnWoc55+LpV72tCF3XYRwH3N6+M4yrjnGvw2kawZwRCNhuLCuWtU22suk4ziXTlT93Oc4qcbd7QVn5/a/NfS34v/RyacPv2j68DNu+VE64gGbqxv3z2ZPEFY+mrqxHG0Zq3pnLumuLdVKmMzrS2QLM9RDv84EqvFP6nNWr0tJx2ddycKng/6fKTpHnMiRdH/iYfDh/V9w7RRk2PYjc01LB5oLjlpMt4yIL3CPz84LIPk1Z63NPGkMv+M3zyRPr8gUoepEX+cyk2VA7sYGea2k7odPwD0bG+uin+eJdQJbTXbx1ZaYWpJplUkEPLFyJm/Pnu7n1cQIUMGO73aLrNxAwkhwBBEjeANKh77dmTEe0wI64Ilp26Bwgsixfy/dazWx3fpfFq6E4QZUd6HpuCWfxOm0AE1rcsd1JpUeDiS1QNAeLBIJpGjEcR2QWcMrGedNhs9msdo95mJwbzLozHwjoN9fYx4AOPShEZBwhE2MIR2w2jK6bL1kC3TnW+1pGJm8bETNKAZZUAD1PC14ov5sihRCtawUzVtUoFp4bjGrgUiGMTtOIaRogsoOI75tH8xbQa5gZHVn5RJAmJfcdhkFD0bZbdKSG/OFwiz/8z/8fu+0eX335FWJ0bxo1wFNS3pmu6yovkpWJc8I0DIUTZhw1FI1gXiBdBwEjT4IcAU6deYPo6yrni4CzIEiP2O1wdXWFr7/5Fv/yL/+KX/3j77HfXaOjLSJtAWwA7iBtenoPFytgkXoUPG5+uh9I9VYsncmuofkJa1PNM8vcaKXlV16Ue4ColDM4DXj37g0Oh3dgnuBk1yD11Ov7HiEqUHQ8HrHbXWG326HvN4ihAxoya6KugBkhRj+KiGan3bx/3JDW66BjKHoGtWiGlnuyWVuWbF1l4i2iYKYCoAoS2TgEIRbwKjScRGH248ZdAWmYCyBR+eKWoDEK55wThWsoqfJ3HY93YJ5AJOi6UHiXpimBshTwLgjAfGZ32fi2Yowl85vOE52Sdxcep1iWyRJ6uAB/iDKYY8m+5N+d5zR6pKxO8ct1+551oC4ZZ86kEu5dvZ70pUnEOfQdj7Ob0OP2xVZlPtZmGaps7Hu/mv0Y0FkwhNlrGqCwwHsV531mK/UFe/go0gKAMy/j2fdNeDBXagEVn4M+ftlfpJFS/0tA2b98aaAHRYBLN8zOyQtQ9CIv8jeV99QcWkXu8g3xS265ur/nHipPnnMU+cC5ki5dQE8Udmmc3KVyrXjp5o5bMlP0yIxpmYXfRPSbaECRKp6cNhAO6LoegIaNVI13bohWjiJ9Vmh18RMC6XOtU7OUta7S7jHk3jtlz7Z1x5F5hrPZEupgku2eLSmwLzNEXHla7QnquZLUY4BzBjIhpRGZE6KYsVoMJvcsqXt5DspAfJc1ouuAuNFQlkQdJGYwJqSRsNk0oWcEwMMPoUphgO0os4Y8xaihWRqGkCFm5IqRElNraJSd5erpICJmNOspoTFwc8gIMSjgY947KSUQAmJQTybtk+5poSnop3HENKKAOAAUkLTnHe7u8Oc//QnjOOLq+hoUyMJvUrlGiWOllMX7EGfGYCCReyqJaFr7wpSRtR4yBwgSur5H1/VlTIH1/Egdvnx9jdc3X+AfvvtH/PIff4uffPtzbPory1DXqUeMUAUobUeexDll3JeI2la/vN8VkLUB6Zwgu2k792KAKNeVdngbPxc87elS30fKz8NAestLQwYOpmnEMB7MC0Z/3DOj7zvs9ztM04S/fP8XjMOAr778CbbbHWLsNM03IggRgTqEqEArBeMWsp4frUwOyLjHUKhb7Ppdk+6+9Yqz0p/MHcxc5uacM6YpYUqTjh9Pmw4xAmwFihBaTyI6AX4K2AvdAGhBIrG+7yFwIcAy+WHmMaiedYN5FDFigHI6RfXCCkTIHM37S4qH0TIEWj2/av3E2JUwuxlIVMKZzveYU4+ilXXuybK2AUErw+6ScXjKuzIvr8x6vo/zE96+06XjXkXlcoCs3rzyQtV7kKDZGGlDzzD7KyJnM9P72nVy2O5b1+bHSRuG9iLPL7LoFzO9qvWAa8YiUU1+AbyARH9zaTYi5m1R9b2/J5zovdaIdlPnCfLJA0UnVcOnvAQPLiyzCj5n5V5YiYtdn3Ny2d0+0iJxoTLiytJHGX1tWWYbXK1x4JrrfHJflUuUi8cMtNatY3F/LabgfoJqP9MyLD12kBcAaP291trykt40AxnuffyC38a9Z+SCtliWr3gV3HcB5s8rRaz9gCtEVE2xWRiQ17Nz+wDku9+NrcnC6PseMXZIKSONgxo4YrvfSAjQLELjNCDlBCLdp0bpDrbz7aqegXQe3jTXkB8yGGvxipcS0ZNGoZhxVW06C0kz5tGinpI/dX281524OWg3445iRs4jUhrKrhwQlKtHsgIxZFSepQqk/J1xaoj2dTW4OlAHRGFQR8iUwTIh7YAYu9IHiLUFQugBC6EJ1m8FUjwlariSz2/tWJamXDXsyvamyzghgnEIOfcJEGNAFwMgamhzzsgpaWr7SJZZysNgKqdRMmBNebIIu90Om00PiGAYBvzlh+/xX99/j93VHvv9HiEEHI4HfYYo4OPPb1NQZ86Ypgl3Rw1NUoJ2hjjvEDJIoDxKosY6TwnxELHb7uFk8cIaPPfq5jV+/at/wq//8ff45ttfYLu9BlGHzASIZjVTMnPPgubV2BiIjQsBrXxak3VwRwAJa121LFnW0+b9yv97piXtxAxvjU/rOzrrzENgyjsV/YEBYhApkIJAmLLOQSHCxr+AkQBhhBiw2W5wGI548/YdqOvx6vWX2Gz3iAbagaKBlLGGQXWder6weVGSeT2SlUtdiKzE/pLubVSBIieqLtnkvC5ct2BGZlby82nSEC7O9n7+vJbbSE6oQNqQOK9bsXHCzbpFwugsc6C+o2b1YxEQMypar2MlcwLzBGblL9puevTdFgIgEtWQNjMYg7iXj87FRMGWej2mz1XAWr1UjA+tvAcK+OfnLL2F/H1bQvpZf5odLL9OjqvXiy+xLXDqKAnZHOwnXarfNTPjcuNGVPfRcF6bi8nWjECmwjlhu99L24Jh5SItj56hYbheb0C79tj8K1hkDTyVei20fLYhoCD7BOYEARsopMBmAewaXAtAGbcsXv113aubN4BvcLjwCm/aciWn5vejPJbI6uGs7mbtzZiPrXOe2hc8Wt+TTo6dFuxh7XPGEUVUNrAEVOt5UcSzB5Zzh+kz2vQ1tK/qYwIgax9o9PbyWZzPrQb2O5/bujw+O+FjDP3nChW85JnPBVJ/lPDGhQdhiS6475KVT42BtRBaObe5173Pase0zl8lD4f/uhBYXs4LPmWe7LE/NIy9vmYJQR7fTp88UDSbgOThTvHAzRZ/nyCXPv+BQXOOo+S5pdpnUjwp7pUPvgM7L9NJPQmpQeAGfqvYPFT3zwAWid+nBUNkPmyFgipID/lRi7lhl/KfO3FukFdcyY4TnbSJNL9nt3H32vsAoUsmKjMgTi99Wu+oyr8XdHHf81eWT0YzPDdUFp/Q1htIeTZKP1IFRY1tLcswjjiOB7AZeSxZPY1oB5GAYRgwpeneslWXdVeOGY+da9r535XugkDNX2712vYe8+XQ0uK23AvlAfPFve6i292Kl4G7cTfhIFAlS9N352YFc14Q6/ciClT49ydAtP5f6J+M10IkgiUCtEWgHULcou+SgkJWNwFAFyJit0VIvRq3HNVQJDOOg/NTeH1S8wMUInkPXxRGcUmRSr5N0K4UQq6eAySgAMRO03FD1KOHA4Mol/ZTsEhND2ZGTrkoqJvNFn0X0Vn2s9vbd/jhr29AIeLVzQ22u53yA4ka4Vr/6k0hRMiS4IB1zhnDdMRxPGBK6qmkWZncaFKPIojSQRMzODHo7h1yyoihR4w9urDB119/g1/94z/h17/63/DlFz8DkYZjup3n4YLOtwFRwmtq6njeWx+3bq9dS65onSh8bbY2sTI5KXnT7I944prct2IXs5YNxAvuOQMgCwpg6zoMMUKEhlJSAougy8CYBiRWjy8WMV4dAkWCBOD23QHHaUK32WJ//QpdtwW4A5Hy7jg7GozIl8Ro9YkUWGzHe/HAoGJoedJohgIyJRzNSekJ+u0CPEg5YRhHjMn4sKBjoyPLnBY0tDQEarJkebs5IXzlOALMyyopiMOi4WRkIKl7JoUumuueGQ2FOB0VPCGGIGFKByQeFVgyb1GQAEkgWUCJbBpwElwPOQ6geEq+7V5Ns7m+8YBzgKi9rtabrTf8MFdN64Xp2pkUnUTKOTPD1uZtB5Kr58uFOnRj0CzDhR0oAhhkBO1O0A8S7SZByngsfFFgCHlMWgV0tRdYzyPPB1i9mXTc182fWTEbUNE9zJSQ3/pA0GelpOAl2dhjSWAW5MxgFoQgM6PLAQfJAvblrdRvXa1PQ7nnXsx6Fa17LD1GlbJhcdLOJ9LorUVq9sl62mX9oAXv5oVp/32cTthmJSUybWmu/p4ti58wO6foyzZvMM/aSwFo60Pej8qD6iiaadklOcq5sqx8saKnP5fQii7+HPJ8nozAXIt9BjlBRtoVWLkL21PPlWiuR7fzyuJxRemez62XF7bpmOJzmn6u88ZjRE4UepIlRLm+eba8T6PKP0k+faDoRZ5PnnVS+BHLfeAKEcJTR9v5B555vCpOupt22V2ea6qWeUHKZ7qwD9XwKaCG3uiRtoxlAi2KUHm6KnzShla9X71T1RzLDniaRvOAMWVWsiqQMiDQFjknDd8p5W5r18tuBSc1sWCeIxW0Ay5qlZkit36dHxLMf/zLxp+i+W6pWTX3u6c9qxv2ynFbVKXV3sprUKPMmmKLusO+8pKNBJSOIEacSxFEPShu9P9yWUDf73B1dYPruIP0ETF3kMAQCUBKCCGi7/qyUCpY4vYOlfAwFA+A+aocZqHdoYbONEBb1yn/RUoJ4zjAyTPFjOoYlCRdRCA5lcxjStxLFq7COBzu8MMP3+N4uMN2t8XN9TW2my0AQc4J0zSaJxEhdj1EElgIYE3pnaaEaTiCpwGwrH5sBp8slRUiuImHSOBM2HSEm+sb/PIffoNffvc7/Oynv8L1/iuQ7CBinDXFVtP2j6Xdq8FWRquP6cZYruecSmtIX+KEWEUWfz+eOLDRPjnGWLijYgxICcVohrgHg4BiQOwjCILMAVMacDzeYpwOyDyAWftKCBHbzQY5Zbx79w7TOKLv9w1fV4QI2VjRjHNKOO0ZB4N9DLWklnWvGiBiCq0BiwQFXhEUbCRv7QaUk5p2fJwGDNMwD9sSC80y8nYyoCg6aELu/WPnBwWXHFTW7GVOWs1l/tAxGBA7Df/S6EdrB0MGCZh5tfgmgAMKDuIQEUIMCMLgoB5GcHDIPa0KP9PcaPZwldJRS2jtvI8TURnz7Vh4CicRze7t6071Wi6zMTlS5uVBveZBqeecehOhPIfK832O8Wf7+sDN3ZoNyGZO0uo2ELs1hh3wEc8K90C9LOqu9UwDYKHRtdzafzVcug1pbN+1hiG264LYmAx1HLdrH2gxG3wC0mzu/D0IFxDMM9k2619z/CER+Bp2yeb5SxDhB5EZUOJ2Qp1bHhZr+0YnaUfr8lErD32CrF37nnMCuQ6xdt8P3/NegKIXeZEzcn5n48MuvO00VXasLnoeVbyiQjFPK8QKSPQUUWNkqWTL8gXt6Bw4EFsYipH11DI0SrmaAaThCHnCZKnJs1SFkKgDQTlh2IiSxUJs2vfxV9GdKjUuClh0stg8tr+0uyetSOOyDaeHKZdIo8j7ub7C+K5y5VRaF79fWC2y+xzYIxvlviV/bMm+i9ts8w7zv+1bq0eEUAbgWZg0rAbUQdCktibC9dUr/PSnv8DNuAd3ATF3YErgTACnUr5xnJCyZ7sRONts9VBzo9J8Uma76U15iRrj0L0LFAjNrLwsMXbo+w08DEnTies9WcwrKBC6LqDfBHQdkPOE27s3uDu8BSRht7lGjECMAs4jxmlCmkblYAm9eiKEoGEGQshTwjgcMQ6DAp/GGZOtpNo3eFblHlrEnDDlgCg7/PK73+H/+Nf/C1998TNsNzcQ3gCyQZv6UNuyyQ7jY1QchsNp9y91eV7m318+3j8xswxdR+Zl5kBgNTwrriYlTIcI4Jzx/fd/wd3hHcbxiGmq7Ri6Hl999TWGYcCbN29xHAa8vnlV+HDm46n9QTleIJPS1/38mgEKzJCcDSjSgsWuQwRsg0C9LTUEUOfElCakPBlPmYZSKnG14eTmsUchWOgmEKKPcS7ehupdw8i5coVwIamvAKKDRDU9vYMi9qbFI8kNBIYg23xCZmssJzZ9vs+NZF5PPsaDby40VdqCC/N1rXZ898bw77nx6GbWMDvnZXoIRF2uBau8OWhBizrPl7I90aA4p++4957Oa2x1qGtoCITM5QbwQErve9zMq1I2MnztrHdv/z5UxkXhmu9QwJIQQmkjluU87xpUlbJ82vrJD00277PkfzCxQvlL/F2IermFEvrNINP/goFHaz6v67VzyQpDn2jbf87iLbJomUt3zgH4CrC8q/+mAjwtL7TnPqodzwBEdO67x0pbno/bwV6Aohd5kUbW1CE9bsvKR1psm33L+t/JXLOwxswSqLiBnJ5zqTwCHJp7LFRgpi3iqfeyla0U1sN9cIpxPfoV5gpyUQbLrQmcGTmzGSgJ7gUkFm7BkpFZjQs3RArnSLurDGm6hJEFQ4qBhMb/7MFes+hb1RW2OaX81AWw5W7wXU41rpoacfCo/KLqjtpcW+zY5pz7i1zfzoEiP+TgQSEWBUEaQ2BdtO2ICBTVUwbGlyJGrltaVoD9/grffv0TvBr2yB0QuUOmBE6CSFqHt7d3SOktkNkMUM0eZlpCYzSLPRtouTeWu/FucDhPSesxl9KEFDvM+59nUvLwCc86peFAAsFonm2bTY8YA672WxAyxvFo6e6zjhlSku7E2SmGwTljHAcMxwE5JQgr94lY+BGsHlCMIS1rIIKwYExHQDK28RW+uPkS236PTb9HoB5CHYQdGKttNudEkdMx/x5S+Xys/praPx3L9h15of624sBA7Dr0m4huiohJ3eTZxiAbKBKZEahDv+kxTMDxeFTvE1FuKeex6sxz5ngccHt7izRNJdOdespMCIgg4zzScCCHCNWzsaZ99rWkergo3R5bPxoxpQQEIMQOIUdsekYIHUABgRkkEbA+ltKEzBqu6dzXCoBpmFgXgRAJFJICLgTzwoDOYM0k5e/rnEFEhIAIFqr/E5l3Up2XCyUWLeerZrYU7zFtn9Wx6ECVz98QQma9LgTjnRGUOmw9qdTjiWZtX4EtBZlWQR1bL5kZMcb7QVQD+lcN2ZM+v1xt1hbUy+XS0JRShz63eVs0c16AgM07KNh0ZLUFzH6w+F+eNLZnZScH/dTrTrvsElwyjyOaA3y+wbKUc5uFn4ZX0QK1cN2i9Tr70SIb/l4MEd+oseMki3OeR5aBYbIchi/ySBGbV+d2RBFvz0uasbRBey8foyvbBic7r48qdfPY2g/nJVh/7voN6xX1DmRP+nhebC9A0Yu8yFlxw2plUH7k3ZkgmAHftPKp7JHIEodvwYeHlb9njVuW+nzffVyGIM0Vugdv9oCso0rF28XccbiAGLD1oBKLCrmREAopbAxxRma9Xi7zJKK2HFJOuUR5LAZN47XSKrQCgJdKc1k7TANvKJmAutYtNyPWSlP6z9omiwDkYQ6Yn0MBCBRm2XwU5In1vVp+gHPvDwPurG0KCOZGaAMIEGAG+RY9b0AdoZcOEjuQBOPvFtzdHfQe7A1vbeGePiWUXBrjgIwbaFmHTtwc0HUdcq4AiRuPavg5Ma+mhS+gGSpvkoMrOWekpJ5G+/0egGdrM0M8ZzumWa20rwqSGdbjOGAaBnBm46Hzv/a+DryhGjchRGz7iBg73N0d8Zf//gvSAPz5z/+Jf/zunzCOA7abDUp/lggphMdNpVhbuPfGc4mItb2DRQtbcQbVmtK41rPed4YWK8uldoU6FpJ5jEV0fUSYLHw4K9dO5gQQW5augO22x3GISgKdW3JjfXDXdSAon9owDBhHBV0IQEoJkTIk+Lzh7+2giP5QQwQe3AgWAYTBrB5Nw3DE3d0thmnUsLiuQ9dvFazqtyDSebBj9SoUFkASiDMICswqoA5IaMK6nBPbwPPMGWzPT9a3HSDyjGlal4QudghBCeyDZ3CbhTK3IEjTSNIu0WsNuOQLcmLj6i3kYWcaehYa0Kdyn9Sla+5dqGEuocxfqx45zQbLnL9ocZ53rAL0Nu+/vtzBw/UCebjh00bCfeUqRO2LozrfeMm1HAUVaooydzB2QBPqFcY6/7KvfU9QSdqNFtfequfQfL5fHsfsb32vVmYegrMaoPo8fHjOz1NZA4BkgV6sj4kfm8x1S3/n9mf1KhRj3ID0B6um7Qr36Fcv8hhZ9NfZzuZTa7dZJ13ZLc9YSnvswucVPVHmB/yOZTheOCe7CrRc7vzjR+pkL0DRi7zIPfK03FMfRuieejjHUwAAIABJREFU/wDAiXfdVbsYC80uynNiQKUkDwBmzl/gGbCwmER1zjxfsKcCV+Q3t9APEuPOYVVAU8rwcAMPPfDnBaJqUJWb3S+unBfekrIQLTTkewt8ekBODrXGkf/fLKSL3RY3ocul5pXDZwAhzUZTr59lk2n+L8YS5vdhdrJmBSqk5Xt9UMwbwvoEWXZBBptvVlmJ63tRBFGnLuWi4X+BIqK1a93lhwIoAvMOqFTV3lbU1Je/6zz8DMVgRqfft/wjIoI0Ka+VAjutB5VAxENN9PxpmgpQpKFE0Uog4JyQ82Tk4ArGEaiQBWfRz+M0IqekQKd4pjmVaOS//m767oRN1+P1qyvc3Nzg3//jP3D79gcc3o34//7f/xu//fU/Y7+/wabbqXcIlHxU4HUmja5lfYX8szbO+5scl415ecS5D95r7TZlR+8RcycZF49l46Ig8AxnyrCuWZhCBlIa8fbtQcPO7t5hnMZCxupgZYg6XqdpwjRqynkW5U+ZUkIwUmHtPQESOhA6BOoVrOAABFZiayKQeU4Kp8LRMgwD3r57h+Mw6HcEhC6i5wRYH6IQIDmAxEK+fDySJm1wL5IQAyhuZ1xeHuLmxhcMEJ6mEYfjASLAOI7qPZf17iFGdKFD32/Rb3r0fV84xWpGNqBaaUsDuf2Zf6/jr37vnpDMjHEckDPbumB8YwIUUvsYC1jjgLODv0AFgRV0aL97otQp74GT6rw84/s6vdWj5FzZZ3eX5Xcexl0xIv9xG6kkxqD/xd6bNEeSJOlin5q5RwSWXKqqu+rNQqG8FuETCg8U4f+/88LjiPBCcmbe9LxeKrNyAxCLu5spD6pqi7vHhgSQmZXQKiQCER62m5rqZ7rkb8kDUxnhsEJ3sPUo14AsQU5twyGA7sCG3wuejcZKVsHDAejnUwl2QdflHEhy3/F9ajpvBZ+z7yZPCmuDO6HatLbte7YtTsQDnukQySBm8BlQgeMJaIzQHAYXM5cpmHZaFOXfp9W6X/bXT++ZXOhcegaKnumZ9tIUJkps6hGtiUYY9Hl1VfDz+C7RtLvP47BjcXzO9awiA06KplXtOhFHOZnLjtvJhkNpVhsV4s0CpLJyIoujkF0b6vpna6j+TkZBYEyNks+hWgifuvaUx0l+TQVgVCoNE1GQpnNpr0z5p+m3ijqzEC/eEZZdgTNYpFYhBKRMJwepVMxj0DgleY5qAd0UswbkWjgHeDSI1IFSNggJ7OvIqw6i1gLEILV2EsGOs4A3mlaxLKIqQxFp5kPnbM45uc70JCnCBSiqh4zV6kyMmwj9IJYSFrPGDKLN8iRykGw9JAoHx4CgrktDiGKd0Uv67zJYJ5AtRyyLkyMCWGI1NJ6wWja4ulwCcUA/rDF0HX777W/421//jJ9++gdcXbxUZXmklE9e2e5m1AHcvwBNAIPTKIO69XcZ1ZDu53Gj75hVETkC1H0pkmR9imAwRQzDDpvNgN32Fm/f/oq7O4lPNIQezAHkOGUsY2Z0fY+u79TtUeY2hoCee4A7AAuAWjQw18YCoSVzVuUEEoUYMIQeMUZsdndYb26w3W1BXrKUeXiAIrxnOB/hfQs4j8CFixWy0u2aBr7xkgGQfBLsE2jMkiwgDAN23Q4cB2x3G3z48CFPXwG2NE0LNAs0TYO2vRTLqpyvPPUtHR8zuoM9YbYv2YK0nCwBg2IcEAaxrBqGqJactTuttatt2+T+JxNsykyEBfAGMALL9tN5cQ+L51I/Cu30qcgGvaBxEyrrnJJ3l24lo/6Im/IcmLGf6rHb9726ns+9NPt2whd/KyDQI1O1pIQREGy9Fc+g4CyEkxO42PdG7OmZzqWxmKtvVnqMydpnFz4GtjOIfDrZQpr77ggkqt6fvHl2rZ9Xwv3pGSh6pmc6SHlbPpXuI0p1UfdZnGEsgHH+fR7OMkv3/noJFmlJifVXtr3TGg6Zvx8mVSpIABsiuwk34Z+TW1eyllF3M0mB3mgcGkJUv7N54bA+MD5XXq+6OgaJjhCpFjRRmOz2GznU9qRec08qpZyJ4j1dRMnCX4M3m7VMchXj8huHB4esPBPk9LXpGWOgSLI72Q+Jm4q6x5HGeUkgCTKgQ2QxjJzeeHNqqFnA2biN+2wxiiQeVEQGj+S3uPHs0DYNSN2GoABV4JjWR+QAH32KQeMbB1JLt6iKvMSryS6brABYCAPiMIBjAMcAaOpnsjWqIJ+pyI4keHboJTPbonUCLoUOzkl2qa7r8Ntvv+LNm1+x3awRXgqIxSzjyw7GmGS8dJ1J4OPS2qjYy18KMHpAMpB1vAUzaFS+l7ssgJAtJEkPHhEkoHnsMWiGxbv1R3y6eY/t7g5D6BBjD7FAgoA2IGy3W9ze3GCz2cL7BhcXF2jbVuKo8QaafFz2uAeahcYl0kxiUJBqiJIqPgWh7jvc3tzg9u4WXdcjhoiGBeyJAwEU4AeGbwDnGZEbBHXDcrrOnEapdo38kAeAXlLaJ5A3oNttsNlscHP7CX/961+x2WzQti2urq4BAKvVCt47OPLqGgxcX7/Cq5c/Yrm6QN8PCF1vI12fdBTTiWLniiQlpwIU0IyFo/kV/uCQuJ9a+vV9D44yzxaTzDkJTh6CuK0KjxK3ZEdT08ky1k3Ju6o4d0ddz2qeV17KlArT0ysQTi8DrH/Tc8oRIZT91p94VBY5rzfT8bAxMqDKnlGX33S2TEoalVd+UoNXXz4O0WPQl0M4HjTkQaLy/C7ltJoPZIviLKcS1ev21Fqe6fNpDoKZg2TOL/VwHacVYUK5/UMzzxidB3ifQl9ih/4ugKKjDIaeHvt/MKb3Bdr+UHTqCEz7x6jMdff4gk+/9vljfqpJ8emK+7EHaDaIfzJdBSXF/rR4jlzUWzA0i/fxCIdxGW8lCWdkdZWZ2Oz5+RYfuiewgMj1xzwtbFTmBNwRvUFS3tstLENdGaDgBiXwI4EWzFnxGNWTcqmlW3bKNRNBAuchPVk1piyLo6naeZoIkzlL66W6oa0GpnqfCqG4DAac3cvGQJC+KkCe0uyXdbwsIDNMQVUfM7N4qBtcpOGeaWN+yZgGGsxxgXLsFf2XNIsNayYlJp0nCzwbqhTI4n7GyR1RgpdTDgxuIBHnukvgSObZJSFSXFDyuieSjD7DIBn1AAuCbh2NGAaNhaVKlncOTdti0S4UTAoYNJi6levIpb8ZwND34jpkWYY0MK9ttrwK5S3ngMZ7DF3EdrOGI8ZmvcJq0aBtHS4vFtjc3mG7XeNufYPNeo0wRHinIKmTLFQEjZdWKGQy9YXFmg3WPVlNAt6siEqi26dM2wuusgJafLnT+N4xbj0fT2bsXkDIPMRiN8kOUQsxBBAJGLjrdlhvbrDZ3CKEDqyBrMVl0sF7D+cddt0Ot3d36PsBbbNE0zQSCJ0DmDo5J9hhiB5uAEIkEEcNSB7Qa7yrGAeQl31xt77Bb+/e4f37d9jttui7AYgM33gsFi0WyyVevX4F6hiMAEZA27SIJC5hcA28kzaCPMhJtsLIEUQBfb/FerPGp08f8dvbN/j73/+KX3/9G96+fYvtboPV8hKAw48//ICffvoDfvzxB1y/uMZqdSFAvW/x6vVL/PTjT2BmfPp0i2B8j0jXeT4zhJWP5SUBmaPF7EpWorp/SQIctw3AS4JzHbquRe8GdDFqoG4o8Eo6zgEh9sAQlReI619Eo1nupBXO5SDiea2wlpX/rixF55ZpxaNH4EdRZu4ypfJKaeAsKhDP1L4RGCP7ocjkFgSYj/pe5qD5bJR5Uvcn7e9c64zPi2VonJycdVNr3m0iSCmPSMbHMlmDzX0G8cqlVZSe+HttNY1iKg7zpAw9FL2j8RP696O5kig3PMgHjZHlZ065pDt2MZYAOoxHivIaw3htHahPCrXCiwbQaDHVUEApNqI645EXjX0tLSLUFkcHGlW2fF8vTtEbxmN+/4vSqd7yVVM1gDMw7PE3jpdbEHHmSfVjx/ZI+Zowt2/q+Sr2+ZnTWF0oqDzhZss4p+Dz19LvAig6iR5qwzwK6n2AznDX+NroXqK5KTzjko4x60cYg9LNoNr4SVA9YW5OMTvf+0ihEJ/UPQbckF4CEEuGk7+vte4DX5hnDvsDbbGg0CnuiooLjELSMpqmlB+Pu+NC5LK3/ag9IgGCI8ltOpkg4QsrFweGxd4gid0BQgCrm5FacZiSUJY96jwTMBDgSYRagoODBv61NaIMvojNW6kzdkyxuofku/Gy3tF7yQWjHnL5LYdJad2QjjPKoJVkTJrGTyA2tyWHSQBjFb5SrB3k7EvWQWa1YkAAKIDZJ+W93sVclCgtDKxqddoUDIoMzx6ITZG2GpA1E9S6wSOyBVElWPShvh/QdR2GQVKNx8AJd45VVjuZp2iTypycBoO2UjKoGKtxyaooz2UOcNv1Hfx2A14tk8UQc5SYTSyueZElgHUPgDqHjVqzMZFYn1jWMoYGdo5wPIAREDCASVzUqgSwOukGaabAv9SgXSyw3qyx6zvE9YDLzRLORyy8x+Vygff+Bn1Yo+vW2G43EveoHcCuUK55T8SNQsZ+CMrrVaMgOZZYO8X5YEskK4bZqkZeaawddvuqmXZg9J5LaypXOCu0q6xIGpCXElhn8YMGiHtYgPeD7osem7tPePP2r/jw8S1C7KQgtXBx5NCQx6JtBXC5u0UfA1pyiAyE2KNBI5npaIBvI+A7BBCGKJZsHAYMEYixk/hHHOHZI8SAt+/+jl/fvsGvb37FZrPGdr3BbrNNfHKxWOCnn35C27ZYLpe4vr7Gq5evcHl5hYuLS1xeXuHy8grOL8HM2G07rNdr3Nx8wjD0ePv2Df7+97/j1zd/x5///B9Sx26Loe8QmeG8Q+Nb+H9vsFgu8MOPL/GP//wz/vSnP+EPP/6CS7/U+E4DYgAQJeU1HMAkrnmArPUIsyLMEyKZ3wIYAwJHBI7ohx2YA4giEAX0lKDb4ibaOMDTK+Hi5LDd7TBECzAuFn2BAxw7pAuBhBQyOGribWeAslNeFnVzKGBtMZYon4xJac7MUmpNPNqQFc3amA7CAKCdyiT3lXmVh1vsNWsztK/JchJBxi8SEElANUgWvhgHvdAgSHA1B3LW90IJdzr+8Kq4BUADrxOb3OCKftZ7r5QR5n6b26NlEgQPABYwDikughnwSgAVQdYP6blGYrlJZCeLTYdlF5wbR+NThRxnA6yZ9FLcN8oirMhIc0olYVTQHqJTHjqJ5lwiT3G/nW1TWu+QvssWFGCVyrUNJJf9ObYcdTwtlqL1V62mRTQqQQA5EaosH5bUJEQgFLzaqYWkHLog1v1svaCiL/kNPRNPkYwPxavKWRXHFofTsimPGTB7JhGdug5OeeZpdMyk61SuqXNtsT11uF0i259Cp8YR2yt01+9O5mO8n4+N+fRzF+dGZG5t7KMsT55Lz0DROfSlAJnjkP3TtOPR6MABaIfS0zVGq+XZ14lOtfT6zHVXqKAn7W8Dcx56xKobjmPPFv+O3x0zNgMvJo/tpdI4GMCMIJOThJkYoYLZTP0Z+DAljwB2CgjIc95lBXF6Xz13VNVrJwW3tpmh6ZrKlxtU1ZEstfQ7c7shr5GivOKWeu4u7xSyWD77Z7wc1/yeuWaZtVZ5iz4S80cdoFr/0d/mRjAvFNh7rmirzWGY3ASXYJYoCAr2uEKoYhR7yG7Abe7q0S7xzrKuvu+KghjeZ2UnxAEhDMmqLVsjEZzzKUaMtFPKjkHC1AuAFBCCAMJllrnk5UTZeZDhU6wkZoZ34n4ThgH9rsOwaDH0AbvtBiH06Lotur5D33fohw4LDgA1SEHBD2z+k8TNU88qRnbbAvJ+fNSj7ljhhz9P8X+K2+7IrICffVeshSyA+mazxvv379F1O0jwdlWcnSgzEgdHspuFEOE0lXrX7bBZb8CREbEBwYPpA5pmieXqCpfDFovdCpGFfwxDhxAH7HY7LFcr3N7d4l/+5V/w5//xZ7x7/x5934FiTHMYglqqqVXMYtHi4uICr169xuvXP+Di4gLX11d49eo12nYhFk83t/jtt9/w699/xXq7xu3tJ6zXa83oF9TaJOhZJorYbtjBU4NdbNG9XWMX19iFDv/Ln4DLq9e4enGJq+trxAB03YCBg8QZg1hoQS02QaFQDi0uFwuQauguB3AcAAS9QOCUWY8oj/VySYlvO+ew6zr0wawDXYobZfuBoJkM4eA8wTvZc+K6rIqkAvuxWB85A+HI6iiOeO7MnpnsIx65y1Iei8+lfUBM5t9qtaX8LEYBwqVXvnwSpQv0YdGobHtxRTLzpdL1N7n5PZFiCxzmaaVsYmd5bqP9Oz7BqXj/vvP3UADBFLi4f3PKMnhv9TKe5sY+eiiLNZUsJOsqy3iUeKk9ny+EsitofWbbezFaYP452acGiPb3b7ZnRz6/D5Vr5fzyT42f9rC0v85yZ2Ay/Idk0UO1Hf7OpHeHHh+rDie24H5Ua3KiQdRl8YF99JDN+X6Aomd6poekyuT1qeo84QEFrj/ncH/Ig0FUYLPJkH9O1hWPPjhimgqSkVwxy/xUbkGiqIuiQLWgzlC7DLkJRnSpfufVkmHW9ItGv+f6oVNH+W/xdBuLs2o6X0hDDE7xqkzZ3ycElxm4bJwFdImzQM2h0a3Xzz6AxkRdBV0gYI5zonRNA4zuCezK1vdpz0phMC+HsZJkL0ywdiitfIRi9YXSXZJm12QBErFkdfLI+hcDtcBP0JTgJngGhECIsUdkguNGleMBAw8YYo8Q+7RXrSCn1m2Ss82h7DKzgEQxqrIcddRjKehycnUjIonDxED0AXEIIAY8uZQhbRgGrNdr9P0OjlhBqE77YS5QBtI9Jb/jDBJCxr/aG/cp8ZSb8Kr4fJtc7t+DTeYMRDLb3GQwz6zJWF3B3r//gGHoUxGyvgQo4oiUMau0Nggx4m9/+xt+e/MblqsV2MvYdN2AyA6vXv2Il69+RNO06DoJ0Nz1ErS6HwZ43+DDp4/413/9V7x580YDZBO8Y6yWCwCSTU8AHkogivcev717g+ViKRnInIdv5f0QAtbrNdbrO3RdJ7GTIMCId04t3wycgfyGxPcREDyC0CC4AZ/Wn/D+9j0G7rG6XOD1j6/w/v1HwEU0rRfXTY6QuEvG6MyyI+9JKTMoQBQhDqYDQBGOgMGAf2WMpIGnHVo4coiREUJUa0RZC5YNs1RQCRbXzsuYqLvgNL5Z/i0gdl5neY2OvrIHHj9EU2uE/Z/PEVs7iu9PyiFVvtXduMZwdZcSjfpyHs3z5T1tPmD58lCUrEmqSk5v15xV1vdGhySKko6J13KnRCrnlMAjI3t7FvtN9xwg4DGCyqQqB8zdgXCxEQ7NV2nZ83RkQoi+fvL6H5fGTiVcvbL+/r76/DXT9wMUfQnF/nun5zF/ODrlNv0Bh3rsI/0gZZrJ+QO1k9M/+UV5S4QIsJrpE1Rhc+K645yD9xasGiBEdbcCCC0CZzPgjOzva7g5+sx9zlo6tAyYoY0+XhyBnI8/1genwAkfFZ5LayJwMU5F8w6CRDM3F3NPSR1ieZCtj1gz/WTBTTXnFCx7QmRWYFaOxPmJrBYO6vJ1UGViIKW7Zsz0oBBRkwKiAJcV61wSGsuCxbJAH48AO0IIrJ8pnEAQawJ2IoRyBGjAEHZoAsF7mYzA6nKGAEYNIEhXCSEyYizXlIBVkrEtagBrhgSTncbPMBCLiAAnwFNkRhgGcAhAiECIkqmdCerXg6bxyZWNnN3KaqdP2LTl7DwUK0oWe6dacM4Qp02gf89tIC6eLd+m0QPH6tKqZP1iMj9iWRPAYcCHDx/w8eMHGJBbtZgB5zwuLy+xWCzw/sNH7HZbcCQQSWau4MU6bRfu8O7DR9zc3GG76/HLf/kn/Lf/9r/i/bsP+PvffsV2u8XNrcQ3Ig3KvOt6bLe7BFxR04glG0XdFebmE5VHikXQZjPgbn1TnOsRSFYzASEEBeudGPoQEBBAXkEyFrCInAOcuopAAyI7CYS9WLUgFxF4h4AdfBPhfIBvGRQGIIgLH2tbQQxXrdHsNCqWex0ii7sfEBGHHmiN2ZK6PAmDtAyBbbPAcjGgW3To+14sggB4sxxiJJdlIgdyTmKNNR6Nb+C83QrYnCpiO2LG2T1q1PYUKHv/+VvhTLOPTM+KU8/ysZXFbOWlrqrv2XH5EPv/fjjPaP8+MFiULLY/QyY6PAcmAZSQSqkYf04W1Ueko8hO8bpcMHu+chgkOioAQVMJIFkNmuCVHsnWm+k3zpd1vxzgV/KXrwkwGQmae5/BwecO2+mfUsIzPSR9H0BRvh6pUjR/r6j+kxHz85g/FJ0o8BC503j10XLuH0CvKkf/McerrMqmT0+va+6Gtrj14SRM2cc5LTMjK70Wt4AcoW0aBLPIsJTnRKBmbBEzB6CoEp+eo+KnEPDMhJnHPS/vSQp/++Lm8vDYZGE+Wx2UwNP95m//t8YWQVz91OBi+USeg0MAZARgkSMFgNEA1TouBgJOlrYBb4x804hyTpDal8eTR78hQZiJFDByuX0cq0DqDgJsWZYxC4gaQYWLmbQ5hEHi/YQedvuZLUxMwS7clWAWTrEaH4qFUhmNtVpQ7BooqiwKIov3R2TEQayKDCwilhg8TvvlvUPoNSaPpnWH3pYeYynlDkw7VLt0H56fyyvO7rNLkYKiwbR8COzV/b334zPqr0D9OfGWNcPcW3TdLo0ZOdW3NEaG9w6Xl5domxbbzRZ9P6DxLQCNcUUOw8DY9VusNze4Xd/i5rbD9ctrxLjDx0+/4dc3f0EIAV3XY7frMETNZKcAh7mFiqWNR9MutQuEEKKC2hI3hrkEwDT2C9keiukmX4IEu+RSJXiM8kE2C0lOAKsns7DoAQxoG6BtgN32E/76l38HhYi72zVubzcY+h4RA8hiUEHGq4GefYUCxTrOm80nhGFbxB/RdWBB8HW/KeokFlTOYbVYISwD4hDBQWqzQP3iWub0wqEFuQbeN/Deo2m135Fg7oiZJ9SWjXnJ2BiMXW7s/Di03lBtzlTWA+mPs5Y9+h6Vb4wuVMqTel9bH4qyhTBN3k/t5H14xnyDxs/NH6fnd8jmh6xMyu/n1hgvHZX9FYrPHAPs9iu7148bmve+OXKbFU9aq6f0beaZ0npY5ll4g8SXKpNoiOwnbpE+z0MBFFUVfU34S0W1LAkDoStw8emplGEqznCCpd/vRi/UM+TRgtLPMtTHp+8DKCop3bb/Thbmt0DPY/5N0sMwb1Neqz/rJ04FNCpAhkFJAFeRVONxlII1Vd8TijFgGAIa73F5dYndbo0+AHAezgHOLYDYIgyuKOeAPQtb8OQMRMy5XY0l5xo0U6EWo2cK4TGBTEmwnD5nn38Oxkeg4pyjZBmQD3+X6hCdWAJFytCP10yEBYGWrh1fU+KCVgqeVCklMzpLMT7T8rOKXyvtosDl2/zUxhDAxIXbjz47Apaqtc051kh2dQRAjMgDdp0Fmbf+5LbEWIN8EqR3tIaiNFAs5bJynuvbRwW4qd8lBrz3uFgscXVxCeaI1XKBru9AALzzaNS16BDtq7V8n46277Ty76tfmjURp93GqqjMKDJljZ/D+wqrtFp5zb9vPn3C7d0t0nqivDNijApGOAHvYsCu2yKEAEcZwA4BGIKkcucQEcOglm0Au4Bdt8YQdhiGIMF8KeS1TF7cDwkKdEr2P+eyKwYgWazkb1fMo/j2sv7AAD11UxMXLr3RL5RDGQ9WINhcGgFClKDhTIihg0MAIaDv1vjL//hXvP3bXzD0ETEowIoIRsj3BkTwCq5IlixtEUt2y7v1Gn3fAxyyYpqCCI+tQTOg3LYLrFYSVH7o1VqKJG5R4yTLm28cnG/gXQvnW/imRdOIWD30EhA6xgEMCcRtvGG87sbnRbVl9uyf6jh5JABmXH0+jzIYNF7dtvj5Eds0ptKVvHxPSNCh87kQGcuf52GPoqOVozkzeE84pmcRZw4LA+cmPDQCrNlloblJqe7xqSGFc73yTePzOcC1AUUhHdCl6zbRNFbYvS40JrLeY9N4t5nQQspXeeaZL0FJWr3XM18e9rovEfbu3c+l2YGgfR88KH13QNHTDOszlfQ85r9/Ok8ZfCgmWsEn+XWJHCSAIut+QYV3gLFYtAAt4YPXtNUMwIPJa5alY6JLfSjM3liWlY/fL8fCbucP7JaH2kt2wR9mP4QquyZ8uNTNDDuYMqDuUAm4KFLHEwNqmRMjq1KZ+yt/uXo1GNDHGhUnjZH8nOYSV4/QNBB1VuCTnRtb/+z50kIn9zd/X5UTfd85goYoVqsFssZD3HtUyY0ugUJmUZQDIKs5fFpD+fsSKwkFmqVWRKmPRb907lxhGeUV2SMAFBmL1uP64hI/vHyFV69eYbcLWP/7f6KJLRaLFVarCzS+0UHROCSHgBQqRxrZUoXHI/cFKU31Ka6V96yCbPtQWjc5DobE5CFy2Gy2GIZB5oeRwVQCODK8b7FardC2DdbrDdbrDbbbDdyFg/OyJ0McNNZUwMXFCoEjFhc9Xry6QtM4bHd3GMJOXRkDGKqYKYCZLKgIAGmo9BiSZYsFfM57ymw2JROUxfwAzL2DVCG3sinvBXDiK+X6dAR4ggKxA2LfaRsDYtzi48c3iH3E0EfJjEUStDZC+DecuFU2rHHJClAqmhKpeyUwgfzIAnRP3DkDSZfLCzATttsd+r4DswCpZlnUeC9AkZf4RE4tjWIUJTW7mFpMlHIL5/1UBtjVT5H52MNKT4eUWzb0v/x7pn6zEC7+mXDd/Pt+7f/cC4+K7l2OWWs+UDsOkFjWZHmi/PfRlM8HpJr3zwpBIESAfc0SHrQV2cUXCJAYa9o+svhwEfDF2U653fP74msZ9zEAdAhI/NY1rixvzPXka5mR74W+O6AIKBbZvB3qMz0CPY/575/OAosKDZc0n9ODUFJsTMyi9LfZEjADIQZ0XYdpWo1NAAAgAElEQVTdboshSPDVSEEUligxZgjLQxVhclwxpYCouZOkQnV2y6GqbSguhWRvnJulLFu93OPOlKd4FSBzaVnFGUhwjihjVLiTclKMI5XxEwqErnTFMfCkQPN48vi8ypFNqcpP3Ui5Ksd+TOWNPo0/Kd4ZlzEGiYoMKmlGHRwYQdPYzwGYOcBxtiAqQSQDJZ0Dxu4mzrkcotsU7pjXtinI8pEosc45ePLJNQ4MhG5ADAHLywu8evkSL1+8AJPDTz/+Af/23/+GxWKJi8srXF1eo2kWujfn+HXm46U6m3AHKtJWT0bwC9JesOgBBGubA4bGAhKLIOdyliyp0ylIokCecwAHHTMBVV+8eIH/6Z//GQ4Od3d3sBTl5nLmzP8RLJaRF0uAGCte4vrqAnfrG2y2a6RYQ6TrCgA5tXpM6w+KTjaQ+EEkVknQlOKJ1alyldw2osYJygC9NMnBN0DbtGjbFl7BytJKkRypdRTgOCLEAcwRIUhq9TB02O7W4DCAglhPOQ3uniwYKWp4rYK/oMhqBEAsh5xAV65FjruV2ywLIgP0hCIpAMT6rm0XakHJalFEcN5pPCL9cV7mUkmsFwaY69kYIBLLsXFK7Hmecx8SFmMKsQFRp1pAHN4PaYQT+6E8hgnsoM/aVuceZyWodR4QzBWgMCGaglbZsmp/zKBTxjptq+qxcgy/PTre6nGH42dD93YuyoVV0MurbDFM0IyDlgzCRbVIz825j2XQ01kTTWXN9N6sa+q3tnbqNWEJXGoGQqPX3zoY9u3QdwkUAZgCFk9xZVA34IRn8uF+2pNfOdmYj13RTujfPnZRF//Ec3isvhMOkbPcro4++4D1nUljd5zpA9O3pLXzgtZcO5MgX32kYMNsv7Kw6siJOKI32TEM6Pod7u5uMYQOQxgUqIng6OEpYrVaivCfBJDiLseuombcxXKHzRVurACUEmc+8CU4ai57LGjmuBOfd8NZx0KY/zy5wVWwVY6rIN4blNe4vZxzrTCLpLm6VERM8WT3rB9HVoEq23PjnaRuc3fLAI2BMTWId5zKMuo1RqiyUZFlP2LAmZuOWF4QKcTDJNhOsZVN+SxJrBFyWwms7zmxDEoCoqvWYxod5xTEMrCiEfBIK/TkcH19jVcvXmLRtIjksFqtcHl5hU3XYLFYovEtiHy2ysi1JoW/3Idc/HuI753Kfx7tPPtM9rfX2q8cIMHt1OrESwYzthhbSABRsvhSsJNZAlgvmyVevHiBi4sLIAJtuwCRgE4gASAAltggYAEjNP4aEePjx/f47d1v6IcOTevBTsAg+wE5xGC360ixe3LgcplC70meYVbgC7KGkcHQ0qHHptZ7wnLhsVi0WC0WaBoFUXSAooKMjhrdshGRB8QY4RuP3XaLjzef0DSE9tVrLJqFjmEBwoAB8mKNFBl+iIhqSRCTaZZMButv51v5Jql1o1lMsvF3USrZAG/ObmhN06gbmbjieefhG5fiEpFaFAXlNRJEe0iWRJnMympeySxBfyp4617atxxHfl91LLv9myDHO9u/V83S1J4XnU7tyOK+s/gw5e+U/PmcskZ8WhV/c5s0V87sGl72d45lZStAea4cw+oxGPC8t19J6a2+MqpN/zW3raJP46eORUujE56pW3TKk1MAcy4m3sG6ZgY6zS/TngZNULRZSqIDiwutNDmijt0D2ffMEoy+mN/D9eZ4UWW0vmqcD6yB02lfX4/1Py3CkX719DrR5JJvorZM97Q8o6M7enZUun2j+PPrA4uO6kIAkm5Qv3VK6fdoz2cX8Z0ARUnJOEJfHVjEE6ddnmNup6LaT96/I20Qyfj4d75WC6RTxvOB2j42CZ99Rip8kPrOoZoxjoEC5AOsfD/p+mdYEo3HYKxgG8CRhHMPhge5iKjggQQxDNj0a2z6O6y7O4jLBVTBkACu8AN8S7i8XKkArDAJReSA4ZrIXBUYC4SdwIwKYpnIizoOqnSxKWgOzgU4N4DZIUYPRD1AyaEcw3o4SA9cTqDW9MCqhbx4YOyz3z4nS5Y0s4SsbDkGIyS4j9mh6zoMw4B22QCs1keOkFNYF/WkYZCMSMQKdIAhqa0HCTJdjlf12iGiARwQXQSzA5NaELATpTgyQgiwWDAiT5bxiUrwiZKAVSprU+GGYKgPkRfhU8eT4SHWQTlwNkiAA4uxYnNFNFXeRGjKCi4UTINzsOCcpSOfWLEQvJPvOSa1JBJwyBPBk1huNMRYNR4vLy+xWrRoWo++C/BwWC0vEdDANSvAtWC4FHOq/Nfks3JEJp+nv+PovRPpNP0gbYJSuWVmXTMs629U+1yxjh2IJVNX1HJtv9oOdgwM457YxqCYBsazZK7z5CV2DS1B3IHQpvXsEWU+WGLghBhBLuJiucDLVy/xxx9+AkXCdrfBZrfG8qLBi7BCDJIxLTIhICJEvSkPEYEJQ2R8/HCDbpCYWE3bIHqJAQTWYNQxIkSGiyIXNU0L5xswgB4DWteg5w7sIxx5mArEDLhGwFBbmw1bHCsbJcJi0eLly2ssl0u0TavWU2ZZaeAHpfGS/dgiRMk8GfoOd58+oXUOr69fo2lbwItlECAZJCMzzNkTjuG8KICc2sJJSRkGRt8F3ScMRNZ+EczFlWG8lYwV5z4RpT1qcYoIkIRvtvZiQAhAYGAIkiFOnsrjw1z+ljIb5+ChbYkRcaJY2SIjiKNwgypGFFtWQOGd9W3GdLUfUxwzID62irTzIKqNoQ1SBLmAwL2uEQeiBowegaKAojpvCQSb1qq/C7fc5K5XywwzLRaXIjACD4gUENEDTtyLyANwjGbRoF00cB4IYdB9vUUf7gBcw7ll4uOpjez0PQF0zUU4Fn1xxPv5FJWXS8VDJSZSWEAbwKF4LGjMZHGgLhspZgx7B8tVrw5HoMuNpRHYcozmrIOqIzzJFsVvQZKKZ06urqhDrB+Tm3aEbEi11OZAIF4AFBGJ0sUhk66Tggw8BoCg85K2ld57GbYlM3hcjs3g7b49aJ+Xna/HSsopn6+fqw5OlXe+JI1dV2cvgPWZ8pPDWmEtYXw5Gs8VkCWzc8d9Rm869XunfC3Jkvej7wMoAr5esOFQs0wirwR0nu/Lsf59DSDRHO3rz5hKRgnUyuMj0IOYlCah6wHoa52/o1Sg/zNK8VklJQU+vzcuK22Zovok7qqVjARcFasLRs7kI7fFqmrol0hv/bl4s77otX8LoVqjPxONjru5NWUNTsCBHRgi6YhxiY2hBWbMgNHsOs3mOdh/ilgHp59MslRMmpuVQlnjEWC5KY9cKAOquJtVjcXkGdVWFJ4VUiuXzHy8dBfZQ+OhEIU4IhZBnzPgyoXMdpoyNbd20y2juhXl7Cm1YGdVORVMrTxzPxFTeQuwm79rrx2oACGts1qOBREmUchTQGHLztS08j4PIEg2p1cvXuFieQHnPeADWrfExfIS8Ctcrq5FQYJdSjCQ3H5wOogzpxKesPdrF5z71JLfp+pFUceMgJeEVfXHtLWYMKd9/S6D57MoSoBYmzRto/FuHHI8dAUItVDTk5xzWC6W+Pnnn3FxcYndbovdbof13S2AiMWiRd8xBo2zIXxLeIRZLjhHYDg0aCq3phTnSi/NTGkhcvBNC3IezOJm1TQNum6lCnXFbDUjWzGPjATK2Vpt2xZX1xfiduZ86ls12szpd/lj+2foB/kJAYvFCm27BMED5BDSPpb4RgRkNxJtFCWBndB3AVvfI5LxrKohiadV+MqorcMwoOs6dF0nFkXeoxk8QghoGsl6BgQEtgsHyKVeXhmYW4jMQIysFlO1ulSOx5QmXDmVf8jl6Ry5pn5U288GHGQcyoIEM0dwZMRYn1GnUVSQqazUOI5x1KytV8B60f909tg6JygvdGhaD996CezOQGR1Q0KOESfroHblJkUF5lgXT8ZpOmaSYXDmo1FJnM5T+XuvM9a+CkuQcF9lXPw6mYeP23/8i3uPiNEFTCpxJKcdKWVE2dbHmsfMiCFiYAnw732DikUI48rlF0BQEo1Sm3j0QP7a40nlR/pNM/u/BNseBSA63Cbm42DZScRniBhPTNW6LRdcequQLffR7NScMF9zZZqJ+iF6AN3x+wGKfgeUGRl/vcDXE9HXykhKKpXqY/S0mRO+PqotKnjyXvpMPqieHSvWDHP5ysJs+i7yWDMUCIpZ8M2ZpBRoMEuUXMNIHsvCrARmVrcfOoGBz1KW5Mwdgoq7P07v5/7P0dQSJgu+ORrHfqrjfOh7xftklgkhIAS5OfbeQdJoZ2sMMyuelnSIVIjjCLlFD7B7//1pRwWgI7Z4V9LLEIJYYSTB/7xDc2xevxfotPVGlEBsBkZrU/h2sixiLl4TUGRiyd+ZkU4p26ulT1QZykCRxL5JsVMgCl7btLhYXeD1y1dYtiuQc2AXsGgucLl8gRevfsQf//ALWr8s6t/nXnmMSoH/kahaDly92qfEJT6w95P6tYHIU/fDQyWby4sEBGcWizkDi9q2gXcOAylA7RzaxuHi4gqtXyAOQdxlQ8D67g5hGDKYwcqnGJPOSOwfhybtuwx/GMCRYiYVbokCHhHIeXjvsFg0iCzB/bO4QRXgU7qwOmReLCniPZrGwzsHjlN+XvInc+cCzF1PgNOu67BZb4EfPC5XLyA2EA5RGbsjzZwEAo1cvMpb3X4xwLkNbte3MKbECgiITG/tmz+HzZKo73vsdjtwlH0U2wbgiBgY5CSQP4PAjhSncGkB7nNzEZAl7xMuDpcq6PZoD5Xg2iFZ0Kyq6vIOk9Q2rTNXrm1FTBkabW4lRpW5LjvMtX0/RZQWWBrlvWjV4Ranphn/G4HmTdvCe49eUzgICJzBJVaLtuz+N7YMkL6eB7adIwPEQkk2C7wx7be1mJ7ZI+hzgg7dhy8fn4/aUfmJiEx+y3u573uJJ9Z4OPE1A5G52T7B2XRPKvGeJMPy6PPZJCtHAIpviE6R1E7wRXkmAOfx4Hl6Boq+CZphvM9g0TcBFn1/pEzpESZHTPxL4TG/r6/AlqqeCsExvbLMQubeZCbl6q5FLlkPxFgEGU6ClipeVJWIWiBTxTopafcTxjgJAlNtsBSGxt8rW2YCPCdf/TNaYACcFaXSSraOKRQVBsh5NE2LgNqSyix4YjSFsxirNHSjWxoCxAWwTHFbz2T9h2UvK0aJSRVqToCNxRFixPtNi5VP+dY+6zGUb9QKy5/8HCdrDgOHAKS4J/l7dT2TukdgUeqwmqVEIINFWpdzDg15cTdYLHCxaHC1WqJ1otg2vsX1xTV+fPUHXL3+J/zxx1+waJcg20tJlf5M4tN2RAIa71mHKFic1uhki+7Vf423mGZtSnDR/pnvKhQAu0202ocwoO936WcYerAGN76+vsb1iyt0/QYcZGCur1/g5YvX6LsBYt3j8OHDB6zX6zQm5umSfuzN1DV9ztLcG6gCybZmwbUtO5dl9xK+5kEak817yoCrWT2RAUUzQeuBBLg759C0Uo+M/Tg+WFbQ7flyvRrtdjus12sMQ8DV1Uu0fgVQk2BgAuBh+zwUc5XnMIaA5SoA8Ph0J0CRs42SAA8dvwNgsIFFwzCI+5m6eREAjgQ4BfSdy6CtB+SMqfn2xI2E80iynp35mRl1iJHWZp2ZjorfxZJ/ADmxBAXrhuSfPGQEs6o9n+rMmKmeQ+5PBUgvY6fnutPYd44kDpcHZi0xKioAhJTpoXxWL2/G1sIPTg8h35drAqjXx+dy9HI+yrKK91TWehIy1q31RU1WIrzsEt57hBDFbczaNBUlvwqqRmzPMkiY33h9jvvzLSpIp7b5K5y7p6enmeBnoOirp5mdbyBRoQR9lRzvMel77POD0anjdi4TKgGVxwGL9rU9AwKlUGSCf14qObhlo98zCyRT6h3MzFzAjenNTVLRmYsYjKWSkf/O4NJxs9xpLIAS8NExVWClXvoFSAEDkbJSct8gi8kqK11p1ZPpnNObT/nUebFc8ap8inKr8ZsiQ+LrlGXo2BTakuAdDGjWEkempNam5TIsecxRgFYMpPlMnxffze5B543LvnhFVZPSc4ddLEsLCnNBk6HgSbsttktppWY5/IyYCrfKApBqnMfCL7D0XuZlNeCydVgtPBpiDCHCuwVevHyFf4gXWFz/gsYv4dR6Q+rLQWDPp7xX5K8D61HfdgkEO7e+LCyby+M9Wlq/Y+s/TXAJeIwUJY2HJFZ2AX3foeu26AfJrBhjB8QBkXuAgKvra3z8+B67XYD3hOvrl7hYXYIc0O12WG9u8ObNG+y2O4kFxrUtIMH2hsYAEtRC4h1xXgsGkBJIs+HJD2nIK0thLxnYvPbZIWfrysF/pe9lQI/CysoB3omLZLq1V1mlAksx3UMlSGQAVoyM7XaDvutweXmFi+UrELUSMBriPuQ1jlblPpRGiWFpsnddL3uoAGb2K7vHSQBewjBo1ConcXvIOfimgQeB1aDoGFDDxRgyuNprYtE1bluOc1Sv8Ry1bJb3PSRRvV8yQFc88ED1HCpyPDRsG9bcLvV7Oc5UTFDupKpibXJRYl408yDTPku0L0dj4BDIa2UMHN23/LKcGSAq3yh9Zl2nt6gku+jzPvMq4ZOuOCMftUkTOknimACTe75pl3j66ZylNT+K+9kzfS2Uz4yTnv6sup6Boq+ZJifxiBGXn31VB9XjkylDAEwr/LIN+lYoKf0HqAQ67l1JCRY9/txM/do57Qu5AcwATha8M0AUg37PAh/C5XGYuV229Zfj0ph1UlbmtYoz1+b0WZ4R7JLooP9YuOlKwH0wijDLknJVWPYmcx9pmgaLiwsgCsAQQoB3ItBYfKjcfqrWYqUMFUp5esuUwH2pYMeWSlpymebaglyymqCfYreyz2XE2sxz7dfmY6QUm3BqY2bvJaujmXIAkQFLxd86R7qu03tmEeGy6xlZH+CwaB0QGzQtsNnc4WJ1AUdLeNditbzCH5cXuHz5j7i6fCXrmC14tptRBE8k3UNUQURzGl+e9rSwT2RByYUkcrVXaM5NsRSuxx9FDWRfHqeaNtsMk1IrU+o3VDwuBpYA1N0tdv0d1psbbDa36IcNIg8ADxhChzD0ABiuIaxWSywWCwl87RYABex2O7x9+w53d3epsdnCka1pAlwVWruA4Pa6sCxDzrTm1DXKa4r3cm8SQW/dLQDxmPfl0Sv3hWViE1wxr0lyJCHGDljVlFS+75wExr+9u8EwDKCVh/cLeEeIHKUqFndX4X1RAABCAu0YA5j7lGkMefSQ+P2I9YzJOYfFYoHlcokYI4YhoHENfKMZMcmyVUrwY/nRNaI/5bo3HbpU9uT9ONOAMUCh34NZdAIgA/Lsy+IGlzs7msOZPtblY3JmkW2Msr0V4CLjSUU/zyMa/Zz4renxDIudVzT0AO2BrZmBSVDa8Zic3MwnJRoBQvO8e3xpcy7NjdoI3Ei8qjibH5uSlWLObieZJ+3MLUBr7D/bz6vSEn4cLisi7lttM30ozheg5lHK31DsNmYeTQmBXGmZ/ky/S6rcc/c98/nVPANF3wqlw5smb3/39Gxd9JXSSXco+799byuGaTlGJiyU7luCJRQ7KW0zPYwTyISkeIs46hV8yMGEpzvydIU31zB+rfVpvSk/z1g2gInqLJk6HgQsksw6UlpWQEwHkvTrHt63aNsFlqsLIFBSlKIErZiCMpX1VC2MZ7XKxrMY/+SWRAVoxDA3ELOyZIjiH4Nlk7s/pzwYHBaUgmVDLWGIeRTItuimKdBU3/rnW+w9cXBoClpRaVGF/Hly5dEqLT4OXAOQlwDWrkWkBbxbAc0l2otXuFr+Ae3Fz2jaa4CWMv6pzZ+xj1GJtNMHZraIuYHZf8S1q2M5BsWITFUSrn/T3GdAcX5Q1gDZbqaBaOnQkmdMds0Uxd8Uacanmxustx+x7T/i5u4jNts7xDgAiAixRwxiDeOcZAhbrRa4XF2CqAEg2epubtZ49+49+j5IVjSIi6KsbUpnnuA2xQgT0q05kC3WnGbBE8UpuyRmXmbrB/DOGaOroWe2TGL1epOxcGmfpDkwIN3ttyQqabwnnPMIMeDTp4949+49ri9/RrtYgJzXSEUMYgeKXnG7aKMDIANFMaq1oyv4s/nwjZozlrCsT8vlEn3fw1zQnGsKEM3GsgGRWu4VYzvX5eqIAVDGwxlbXs2Dq8pXNQg+TVpv55ws2Gn8mv10+pFb7KaKP54SBW9fWZ8r0doasJDySKOjfmhnSdCU0mfVFrvflOQ9y3YfU14ejxFjf2zBBybKspjxkLaVzIsxRL07FMuiz3BwHlX5GPqH7vokexkXkDNnKo6Mg8c/lPz3TM/0DBR9W5TAIvvjmZ7pmWoqDsjCyki2jaWxp+pph5zs1z5P35wVAOz2iJPgMQ8kjLTUo3t25nOzgpl5slZqEsyQa+Xx06dTjklR2IFkDTBdFjrvxdXCy026I1ekslalZ0/fzKWqfMfSPtvtNXMZ78mAj9zfrKcUFiScRcAqBhCK7z0AmSVG5QKmUtysgxVlwGQuCDsIGQxBrTBOnjXA0+ozkIjUmkiVw4iIPkY4ACESnFuA2iWCu8RuWKFprrFa/Yjrl79giBdo2tdw7hKOFpAIMGqRh/vL+qd+bU4VLl/vg+rABpoJOCmeIpTGe3b3cVWCxJHluZtZWfd29Jb8oAzCnFzdCNhstlhvNljv7rDru2Rxx1FiYzkisPdYrlb4+eefAbBkSHNLOGqw223w7t173N7dSdY+ACFmnkZqpSfZlBy4jGPDGcARo0exIPIaL0iAogwS1dZtSJY3ihMBmvExxWrjHDXHlJb0sAE9JWAJmYtTgCKgzI6Wb/43XYdPNzfo+4jFwsE1LaCuZ8TiIpm4Q7pBd8jWVp0+U4CAQNpDYxq30TITSmDcRvriPJhDUka9a0BowU7iPGULphy7Z3xEyFJjxBjSZ5GmGeIOUgJTo7jgkZ5OTyIeFpnaqjEzcOWJiMYvFU61GwNAAEU7tbk+dfYNlQVyz29ogd+a6F0lk6DR7yeiR8YtRsZ5cOTQti3ats1geHFGPnz/H7a8sVxX/z0eTAsAUH/7GSb6Dkhuio4DlZ+5PJ+Bom+NEo/YLzo/0zN9zyQ3tbT3pBSlPMd5cI6SfpHCIqvCbu4ek52WhA8DCkyxGT9oAvOBvap7es6D1JkLQ6LRvtc/CZp2vRSO9fyQTGw5Rfw5lESqQs6kQiF0zqHxDZpmgaZpy29U1+YZOrGb7vGQaJBQmNWIKabWf7PYqoEn0VFF+IuE5KJlVjShyHhmrho5wPHn89BS+UzAl1k1FaDP5Hujic5m8aqIs2U+3QMU2YgqWGWKv9d4NQkoImCIAY0jDPC4WF3DLVaIdAVqX2N19TNevv4jFhev0A0rtIuXcG4FoE3zgrHCdCqdqFN97ilmK6EYId1SBMDBjV16Kp2WbUUW7xeCOJsyPDdfo/1k7m9MiEEDuMdiQyq/IfJwPsKtLrC6aCEolQO4AZjw8dMn3N6uEWLeMYbJSGNlzaUdoIpvSl2vezRZEzkDEDOAU8YDsrXnnNdA6AXLMbAyBe8dKS6U+QyQ12mOjTS3dvfT+HMiQt91ePPmN/zP/9RhdUHibqZtk93itcE5Ro/xChBA3Cg7GQMwxdwcIWauxg3OpT577+GoAVwDZ25nZtHHOb5TWZb1TVwmY5oza1cG+qZnQm591A85AUaWIdPA9MeMnWO7xNi1VZW3xNPKqOmcFhPAAhKRBRojwxGyUs0Fj+Y5ZpWtPFP5qVDGk1nJnEkT9/d0Jj9le2fh+QxzsMU+e5g2cdrKnGQT71u9wKoB8TGvul9947Y/5Vq3DgDFoVA9QfWTz/S7JRMMDq2908+5ffTVA0VxfDNxD59LAt3na18nzc33IWnCvpaUr+P0pIH5Trk9G7e9MFEv6as7tk8d83F/VPlLPAAGGBwrS1MNy0vVhWcXTHr+cwatVoRjsTQf7njKDjTyOs4q2OUb8mSKMs3Q20T1ESeAsZC06ZoOV0Kx9MmlglmULuYBYejAPAAUQZxdoQBVvsgloGh2MCm3f08HTbODgUom9CQ4QMGWFM/a5ra4OWWifIPKEVmtMxsjl9ZTjq2zB8SAQTulXUwWRqK6Mwis04DcEot2hcY32m7KCi28Cm5jqx6bI65qjiTKNUWJE8VwiBpwPBLAReyNcQ8MVyFAU5FHhBgQOOT+lhYjZwj6s/yGBIiQtghIxAw4VpsoprxHiqrGtVauJrBOjK2tZtpEsq4lTbjEoXEgODbTe6/DLBmA4BtQuwLaa2DxCpev/gtevPwFy4sfALdEu1iB3ALkGrABb7lR9qLiL/aGyKtJAi9bmVcOj9/F5DnrMkWk/cbkBABk0vhXZfwVgiT1Y1BkUNQMf1H2hWMB3Cr2CgBs1kCcliJrem5xeQy22hVuyt0qXQBjAQTFyOi7AY3zcL6Bc62kWoouVSyxMwgejazlKC5MzhE4Mra7LW5uPmEIIY1KYEYARNkhdeN0BLC5UlCySEsgRQKGJHuauJ5Jm8ssY7kvMhYS/Fn+Tso/C68xflBG2qBi2idB2LnO+HffAMshAutth/Vuh1eAZBmD8RYPglc5UdcJKPFymZcomeQQNG16C+IGxDGdkVwALmlNpPUsaygSxFrSrK5YlVDXqDucxX/SmEl6BhmvNh4UlR8Zv7JxtNPOEpklV+akBZfqn4Hq5dxljl2CNgdduCnXnt4igKu6it+UmxQRAHWJpGLNOafu3FyWRYBa9bgRgE7uvPUw7gCZ9Rg7Aco4gKCgOdQiVy99pC6JJcggxEhgtv0gp5q0LbualZcLkVlF1kmezQNk7tQ2y8V4Vu9YAG67rCq4rs0Tu/qrsyNyKP5QbkMGJ4/La+eHABitH+K0KJkjoNZv6bwr5aejy6E8+R2SuyHJ/MQgPExiE7UC4pZnMTMcSdKNkpdnK+qxTjKWOXVdj/haPULT8ayPrLoAACAASURBVCpW/OHe0eGyMvssFkjBTOQtOmEcTyMDnR9Srj9c32n1PKRKX7uwT6mSb+Y/zWWN1s98fw7rXmVyg+o5k6/tmVJo2lvWpIln01cNFE36d9+guE+zvp+QZhbGKcLXva6Fn4AOtX2uzXuFngfijA9KJ4x5YV6RddjamLQUEw8XU5i67/3eWAg8n8YuMikd/SyAd4yRjQ/CsWCb/bRL8GI2gHB6lTVZYrtdjiJIcIO+CwiB4bywwCEOmqbapf6EEDCEoVD2szVLBoesXTYeo77OmjlnNasUykmRoKycGVgzU2wqZc7VDEirpxojBYtGGdKmlK0BTL0xQTrf2mvgbvJomiWaZiGKimaNs2dI6xzXVso6+R0qxsLJrb0qS5FIwA6q1y6XepSV6tS9B+LWwRwROda3x3Ur9tKc8lv+nSpU8EzKLyQ9jX/FNq8HSddVke58/6MEA7qcKkQODo4NCJB3GKRWDqLgBnYI1GJx+RrXr/+Iq+vXIHeFGBswWvhGQKLy8sGO3TE/2vfeHH9mzi5LY5CoAi112/okuIlLIeneKliNrk8JERo5IiIgshcgkwEgQHWSiuXl/cIJWIQK1nnMbZ61tWyy+FixyHwwRsYwDBKzy6kbZrKGI8CRZJMjgou6DskAA0bgHpv1He5ubxGCxAZjl5WoqICjLC8DEWKyFCpjcwiGSQpCKYSYgCJfxCeSPuVA2ZT2iFlIUqVQ1us4DSvnPIKUxkYUt3K6S+Co/Nte12tGxj5Gxma7xsebj/jjzwFNo8CEuaXquTe28hCeHBGHAWHoUz8ETPCCtTEjJrAXEABmrh0s4+5EWSetS6ywvPyOBvTIKFBqhSwg0vIZERwHRR99am0CB4yhsSQRiEX8orw2ufyWlu8LJTazytr6YbT3WNZenklT1vdLD/ZWZOtTPZ81+JW/RJBMejQZ4Wm7jlG5VjJYakCRgMf5c/ErjZHFKxryOTsS0JHNHcnGdRx0uBibzOT03D9+0VlKCvtd1PXsK/zMyxmRt4oEGwfroz31zLXsaYgBCZIPCIqoc8SQiwC4YsGOW1nNdbkm7TAoLEhZgFgBg2RvWuDuUl505OGNVwKFJXLeWal+acTeNs2N9azaMip3vrPHdwKPmzMSpvK/8zvtfLq/nvCY9GAGfYzj4zQn288SzazXmQpRz+HseqlTdFrxFQ84tVWfu9e/aqDomZ7pmyaT/A7R7MFvLiyZGT7dkV634/BnSS3Q/+efTzcv8sdJNWf3JqQbwcxgawXntLLkMGAWAbcfHLreIWAJQCwvIgI8eY2A6cBowNwgRkJkuaFMt6PI/Tkmk1W3p8WtmrRLDxbDFyJgWXIKVdngmlJq1AKnY3YIBjlpHamwYkZZk4812xkACRRJhMZLIFdyDhyyskgkN/2nixr5SbsxST06tHbGyquORQgxKavlGnpMyiAqp7/BSJYI+75zjotOCXjoFwzJsDfkX+UvRAQmLxZ5rsHVi9e4un6NdnGFGBdgeHg0cM6r1UUh7CgfG8d8B3IbbL0kQKV4314rlAbwHKcYL2wduxjAFBLgRuqemJpCqrIxgzXEcazKE0vAfbPOif+SWlFFMMWq/fKbRXAz4GgCdkVEDggIYBZw0hTU5PYImVcBFziDAsQgjuj7gE83N9hstpmvkVqTkhOPGrOYMk6k4GmKv5FS3Y+Vmfz3GCQqf9vrQ7e6h1Zm+Vlypxy9NwaLMPmOvRaXwUg9QugQwg7AAHIxJRBI98olvqEcEwqCMITXem8xt2xAzBqB9NkM5+TCWAJhhwAiB+9bbVsA4BQQFKsmwcQVHCzAL+PmDAU1pADEoJmXnAWi1fES5ql7SrPJ1aNU7TvDDKkCytTC6Lheml9wKvEgJQsbSn/ky4dy8z8Rza2n+iJLLf/KtYXCUmhEbIysOi6mmjkpWP9wFvhPcz59CZqMEFUrWJcfn7Z2ElIylZvTK+bEE8v3ahmABJ9K655GxVDND57pmZ6KTPbex1u4fHF4jT4Ef3oGip7pmb5K0gN0n7b+mHQKwGXg0BHgpzbPPVEQsGaQXa9kgc/KPFzvDJJSKpSR4ZorLFc/IMAjYsBFywgF6LRaLHDRXqBtL8FoAGoQ1blHbsFMsWBVTSndzE6bRklDqBUhyyKk6hRnoMcUmgSwmTLLnILZipyeRCzpqSPQ2NemGInyWDk4c4XMNBHHijlpmkZiFDm5uYtB+0LZjN6VJinm3jeqw1x/0hzZLb6BPJFPmPdMkq1KXM+sHfL9KWB0btyU8WelZV1un2UcKugQgleXerK+wDClbTxTtmr0llQtWwITmBosL67hFxeAW4CohXceIfq6DFvfXM9XroGPmn+P52uvKjSuIFlVAOZiBQ5yG52sF+URigREB4qSAUsUbXVRM5Aoaegj7kB5v+ZLQ8lyxBC3FKBen6kEgrqCRcQYJKNZHND3Hbpuh37oJU4RJ31aspdxTAoKJTArYrNZ4/bmBv3QCZCsFkGzuiqQbhyzFZEpzNY4xUOOyJJzivYcgDQZvKJNpYtwVcaRdTyO15Pfz/w1hB677R3u7j6g79e4urgUvpIUOU6gjh5KOt0Wm4ohvozCqQ0zEKtdSq3PJjhVagMArFZYDSIPYIgVH+DERRlmX1q4/FT9LsGbUf8K98FsBZQZRcEtUeIXIFZjNT1z9LliBlFy7zy/taKcfs+zj5lny3eTlp3qSCnl72v9fw867UyYkQmKsRFQzQZh34aRzygFTz8OxJ1HTzdmX4JmYGF53zCfk8bS9hdXf45BovL3+HV+3tapAp2cG5Pz5s3R3B56pmd6QiIuBZZHp2eg6Jm+KJXmg98vyx0dYin9sH72jcsP+26PH6myJEgDWbQr20Hk8F//6/+Gy+sf8enuvSh4HOzr4DhgsVjgxeoCq3aF5fIKoAZmGRO1K6IWUJYvxk0xgMhaYhZSVAA8zDlDjip4pjjEyKoo0mwFPJKZTjHFTfLYHuX/tBKy4OW9ggvssn41cifJ0FepGJZqjSgY8xZphUB45hqK0VJZzZR3Is0F1x0rt0SmLOYbTAeXFCefwL4C2JmhtE/2LajRs3O9MhGWRh8ygCGyghserlmC/ArMCwANQA7k1EXN1nRSBxUemWl2rVvyXje1NNPjQsby/p4asidMsRcUBCFmEDNcZDjm7IWkoxEr5XvUZrYBcoIgpCC4lP5zjtB4saKLiFWMFXNtDHFA13fY7rbYbG+x2d5i120wxJ3EyYoBwIDAA4AAR5zaLVZ3ER8/vcd6c4MYOhCpqlIp9wxy6vLCBIo2nscUXExu16spKBSrY89Myp0DRKpmcLVuxsBqCXDZewboWkwmwGGzWePtb7/i7vYTfnjxA1jjPjkyS52s9BWtRsqiqChM4jlUjJcywpQMYNQlIoemcWhbiS0UoeuEzMVT4+QAsMDiuXxrR/GaTTnltP6S3lta0sKcmqdtKntoO5RGn0HHMuFfPGYrxcGhfefqszE32cM7qdyjX4+wss8yrrhWgVm9iRsdpzGrgQAFD6oD8zQ+fb+GYzrUX+LC8EGJ09oYc6lsMYwz+pjlp/xWPWjjWGlVa1jnXn9YLwnLOHCHdJNc7aH1zl94O8zX/2Sy+DM9KM2fwc9A0TN9J1Qu/++OhWUNSv8sBJji4CwuxL8peuhD6ZTbQwIjprHLriLOkaYfJvzhp3/E9cufMMQdhthp0GMGQhR3F47wGNSiqwFDAx/WnYPFytgvS5dSH42eUXW81CdSQZZeHooxZXBiX7H7x+P0Z1PL9iqNGRwx1zmnLnnlXeu4HFO+SzCIOKk3AOcYJ4ZKyG19rQafSpVbYj1pVUnnrM+x5UUVa4UJgFO9Qer1zkuwVLtlP6EP9wFVTeC1sY1UxLZiAJ4QWFrRh4gBADULRHYgNAA8JOaNS4BX0hrpcJvn9M5R4/Z+VJYx6bEFOyUCw2n2MLX1YYvbRWCKyV0skrqpkSj0TAMYTgPgG4BXsNWoFifM4EDgOAhwg1LBZLSNxDAmMQHSdSXKRYwBkQcMw4A+dNj1W2x3d9h1G3TdFn3YIcYegXsw93COwRjgzUmOI4gimAd8unmHrl8j6rNMmiCAdA9FcY8j0rg1RBrUWnJCCBBge9OAFwJKS5c0Laets7nnTl2dFrfo2DOH6pI4T0CIAR/ev8P797/hx9d/xGpxhWSKYBNKQJkKnBU0NBDJAByu3FaMjyEBODXpdQAxmqbFECNiCCCN++WcBT8Xl1sboHoNpR4W/47GaeQi4wo+NTeGqaWljABBgoTfR4AcknUVZcV6Ou3TQF7irlceTMVnhxDjE6wzT7UKfUiatZbTv0sXtfy58fghjWG5Zqag20M32Br3iHU8JU22FSX+xWBJROCoXmp7CzI5IgOuJQBbzt8cQM6MnCESeW+YJeZpssYJT3wVoOl3rV397umpZvQZKHqmL08FGFL8+X3RDGCUBMFnOpP0qC8xFbs9ggBBgEcYIvohYIgRfWSEMBR+CRHsGN5J/J0EWLjC3NwCKVJZLyWLIUIpYJryrQo9ATm872jFH5j4WQH1HBBofBM2I0ecqkQKLKR9m6SeLqosbsgTYJSAL3kvAntdnJJYf5bgJYGFI5tjyOdRKXCOBcD8mcuXmprlTdKPU77tHCG+x+K07P9snyYh82Egi4MK5CRZ6ogIQ2R03YAYoZn7JACs3KaOQCLO6+RQ2NakUh4R9Cdjp/+Ov5as0JKFFfK+KPcbScDzCHMXyOHW5W8HYNCn1V2IIfNBEcAA5oAwRGy2n8AXpdLM8J7hGwF3oFmemCKgIFE/dOiHDsPQY7vbYLO9xba7Q99vMAw79P0OgXu1PJL4OswDHHXmxArmHn2/xc3NB+x2a4TYwaxgzKoou2GWirrtIaTf2QIpK8Onrv9TXSz3fa/8PMVMsvnTz+3nWF1J2UuPMTa7NX57+yv+4Zd/xmpxjayw25otVTy11HSU6gQJ8ETFgGUejXLZo1qNzGpV1KKNUdm4BAmHxiQqA6yP58iUWtaA+uCcWUsyI7pqHPeMStG3PCbWD9kXtvrF8tXR6LkT1sFpSvK+b45aNwOAPTXtH1Mq+JutyQzkpWWbQIfME5gtTQMVZ/nnNrQsRs9JLj785snWppHXIbVOyro9WEK6dCgz0+U1xagBvVNd1c1CU+ad9SysrrSOlrGvrV8XHUXhnulboDOX1kOsxWeg6Jm+KvruWZmgGvryux6JexJNlPF0+0QiWEQe8PHTR7x9/xt2wxZMQFATJIJlQJYMKVcXK3gX0WgqVQ0sAkoAkcY3KOvRNMBcPJNi86iSOrVCGjHztBGO74jzhHtOS8wBlXm1KUo2ZvuEbAviaQJ12eDz9q8Fbc/uQrXyUz55Qg+LgWAGQggPArQeCvxrJICQCaYm8MakYGcFPh5t0j5AKrm5JYE5Ww4JGFXGSFF3GFO6CQgcgCjgmfONwBQRKWsW88g6LlVc/crtHL8ePzcWUHRbxjIGjLoOOa7Ll78Z4ABiS1VPEAsczvuNHYgdHDMcRziO8CwuXZEBD6AxyxzWcWKWgNUuIHKPgB3utrf4jz//P+hebBVcigAFBN5ivduhD3cYuEWImkkvBOx2W2w2a/T9Drt+h81ujc1mg27YIESxJIo8IAQJcB2jxCJijmAEsItwALp+h7vbT9jtNhhCLxnINL4OkYNDTJZEaT5H414p5dUvszDhtKcyyFF/bw4AzVN5xkbSNnrvBYhEBmssu9qspZJZaKpyTuV6BiOEDh8+vcN2u4HzpAGKIyywfsn27QRlluyVMWYkx8DtCd/k9IA8WliuEQT0apoFCF4tyTJfT+23sUrnuP3k2D21knsgGDLlWDjSnuKjdKbNfJc49S7H4Do0f6dw7D3f5/HHCqgUCnv65AmsiU5as8IwZ+4dpidp6fqXeCT0+/aVB6M5IPB3JANW3RmNM6EY6QNFsAVOzwAfYOVy2venWuVGncfIkgpB9v8caPU7modn+vbpwVK+nUbPQNEzfT1kF4P4nbDlc2Si30WHvxYqIDaCuJjozS45CT59e/Met5/eoYs9mKCuag6OHBov6reARYTGL4B2ITdZVjbprSNn0MX+trpzIOqyVWxSUaFIFM+Yno9KtytId4cCK2fpbzNV7lt2Un/tplApHPo6Kz0udWDiJmdlJUG8mJ8Eih5oS2GJsI+mQylBy0MMn62cCCCmGe9ILcag8aPSRKGejJlsOKLQnp5HZaxYVeMJoEw0VKw6ues2ywkiceEjDQ6OiO12g123U4uPUXpu6wbVitRYKRp3V7ucF24qK88vYGs2Jh4va9whMlXjYpYXArqKixaHAIQBTq21iBmeezRhgzauwfEWbfRwFEGxQaSABq2sHYhllwXgDhwwxB0idtj1t/jLr/8d/9e//Z9Y/+lWjEQiEBHxH3/9N/zt7X/i0+o9/BroCAhDjzgMuLu7wWa7BnNEP+yw2W0whAExdogsLmngmCzlssUPJSCPnACam80GwxAUJBInTjsMo87AmJ/AgOhqImx/psmAxWEhZDAmlXBAoSqBybNcNFHs2UL5JqIRoFrM9biMEvTSrHCRB3z4+A4fP73HP/5jALhU5kbudcUatvY75+AcJavQemVavaiFcC72PsSd1LUOIXgM/aBgUcnTjC9w3RwDPslOIuOteSzqMUEVhLxq5RTdQgJnqNhzlNdQBkHG80ij37OVzJMCr2yLG/UP0QllPBKVa2xu5XL5795mzn2T8CjdMuDJ/gTN1v6t0rQvU9nn1G9SWuMsoE7pNjlT5Lz7WeZPttfsok/kO3VbfVT/wmd6psM0e1n4xCAR8F0BRady94eYhMc+IKflTzLsTB+Y/d6UDgiO5RMnuaaMm8CTB6JIG9VFQyn7njMbR60OzlGqj/TvYF3VR4fNaYUs48LhGk+iYhmIrDzTUjoshJxkrlvcqJVm23ulrs85cEcXfXOCdfnwVKimJMgSa2yR0KHxLNZExGicxzAEDP0WQ8cI/YDlcgECsGpXwGqVb2nt1WhLcZXWnnIDuGpMRTWQVK67UsnjJLDk20xOzyWhl81eZR6MsAw7Of4V181LFRRKivVRGpdabGMAUwbJgoFanBurMz/PirCNgSRzqaHEB+r3xXIkKpo3pSmYBbVcCIm3GHDCRYfPAWyqKXVknmWy/i2zlTEvawuKZ3g8v2fUPeqbFSpptYV3uMLapMyCRc6C7Mo/MUZ8+PgRt3d3+kxRLpWtPoHf7NEtK+yC8x/GMzgpyLbENN6OBfFnyHwjAJERuUPot4j9Dv3mA/ja1hQDsQNvf0Xc9IjDLeLgEDzDxQZMEYEkqxvDA2jB1Gh0oIjoewTX49PNR/z173/Gr+/+ghAk4xh0yv/jP/8//OXTv2PzsoO7Y/QOCP2AzfoOb96+QddtsVg0gFotOe9g8V2IJB6aWDE5MAHOMUCSPbFpAI5iQbTZbgTUtH0Pve1mgxW4GtQKQLTFaUu7YEwMAaqcKvV5Oc2vwxqYLJ8dcyp7pn5d8maOUXW4OWWv2OPa0npvFOdJsuQM6Icdbtef0Hc7tE0DgOHI8k6WbU1MC468WFBV/DqvIVZwiJQ/xYJ/Go9LpatS6h0hOpZ4duN6q75x8R8ygEQyN5EJtvwpue/OnSO5PdbP5J5eUsG3Zc1Q9UEGb6QkHo1HWdHc8VrMdnESIj9sZ0tqPyqgP/GAmf4c48jmzj3TGIz3xFRGsOyPmUezyQYzfS82U+qtrFHOcaVn9tLBMSvcMOsHqH4uFUap5vrTY7z5qDApTxnAeeTRU+TB82NRqjxWrOZSJpF2jdaWvs480s53W8t21uc1P5V+7SxXsHafMKB8Z2Z16McnnI+PSOeC909L+8fmyYet2MMnPfoEVMrr87WXfILrT3nyzpM0/HcAFB0/YKbZMGYoHRoPQfxwRY2pEuaL+k5ZLCe16dhYivJxNpNSRbB6SxWXVKsOWxwNn92MH6ZjiZpTaXiYyTm0poo63BGgiBlyMzpuv5VRaFzHpqZ22UY6KEffIxwfz5MOQoq5/LE5zbTE/cXscX3Qxsrh4kSg5j1gQVmPKaKiQxPEAcWBPADuQQA2tzf41//3/8bN5gO2Q48+EPq+Rxh6OCZs1wOurq/wyy+/4Op//z/w+sW1xLdgM08WlxdxwRILDul+oRyY4gsACDMsZizs5zXAzMkKpBT1YZYWxXsmDMXCrWkcvjbFxyjHeQwuE1JtZgru4CSVN7LIakpVluMiLP26O7S0KqVl9BEsThHlkWAGogZVZQl4OSdp1DeGMmbD0CPGHg5RM6o7gJyAR5RF1FPP2By8m6UsmysbU0REpwnUmaWesn/MGmTawikfrqv8ngnNptgwS1Y1cpZZjXO0KyfBxb0XV0nnPcg5eJKYGkNgDP3/z96bN1eS3HiCP7hHvPdIZlalSqWRRqPRdNuatdnsmu33/wz7X88etr3dI/WM1GrVkSfJd4W7Y/8A4EdEvINMkskqEVXMd0V4+AGHAz8H4BEMh8jQpMq+9Fy1Xd7I+MoAxJEW1HK8zqlTpFg1hly+sznrzZhLBDjAccJ+2GB7+2fEm++xXv538CsxUjtOACUM7/+A4fYXGLo1ODjsOodldGAaMFAPn5wa4R6JFkiuB60uQBcLwEfs9zf4uH6Hdx9/AA97OEREFh7ZDB+xjj/i5maP7jpg8BExMNbrNdb7T0gcQNzDe5eTuhMlOEf5REAiSwzMICcnBTpHIIrY7a+x3e1xfXuNwHuwJeIWCAORypwV8FNOSMuGvyVRz0cgKo9YnxKrv5+tJz4PZ0ni2oaG2dibB9QcEDvhj+o7A6ITJzCVMLexnDcwrXhmJr229mYzzysCEJF4j832FtvdGp1fISvVli8OUo5klu4A7uBcV7XFNZ46nI3q7LeVOTWjwVn+dbnuEZLviB1KOJ3Kzwy+suSZ4pRy7jsZLckFFpPJa9ZT1Dy867QP7UxNyYYjg1ltA7B5OUUAsvY4PYEtT7YEZHCfMPLwKbLc+tk6hUdzXS7j7ElpfETwaBJe50dr2CCVMEOGAZ/l1FDjCSutyMZD0sWknK0W5X3J+TQNpcwS2rxLc5W54r0SHkzgbDNwlLxkkiB/kLxXVB+y4GdUnvabrBYxmsWxACGm43q0N07UOeTFeEw8er0THdJRz7CbziTTfYjM41XWqcJ3GoKcbLRYbI1WiRH9j2S9JctMVwHxmeX1tEPmCEaEHNpQwlkTNGdciprRq+g90Do6VEDqc8Vkni0d45uH68yTHrEoYvwwKZ+frNZD2Y7tk6HypoDu9muESaSmChNF7PGZ82cAFL3Qz4XmpuGLnH7+9Bg7Gw+7Y6MLge5GpRTxpz//Ef/3//OPuN19xC4mbLZ7xCQ5QzrXoaMlXt1+hVdXl6psaC6Myu3TFMCJmsXVG/NiqYT72J+r7BwXA8kUWtFbuFrLHk55myMDT3JyVrJvq9pOFDio8mbKOilwNt31OhVy0hSYx62AMtN7aqlRQk1SstwkKRty0p7Wq+gcahR6dmLGk0PJlSCGeM4jUyue1XPr0KNj/O0UiG+vofbPAAF9dc0znIbZuAzqZ48qNpBADOoSAjOlU3NwEmKoPFuHfcyG1VA9Yij3gqqRFwObOGHY3ODdd3/Cp7/+Ef82/Avc3w1yBQE+rPHhD/+I5fVrbBZrLGKHvfdYRIeIHYakYC4DTD2oW2H11S/x6ttfIaUFNmmLt9/9D/z1L3/Ad9//FSlFmY86VtefPuHH99/jHV1j/fEjYg9414EZiGmA0wTCBvkSAewI3ov523W+zAOSnFUWAgUKuN0MeP/hHXb7LWIMKL4nKMYqa39UXhnnjIspnzYfgATnvI4ZV/xY+LIdq8+T63V9azC39iJyrv3O5mqpl4AHSQHjYQi4ubnBbrvD1YUZ+pwN8AYKYfkmP5vqC0q7G5lb7h41Rv6pPVK6zqPuM+aU+5RZQAaGyIYKEspGMIjE68rKVWDNDJyZWjR9WwOuxa9U+47r8UQBbqsxr9t9ag2/G6w+W2Er6Enp2FzJ61W9SUdAvbFs8vzoHtgLnaS8eYeClUleIAMgz+tfIgF5MqBX8b1coM9oNrPHysr43YE6V3LlhV7ocWhOn7+rjn/OtZ/Hxy9A0Qs9DVWGT2t2ztO5C8cLHSNV7h6xM5/K/ZW53mWe/Dr6XO3aVSp0QVsYiwWD3B7b3SfsQkBiPUqYGMMAUJfg/Csslx2uLldwxJDTitQIJz1lKSvh9bN1Dz+DJ+qpkQ102caUvmthpqY7ieFYPfEyVsQFxRm19b6UTQBThsm+F2OjNiIngAXqfB+1Yh7z7l02rswbq7p2kodHPWSKXlmMSkIV6jBDXP878pC4D9XGbf3cwvO2A+5gp1OVe7wat3P3te/HhlpbTlMjlP4TT4O8a6aTnKBhaFqO03Kcc/DOQzwQ7sczh4yu5vsRSDQPDI4DAqyPa88j7ReOoLTDsH6Pzcfvsf7wPXzeaQNcGLD9n3/AcH2J7WoDjj325JGiQ6QdIgiJHBgO5BaIfgXst1itHEJc4eP2I/7l//s/8U//9H/h4/AJPV/oeDiklPDP//z/4v/419f49Otb/Gb4DRZfXeHV1WtcLC/gyKPvPYh85k1Szw+oV1EJAzQjXoBA5wneOYRhj+12rcmRIwq/nxOOfHS0MiI4hVBaukuS4cKSNdBymi+Y2wTaVsd0wEPUPOkMfBFgTfI5bbdb7HZbpJTgnHnyAOaxg4aLWsCKYevIFMDMhmxpXkMyRjp3NQG8gEVAjKGSk2Wt4VTCfQ1YTmxeY9OZMNsXx77IIBFnkEhAJPNsqkG/FuzJkPaRNTyDSkDTb3fhy3kA7jlodzbeRwyzCuDArEx+obNIZZl5wALa7zS55CSZLGVmPdmPmuErG2sOBDnRs8z7SobleWmpE6gMd77mBSx6oaemuwBF5173AhS90E+F1M93mBERyAAAIABJREFUdkH4jM2qFzpGjw8WPS2Nw/RqDcE+TpmpbToDNICxB6eduCCjA5OGdyRGiA4x7uE7wHu5pxjw9bMp/ys6CzW/2TsGJAwDEoqSTw0D1BfBTAaqNoP1WzOA8jHrtbL9MAM7wqcAsvC1MUg0ukOvbY0SbUelgB1iwXkjszW2DE6beL80Jw7V31fGYfMMbl7uTiOgigAir14oLntpWCvdTNjpXJhl69UxfW2p5DGRkBTbWS2AlqsNGiI454t3ESR8KcYSYnQOne3FMuP1MvEuwsiDofYmUos0w18poEPAkgYseYcFB7gUkFFNduhvtug/BVDYwIelnFAYCERbeHJg5wDnkGgJf+GwcAJARY74tF7j+w8f8OH2I2I3oKMVMtcT8O79e/zhj/+E690G15sP+OVvf4tvvtnjF29+icvVhQgHC0fJfW7/WDsrsFEEAQjAbr/Fze0nxLiH84wSWclN12SPEzDakJtjY0fNzwV4PDyO53grVVefeP6Utww4LZ5oaIDc1vvO5glEXkLySgEJMQbsB8nt5F11L2voCYwHp0CM9WMBb+z98baaV1YIEeQJBAsRk99VtSnX5zok9WpUoCgnlbejEUq9qBYXpifVlSNqT4a0fq6e2oSejdo98e7M/9Z8cWBM6w56kGXnSyskOp9sojfgYtVWnvLPC1h0D6IiubJEpPLnKrXpeO+aPqHrAwOUT/gEJOR0usYCU3nESI3MMzZolqbxc1/oAeg5gcVfkioZ9Dl06v5mzbjfw16Aohd6WpqRwrUpmr+of39ZmD+DRgbZT7krLXzAjiWSL2fAE6AYkpVpX+9mOTG6JCGiKheMXC45gFNE5yyLUELJoVABJ2ReRaamM0CuUT6YOZ//XVVBwR9GMs8abR+TeiCZPpQMXLU620kfdZPHIM790JAGA8v9WBSvg6FTlZFMeq1TVyML9yKufUZmnq0hE07LcSytZc3Jcjwe/TAIVYNFk3qPdgznlEvz/Cr2v55epmAeVS5VBhSNDYpx+M2k3dXr+H11JWrQgfJ11Xdkr5RD2MhpuJPypRiuMf/WJA05QDamYwBCGqe/pynQMPYuqvknF9MYagrGghATa+L5AYh7eI7oOJVMJTqf+rDHYhjQ7TfwQ0RyHo49nNsjEcBwiPAiJ3gFchHsGYEi3t18wA8f32E7bMFOw54q6C2mhNvtDX58/xbd0ONy+Ab7GBDBYO9BXQ92DslZTgsC2ElKtZwMh3K7rC8uVivc/PAjrq8/YBe2iByV16UPJMdQC2xKV46sdMLkFJR5QKjwmOCXhR/nQL3ja25lWN1xQRk/y/ix5g/hW9+0wa4jTThhIDCZd6d1x7gtB6rXfM31tzz+siovYQh7cGC4bpH7rgXYEpKCQzwMSBr+mvvVG4x0XKbZBkEBH9sxzaFn9cZbtSxBAbCJDJpI4DmPsCk/zPZJZu85mZYrMv4FbR/fXyERfrHnTWV448Go624ZK+mHGCNSrPPJtR5ntZx/oftTFl1Hfj+PEwyc1fWYAQl11/xOLDm7yh8w0VnyeFYhrgefBp2rtdz9KSvRX5rm5tHfan9qux/bLtP1YPbZZ9ILUPRCT0aTXWQiuMbAeqHHoRE49xPtawFTzFglaMbZigyFcZieYSsgC8BAimIKKijkiCSkoDIYzDPDEwGaCFr0xzRrgNhOdg0a1EpndrhAyXdAKLkrALXXFRxSe1OPaC0KCmf0qLRqOqYPo9SeBGhHFkoDdKBy4z5iGMyGrgiyA4AnallzZZ1Itfm++k4BidkemXEjOATQSDMN+LKaSXvMuE2ax2rOKyN7oxkgOcb1UAAeKNgDqsE5u7CUMxeGR5VxWIewEZGcyEWMEAcMwwA7qe6Aj+eEnF6deNxPjBrTmHgQTYCI8ll6sTw/AwD2CzNiChjM6B5ieRYRHCd0aUCXCIvEWKUEOEl0CkQkR0AnXnzsEtwC8EsHdIxAA262n3C9/oh92IFXeRbntiRmDGHAPgxgBywuL7C4uEC3WKBbLNAvFnDOy7jDgRNgCX5pBBTJn4QihWGPT9cfsNncYAg7xLSH5FIqyc7HmWrGXiv1vDvmCVRyV5U+LmCSvC+ePZS/O0ZmfLV1akf1NPEkh5jVI4OcsyBEycEl1ylnZn4r5d+N5gETCzE2CiEiDpsGKMrXkwFHETwICFFvGkgusyQbFXy8hqyNmWuOeVA18oFL+FmWIZlPtG5AIzfa5yuAojJ73PctN/Lk+6YULvMgXzfLLLXsufu6Ncf2xz0y2zWYU8qnC87JQPM4M8DoZdPykcn4/Wg/F69aOcZelCXZqDD9iPK1zKRhwEfkEk0/zl/J1RUv9HlUyaa/pf6sGWsWUC+6woNRFrH36/MXoOiFvgyp0nCUVf+GZMfT0M+rQ0tS3Mr4HO/+USsQa5U0JZbTZkDqvmwnsNvpGFyMEYhSKclga8MXqpTLe1OiJxCG/VZfbz8YdjAL9nCeKwZUzKIMD0kKBNC0Ui2dfPS032evouKBo6ZK6Sw79YnK+7auNvamYB4Aj+a+uovSb2CO2Vw6LrUHhnCJByM1p4fVRTj4zCyNyjkCdcwIn6/I1BMEQB6vAgyU02VyAmVIku8Q9gp4nt8H2QMExfPDPIMO5yOa+9wCS4D540GNX4BTBHiQk/wI4rXDdhpeqbNn0vAcD5c6+NSBKCqY0CN50tAwD3Q9/GqF7vICWPRYr9e4ubnFZrNFSAGRy4hY3pfICTFFwDn0/RKr5QrL5RLed/C+g/MdLBcXwTXhRKPey6/MjJvba/z44/e4vZWT0+SUPEmOzJbnKIN4UBCpBpyQvzea8ETDU8dONqLRZxw0nFtqAZS5co4SIwOelj9IeKAY+o4kpERksIGyytN1++zxRwCzzyEJK7V5lxBC8WwqYZ72nZ2uFRFTBCfOcxLZQ1XN3WPdfKwtJvdqTxqdPCU82vJcaUja7FyfM3wLD8wCdSgSt7E/mtvvB/rcSSSTgcknr5yUS9BnGe80v7T6w308517oM+mE+LGNFDpbHyqy8Vw+Ow4WvfDDw9Dfal+OQHx2o254nHWsrJF36/MXoOiFXuiFfiY0tzs5MqpVOazDHbJxygCY9JhhOXLakexUgSEeGc2zisZhT05mOE/kcJXQlg3WUJDIXOgVrDJMhGzXky33hD3loRbWOeNRjTSGeklMSY6xVqN2rljOpTRlTp50SFtTQ4Mr0CeP7ATsKX1JZIZmyU1UQYjNrTn0abxrftCYqnhrcoS8GRuarJfa3SDjPTF2nfLbuAwUkIjsOHf73cpBLmeuT8tJdcLvzjmQdxp6Jn9gAYqGECTkoju9a8XMVe6gUu/shcLt2NSeKzWQlt+Px7BWXFjRIJbji5ki2BPQdWDfgZwvw2DmKhGi7yBHVTskF8XDiwjBO8ReElpH34H6DkPn4B3hZr/FX3/8AevNWsLcWAJMmW3+MlICQkroFyu8evUVVosFOt/BOw+vAHLLYVmQYI7zmOXI5tvba3z69AGbzRoxDuUae2GDiWSXXE0c5QHrq6ksaGTdyCJqeJvq78p4jWnMp4e9l+w3HPAympIjB9+VcEh7lTBJr8nXIbndDCjS/wT3aDOo3ZuyRThnGhb+hQPIUdMPTe4nnfes89u54sVHTfF1nx7wVDEZcaAzmVnXpbpsXTQq8KSAWIcaf9hoOOlVZiAfWjFKbWPPpqMeHyfuO+RRl0FEqxKKtLYWHCbO804X7YPg2Qs9DOW1Qz9PQ9wxI1/r+20xEskwv2HxEEb43yrA8Vj0t9aflb6T9aUn6oFmHTrviX87QFEtG2b6htt/jtKTLRR3kWdPPMeOubr/HOjB2vdZ5fD03WdXS1WfB2neKOOBKrUztnxbgyMPb347p5JZccg3jd5TPlrZQBkwI8WUfwcncIpgiIHuvNf8OroTr0aE5dABj0RFjYdw7uG8BpinjJZSbtQwBbOyiV0BhJhBSAB7U3nyg+ZMe87auhmrc8pWu/ua61S1o+wS29hS00zOr3fZabU77KmUAbC2DZwvz+pcbtI8Lziy5M1SakoJ+7THMOyrjrG21zVA+X0mVGEK3kB5uzIzsrGtxqACJ3PGk3zvLbNC023CH1IrZ0CRK6dljbgmewpxPchNW+s2qDlHgPceRIQYB4Swz6cvHVK6668OzdkCSlSjPF5rZ5luxFmVdynB5o2cHpYUDGIizeNTyUVKiD4iOhZAiR24B6Ia4KEDggcCAUPHoB7Y+4QwrPH+5j2+f/c9Nvt1BhUlvLXtg67rcXG1xNXVa/Eo6nosug7elTxRBlg4q3eeH1VSY53XMUXc3HzCen2LEAcBj5AA1j89zY5JvLfAJna0h0uXT+aRtUEZQOpWhXFJUnNqPGQAAf1iDE1yafHKqoaSzINIvJtacMDygRye1+PQVJDkMSMvCeE5MaJ6Z0k/OgWE1OuTHMBUQoWESfIz7r+kWX9pjxooUJVoIcLee/XOK0CpyAcbjZIbzEEALjnszBL7c+Z9ziDdiOGMx5mBlM5rV+Zf4TU5dZOqn6dj0D5zttCj30sYapLwrVTKN7k1Ncp59AoAYz46Z9k3HaY93KAejxyeW3lVFsldVoByPzWy3crnVM03bpOW03jY5t6Pa14BTQ2ILl+carjW9MSaO+Ld6saZj6dOPDzGN6doLPer/j9Qnq2bxe9uvF7bPzzqr1R9r96YbMv7eH0/sN7N11w+z66t95E4x/vxXLvjVLhxfd19n3FXun+5zQg/0TPbp4OO14AbcMWIqn+rss6mih+1C8Y98TgjZSWf3+c/AaCIRu/HZtIhl+r6rnpSP17XPw6dM5hP2CaWBKOHBH193XThq01Oe3u6fTxxD34udEatzhJkp3f1IywsSlVLLq93pYdaKGS5dTqE6nY/o+mdi5SzKsbzRitnw0eAn6oSk5PQatLyMvhACMkJT9ESzq+QKOrJVR2c6+CoQ2Jgu9vi5uYGMUU4p0YSOyTuYAYByEJCkPWfCpuA5aSp8CbUqp6jOpOE7S6bIi59m5hywlqGm3grsfVd9sxgYHSiR+7filzlkVCrTomAYEacgQ3KbGIYc+5vR75RRhL0BBOSPB0GfJHTPjddDgd4gkpJDklyvsDBWXLKOnG5nQLnBjB6JA4gROyHG2y3WxkMRxKBwZDEzVyMymbJH4FEs/lRsstOqaiAh1KY07AzsTVdZcmXMhljF+PScCI5ncuReGs5NYQJWvf6RoYkBz8yvrnOxHCdQ7/w8BSxXb/Ddv0dEl+D6RUsdEoYq/VWEjnDmvad1TtCcoW5VFbfBOEbzwDnOccSFkY6VgzxgkCQhO1gkFf+skKY4MgjImHvGD0HhGGDYTOAd3s4PTlK2tupB5AD4MHOY9c5AB0c9disxIPLsQOTQ8QCvFghdR3Www7/+m9/wtsP77GPjEiU28DZyJUPq+UKy6seb169xlfLr7HqVtmryDxdpLkJnqXN+SxEVmOaE5AkdxJxwnbzASHcABQBJqTkKgmmvW18ylwxq/EBZ4Mw56UZ85POHGIHJDWa2YGS8JOjkvA1xih5q5TKUfbF2GZVlO1zGf1iuOena3JZOxK+rZn8dXCSAw4iB9kBHdlcAYABgIOlLxc561UaOBUFCTkV8cSNs53Thb9H15g4UZ6VdkQworbV1ZYCnPPoHYHJIVFq5AHgM0jCSTcgKJXwMylB//WA3k/EIJdALgAkfOIUMHY8gJFagEAEscgTBbdk+UzafwxL6HvSyB+LOS1rtIqXVwU7bIpETgJ4pghiOZnO1iAgwbmSiJx1fuTSKAHkQd5DcMHDHmtllaoBCJNdclS6eBV2AHdgdpDT6TwMhvZEGFQmZbDUMUADiuxjfaLxhoBvrPkJ6/DgmptOHdaQqzzySirerSdvPU0EBUrG5ua4XjIXJOTVaDw3VGP6XJAIKo3yV0m2TBhwieGgm2JEcgBAA8LYmq0hxnDw5jVnC6zGcDpRloAYgRgABAAdzLtPxpuRUgB5zbfnAfZA0mXZOSnZTmHjdMi6uq8O/TCWzP092760/fu0llyeW0dpbl2YUqKYrx4fINE8c1L8RMBWRkIl16pNYyDBjsF8HGDvbuPwrIEi2403OhQ3f3LCNAbU0Qtny//ydKpOTzP5798zY5DO+pnOrDpPJ9sXp4eqT218nryq/XDM4D5V2INUn5r3WUmqx+ouQo7zPw9Qn+pb874wgxUAYQHvV7hcfY1dHEBuANjDux7eeXBMGZSSZlSnLSFB8olwURXN2LAF3E6Syn1d8miY10WWW6PFrAZ4BFyS8JNc/dGOZLnP7jBFt6raeBx49m2jUnJTTuFTLlVAnRcjk72VTNwFYAGXjxNDZFSZyghuC53eQJC+iDHC6Sk2IYTmvrxLeULBGv82vbZBfybX2sl4nJVdyxckgFo+nWkETMmry2VYPpZmN7SZV2KmlLwL5bXhHS7P8E6AthB32A9rpLQDYwC4Rz4lZi4PjiryDqKwyxjqnm8lvooZZ0nXZ0IGOalxC5gXRrXCV20RoBApABwEUHGS+ynvxENCxYauw9B32PUeKRG2fQ9HHrdeSt91HhsAw8IDqwWcd/i02eHthw8YhghPHqSgJo3mIhEEFOqX6LsOi96h6xw69SzJY5aqxOCkR53bWKhnBTn1SIgJt7efMMQ9UopIzIhJsxuRySvKxjrXA1lqNuGfsW1oHkSO3ISXyPjNuZxQOurpTwUkkoJqz4cMKo4BiCNiewLCcpHJKVWnB2qH13UtsnK+7S0dTlzfhONVbcrlZbmqQId5MaEYGzYFvZd5wqzGKQGAQwmBpezxxE6M8RTjpGJU1Zh1oDl7HomxX4n7E1T5y1Q7FQZaHZJ58x4HQHuYuZXZAg7MSQ1vyeOVUlIPozgCIHJjq+dWH5Xn3cTzo2o7p+pmfeHaz9a83IpHF1BtdACZr46zUuETbv6q30bUakH3U6xmVbK7FFVfmw9nHevdoweeV4sHoUOrqHwwA5nyl4XfLP+blVKtv/o285KCeUn5r06U38gUHdOkPDXX6rE39QvdlZ6bvXYGHRW0tdxDi2XOXnWkzFwA8voi7+tCD61kD0nnj9GzBope6OdA1WLVWsMoC8Ep+gkKnQemcQ+89Mh9SBTnvu/x1etf4e9+/w+43d4ipEGP4haDHiw5WZaLDr/+9W9EgIs3P4Da8FXFXr1EzKBwqvCYgpLzEhm+UClEUuBIvTyqxB1S5sy4mJr5c3c1gNBRZmrn7SGjo3Gpt88jxY4NZeIxQFDKqOUFp1MLJTdeF0ySoDzGKMcep5SveYwZU7e59EnWdGGBakQE78VLqL2sMohU4xXPh5KPxZL5TnpL769zpZg3gdUtX1i9JWLENGC3WyPyDon38LioUCXKDEPaDvsjTuU389DIQ6RGqbPKqbHcHKZlH5zOCb2h8lATSnDM8GwhowmLVY/hagW36IqSxQDIIa5W2A8X2FwA++iw7QHAY70AHHtcLx3WKSF1hG7h4Rzh42aN2+0WMTEW/RKgDhdXAY4Ax6w4L+Gbb67w61/9Gvimx6W/wGLpsVx49B1lsEgUPwM6EkAROZiQkD/LpYzdsMN6e4uUNNmx/pcMUNIOE74+Nuczq02AlHLKT2UgV8YSMsBhZXDDP8eoANx2M1VzjCdVnYQ9qWw0GFjqP7rG7jOQn5E9Ag/1wcy3zaekYVz27OY6NhklvFiqwWKAouRfImI4n61xNNJUvVQSiw8QWPJcgSTkcIKtVRZHA/pSASnm2jL/ec4cvw+1gND8dwq4wbxtWgDpXs8fr4N1GVS9LzdUYAFN+Wz2GXerUqnGIxttD7lEHTr4Ye6Zj1qREzQD0tUhZ6U+9cWVAU2wRQAAJJRaw3cNtBUwkyZyZTqvarlVf3dY5tyPzj3J6kXDfyg6xdHPo6fHIJHR84AqX4CiF3pEOgASAdW8eMKF6adMVV/N6kzPnL7sKKuSq8pB1/X43X/6X3B5+TX2YYugu09hCGCGGIBEWHjg4mIFB0msSmyhGrrLld1D5MXVj0NRWEf7YPUtk1q2+MEcvDOnSmv7qDI22pY3JVgFmu9r2y9jHZwNuLmdkeyaP2MYNrvwEOM7X6gdcYonxr/P1mBkDCdOCCEgRgu9q242gENb3p68dV8yj6GWF4AyFuXkvHL0+SFyZDCfeYHMA+ljwyiDkjWQZ4Bc3l0HAEYMe+z2G/FyQEJKQR0YKuNXXBF0nAQkohr1qcLeDPhUuAP1HLGTCSXcJAEUYKFU5dRCG4sSsuW1+ilFAIx+tcDqzWu4q1V21gMY7ByGi0vs0ivsVgmePXa+B9hh30cgeew8I7gELJZwyxW2IeJ6vYZzHb5+8wsElhDH8I3HJ18llgbwn3/3n/D3m79H+M+E34RfY7noseg7dN5JDhoDFzhBPJ/kNeW+AwR4kHZtd2u8f/8jNpu1jpXTI7q1uzlmj577cGYNCDk6FnZ0ftlT77fxrOSGH/MTGoOsfIc876oQJG5BzlIegTVsZM5rrtRrzpafUTqqi0o+ployF5Amh28RMsZpMqUc/T4FKMx+paq9xaGpzqszB8hUv+Uw2/lrGnBu9pKHWHFbg7rUv4DeIuPnKsBlnam7+IynzX+uvf1qw9/qcRxM/Sx6cMBgjo5pB09BtZbyhGDROY/KO23VVw12VPizVpDNo66EZOoJi7WH2kRDGmtsX2I8npNdVPfPc6nTmaS8UbZfjtB0x+Xp6Y7Dbtr+YydneQGKXuiRaQ6p/4kJm2dAVCmMNPn3S5Eq8oxsdR+Kp5Ul95BC+9gkRxqjcpNfLq7w9VcMv5BExFHBBXIS+OOI4UgM1973SJHgvYSaMBNAmv8A0zZzJbjH+vEhgMhe23Wi/ubYCqIKeeWBw0Bx+NBbZ4eGRvl6mkdQ05a5ejeBQ7VXUe1ODonEs4+5JYfAIi58XsqZZx1mRooxeyOlmNSbqHw3fzNlJeIuYRnzIWOar8pOwaKaJ0pSVaF0YOZaWQ7OedDIyJ8axuV4+tqwnhrT4skip545vYcw7CM4MihRw4A5iS7K2FD+xC1D5dtYI9YY4IhiGbuq3xMIAcyDgkaaearKv6XpneAAdCzpJhIYXe/BncfgHfbkwBYaBUYg4C0IkRkfY4CPCfsY4SAJrGNk7IYBAxgU9/jw4Qb7cI13P74DQHjz5g1CEnBr/SZi7d/mOQEQvvr6Da6uXuHG7TR5dYJ30D/WPFoRMQWQZExC4KD8R7meRISYEjbrW7x79yN2uz0SE1Lymh/NacLkpKBJzSPHheYYTMxhioRZxTeDwix8Mfl9BtyZ4/v6GPuDcr+SOwYeiz5+3nPFY+UQcGYCRPlxHi1qrm/A46bIsRyzeWUJkuUESJtTVOWPsLY0nqQw6Vn6oS3f6n2MjrdlBq/TZ1F5xoPSTH0KSoPjwFdVQlWvzOGVHJPiindlkUYj/hhNDz7w1zzoZA2/FJnMLFxzvxw0D1CPc1G9z3qC8Q2KTjDb3GodrQ8kwbR/KHtSGcfY+4SUWvkiasXo/oOfnhK0qW2k52QvjfniMev0kPynPDGv4FaXCUhU88jTzb9xv96h7XWowiOOyQtQ9EKPSCNl5qeMTH9hasCDvMZ+eZgoK6RmZM4R2bWlHU9b8/qELlHIQggIIWAfIrpFh67r0PcSHoQUQUhAinKEs1tCfBwcUgIIHhKiRtk+GT0O9rT71VZfG/fo3MnZ8MxtsysqT5KR/S+vkzWo+nBASZN227Vq/J9BU88D44+ZkDP7t1nQp8vlEZgsU0oJQcPOUoxyGs9k8XejHcUD5Z4FImniV2ZNuA2A1YU+e/RoUlm5UZpK1iZ7Y4BSSR5sz2k9LObBqrn6ZaBIL5HE4x6Jgc1mi/1ukPFlD6i3nCnQrO2wKlviC9uZy2oJa0ZrM/SYIV41cpUBPwRJZhv03DJnJ2olDdFjgJMkepYd4C1CvMaeb5E4IKaITQi43u8R4PKY7wj4w+YGX98Cn4YtuuARiNGxQ+wTYnLYhy0SGMl5fP9xjRCB9XqDGBO86xARJZspPMTvqShdt7drfPx4jbf9e3QBePXVCsNui9XyAsvlCn3Xw05NtNOw2KUcpsfQkKeYMAwDbm8/Yru5heSXkj9CByDCOUuMzLmHT8v41rC2/CwtWARJ6u6Kh1cb5tnyzJzHzvh5QJvzw/ikWeHJEv1XBhrK89sAsHmvJCu/ADQQ+ZZS1YYK+WrvrL6y90WOjq9tXusXBiS8ilDCrOowPWpKAAFJT7mzOrY5fmq9SDjOhstyEyHL3TlqcxpZzrCnoFbcVGBUs1ZJ/9SfmFDyts2sj5WEbh8yG46m17HxOLVDT+0f2zXGn/MPfgZU5iSduc4+Xj2ekFQm1XOiJZPKVCYkZtZnHVyX5VJCvWaOZeRUz2qf+WVtlDFw8BzspXM1srtTLUsPrz13p7xCEGf18uCaWj/3CUHa4uRu8vF8sIhsjQdgQPNjTN8XoOiFHpGoRQceG4V+QAHz3Kk9tPz5Ux0S9eVqLMtGTBF//OM/489/+RM+fvoA6hxevXqNq6vXeHV5iYvVApcXK6yWK3z16it0XQdHZkRXyatJPSNmDJy7tnHMuZYjJQNcVCtJVXLukWEDoAkjq8udmx2n63molNN3trvocks+8aYxbrgt7sAiPZ3eRQlkZj19J2AYBk2sOs1FUMKlZgu8BxVwx428fAzQE2DRDGtu2jc2NuW6A0+qwKNyLTUJO5s2FjRKj0XXRLxgbDZrbDYbhGEPl4LBWGpbUR4SUhCHzKBtaqvGO0SVh4anmU1M6uEhZzOpcc17pBBAvodDl+cTMSPGASHsAY5I4Qbr4UdswjV223f4/rs/4/sf/oL3/lMGq8gRBkf4y36H612P67hBtyckR/CREH1AQo+EIdferYd8ChgAMDl05MBewV/zztG+G/YB+90Ww26H3e4WQ7jGdv0Jl6sLXK4usFot4MlJovBJ2BLDAAAgAElEQVS+A/kOngDnZQwkBC1iiHts17e4vX6PYX8rHiuJ4ZzX62yKpwrQOIMIGWDMIY5jr4wTSu/9dk6LV83kl2Pziuu1oMrjhJZvJyG02XNFXtp5lhsymtP32EpRDzc7pVJOFoQ6KnDm52xyUAGEGTKmTKzecDqeKDlT7I/ZoNckwGpuXCMIj1dVOhNNDOcTUQEBxw/OyCSAmMGZ2g4ak/wmJxNSljoO3NxAowJo3oFsph/G7P2lN9hOUQs7Pu+6PhjVC0sFBrUgcK09ThmJba6SL2B0A3zbGkmlOFT80Xj42fz+0mbFcwGJxvSQ9aLR68NT0Tlbvhpz0/kQzWORgUUOoHjy6ik9jtx4AYpe6PEpy5THFnjnlP8che551OBtzwATq6txrDqN4WByEE83Emawsy76IQz4w7/+E/7bf/tHbPcbRAAhRCz6JS6WK1ysFnj96hW+/eV/wP/2X/9X/P53v8fV5Ss5ktmpoWDH+potPtnZumc9616h9tfpsnb8QePTxUYOO3eqWfvs80du4nmDIgoEN6l/P3WU6Xztjb/stLMYAsYOQ5QfqM8imkyjUwbzfCha+a325ijKaQk9G3sCzYXgGJjUbqrPPbdNUDznbWTlU+X94DyBOWKzvsEwbEAUQbQDUQCZcUZyilfuRD2ZKhvI2bKXJM0ERkgD0jCAiLHd7xBjQAgDiOXkGWdAEgOeHPpuAe+9JrRmpDRgu7nB+vYjtrs1sL/GbXiH6+0nhN0tPr1/j+ubG8QY9cQ1uY8BbEBYug6x6+G4A0DiRdAFkFvCOwMHCY48evJyNDkIcJJ5KUbGm9db/IW+b3jwcrXCxWKFngjECTFuxRtr+xHbvsOy77Hse3Rdh+Vyha7rQV5OE6vF3n6/w3r9EbvdJ3DagnmQ/jPgg0hhuOJFZN44hwwVomL0zoFEd6E64euhsLAmv5cBmmZJmSY+Cu+YD8vNH6pbpiBRbmT1lgGQc1gsFgoWAVBgRmswati5PVA/RCCcnJMrAyI6p7lqi4Gzem8C5AS7lMBJ8nGxngZWgFUuNmkOlTlc0dorqvoWBrWZlHl8OmfFnwOP7viIynA8bKTPIULzV4695/hEfz8Psjreo67PQDc8Te2YADjQzHH7ufJc1H9V9lheNu+7nGw+TXIRyf0p8YHntWt0ecqXpOcKGj7cPKrXgIem5uDdSoQdqjmVCk1R5kehB4KrquX3oWv9AhS90CPSYfT/8Z71M6Znpt+0KuNxI35835dsBjOjXxBAA27XnxDB4ATsd1usbz6BAHSdw/t377BcLPDtL7/F5eUVJNxAE/6S5a8ox/JmGsdgnFOnyQ21wSXGSYY2qAANczS77DzSAnIuFR6ZMUJt71h37Gd7Y7axdjKYGJeJJSF5qrwMxoBLNkQpZ6dtfhtfW0J4pgpknTC4ub6poYeFu009gipllGtPIjMqT9OcF0abo4QArSOz5StibHfXWG8/IaYtvB7cJdzskKKEZqYYNRQsoVOPKZAYwlDgJ8U9Egfsww6b3QYExnpzi91+i832Fo7EUCaIstZ3PZa+x6IXQz+FiP1+i2G/xs3Ne6zXH7Dbr9GnHTbpFtuwgwcjbBmwU22ykeHgE2PVr7DqL0Arh2XXA+ThmJC6gOiWcOTQdx2WiyUulhfonIcjDwYQwYgO6Mjj4pdf4V/6/wGCeAIRgF/98lt8+4tvcLP4iGXw2HsGhwFD3CPuIraU0DkP7zosFhfo+wXgbcwJ5Ahd1yGmhNvb9whhC3ISZuY8a9J1C98zI1u9NRjgo7ygz2iAyXr8C78dAo7q3DDOuSrBc/ndEsFafh/Ld2U8XNchc/AkJwjnVwubMhRbyuDmupKjBuAq6zyRgGLee71Fw9CyT1zBrqz8u5DIWIcaaRbZRHLse7LAxEqWsUFK6sUI9SEia2/Ns5a/SOpHZFAPNx4RVu7TGCp3oWPg5Yy+NwMWNncQAUzgRGD14hrnWWt5t+0PA+Prz/NLRQFgzQMPWfZ+eShgnh5QU3qWTbxfpRrILwPU8kpwqqONeaIEutaypYSwvtDPmnRNaKG/6WbhT58ex7p63kARo/EGrPKUlUvOCOWd7Gx9JmecJ1h48s5kmpGenXRGOQ/AyuOHHyJ3doee8cC6z+/PvMdzJghRdRwwA+IxYIKhbvs4geQ9F4lzRuWcvbW8QTlJmTKXQ+XuAMR8vcxY1jrYAoojitPM+BFU12IJncjWYG1UQ7wTirIMIC/kqnTWri4HwhmyOwyrkm7GRmVk1G0xhVz6TBV/vSeFgBBukHgLYCvKPwMpeYTAcPBwvEQMEd4TXAeEuIPvVyi7yGrQVUtP/W3jG9NsaYz7VfudpY3E4vXg2APRAeRh4QWsyi6xTUGS/DPqqcEoIWtlhIxT6yWxfH9SiW+/QcU0swZo2Yhpud9CJWnULfmEIasPqkCNfNStKnNNXhIpVeqQAASkGATE0Gs8S7gRE2Vjswk9mwN2DnhVzH0WY8ON+qEY767hZWr6pDawp/3sprvguV+r+3Qec6pBLwFzzPgmBPGeYQ9OgHcOnghpGLC9+Yi4+4DB9QgxgJN4tMUYsd/vMAx7WP4dUnkKSgrs7BDCHsOwwz7sEDggxD3CICeq7XZrbLcb7Ie9euot0Xc9+q6DwxLeO6SwR9hvsL75hBQGcBzAHCCeSnsE3gEgbBmIQc4YS4jVXJL5v+pXuFhewvU9VrRA9DIfol9ggQVc77FcLnF1+Qq/+vY/SLhXEtAs1bz0Cw/f9bB8UkzAatFjyQm9YywcgARETohpQAo7pBQQSIyT3W4NIofEXTVeDsvVCovlAsN+B2bpS+cJrhfvLlsD5BafeYSJxYBmS6g85/VDMIgvh0CxhaGVV4Capb329KvLrsMXD8mGMQBU14nV+CIqyeSt/lkPEzRETnobgTmZv1HlZ+KEmBI6L+B83xOcCyAKIPL5xLwcrsQGoqs04aTPTll+2zrB7YPBmkvLMcDsFfM3+Z0URmrzm+Q1gOVaGQYCsZyJl1TONbl2iRV4JUiuKtblXcKuHHmQ82B4AQ01lKvIbRsHScTOuTHt2BjI147dOUrEOEF6Kz/lPUmoXYK0ATqmVOQXMVkKM0nwDluza/mo7eBq3HOSe9M159dQaZrpSrGqq+YZZDn50DxKrcciCEiMCIViK5AWU+6wp02+S9VJkOM1L9/FrZZ/trefLdt3JVGAqy/q0zZLGx/SnjxpE5Hl6rPrCRwLwE3OyfjkMW7XTpkbbKIjX8Nsyf91TnuIzMyyTHQDeWYZZ5FVSYEieQYnWzs1X1/SZ8/MF2vvaJacYWad6nSr88PQ44FghzcfjltHcxucc9eeK6dOkWnk8rZsmEr5WXJM9FWTAlyqUw1LLUfvVctzxsVCILTiTNOcmYfya022Wif2291q/byBIpyeVncfpIdH26bEM+/uUdcHr+qZBZ5ayM4VPq0uerc6lIfNFTJDflTvorxNwLhcr/sLonPuPHudP6saD8cMRVmrizaFfbo7fGj8TF0WQ78ItcliOdFJrGdMEayLPtDOfA3Dkr2NxfohMgOGAcRoRksAURLDzQQrM7zvsOwXWHQ9EicMYRDlwQM1Um2sJgblXJ2rRh053aaYe5T7iViMjGLw1EoscvvzY3L/2g6J9W/dsWNu/JykgdS8jsOdcouOypF6MbOdwWpppipAYGxwaleY+QTmnPCY0+i+EZhoeXMOybhDYV7j98Wwhhp7RXkou5putoy6fw6BbeM5mPmtAggKXt+eCtXKO0Y+qh6MrvMgJAz7Ld798G/40/98jdVygRRD3o2XEwADwjBgCHsMwx7METEMACLCsEUY9gIucUJIEZGDgh4RIQZc317jw4f3iCHh9as3+ObNL3B5cYkYPAgeMQy4uf6AuFtjfXuNDoTLixU6b4pyBBDhkoSMJurVc6RVIBkO3nXw1IGYQegBGjQ3CmXPIfIei4sLfP2LXwBwCCGg916BG4DgsP5qq8a5KmZgeAdQCvBgEEe4FMEpQsAsOUGRkRATI9Ie5BxiWohhzOKBaODnEKJ4nCjgqVyNmD0o6vEt/DrZ6Gp4qjXgxWvMwbzVzAOnTjjdloEJH87xe8n1kZDSlHfLPQZQCeAY9UTCUv86UXo1jva7Ts0s8Rxlpd52fpwDQAlOkRcidYlrVIX2JEDOjZ8BwFgBwywnGNB8OXKH8l0t/1GHjkrQZszPtXmZwBQBimAFMWymCsZMYHKyy6L9kRRUYCo+Szy3yWbfqVdrvfyNwwQnjT1D4Rjnn2qB6kp6M1DvAWWjG8VEsw27LO8pD0XhH13Py9jPyTLMqgdtHroSJisbCLpusAJzCuzZ3Jv4euU6nad3lnBxy7d1YF2ZA5DOAozuofcdzR01Xhueiigzj0wjlSsWAlbPSVtXMcODhKwHZIZh5PEkQpb98q3mAKMCtuc+0FNwkb+ptTFX6ScHiFutqsyNE/1wqtB73ffYNPf88Xfn8NNYDz10zUPxJutYVk8nblRyLsPe1GBSJ36gUThH76bqTQUWnVWB+wzLCXr2QNHPjs6ZJz9Lqo3Xu3bAORPENJXiYFyUl2qZpnZB+hyw6CdPqhgTUFkQJpRG1/Edx2328hq4qD+fSco+hZNUoeB53mqGVr2YnBrw3vVYLld49eo1Yoogclj0K6wWF1j2K1xeXOHq1Rv8x9/+Fl3XqacAy+48VWpm5eZsT0/Wp2f0lzgbjTorGyT2Zw1n1Ml2LTHffcTJ+TvL55ZlRLPfHwt90XfN95U/Qbl2cjflbuEERD3tTMLRtG/HoOeZdKy+TaiZG3k/TBTxtpwp0HTYgDhlSIybVV+fwQHqNNxMDG7vCY4Y+7DBp+vv8ed/Ay4WApiIdwBLiA1DvGWCJJhmHhB5C3CEJK1W046E32MSgChxwn6/x7t3P+KHH9+h61a4WK4UQNohJYJLEdv1DbY3H7HfrhGGPbrFonQZxJNM8iSpxxY7OO6mCj05EEm4mRjkDpxPjCIktjoSEjMCQ7yaGAhIEkJlQIZ6TTkQojIWK3ORHMsGRxEg+ZyPY2aAU9SE1ISoiqmN/hAC0m6LIQxIHDGkIYNsnOpkq0UZzLMiAzzV7nvj+VOJ7uqevOOdQ8TOkxLH+L54p8hpglJ2hj4KcAqrowNRl5PLW33nyp77bKAuU5nnBA0XsaREsyb4fLkamAKgCtPUfwyOIjh1irV2JfmFCzhIcKqSpAwWJRYgERCwFJzANAAIkKSkqaoniRx3ZrRG4Rf1PCVAQU/lsc+U03P5y8Z0Xzl54IHlbfWuhl+yBqDeJC3flBMbjz+mXV/a+2xjoUomfkDecl2hOxNbBe5bwMPT5Djweh16Jvpu3vC7K1X6QjVwNRANCFBNhjoqrEnNRl+BgV/oqelL9/mXfv796HT00ePRC1D0hJTF0ni1/LlT0QqAWXXh84mbf9xo9T+8SLIqo38rQ9GSKTloXo8rjMcAIzXjmCunl3kgoLklX3eHUVAdm6E62ljpaJ6thoC68KfI+Oqrb/Eff/17/O53Hr7r0HUdVisBiS6Wl7i8vMJydYk3b95IPgxIHgo7xcX2JN0B3jp8wmVtQragwjw0MuoX/Xi4px4OADqXpp4vY2+YE+PK9dtz6l7Kk5zLAnCkQY4hp8StrnwmnfImskdP9Vsz8tsTs+aoeHzMe3CcSwYQENEkr0wLQIl7vujLCZ6ArvMIlBDjBtvdO8Q9g+yUONthZcC8IDhFJOwBbNHuwnoADo6AxBGEQU778gGBt4hpwGX/CovVEr73cE7d/eMeYdgghi2G/QbECZ56UAqKSQdYOCUlLy4kHEHkZb6ZnAK0z8vMKcdwA+xksqbECClhux/w9v17eNcBDHTe5XnsAVz319UJcoJKxLAXex4MObUtARzAaQCSAgIJGhaZ1BdK8uckApzziHFQIC1iCCEnXE+RERNnTynxROM7Y/J3oXHOl0PeSuN77HWaM2Z6+llt9Hvv8/3Jcluh9pQsZc8+l0iSqXtkAMI8lSS8k3Ii68Mk3pkS1utKXdjmHhV5wQIWCfAlvJFSEi+7EJES4EEgirbwwLwhiSIcRzAHASfSIK8sIWNc18fAQCJJqJ4KcJXDwjPfPVfD5i71Yu3v+rYxozMsxK98fXgyTL0uKYNDrd7BmXeKR2JbZn313abeGIR5xMl7F5oFi+w98NQ8Zf0u6oHWYZKm4P5EBr4i6aYIKRBfhZk2XpWHN2pe6LHpS8mz5ypHzyXKcuypRcwLUPRoVDK/GB2yLx72Sc9D8M1CQaIFnbjqnvVvgIEku0f5caeBqWeyvD8NyZZlfj/OUQQY0GPgBVVjN96hKkUWvEe9XcZgUfPZFLjq9xM0nzsJGo/OuQXlslJPZoYjB+c69P0Sf/9f/iu+/upXIEfwXYe+77FcLuFdh9736Loezjn0yw6L5QIWxpLDLpwpO8iu0IQ6ISllZW2+dac5TpwhGGKJyjiIhwONvbJzmUUtKt+1/fHwdEzZmgOMpuOo/WThI4eKG6+RDIAVJIqpAUzM4D/HCJaxquT0yOsH9e+1FxEVHjD0qIQtzCnn9wOFzqn/wdAkEvCBpOpY9EtJ9BwSUhwAPaZdbDQCOfPaYYC8AlsdEjpE9mBO6HLiZYDVRb/3wGrhwX3CYj9gOxAYl/j2l7/Ft7/8Nb558wZd57C+vcbuBkC3wD6Jn8XCd/BeQaCUgAgwR6QYgQikrpe8J9SN2qf1rMMLKIEpSegmZCfZgZDCgO12ix9//BFEXm1J8VzynuAZ+G7/PWIIMJCDQNhttxI+FQM4JYS4R4wDUozgGCUZdQRCFKBNxmSAyUxmhdvIIcUoAJECDomBFBMiR5VPqAACqnLYVHmKKl6qx75+HfNHww8z/AOUU9PGoGN93zQMjSe/6R3S91q2gUXFK0pPHeLUlHWI8vTL/XHingaMqD3+JKdOSsIjitTo5ZUSjiKHJC+KypbEQGxlr4FJpB5HlhvHQQBEWWadeqpZjiPzbpDyk/UVZRitaoeBJ49L5+SjmqU7eDwVEOyEPCQgz+m843ImWORqna+Wv5W+0RTFqK/+WeiATZcZSPYAOvZnkQX6CZOXkENk9fJuNHdHyX+TgwWIq+Vb1wr7HRV/2xc/CZqr6M+Ccx+J5mzOEZ3Sy1qE+/FJ16YWVAdyvlMudttT0AtQ9BikhjiZ2VZtQT/KdM5CUJn5MxJHPxzp0jsWwOOqTYCc+y7XtWLAmghR1eqsgR9IEjf13b/H8386VE5/mPnO4MbxupnZ64y+qfRtEIlSTlp21tlqY1rfnznslrejrZvePANCiG4qijgRoet6/ObXv8fV5S8QOaLve3SLDt55mMtQ5zt0PeDFKkYIUUMDSOeXfB+TZTzQatTgmlTWzCar/XxfYcT5ZIaD5X+q2y8fknqntANV92nJ6/PgdHCsWkN27v2YJGwDZeyOsNgkzE2BuRgiwhCRohmQZ85hqhNfogGEMP5OngiHKvRM62DePfP5Qaad9ZCAkRnNkzAjdb23ZMZ2rPyrV69x++kTnFvi4uJr9OThyaPrPLz36LzX+ySRetK5KjlY6uSh9vwEcgzqAO4ZKQFff7PBbz6tcXX5Br/85lusVkt0Htjv1vjw9gdE57AJOyQEeJKksCkFWTGj5EiKaQAHxj4msHNA1xVjM78ymAck3oGxB4OQYoT3ncwd8iAmpIGxDbfYYCNAEUtyXUkMH+GY8UP6ATHFUjazJEgfAlKK2A/77BnE0Y5AZ8QkwEdiFnCLJfiUnJfkqgwwCQgTh4A0BERmxJSQUkRMMc/jnNyZGEmFcLb3UAwgjhoGppGvx8Cimk+mwFLhR+99zik0vmcM8h7OmWT82PJmHWqZkhpvCkzlMDKeL68eD9Lv5wCtuozxvQxSRxWZB9DE1tmP08LTuRiSkLuQOAnIF5OeFiBhYxYaB4LmldM06Ek0DZ+cfl8S7BOiJuyNgEsCgKZURIzOWamz6E/3lRXF0+mx9MF2wKYhyFPj6mRNMr/UINBh3aDm98Kb4lEk4UeQMW2VnfzasrHBdDy6sK793Hdnt+6JaKRr5Gp/Yb22qgMzRC+3ZfYzuq4GC7OHYE7iX0IQYTOeNDTNqlXrHj8pegi76W+BzhnbM/ruibGipk628Y5RHZ5oyF+AosciQ8wZeqpTO6IPOb5UPc+UzGcBFlVeKy06Wl9TvZ/sfNzhUfm4cqDdhbLFct5QnyhRP8kF43Eo943tAjbDM2MoNL9U/AieuaAGiu5L1THGta1B1RfjuhLB+w5Xq9dYLi6QwPBeFX9WxZMFTHJdBLmE3X4Pira7qadzGFgw0jY5Y2FyGs0ppjcjBVAW1boX7Ksag7xWqDI06b7aOEqNkTb1LPhcPj80TwuT3M1AuUt9WkWeE2PQcB7WU8/OCT2b/D4DDNXvLWRsdFMLDM6ULHLJYa7P7mvE1SNYlzF2p+e8gyon/3iSY+KZCc5f4Fe/+jt8/foNOufRL3oBTX0nnnbe51AdcgskeMgpdWJkJ1XAiYDEA4Zhi4EYKTGGfcA+JCz7C3S+R4wBjiIu4oDFxSv0lxfoVh7f/WkHF7eIYcCQVJVPSXL5xIQYEgZHYBrQ+XKUceGyhNv1J7jrPW67LZZxhR0CFv0Ky6sLXK4u4ajDZrvDerPFZjsA8Bm0kakg3jz/vvsOIQ5qqMszPn26wc31NbaLHRZ7j8ACGoFZHEi4eMkw66znSiaorAgpIiRG2A8YwoAAKFCkxikXA9WA8DKMRaKOgaAs6kZsdCysYgKicPH6sbCuuWvt+nJfqe/4uryjP3qutIvyWt147XGZtI3HQdNYFCDFlGcT+vl54xC58pvcY64+SX+i7IEmQrZNUZxSwDBssNvuQJGwXCyxWryGtzuYxYPW5C2zhL+mAIYA4XLi1x7AgMQBiRwSeZAXb1WQk78kCdBLR9RJ/+9GBrY8hxCbmpVrVXX22omX0nzd5zcfCujWbBhMhf202Hupnyr784L9DPTuvFFE5a+xCeZ14cekU0+6rzbCWeHjWuTqpol4YlqYasobKipfnsBT72mo1qWfA//99KixoefoS9qFeX0zA8DWRtMZHr9uL0DRY5PJ6LJ39ejPaw3xL0wNbD/+cubazwCLcpuP2sHPpF++II2T+LXJeGdvqMbvdN/N6V/tu7HCcr8xObw08uhPv60MOMYeTo9mT5yAyJKDghkcdfeJJNeEcw6OHDz1MEOirvukVc0crPrXnq2/K9yTL226uc6zYPiFXVsdhy0GgBgWxYOxZf5DyWI/j4rB2O7o0tH19mw6IiJqSqwnT4Wo4WYxAxlgM05H7a8N7zOMKMojVQAwAQn1ZKRxGQTYGNVPzaVRKcde7zwmBNRzdi7nERGBKcC5Dg6dAEU+wjnxlFuurvAP//C/4xdvfgNiJyF15HQn3sE7J14xashGlnxHIQ3iZaF5eRgBiffiAUSEmBLCEDGEJIDRboBLAUQM30VcXnUAHLbbW7z74d8wrNdgRMQY4BmgmBBiwn4ISPuE4BhwHpEGhDBkyI2IwCng/fvvEa9XuO226PcL7BGx6Fd4436JN6+v0Hc9Pl1/xPc/fId37z8ihCTeISwnITkQvHP4bv0jhhC07+QZb9++xdu3X+Fj/wn9mgA/gKqExQLUcL7HW0hqgoSn6tgKn3LutwhkkKgkVi3AT8uSlL245HH1nDNeaOlYAuM5ryPZZXdTXj5QxqFnlXq1PN0mm63r75Bzy1TgkHxHbdvMGK+myqFpMxc+6vIz1cMgy/FYyc0aZNL8RGnAzfo9fvj+r3i1usDrq1fgYYO+76WeEM+wlET2yBhHIO7AzAgxIcSAGHeIvENIewQG9glYXrzB6vJrXFxcSt3IErHX6/PD0F3lzMOsE1ZYpXWwDWMF5OGUHCZIUvD2KynOwoerhNk2kfK1FWiSby9rFVXz6Kj6ONs2fSAbi87J/ackRt4poTqhAEb69Tmc9UD1b4qpVi4yzeVznyTrsZHN9RgDhkG8R2Mk5FMuvzBw+vD0Ytf8vMmEHX+RYX4Bip6EHhckMhfjlr5kjvQZokqhrP6dp7HQo/NWEbH89C0duOVZ9coXJLqT0FEfF9CdFyQ+8L6oBvdRzAxUmVcxxiAR51qbAbDf3+Dm9ga7/R5MBOcdwjBowtIEpIREDOcJX3/9Na4urvSAs3I0aw4TgSZGmIAFpjyWUEiRBVovdifaPe7rFpJqHGA0LM76hXE6P89n0ahq5VkW6nf/ecZgoDrmfizeuHllDf/RY7itLhq6cagPsqo8AlcOkYWY2XNrRTf/nj+PUT/Kdx1SUu8MFhmGOOstVr0CAv5Awracc/AdsFh06JdL/Po3v8PVxW+RkuWRgRpMqnSzGuwO8BDvCg578Zhw4vkT4x4E8fhJFOA9w1FESnts4wbDEMGxg9eT1wiMzu3R0xVS8tjtIsJuDQwBFBlxkFO1QkzgISE6D3LAIvXYbrel81me/+7dWwwfF9gstlgMK6TO4avXDh6MRe+wWPYIYY+3b7/HX7/7EevtHjEmEDM65+GI4Mnh7fYaq+HrAv0S4f2Hj/jw8Rq3r9Z4PSzRLQZ4YjE4FAykarUhYsB7JALIOcBL3qeOCYgRCUByBBcY4IB8GhaZqUM5Yb5gF0WOzYXZNLyHlreYy0lldS6iSUgXSpnn8OBcrqIxL9ZOVePnVCVVItNl/qvBV+FfrbdzTXk5f1B9HHi1HJChfVV7ffPRQu/Ma6eSzRBPoJgG7PZr/Pju3/Hnv/x3LDuHr6+u8Hr5FVbLJbpOjjNIKSFpHisBPSOQ9kgJCBEYYkBMBhQFBBC2AXjzi4i+X8FdXMppmSknNpI23kOONp5UwCzPPCjldegY2DPW+tp1reaX+XpOs++NSUDr+pYKHBrxablk6g96F7JQp9qDv63+l9DDVWYQoEGQlY1Q88apmj0kv7Setxa6bNCeeNXdBzb+meQAACAASURBVOyg6UemLItCGECU4H1EigSJWH2xAV7oJ0xfABN89kBRK/pr46/69oQ8OwQZPCo11T4AdNA5i/cZC00FkBysw9mIwJkG5ulOB3jmMrOt6x2zUztJZxAzqhRENLqN9f87QxJV4fejp+S8WTfEGb6ojdxjO89zT6g7NjvFH+BjM4CygZ3rQqPrWqUl52eon8wljw0xtzw0qXoBZTJTMAHkkDhhCBu8f/fv+OHH73G7XSOxGCIhyLHVAND7BcgTrl5d4eJigavLSziNCCh2Rm2gVO9L/JmYkNnYM6VyVF3tRFZpl4EQw564/q4FZOCouaGc8NF2TJ2P5C5kdRjjYKw/WjjJ+PrG4yHfwbNTSXpFDAE2XtKdedJQJjG/kuby0zZrWEZkAfciB8jB5pIjpu6r43Kt6kPzWKg8F2pey7CAjbH9NI4sm+mTtj+K4T81tNt5PK229RjyffLGFRa078kr6FN4MEbJP7RcrhDZI6YFmPv8O3IYlDzH2p8oScgMgKheGAlOTxrzAAcQOQGPEmMfGLt9QogMJAcLqfFYIKUeiTtsdxHbISDsA9J+D0p6mBiz5AJKjMhAhwRKAfuwRw431f7a7dbotgO2aYcUIhyWuLy4wNWrSywXPRiM29trvHv7Fh8/fMR2P2AYAjgxOu/gvEdHDut+hwt+I+3XHfnbzYCP61vshn2Wc4n0yPRUgCLHhUMcHMh3wKLHnqTFzmRFLzKJYoTlUjGPFwJVwEABC2ze0FjOknFkxROVsX4sMfX4/dx3hwzrSfgb0eT7AiDZq8mJmXXC2kEAmPMpc0TK0s7BeTlCPaWE29sbfLr+iP2ww9ItFd4Ze9SJVxwo5UTdTAnwBOc9nLctDwc7lt7ySoLVH5AIcB5DHPD23V/x4eO/I+1v8XGxwKvFJVarFRZ9j77z6sGQNFl2REwBiAGpymHFLHmJomNEeGyDw8XFG0kqzxFyiqCGQCcGSBJi16tdWbtNOLkiXLLMouZPyrKxa+X4IZLQZaDmPVgtyPiirDcZk8s11XF3JictvBLlCqs22z/5IHsrIbecEGcNpDmvtRq8ZBZPuRQZHBlwokeY459XT7O67tkjtdZRs05ppyySHtpFUx1rxOIZTBr38ejz/LBMG31SP5/8bHKgzqboqmeeZohTzzypP1YyLn9myIYQWc2K3C3627jsIh8n8oTK9Z136D0w7BiBA6KLqgY4gD1Izi5U2TXuAx37E91cX12vu/l767NcZZERp+nUeNzTjnlQUh2oFUp40nrx6Z4aqcH5O/P3bkzzU1V/xKbNzy/lQ10XhUZ9/ETd/ayBIlssMlFZpJ41jbmzWbyrL8/y6JhfZJorHrJLHmrXaW6Gngn6nH/d6JG56ikrRq21JwrlncvVwu9Tq3N6c5JX5FywbvZpc/e1fJdle6OptfWxekzLqepJlWE/0zm2jlM+isSXH+vpMVIMxAC1Y6rLKTc+lauaRbg+hYZJ12LS5pnC6FXpjhjCGu/e/hV/+fc/Yrtb6/HUJFAERTjXofMr9H0P578F8KtczxyGxIR8HHoN1libElUpusyAmWm8frTd0JQS3MTIAsB6DDTX/Cwfx6Barfy14MP9iJlmlN35ud16zFAGmUTvqgzjA5RITqICJ4XNovxHCWGSryMhUUSEhnvwAI+EhIiEDoYeW4hUmR+uACkZQBNGymNV9a8YRO28kFtl3MlJ/gNXG2oT5bad38d230vyTTT3FEZ3jbqQDeORt4js2nowCAlB5lUCOneF21vJ1cPJi0cGS7mzoXg6pQIDgeUUrwSAKUkSalXuEwBEQkwO623Eze0eu8AClHgnXjYMDOwQySPAI3AH+CVoGeB8J4BSJMnVkiKQgI48HBbgvgcteoB30idq1DMGpEQSQkoB3eISr1+/xtXVheQY266x3Wxwc32Nze0GITLIAc5LexMIkYAIFDCbZP5uB8aGGeg8Ot8hsD2Z0cHyEVmogySk7tij7xbgi0sMauy7SCAfwF7GlyIDe62HhvpxNkZlcC3Jsx2d3sw5DZuV8NkyP+3URaGS08lCy1qemo51bWzXfHSI6jxoDctksOqAgpufUZ1KxMiAgd0iYE8x3JgTtrsNNvsNAiJ6JEhycgGVCU7yDWWPOsplsReQBuSRT75Tgx8Upd8UbBWZQYjs0C2W8D2h63bY3H7E7ZYRux6bxQJ912G1FMCo8xaGJnmsOIqXkSRNdwCThCM6RkAH5iXCbg0OOyApUOQUzEkaiucg8jCv2vZnfenyZ/NrY27BbFZwp8ic1jNrznitD0FISY8cz4Nkcli86sgJH8tYlpPfMvAPyQEeQQ0HFPtcT+tUXmKnSysxyJe1cY6mwKbxb9VtcIh2KiYziJMAt8xwqE+hM3BMvLqs34S/NbxbgSEPa5i1oWrTuI4gaAcdoc9bo+eeauU2wfFkkIzKGprPnSe3SifmPjn0pJMbvdQ8W75lzQs2Y79kva1e8w6NNeey83sWoGjZ9dgwIez3gPKp/N7BeYIjZL4uG1zSVs66wBFPtrHad0gHrj6dZ0PM5UI8XOpD0CkZP/1SZo5MVoYdvlKDkgdKw0PV/wjnZtK09g2WxbrhWFfJMUbSqaWHnp2nSZ+o4bQZiByNxTn+kA/hSfqsgaIXeqG7U7V41ETQhe/Ugq1lUHk/VXF/PiTdQicl7uFQvselZsdw/BtG9vjoKlOf5T0BiOAUwGmHzf4a7z98j92wlnwlkNPZEiV41+Fi9TVe+dcIYQDAI0W5eha12oKx2WPh2SWsyp5WksLa73XIVdllf/CaoLFqc33KmOk+v45DC7bMlti420DvEoOZ1SAfL5MMBifJUxRT0kTIdr14GqUUkRddIljIUI1vSmss6XRpS12hsecPSSKaMxbiVh5NFvsDwCxXDTbvEDLPBbich0G+b5+YvSgU4JZ+kCPBd8OAzXaHmABLco2ZXe927okBjBTEk4YldMvp3FBsD4kZwxCw3e7UawcKpADm2UTk4f0Ki8VX+PZXv8d++wopbQEzziPA5nHDQEgJw07yh3WrCKZQ+ofkmQzk4+kvL1dYXa7Q9x1CjLi5vcXb9z9is91isfBI+wEpQUAWgiq5JgPtpCspK0Y52bB3XQa02YDi7CZWG0MMdh5uuUTqF+icemQEgJNHjE6BCYcQpU12KmdiybnFLOGUTKnxCir8Nw/ojCmfLjarZKo8G90+9gg6BADVmxkW3va5IZW1FKllrR1PTwJLihzQuS4M5tuCVAgTWc4lMwLFsCEIUMRIALsCtrMYFRZezCkhxYTOe3z9+goflgvwwoP3AQ4BlAQw8BzQEaEjB3BESAMcRyQOQBp0vmgOlSTGt6MOjhM47sApIIUA34n3GzE3fVrWsPvSlFdOhtrS+GQ5A9x0XKzPgNzPLRiFfN3s4jOjolkdST1P7hOOm/mzYmwLwZSwwCxQZR5Xc2O6vuPgRv7T0QMu3GwhXnWx7dr9VHTWHrlciWNrJwBkUcxF/3Guw3K1Qr++xXZ/i8R7EEmieEdLOL+AQJdHnjsjH19oRBnF+Il31J299P626G8GKGpVpBf6eZKO8mcMcOaTWqk6eyfgp0gKAR1Q3AA8RLfei8YG9ERWm6fHBBSUb0Xpl/xAAh/KyTMp7RB5A7gdhnAjJ/0kIKaAiAjvOiROWCy6aqfR/CgsjOf/Z+9demxblvWgLyLHGHPWaz32Pufsfe455/r62jyELPEQEj2E5BbuuIPcxYDkDlg0sfgF7t6WJcsSAgkJEB1oIDpItMBuGNwBYftyuffc896v9aqqOecYmUEjIjJzPOaj1qpae629K/auVbPGHCMzR2ZkZsSX8VCQoBZO/SDsYY0eq025Esprpe79REUQU6g8uwjsRN9qTz5mJ7RrQc6QyfdutTaDM6SAKAILFgxNPU5xMMVAFQSvX5XCLFlqFVkmTJlfxibpzgdULDHsBC2XW41BDfLUitMSSDSPPeOn6/PTudGpOcbKeVGwNYtVDRTpqbkqxW2zxtWT5xj6hN0uYt25Il2WgZEbBwQxDQayRrN+cIu/qn1isUaSmDWF2EGjKehmrSFI4LDG+vw5Pvvxv4gYb5DSzpRMVtaJdqpvFiTb61tstzc4u4qI9MZWZLXQOD8/R/M6oGmAVXOGi8tLNA1js92AtoRvXn6D129eIcYBCYRu1Rh7CoQYzNpXYRW0fZRHUDMiEkOQsNsN4E7fl4jhWc/1dsqjkAJD2lYtlKCuToCAI4FFwCGhaYH1eq1Brh0YMEMyET3xTDRXlpeU2SmNY9QUxV6XMtGg5UCeo3kEZc6fh8gV8Mxney1R95awfCmzNGXATpAwpGh8rP3iGfnyXl0954XVjkxKChQBAZq2ri7D9g0hSHIwWBAAXF2coWWAZQCkR0OMAELHDVoeEJKeWwsiOPWIcQDFARh686VUoChBzBK30XbGAcHBawNfy3Q6YVN+DzR26Zs5hReMuQhPunZZ9rhcDhZa7Qutzx9ZWOP9+QOvXB+OoB5zKtZCltZ0LM9I/sd4uOaVqu7Fqw9Htis9yE5+WpiB+6cpN9d78l2fBPxStfei4iFmrM7OcXG1xXa4xs3tNYZhA+aAhhNCswIMdK4KO1jrIy3TdwAmeqQj9L0AipaWmccF4LtKx5asU3yEZfHjd4omArleWzjmybLXhzVjcjbjfd/nTxb3Q0RPlDBAZIuUtgD1IN5B40MQmqDxMSRF9P0tbjevkWRQk+V8oFqLNqq0VMbR84bUINI9kG7KRSAq1RRhfskS4OGoUrCSlICzNTgkCtvJCHg5Xu5I7t/zpFsQ+Qm03ygGArm7m7sVKKBlzCMMiVFjoLCZmhfNXy3t7LlxfwpG7hsGmJXMTc4jek8N+NTF7FOoxU9GK8VLqbZ4ciCpmKmrOb+MVjgi0nTdHMBsAaKhSuvt7Rbb7Q50VfWd1KqgKloxJcQ0YIi99benoJY8Bt5tkiL63RZDr/MqK/NUgXXEIG5wdv4J2vYMIWiQbO+75FZzonX28RZff/FbpG9+i3Z1pgApACEFvz7//McIYHzVv8A5r9B1K8SYcLvZAElwc3uDi4sL/N5PPsOu1/cREbVEIoZnrxuevgZCtn0DEWG9brBen2GgG+V0KbFWSujjoikLARICNilhGAZsRUAhIQ2a+aqPEX3fI6WEEDQ+Did3EYtgA4j280YNzhV3nSUqwI3eWwJbu8tFGvP1wnqyVF7dlhpQdNBo+t1+8u/rNhCSxAwGZXBCHBosQKqX4ePloGZp5oLCnwFehU+9bt0ifBwpW8kw6fpwvl5j1TBukJBkQEMtOha0LGhJQGmnPBsj0tArP0ede5zc6k3XI6HqHZIgMIOhsXJ07hYrqbsY1Lw9zVW9U8C+wov1vb5YHwEz/Za66gR1NfOx9DYsiCXz8soxgB4o1KBltH0gRyap2lq7JH8YMk5ZUx6ofB+3/MoL8/C+65z8fbp4Uj9Ji1fHF8mshgLa0GK9WmO96rDb6UHgMOwgDKy6HorMywFWFZMdT23rI328dGz+f7+Z4DsAFNXnD/sHs9Zzc7w0fChbw3eUTgEX7nsARqdxCwreyZLXd5UzqKyJfppWyU56iFkENN98vy2aBW8tWMCh2V7d7IK/K8AJKe5AEgGJCOZizaxifGAACKbwqJBJqAXihVpHfUT+/+gd7nubuTsO9EDnPrlYDyAlqsjb9ZqV7lYslbKrl60OhFEzbYlZVblIUrLvEkZPSUSJm8WAKXIEArMqkGPLDS/flVcGLGC25gKze5BAGRwCKCsmdRDZotTmZlbAgMcMWVa0J2PoCifqk1mxYLgJYFYLBajljQYGhgIlBFzfXmO726riOjlt119m7SIxK7jJMzJN/hUAQ4y4ubnGy5cv0A9R+x6+BptdphAiAFDA+vwKhCuQZf5KBuLpuyQMGCAy4Hb7Ei++foFNH7Hre8CDzRqA99Of/r7GE3v9G/BmAHHA6zfXSCHlJf/Z8+foVucYBgO+oobjdsUCQoifMHYTL6aLy0ucX1ziFW8hHACymE82vok8YpSBMSSQpoF0LYaUsNlukKBuPDFqNqxhGJAkIgR915T0moItNR/smzUED/YeQtCOs/gb1SqEihHzOKk7mvLYNAOasxSDJnNuDjxPLYj2xTa6i4WSulnpuLqFSCCLN0QW5Bk6fsMwIKaSLj2v+DZvK8wY7qIIAJzjvadRe+t/iVjniiQIBQgxVm2HrmvRNg36fodAQCAAKSJFASSaV1nE0PeQFDXwu1h8NJOFkmi0sAEapD2EBk3TqKuXiAKqQtVacr9r9n6XxaX17jAVSx1g796Yr/tPZSXpcX885prkb0ZYzl23upofy74f85wiZmuGmPuiLRLfevzTWpd5YFLTTyDxSHx56AOmsTo+r8ux2onUDt/nRlT8Bqv7lBgBRC0u1ueIV0/Q968Qh2udj8MAGQaNA+ZxBt/91R7pkb6z9B0AisaLm7jyO6EaGPIkF4/0kHTK0vsAgzDeiRYoHvrSyAWYuqDv2lZSHSllZcKuVWbgHwJl4bbWggxAuMuoqFqmQiEBCMRo2wZswYgTogkp6o7SNozAFvQQMDeaqk6iSkmk3LSHpdFKloGZQ3TnWA9ekwvWNL8+EihHOP1kAmb940Abl5pGBdA/eM4jopmuBBnoyM2RsWI0VmJNsRZSQMmU0ZRIs9v5eNI8W5NbDmkXRFcvIcLZaoOZi2ArAGWro6JQTd0GS+D2sSK3f+xqV4mJdYkkUO26Zt/FNCDJgJh6XF+/wosXX+Hzz28RQmt32r15CVALimS5zkSKyxlZbB+RhH7Y4eb2Bi9ffoPbzS08yyAQQRQyKEFEiO7aZ/OJpVH3L2LNTAQAlDSvmkQMlvlss9nidnOTFQkCwCEgNCt0qzNc4jkQdtimHttBA9MTGOCA9foCXXcBEQW0NEtepXoI4eZZjy/51Yidnz57iifPnmF7doMgHTgIAhJIAiipBRcyvqbM1ydBv90iimDoB83qhoTUa/r0flBLKbK4ZwoejYGLEpx6MuJmueP2KZr6OYzWpdkyCeezIh85ryzxmceIcRpb9ywDR9My3ma9KWUoT6mVj9lP2h4lAGIcsN3tMMRBo3W5UIfq/uny5HO4BkVHFno+V6QAZaQ8zNSgbTt07QocGov/ReiHHprpjA0G9TmmVo5RogaRlwRCgIheT0TYpYhm1eLi8gm6bg1iBZxFHNjyfvS2vw8yWM3dck+lvCXR+FrW42ugCNV26cGhyyWyeIn6+W58NAb33SrM116xYMa2h5NADwSA2qoox1167/JeQUfeX93VmJg89b5qLTLU5Mu8eC20xeXUQzE1M8DICNQAocPTq+fo+2vstjcYegVid7vBDkXpyGt/IELwIz3St0jfCaCo0PIGN41w/l1T+R/JqYj4e7MKHRl8EzeRAZR89SgC9RHSHCwaO1Z9eJtklXk+095RyWiwvyMA0aw4TAGBA1ppEEitLJItDiKEwIyGQw4YnJ2JTFCpDurfr1BZtB7sW+/utToHBE6cAwkyDS37gESWjlqz2JBpWu4eWDJAiSnYFjWGNGNHbeFT0yyotJTse3YFAEOSWHyWZGVaq2r3v2oNKa4a43rmFkVAdmXL905lZ38ncycywMh5sW6DxxARUuugIe6wG7a43b7BNy++wGb7Ek3bwlIYlj4ji+5kCrsr2xpqSK0/SAiSemy3t/jqqy/w9TdfAiAwaZYwEINZ+5zZUlVb/5c5RDanDAQRBcCiJG3vMCBKxPXtG7x+/TpbdwFqIfPVV9/g4puIHSes0Nr7NqooioJzzJYVykBLCYKOCMKkVjwCrNernLnOZYZutcL6Yo3VxRpdWgHMChSlqCfSKVXezAoORCgYNQwR/XbAbuixTQNS7BESsEs9+mHIvFH4gItVBZXVJWN/FbiTcgpvIDjoubDnHbIEWvp79OyRpWUKQJ+q1B+fX4CbFkjFz37vru/R9zsM/U5d6GCBoqGWYeBpO3ztquSCPcvYKIG7Ww9yQNud4ez8Ck27hrQDgB631zfYBcaqa6t9QnlSRDQbI4YcZ0l5ngBmCDW4ePIJnjz7FG231mvZpbEGPY67yrv15f3sBHfZU3yM6kdOsEZa2kYsIrGu25jdcAycHLkd5+ongn8FhozvN+AbtvZ9a2Let1FxyZb38DXVb3j3d81rxKGFKYPGBKEAoEXbnePy4rnGKeoJu21Cv0uIg4H8ssT1tczzSI/0/abvGFB0GuX94nEN+G7Swc3+lKxnRhl3Ol1R/vjow5kIRwVMP6Gsn9EHJ7cQqsGrPrtCqgpFYIYYaMRZ3xMIWONGcKlNlX4LZAwYMFDX875Jyr+zbrvf8VRBupy+7zVPp/zPeyECNONZjAgpFJCkPoWXpVD09Qm3x+wZsdGMamW2VjQLz04UkOo7qkCiqXXSsvuOP0v577qO+vPYMmQcs6jcR/aeCbt+i12/gUiPXX+LX/32z/F7X/4Uq1VnGcoUJfFsXM2qxXq9BhOrOb/FcwKQQR2RiM3tK7x6+RVubm7RdSs0gdVNzcA4IkFK0FTaqLJkESFRKQtu0ZAiIiISogZ8ThGbfofNblf1AyMlwW7b4ywSQtOhoQA0HVJPGBDBCIiIaBwtg7r1xDSUsbO5fLbS96zUX4Q2oOk6dOsVurgGqNFMbykCMSIN+qNgFNCgwUCMPg0GFg3o+x49pWKBBCAwI8aIKdCSM0gZKehQvvefYdB+AYBIETFayncgW+JM6VhmMv+Zxi2aXtv3+S7klnMO3hy0RJqAq0Mc8PLVK2y3u3xDzpRl+5kGnLcXqO4pZMBSPiQp+6B4VkSQAp0ICO0K3dklQrdGGgbE3RbCAgQgImGInpmtiltFAUkaRIm65yBAhMBhhfOrT/HpD36Ki6vnoBCQclIAya0QAy72Hdj43ueAbe6urCy/HY3w7eN3ZwzG/yYide/KsczK+O6TNooN291hi1EQdfGDG654DNV6PS29Bv+rI58PQyR6bzR67/eyhd9vRaNDB8BiC/kcTOjWV7i6+hS7HdAPN4g9Q5OdHJDhSN5pHn0v6LGLHpTe1hvgvumDB4qOddI+oeh4wW/Vmrd56J2fP2rZcWKx9+d/fJcTp+NljYdvWZQ43WpjesJVV8UQRByyKtPDrMnDuUnjtt3f+lgLqfrv/VrzzE9YF8uvB+JUiSlrMQfqlsm9R6k8k8fdhP59VvHF4sJPJuuMKy6UqADJpG4uavEQ9PSXNOuQB1x2lzPYAZbGfbE4NDkdsAklY0lZmzvV1U95d3GFZ/9crcdNxv/kuvKb30ng39OkyurF+3Ean2TexsOHfifXfewGBxiSxpqoBUVtbT3+0z4tShhTCfBcgBxGVVzhx2rc9WvKwI5AkDTnvMU6so1+BFWVEVS2KHGMgP0K/thNyAE7/c6zvjFbbJXq/uKapuXHOGCIAwQJ1zev8ctf/Ryf/eoznJ8ZGESMwAFWELr1CjFdoWs6dE2HIAHDUK1TBtS9/OYr3N68QYqk2cAYiBHgoO58qrsKIEnBHAjAQDSLC40Lw7kr2BX+pP2a49IYgObDxBywPr9Eu1khdQmtMMAJFBp0ZEGBKYFYY3E0TYO2DWpd5SnWrW9unwz4s/Cb0fheXF7g4vISNxfnuKBLCAYQElgEMvToNz1SYy55SGAIVk2DuNvi9vYW0dzcQAlMuqa0oVWroJiyFVXhCK07ib67w9dEnPmr73uN1UQ65pG0jiQB7vBIzku+hrHBXxUgU1u9jYHKufWGBt8e2wrutdrdw781FVBwYoFi1+tis8WMCNyO6vrmBje3NwrYiyuKvj55Of6cMcuE9D7nJSqPTPdBUoWTQwcOLcCaHS+0DSiQQrAEJBrXCwlQMJOREgHQ/WbVXuGHP/gZfvjDn+Hs/AnEYmR5m8TLsaL2rbGjcaPy/rndwOQ7oOYBJwd+Z30zuzYVnGjcrb5GMsCJzV2V8jpV3VKWJBn/OS2roFY0uyfjvNW7KmDrceTK3cy2bkualaVVVFahk7blA4LRKp57YM8LjF/FeWrvnrYPQVu69eicu0NhQLl30bTmtHKO3VUCuGv7NLbbeJ/N0/REean0aBXIPB/mlay3AKNpVri8eg5BAPNrvHm9QwidHQjyuFCqmnAHN8x5k+3Z+xTlF+sZfTtuwYk637IOfezdTbbIVl6nvuhd+XN/7e/rqW+NxGTM+2z3bPE6Th84UFQEpULjt5ua8y8ROx/fW5vu6bmTBuqULF0HUPEHobssCG9T9rR8rr7bV767WNSK4vgZGZW1VJ7Tvj4/VP9bEgEgD4yb1a8DbbsrLZVzStlToXBOdEpRAk0PfIw4ZEmBJ1WLC5k5OKhLE6ZUwUQ/sYzPFE0I0eTGGntI45VwWOHyydOcPpwFSKRpickQoSQCCQSQKfiJgWicIwQIa7akBpAq5tW4u7i0ewRh7Ccy8/sQwgzUgAVX9a48znmy5/fdqShzBr9UINHM1aVW8lxGMvDKBcEl4SWRaCY7mNhnVl+wzGOeoSR/DyAhqWqeAEiyv2FZs6LW6LKiwDJwEaIkiIwFxGl8CwAgZhBTUbZYU7zrt65UEBBZ02mbMuIWRKM+spNLor7qKesjcSsSzt+5dUhpXz0OmI1BdkMDa+bvrJgM8ADclCz6WkpIkXB9/Qb//P/9Jzh/GvD06VPlPSaEoGARc4OuPUPXnaFpOrRth7ZtR+CCxtcRfPG7b9D3EW27UsExETStPKtHEBcJPDqfoGqnJFtyND6cEEEkIEDnKDFhQIKIBZeHmPJBaFbP0J0/RWo3oIFATUQTFQwSZrTBgBJmXFxd4vLyEufn50gpIvY9JCrYSM9W+L/af1652wEXZ1d4cvEEm4trnOMCMWjwW2//moAUe+x2W2y3WwDAmhi3u68BSYiyBUuPVggcGNwGUACQCKkxvhHY+wLRwCMWwSCD5XPaHAAAIABJREFUrYuifCUMUMJ21+P2doOUAEmM5qpRsEJS1X+m4CJ3OxhqK8MV77gVE4tkDWQak8ivqRI+njPqerm0LxJEhlzHFIjS3+OndJ4ZH/iUAen6mzcbjZUFRGw2G0Aszpg5vRaB2hYfEXt3c3dMagEn6KHxxTy7mMWxScUyy/fhKAFJ1gjtFSh0iOjBMYDRgKK5ASaA0BhnJjAxEleuhMwAWkS0WF9+juef/UWcPXkOajsArfJANCBMBgQEA8ZgAc6nnZXgzm0sAWqtFPMa59aFClJTfseltXfJHXHpnlKvtofBAEUIA20I4MAgFoSgLr4YGsPZCD1FMAtS9P1S13Zf9yIEGijeAIV6PXQgZ9aqQkQKZiJxnp/a5AC1IG5tPmj2M+XlTq2GyeIXkSHcLltIMr4BYqoV/hLDbXSAuc+0QuIsrE52M6z2yqnFm1tZ3V3K3PMUVe2dDbEAdoh6FxIAsgA0LraqKlsEECZIICSz0B2BbkcLq/9Io2suPygPJIAYkAar9in48hwNPcPlWY+z9SWI1OWaksoHZD8qOHhzXDY41LB9o3TX0Tv1/vsBW5Sm73YC15Hu4vq7LucUOoWrj5d138fpB2kfyPBg1j51HxV99J0tjGTy+UR2+8CBoiV6O+X8XmGUh5r7HxO9+zwv953UP/sWalPqqNpoyxHTpPD6730Vu2Q3v7Sn6ncmP10d/31P9FZtXOiDd7/tyE3V++c9sB6rpZJEN/l8Z+lHN9tfblwVmBWqnLIpIglAzrxEyNZDOcCrtUu8zQRT0Ep7SwBkmJA7D7A6f/8D3+V3HRdxlEXtfql+z+jI4E3jh0z3y2Ww6I70rjJP9exIcK/4SJZuBkYv5AGEmQyoo3JdY+woL0ilNEhVxKiv9ihfpb9qUKqchNc8VLdrn6XRqHyBgiO1Qi6C2gKJma03Eoahx+3tG/zpn/4xzi8uwEHB9iYEjeEVWjRhBbZ4PyE0aLtOLY+YEQJjGCIIDZpmha49UyVcBqQUIERgWFYj0pNenXu8OObFAsqV2vJ3Sm61M++Dtm3QrVaITUTDDAkD2hAc00Ww+EgUAp49/QRPnz3B1dUVRBJ2t1tISmAQdk8JbWhzu5gIT66e4NNPfoD+6RZP+Aw9D/YOGq+MCBYrBxhijzgMuLm9xdfffIWYIqJA81dxQGgCmrYBWCARBlLrEEXbAxx48VTzip9JXm9EGISAvk/YbSOa0OPsvMtjHiWWOEviiIt+FhDYNVMG2OJCMXm8LspAUO0aOAWO9sY0yt8vgRIKxDjgnA1FMJkbApS86fYv1YeBmkXsdnOLzWaDYejRcMjvmeeleDsMpKo2q33bqyruBqzYoilwazCgaVp0XYdt0yIxg6gBkLDb9Ajs1qlAEgIkKHAlCno0zQqXl8/w7Ae/h09/9Pv4wWc/AbfnEGoUTE2CIWl2wZREmYtFM6st6uFSvYitN9ZnGrfMAzn7uE3Xn3rclvtjuYemE1frltyeYiGeQXZgbKXl9R6rjtwluF4r97V7CkiSWRYVnszrd36XhfZUvHeSaDrrvIUnMuBQ/7bSD1Ty1uLloQGVyR55X3Q6EwHYkwyD3uKdR6xR3DXdFVOthBmEBkTAqusQ6Ayrttf9rWkrcGjySqDxWB0VUuatf1jV79tWLN9GaDsWPBx5/h4v6r3BRFbfTPh9b/VmTegEUP+EAv3pk5/4CIGiR/r+0T7wZo8AgeUpUCaYvN0a9wD0IfifftukAnpRbGkqUvnJN6vAKWnytGDhbMGV7gIcESS7dAhkHv/DFUCwxSopFh56X7FqEYFZsUtRxCqgQHU10hNeOkXION5HNOLsAioUCaugF+OTzslve+Rtz2Nqnp0Hy1Xlqpj+z+uYPv9um92o5Nl3Dv7UgiCLxcFZKGUKOJfnaXxTXpIm749xXI46rtGSklaLRFmvX7DoyPcf0HIT1BJler9mKlMeIQZiGgAkpKRp2q9vXuB298osRNSijSmAEBC4BVFj4FBA13UIIaBpGjRNi7btwNzinAmCBiIBQ9xBA8JTtoQjUXe2JWurrMjamk5ZQdd2qneRAU6+WFT9B1KgQPxvGMCho2E9zHDXwsAdmrDSuqNajwQQLs9vwAhglDO8s9UaTy6e4s35G1ylC2xDX4HIuqbENCAwkNKA6+s3uL65sbmlcz+EBmSBvNW9TgBKahEkgpgEQ4wKMqR6zdM1xPlNzKJwGIChF8QIxCjVGKfs8lizai3gJkkQewZs7+qgTs379fOjv8dz67T5O7bQ1XhVrtCXWEUOdlYjm9dt+I9ofKmUIobYY7vdglcdEE6NPeggwFKbtQ2JbA5LqZeIsGpXWK3OcRvW2IUNmBr0/Q7CDRI1IGpzaxNrtjQCsFp3ePbsU3z66Y/xyY9+iqvnn4OaCyTqzHLGwayU90Jde0o/TNu50HIUKxp/F7O1rQDY8vxYYSvA3aGx3LOe57rKmPrdy3vzARIsgg7H1MaZC+PCU2RzFlL306F95B6p6jr/mC89BFj0gZGOj1qhKBg939+W9sdDdFSqEkDX/SbfyWC0QdCsVxBzs86HQhX2mkWAkwCiR/p46P7Gc1n7vF/y0AUPRidico9A0SN9BCQLEiuWwaIjG40Lox/SBnzMWuD7QCooA7VYWUMheo9fd4ViIvRT/csFVho/74UnVTqSxQHxQ4yspAsMKLLSkisMKLy3Z5HNwrIrUe8wltk6KQu3FTgEqOafm0TIStlUL5hcI2Cvpfy0fgCVsnGHd/H7F/a5hwBIayGP2WIUmPKkVi3ltLncq+9TLCkqt78RYGPAkoEdiRLc6qBYX4zjDTktCcRjoXhutbXP9cefHxdo96CMlYi7ItGI74nMOgdJA0uTIMkAJEIIag0hkdFwi8CAp8pWm5sIAVlw6QTEqK4mkpBSD7HA04g91IWDrdwmg8C5uYQ8l9QCwrPVOYCnbmaQqGOZ3CIgwJ1Ny+urI1hKpO0Xt14q7gQevHi76bHd7LBpdwgcEHsLYU+EFJ1XbB6JIHCLwB0IHdrmHLHZKaBGBLd6akndWYa4xZAEt9st+hg19lIwV0UiBa2i+bAaSC2icYeiZUmLsYAqzAFiLigAYYiCXT8gxmin4Am3my02m4C2bdA06mYzAm+S8zGqqVjiIgkz2KyXZgClu1IuWBQdmr/lO3Pz2gNe6ucClPiyTn7NmEUMbMjz1+bO0Edstzucre6mXGq9R240PnDXLSLG+dkVri5/gDcvXmBodmASxMRYrddWXrAfs7hrWqxWazx58hQ//OHnePrshwirKyS0CLxCQpMzESZRV8pEFaBhblH7QK3p34Li5FwAGweEpn20tJYvbBSLVCtb83Ysfz51jOr9jUYDtbT9TENPKOC1D2Rf2nc0Nh2ET2/i29KCjpqrrN/t3USGD5JqfnSeLBZf5eVPkguMnw/e6/PG56XNZ10OCRQ0oUBxJ+dKnTDe+64NwiMZ3S/4d0ep+AOhqsUnzLlHoOiRPiKa7rLLSugjfYTkR5r71i/VIJZBtdMqgCvkWrZlU4oRIL/u1j8a50DjWRAIoVz35mRFtwg6BV+otLI77SCnMvMULLLfItZ2s3xy4AGzrn0PVL38aEznSuP0KSd3V6otDWqlZ66qmEpFdmpJdi0Hih1bq80VYIa7SeVTxilQVAmQBdiUihcWemLBXHjqyjdVeOo+mH4uGcLyN/n7+h4Gg0ItgOtntyhSQV0A0fhdBM/4VfWqK66sFn2q+HssHQ1wHLjNFi1Ebv6vCjDbdcUFNJizkEd9IANAPWCzlk8gCEW4BZ8H2WZuDNCrLFTE3LMSKbAhCg7C/iYDrNTljrC73eGbr77B9maLpmmQovYLA/iaXioARj6q2gv9oEGxiVs0jfaxtkNBZrIYaEMcMAwRb66vset7i7tSuT2KaIyxlECi8XJS0oxuMcYMFnksILFn2ZTZlBL6YdB4KTAwNAluNztcXFisoUm8EBn/U/ipUpqFoH2XCm8vLRZ3PbyoeTRbzNT1Tiw7BMnAgCogrZSZ566L3uiUxOI0keIzJyyfh28Zg48aM05MyWxwdvYUz5/9GG9eXuNidYm+3+DVq2+w67cgDiAwmqZDaNQV8snT53j69BkuL5/i/PwKTXsO8BqJ14gIEAoQd8lDUiCRSGNvZXCPDizcBfSmCbhwbKyWvj8OANb9RFMcp+o/mfw72dSnVmtaefnLrelMaVcjIA82Vxd12n7pLpTVhSxujK2K3xM2UDV71GNlyZ2N58dPFXgMSzKQr7/Fiy4OfSknufW36wgWQoCNFyRNW2FzCSjC0h0tnB7pY6HxOvVuRdWy98dGNW8fbv8jUPRIHxk5Q1cC7ei04uOjx80IywvtFPivkIFpj51qHeNnrnEkIHK14NsJI0iviwNHU4AAtWix9EK4Oz/Onylt9BhJRZqczITx97iXbTCDHHfn0YX777CZ+il6rT8sUbKsWymV4NCuYJWAo1x4xjSQGrzJvyuQSBV1oHZZI1feQKBUKx4uZlIGBvb1Vg0SFSsKj+cyDkxdBzWvf3ucpKx0jwDKWoGqFC4TlAMHzWqGulwLpDwIOLgwrW5akgw/EIA0Wb2+KamFVowJTfDgsJytgsjWZUkRyQG5gByDxvsopWIpokYuFtibUzkRFkLgFl1zBuYWY1cmQozqvkVBLXA0lhEhmPsjmdJPiTD0PW5ixPZmC+aQLWsIhN/I79QSyBmOAGLWINShAcgyJmbrGwfONF2iiKatv91s0MdByyGPs+LjYbwn6gbrwatrHgPUeooNJBADiG43G2w3W/S9ZorzsR+GqO/PY2siBUYprxVTsZCrOV3H8fFA2DXVa8BUQV9yq3TgsPC7VO8/B5H0ty34VVmSLBcCA+42RN7e6XpC1viDK19ZV+Z3KSynBXHOgCmkgahDc45nz38CxgosWwzDFl98+Wt89dUX2O62aJsOq/Ua5xcX+OTTT3Bx8RSr1TnatkPTdBBqLTB2k4PuJ5ujSahyqXZLJs4ZN5dpslfMrpxCd0Ek6oYU+WsBcsLU6q98Nea5svaiAKQ2573KuwE4BUgezauq/TbDyvpmIPZSnJqHoimX0p7P312aWtXe6VEAsjCHKwAakvcZu+iYr85uDzBXHY0AtoejAmm/J6Px/SMf27eXkufhCDK6/YHTlKeP8/kjUPRIHxEtKZ9+/XFB/y7QSHhaAoaqU80ZjkT1vUtic1F0kmhGNJHGMsfUpucBggaEAKABW5DSLJh4xQ8eTG/e/nfb2h6IRiekLnDP5+Pp7aY9b1rimTi5NUZKbOktTQ0gc/UZ1a2OPAJTfL02Ewi17UUh9vhSc2FWLWe83AKmGVx0BCwCKiXGBYz8cQKQ5faV3yEECGusrqkb21ihh4FiFQAWGN2qs1Tnei2gUesjIv3NDQA2BV0QLItaSjBQRzMYMbNmDotRU9fHCCZzFbPcZkkYErUMFtKYLFKfJkuZWw6QMfKJL4u2Q5Jaa5yfXwLYjfqybRo0HDAEe5MAtCloO0i/H42hAGno1WrJwQAB+r5H4S0yHVpBw5gGDXg/SqdcxjDGhGEY0Pc9+r7PsYayRZGUJ3RcUpEprR5PP+/WWpoVKqAfBly/2eD2doPdLqLvtc+tlYiDxypyq5saRJnMoYIzL9OE76YA0ZQX91K2/nEw1IGgSWf4lYyIKyo5AoEqsInMMmlIPYZhhxh7BVqXIz5bNXV51dxYenlf06k6SIAeMBC16M6e49PmEiRbxLjD6vI5nn7yAtc311it1jhbn6Nbr7E+W2O9OgNxsHfSDIApmaVSUoB4qlvkNcn4T0ZXH4IWUMT7KrcCo5cOcmZASW09CbcGnAewz/fQhJ/qskeCgetwMrqXDBB3YPCh+nnMeYW+3xIrTX7fnbIsMCnC54xnmlO4SNcIPXxQa818oJEBYr13Hs/rkb67dI+zsJLlvmv0CBQ90kdGY4H/kb77VCCDOQS0BBaVGEbLJACGCIgEgFpV2hC0TBG9hg6aWpeyeXpxD/Jj3jouxEMSzf4qMEUNSkj1876pjMi+E+DTQa4F0Vomf1ffS1LXJiEBibpuCBQsElARKOFCY1VCPtGm0TV3/VmyrPCqPbOT6htioFKl8GK/DnZI0Vbr+Lly7s/VliL7yhpfczBJA1K3TQciT2vOaJoV2rZByZSkoKkkymnbfVxjFAt4rX2TkgIkIEEIjKZVAdw9/5gTmAjECcwJRMlSfqNKra7zKjCDRC0pmFotBxGr9hw/++kfYH3eYPN5wJ/hf7PXEgQGfvqT38Oz559gt9qhiw2GENElTQafRNBym9vq9WmcJi9GQZbNJwMaDtoGwx1WXYezszW6JiAkgNziCuoqlZK3UhDjgN1uoyBOxol4ZK2T2UPEXKeK1Q2RuZOJW8oBiIJ+22O33aHfeRyjaFYQyispCfo+YrcbsOqC9v1I6a7qpmV+nPLONGbRKXGKphYibABLykiR3eOfURT4DJBW60e2lhOLVwUHeBNS7LHrb7Hrb6EpmhuMELnJ+9TAsLdveifB555Uc0w7TEHnRnmyXYFwhsARz8+f4MknP0aKZmnHAcxN7ucUY1ZKBQAxgZIAlEAi5orJFqeLALE5aMAFCeUMTpOWLlx7aJrttqDcFl+vKvDGx3ahGI/9lNfHub6/p84TW7oI8Nd/M7KVrvGcxja6/73zQXC47wS9O1i0j9St2dYit2yExuxjw4MJNXjoLuqPSsUjfThUW4x/m/QIFD3SR05LQtMjfWzkW/Rsq5bx9/exjZMQmDqAWhANpugHgJIGAE4MECNwo0pgqkU9Gf1IFpa/LapDmALfnqDz7n3gQlsOfExF+XQrnVGNxhRJEsTj67DHNrGoH1RcCszTy8NWVnVW5aFSXPeBRMBIcZ7GhXHLM5E9ma2n74B9wVfnWdO87iXlY+RyNCrPyzcXPNJ4KkSMpmnw5MkV1uuzeeNEFdhkyh6xWrgws4FFGjeoa1e4vLzA2dkabduha9cgasHUgNkAKA7qNiaMJA08w6BVBMDO9DNIJyDq0IggUo/1+RpPPznHN59tQfS/534IgfGHf/gX8Pz6U2y7LTpRoKjpVUkfkEApeAchpQgFilKu2V3V0ieMNigY4O5P5+sVztcrPL26wtXmArswwHULkYSYXNEMkBRxe3uL3XYHQAG1lGpARRmWZDxW2eVrArykmCBJEKOCZ+4Wq+wmOWOWW38p2NBUfIX55/koj+pe4nHnb564th0in0OLrpb5egkqX+6bxHqz/kkCA/AIIEZMPfpe3b+Yq9VYxkCrvjPZOlGAOfDenoBYIHNHezQjJ9S1D2qJl8AALJNfaAw41WDWQmxh4gYA0d6htLFkz5TynVAGaD2wO0Ov8aJL1EPuOYcU5uqYQgpgmDyTXv1cQd5GPxk0zTeWtd665A7tqe6qLCkny19VfSnLg9w7OFk1+qT6TqVatnkkYLknavnq9FL2cmmqy/NQ1XqQpCPsIGFdra6lj/RIHxR9AGDRBw8UTQF+kvGSK3wKUHCfyhPNNdmZxI4T2rTn2Y+Cpn2w555j5ukgqCnAsdsO+TOX0wCZnryN3A1O5AEBNCAqkI8+9936wZkZnshPR/hT98wTlIGSLV6FPzv9HT+rgv1JTbKNOrdsJLOJBoGtjh8rFT+XQFI/qMTwtNNqIbEbBgwxYbcjDKmFMEOiZoNiatByAw4tGB3cXDlRAqjJJ9qQBJDnGaNRdyoQ5UKrABJPArn0tMviv4CB5Nma/OzVFQ1kcAgu7uTC1VxfLTMKODLu57ejYv2xnOrdy09A5oF6JFzhKu87LVuV7yQD2C22BIC0gGXAGhNhu7vFze0r0G2L2AxaJiWwdBCOptxKJRYaYGJ1Zc6jHkysAIaoFQilKo4LlRbnoME+xkSaXQt1gOzpe5oGxGyxEgSoXMKm83GkXNtvV9gLcIRsKbNEHtDb4wYBgqHvkYNHc4PQrbHqLnC2Xpti7iE+SzyvZP1Ut9PBMAWOAvrdgJuba3CzRdts0YQWGgBa4/pQ/mG0zZkpxQbZkWTQL5C56gggiKAoSNwDbHM/MoDBetTmRxwg6RZD2oKGDjEOCpwgqcKQeU7yms7sIK+CBykBEgerWjO8JSS1ENrcYjjfYddvEdNOEybaOMQUIYjohx3eXF9js7lFjluUqslHRQOWbLEVC1/Yd9qWlNuUkrpXpoq3GA7aEUCC9SpgtWK0TUAIPNmXHGQtK6wzpb9DzWc6tmPgsua98j0M3Kl5T5AxFtsHSIAAyxAnGhvO39eSeil/+b4xW9fVokYkQkhz7wkHJPR49eYFdrsNPIA6hJBE55Uyi7qbRI7osVMrMhmMl0O2PtV5xBkgKrHpAERFHVgUQEzeLGaTN+q9jfK8hIG5EliDLDkrCCASbX0W5TfvQsevHBgnXSeW3LZ0LrW2X/i8HK2oVR+OnrQxre6cAjwADss2Fo8Man2XEtAPPWIaLDtoMHBZA8OTL/z2o0upZyPUPpQMZvu6X8Cog7uWAGUMGv2RAZAAQgNJDHADMRdWBLUEVEYNILTIYFFyt2T9YQGGFPOhgYjk/iZ3Va+UOAvXjno0RJKOZ80n2a1xPOfyXo76u+n4nSAvo+xNi13mgCzm4zy1RAWL17pXZGThBau3MZEUgJmp7MLsS/IClFYXmcU6BxOr71LKhQDJ6hqpH7r+uAto8v2ZgEY4JzzQ6rXvkkcTn8qyezrf5zOqO2TyDg5PHaZK7jlIp+i9x+ZxqXOZXT40/eahSHS8Tya38hTd49+mxqPjUtZknR5SfTP97q61L8uLh+iDBoqWpgLNFtATAIm36809LVoqXyZfvW/w6n3TiejW0XGZbw5771u6bUkumpW//5H95Du/HH+HD43eO9vVfWVK7KzCY3PUQYWFe/xxWDYY54XprRWLkAsStbJOlF0vkgi+/uZL/PwXf4rtrodwiVuzXq1wda6Kc+oIfR+LNUsl3FrUirEwUC1NvoyPBKhjY1NtVvW5l74yZTCuVvaOFSt7Pr8t3TX45IkzvLJUGLuKuQAtYsCFg7imTLx8+RK/+uUv8Xy4gHQJZnCAgBUoqEITQoumbYt1hGrIVnHKCreDKgkCTuoGlNx8fWSBYOoAUWkj7eHf3AOV+Ej+S93Byv6kIE2tuI2C/iYHCyxlegWg7xM82NzJXPCIKRaritraKk2FRZtABKtvXo+PU4wRN7fX2O62Guw5NPqsTUK1tFC3NwihCa1Z8NW2XQZemZJOSUNnUxIkjgAnMDF+2/wG+HHpyxgjfvPrX+L25hWu2y3aXYuBe7DoPGdTBPJ8zO9Y+EtEEEXwRf8Fhr88oLYA+ebFV/jqd7/Bl/EL9Dc3kKbXkbKlKCFqnKa0w1dffYHr6+vc11q+8UVO+T5RJsTBUOTvvG9FBENMGOJgMYnE3oGywioQhGDuFCgubCP5qDDciMje/xQroUPxiQpopNVoxj1/ifLbLQXHyrBHADJF2w6OdK4aeCDZec7WSI3hNMQdQIJdv0VoW5SO9D42IF8iYhrU8lBE95HcL8naT2WWjrQ7qg4fSruLJRTlvgQp2OCvDpDxSQHzVa/QOGAaqDplCyJihqRUKfA25/YqMb4XodpfFzbGU+U1f+IE5VIxHXeTFANnB8SUrA+0Nx1odze+zGu5WR7/6U5NnLbG/rU053nzL3uo819eCwXGHX4YU8op8kj+mPvYQfcafB09CuT1o/AjjYZnUex1+QST+fU2h533TftYC8jLylGxs5r3lD/L7Nn6Dadi1VHK/WMuhL4OVh2e7/DtjWhWvgex9zXpFKpW2dFuPykZx3vqfdPHrIfeE911SKqDunttRn247FUtK7ZvMTm8krdq2rsBRUT0DMA/APBXoE3+DwH8UwD/LYA/APCnAP6GiHxD2gt/BOCvAbgB8DdF5P84Xkc1uXKnPDL3d4fy7vjtNuOR7ofI07N/OCS1dGCfv/rqBv/fn/wOm34HbloDENRdpCHC1eUVfvaTn+L5888QuM4+ZdY+AFRhqcEd++2K/z3w9MgCh4qVRy2eSam0apPPK3dt2CPUnkhLVi93IW+St9hbON0cXZmcCdOiYMZ047y+2eDXv/k1brZnSG1E2wQ0bUAXzkCBELhB0zTgRsdQ07kzmqCp1puWNbsYm2JJanWWCBqTRnPCm3Lgp44V4OT9o52U32FkkQEZxaYBgLHuV79sQrF4chcpU3ZYTHmu6p3UWbvqlRN6zvfFlFSpc6siAEgJaYjm7lWPkH12jSlvwyl/z9SAKKLvt+j7HSBsLjpssb3I7lPLo4zzkgNFBfTx99RsTwwhtUJJHDXGEQJevXoBwEET7a8XL74Gbnu8aW6xGjoMHMEGvLDU/OWD4LzswK+CCa/DS0iKBu7ocF6/eY3Xr17gZfs1+HZADL2+m/W1uq0l3N6+wYsXLzD0O4TAZsHISGJReqq1J4965pNKMbVPIhrUOsYBQz/kwOU+5tlFD4K+77HdbrE+W6Fp1aqoBoAWpaV31lnGpRJ5Jr5qHtt9tftZ/Z34f/n7NOkT50XfT0K29iACYlRrjxh7cJtAOdMWVY87cJRyjfchPiqYMx4zyrzmymKooDBtUFIY2h7UDH01jdpnTHiKa/N+5fQBSaBgUdK1ObnVUH6Hav2gA7rNwusVXj8NHylWnJSth+tyvA7nnfJcWUPHAOu80lOmC1tBklIGKpfa+n2k2kpsyZJxCXWagTjAwkBUE8ash6XyBsjJFElhybzeUh1k3XjG+QQel9L54gQmlCJi1sWekpH3YehRT36k+6F3tSj6IwD/s4j8e0TUATgH8J8D+F9E5O8S0d8B8HcA/GcA/l0A/4L9/FsA/p79Pky+I/sfeR0vitC3j9LSVGp4pFOJikCwRB+ee9cjHaI5oPBuAMO9k0ka3DagJkB6zXZU6xUMYLftcXF+AZAqke6mJJIqwZ1NORlGJ2olAAAgAElEQVQLBlnpvUfAqFDJzuYHs3rC4SoJDBwSa689RhOFcSQUH6cloOhuAq9MUsrPqx+5G1TLulvs1DFOvJCnTy7x9Ok58LstXrx4gb7fITSEVbgAsSCEBiE0aLpiUdQ2Lc7PzrBer9GtWrRtg7YN4MayTola+uT4IJV1gweEVn7wE2bLjjaxRKhBoQwFVP3ulhjFlsGVG3f1YttWxIJKOzgxV7rnVh4GJgRG07BanbDePwxR499YWUOMwG4HIg0o7eCSlAFQ/nGFj3QeaCyhiJQIQMqZvkQIgdkycSko5FZQ2qekFkVggGtAS0CiIBOBi6sRijtWTMNo7xcIttsb3N4ydu0GFCMGHtSiCAlB3NWkjoziriDFtRMQDP0GSYZRf8ahR99v0O9usNsG9Bw1SLqdRIsomPPq1UvcXL/Oir/zmgJxMEAJI+udko67dqnUevV9Y45T5CCMK7MhBHCwVOJIlm1twGqlGebGMXokiyjjWpbJ+WIWF6sGxancWz15tMz67wwiwftnfE8GSdks7+A8qxZFOn8cNKwBF5tfOVW6lLhkMrcguDsRNF6UjK+Vt9P2e2ZNk2HVNSlAEIv7ot45em5U097tc98I7nEJXgDg3pXIrTxRA9PlGjB+M6Gx0nz/kjvln9IHVQdW++LoKaLRs3NEV68da+vBOXXonvsWET54qua+y/8GpvpeP717L836zZ8XVKczBcGBA5fV9w7pUdk3R9ZnJ6wY9cHPbJbtWyYenO6y0n2vGPCR7khvDRQR0VMA/zaAvwkAIrIDsCOivw7g37Hb/ksA/ysUKPrrAP4rUUngHxLRMyL6sYj8+nhl/qE6qfhg+LraWN5GAjnlmaPvOi3kg+mcR/o+EtXWFnc4xT2FbScKT01HBc/qdPHTH13iD//y59hstxjSUAoQoCFGYMYnzz/Bat0BLBBEgGJW3lWBtxgwk/ZQLmzSsGN9cIq0SEVkra0xMjBUu+eJ31+BSqWBdxJOp1Yyh0CimUVN7YLi9YsLh+U9skhGdWfVGWmCxRwq7f/888/xr/3rfwn9L67x1fWX+PrLL/G7L3+DftcjYcAwaKaopm3hCkTXNnhy9QSXl5c4vzgDs4IoHAJABivYPhNCQDDro2yRFIJaKbFaIzEHMDES1cp/6WweKS56D3FllQZ3ATOwhAAFaiKKk6PGUMl9JCZY5/4ZWxI5UMEOWJCCOQ44iBDYgiMPycENjSFTuMuRITFAx0bJQEkNnizIlhxksWiI4THjNDaEu8ioRUgIjYa+IAFSAc0o12rApySIBdQWmHVRGqfLFlHXL0nm+jIwUuiBpECRZ8MbgxAlDpiDWIBapmjGRJ8/BtCkAYwEjX0icBsvVYojtrtbbHc32O42GIY+W/SoxY/NTpkGtl6aOLlasyaT7BJYspCpa54Dl4NlZOz7AXEYqjnjoJ3zRVWJFIWo8KTHupLMw/U8XprT03cRm+PuZjIFdv2ZqWWByNRSsHbtApDBt4Ds4sZA4FCyheUYUOM2Fr2zArmqf2cDcCc6vIDmsfc1GAaIGUCcFnihWLNRKeRA7WUH2PNGE6Cv/v3uNEF9MgpEed95SJrG2pm6xYpM96kpoDguZ/5F4RTxx5c7ufqDJr8dCF1+NGMRHzV5v07f3b5dWvNq5MzXCPL1f/z8SBqgeRGjGxe/APy0RodQrfo0G2NSl2jU6wTd0/wYwZQnQk6n0qPO99HSqWaS+wvA+1403sWi6C8C+ALAf0FE/yqAfwzgPwXwWQX+/AbAZ/b5JwD+vHr+F3ZtBBQR0d8C8LcA4PPPfm9S5VFV8FuiSZv2HwONnznJRecEhnDp8lCbHumRHpBkit26AHDKs6fWgf1BFWuBOSw+XWL8AMDl5VP8+POfYkgJw6CBTlPS4KcAYd11uDi7xPnFCjCQiCnBT//9hNsV+VLLnladMo0tWPYhqt9z3xTPWV6r/ejbM382msrrQIa3Zkdvs7VM7EDQYpj4AwKcn13g93/2B+iugJv0Bq9fvsIvfvVzXL/coE8bXF9f4+b6FjebDV68eIm+7zF0HT55/gmapsFuVzJUEQFCZr0iUMCTtIHZJ10kg0XMChg1TYMmBDTV35oZzMAlCvmklFAAJ7VMErvXgk+DADKLCbeDyAMq2Won80kGAvcxmO5FSQqXpmydwggGfMaM4VlQTkHha1LrOKIqej1Znxmoo69Xwnkmew+QBYk3Nylly6B9PhGjJf9WCxBJCmwltpkvmk0qg3HiYI1qhRkUyFqd6BiCMHPxsWD0WTEAF3yS/H4fM+dBgufOILO8IiFIGrC5vcGw22K326FbryoQkdCQZsgiKe5SuQ05Rk7uBgRSiywJoUz0oMHAC3BqljVDsrhThGGI2j/Qto3iClULcu5nSVYmMkjkq6xbggEFJBpbGc1dyur5K5Nrc1Cper4g76N7s+ucgUFM6o5HEAQicDBLNZd/shzkPFpkLMlozZIr1/0vjtmVzuIpuUWZXtXr5WYCiVhcrWm8nv2WLIuKcnVxDpLc53vSiG/daXQE1E3a+NCqzVIMrSlgWt1cYA2aAwOFjSh/JmDswCAObORC/aFs+ae3USkT41Hw1ceNXj5u8s5Z4DWZsWf5apZsotBSaWOeugNXzeYV5T2AcmDLt4NzxIuatPthaYK2Pep8HwZ9/BN5Ru8CFDUA/g0Af1tE/hER/RHUzSyTiAjRncKJQ0T+PoC/DwD/yr/8V94vbPZOROPPx5hl3ynG+Ka7V5+F5GmbPm76vvp1f9T0/oHvRaKFdvTbHW5vbnF9c4OY+nzCS5ZCfNN1SAL0wxaCqO4crCdQKtKY4ivAXPF4W3LF6dAtrlC5xjoRFqo/R0vMA4zDvpPpxbkq04+VModa4JNq2SpCm3hmpFkJhKZpcXbWAgKsu3M8ffYUm+sBCTtsNhvsdju8ubnBl19+hTdv3oBBaNumsqCwFpi1gpibk6Yf98xcWmGKcaRYZMsd1lTzbduiCQoWte721jTVK3G+PwQFBpumyeAT3DWLSv+64u9tzFYUqJRpESBVsWukSgGfGGBGkoghKQjArEAVUdDAuszVexE0I5BbcLGCPkg5NTsg+Tu3vtK66+sKAIi5QmmgWQGgQa1Txeterc/VCFEwChoQmGCxUCb+KwSPf9SAaQAjgCkhgEEcQamAHYWfDHgCafYwqLVaBvRK4XmcAjcI1Gi8JOtjsCAmBTL67Q79bocYewyDAoEpRYSGwWaBFjPAU4CQXFXue7XIaiVYIHIGUQJzA0KjoF6MGIYBKYn96Jhstz2GIaLrkAEvny+17d5UlR8r0/V3Hi+rgEQ1781c1ASAraOceXMMEi0Bmj6PRcbx1dzKoFLp4QZqMaq1mIKrDLdaG5eYLEa9Wo/pf2UJJRyCYd6NiMiC9jooZD1v+KmkAsh6v7kFjLbNXFkrwOFgfVW99e/pHfdnTVSsviYQS/Xvh0W+Ti3t2A6cK1WA18IJy1jSnyrphbPKzb644cPsmHulZbDItthJTKCKDiTG3QcSHXxgmmIVS3xat1UqFLA8RzS7dFpD538+4NAfAOge6f0Snar7v1sV73sZeReg6BcAfiEi/8j+/u+hQNFv3aWMiH4M4Hf2/S8B/Kx6/qd27TSqFvlHOkDfi83okT54yjzocSI+JBL02w2uX7/C7fbGUvnWp+OMYdCgtDebjWW8cpcYz74FPPR65DJmccNwZW+fQPBhTP5FZYQA4tr1hqpXqJX38vXoWZSMM6Ui3ZTVjSiga86AIFifrfH0sgV4yMBHaDRG0fX1Na7fvMHr16/x6uUrfP3N7/DmzWukFE2D0zoCB/gwi425iORUz6oAu2KrwM4QGcPQQuMYqfui/jQa6wgYAUUKECm41LaamY0DI3BAYyASm5lMViIzgFTkkZFlRwVGOPCVUkJDAf2ux6vXb9D3MYNFgQPatkNwoAgEBXJKOnu1hkooGeJSAUvEYnFZZjEhB1Kp0pkE8HLElHMhU+5LKnNYxisFjAjCBEoMJrFTZ57wlZbL3FgftwjcQtjc6iiAQqiUOee9wmdkHSmCkq2t4j/t8wZMVj5pXCV9dQGRWiLGOFj69VS5gEXExOBgmRM921wu1/owx2AqQGAyayoSZKsvQUKMmuktxpiBQH+tFAVx0HI4Z+mrAFhXhqqUkGpJyZM1Zvp5blF0SBamBdP6scvZkjWSd3pRetTCk4qlmJ38SyI03Qrr9TnOzy8NUFNrq2L+4b+BMZBkAExt0HNPS+bIEkononptorizkgGftqIBqPP4Upnn/r3MQY1pb/l+MJoZ9/VShyhP8cJrJ8NEgjyuDxlmtI5V5P155IlK355sQovvVYNK1aX6Wr1l1bfgwV77YehkljowoFJ/GAPmmXcm9dRwUx6Fox03kc3yqVl1jcYZGIlKm0oZ3sYCP38AItYCPYJEj/Rw9NZAkYj8hoj+nIj+JRH5pwD+KoD/237+fQB/137/D/bI/wjgPyGi/wYaxPrlSfGJRpU+ToRHeqQPkhb3qQ91vpqpP2nwXpaIKKIxZmDAAwMlcHEBA9yaCDkN7/2TWyoB88MJqe6pL4yDd75fSWZp6LPSNLkvtyzLbDL9Zv63ODw2vq46vgWd5sYEP3OpCh0otAiB0a3WODu/wPnZmVqNmZJye3uLf/x//kP8s3/2/2CzuYHyBUMCI0nSdNpwoEhlTTZLGVWYx29NRBr4PA1IO4zAIrJU0hqnh7IlkbuhNV2Hzn4CNWhCi6ZpwQbkaLwhZGBMdeI0UcBT3iM1nXrGaMwSJeHNm2v0cVCrlbZDExq0lhmuWC8xIAHqxGn8ZBY0gEDEYh+58kWsP9XoUP5XrM2UwYuUkknearVUnlArPSKGMJAcBDTwiSmAA6G28SeQAkWhQxMETeqAENSNhyw4tsU7ytZrDmQKigZAYjw0Vg6ZA5rQIvAKjYFQzrsig8V8Uvex0DAoNRZ43HlEwR198WLlRQTrjzpD2YAYB403FCNSVGuh/COUy/JyFGgqa1SMqbKcqJUjB4mQ+8BpGHoDBcev7+5TgFoSuaVa7X42LmsODtVzo75WA5tlJHVs/V2Svbj2jyCBLQNlQNd1ePrsGVartYFE7rbpC6JkEMIBgjgkSFQwT2NdHYkZdYgySDu+pu7NNsa21oAYfsiQzDWQJGTFmNzSyVz+6iWRDx0fV20o7qL1XjD+fN/YUc4MlRKkcl080tzZ5/ukfRZ7bp3n9+xtoTAEwyzu4HGawGU+1fL6ctfyvh0a9U2FNd+7RCH6T22duyQzjCt24PfUgPS2C4nOsjkIVI2YtadYzI7vH8tbH8lgPtIj3RO9a9azvw3gvybNePYnAP4D6I793xHRfwTgzwD8Dbv3fwLw1wD8MYAbu/cgCWCnL8C+yUkigMTF78o94+enqP6pdDqgLifuyg9k+ExYrH/vFj7pn9NMk4+934lvZnUf8zU+RaC7l77c03cfDR1r+l1e7cBE0QCkGMfDqW+wfjytOpNEyrF2+aY6mJM7WSZVUk5djwuzlNC2wJASoojF9lBlioTQhBZd06BtQrYIIQT4CXcdoNvznk27ab7iHGhtrXyJKZ/MBgZUCm5Vdq0uz4WgMdAy9Z+3AmdPzZCpieJYvh4r3fOxUcUvly8AJRfXZEGA9jbXTOcnjWIgiIzbAzIrDFtJDVgQ0j7TEWnA1KFtzrFaXdlYAiEwri4jfv8v/CX84pe/xjAMABTkAUitOAADjEoAaEilXNKcuxxUCep7o+4uzCbjVlYNTEiIcBN5ioRhIDSNWvFEaDyspgmQZBZNVmYGYaSsm7V1SYypCOACRCT0JLh58wYvvv4Ku80WbWiw7tZmTWRuTp5tDDWA4QCQ5DHmHFPC3iUrYTTnH7H9hApXBI91L1zNGhr98rUlWBY0ZWD7PGJtUbBJHCxwpdx/A+qyWD9XQD7yNlfZDGs3yDxmBFX4SdviFiMxRgz9AAjQhBbCKcfRUo52F7yixGoMrIQUI9w1K6WEYRiw2+3Q90PO+JUSLHucfhYBQgho2xYA0PfAEBukGBFTQj8MSJLA1dvoi1banqeQRsr8TKTlou4B8fdUwEzdzpCtXmbrmox7ry5rus4wCNFGz7u3tkTMVlXWNqYGgWD8H9CEFS7WV9C4bo2Of2alwp8OMkICZGAgNRboPMIDsYv7vVDVXzUjTmhpVxOzSrNuA0TjtItnmyRkfiS2sPT+haYjhAir26tBR8kWmOXdY9LLPsyz9XT8ffn7bWUcyc9r3KWEiIiEqHyvyBFkxEPI8cZgexjlvvaYc3PAoLTbX2pp/3fuKu9b3O/mwKfOe09IMUCkLwC7uIyhG2aiBKHinqz45Ryk0HWW8trsa2VeRbw9p3Qt+fvnSTGt7X7kXFnm45HsX03ceZ00/nykUcv4Ck3+mtQNVDIK5ekpB9zU9tYvRfZgy8AJGcNFZPW55a4rn8nHd6HNdXNPEb2nsfL20b14h75nOn1NmfP029HCHnRSfW9PugS4O7w2ISfXyzcdqe+kfqLxnj1rRLlttmdN770HeiegSET+CYB/c+Grv7pwrwD4j+9cR/3yBEydWUnSCWxWLwd+abaLnDiAOD6L0ymLgSw2695oxEAnP3AHkv2P3MnSwl0yDpT3ru25UzH3VM63QnQ/C0O9qx86WiMTNvb1F524eegOWiTs0amW3N2QMM9PF0StMfZ3FuMC0HQESQSSUO5NjFW7Qte1aDlAoksnY4WCvDQuSuj+abdX4srXPWirti6iWC6VjYAw3ZiouOP7zlXXV1dZLTmEcZ6S2b1GHjzcLQpyUbWbRRI3KFkszVvit2TXvakbUf3RBFlVLjQ+jg7hvAN1yjIkZ9tjVf4EIDQIdIaGL0DpDEAAiMzdMOLi4hM8ffoprq9foe/7rACTWfAQAEoWGEVUaV5yzxHAshhZJwd3kzKLJ6iyC6Ycs6ZtA1adAjWBGzSNWhERAkiCWcqo2xOgwZE9+xiTutOIqMucx2qRpNZENzdbpJQQWHmmbQlffPEb/PbXv0YaBnTrtVozcaPwi1TKmpSRKlxWC/KVa0w+EZ6J3AB87koZNwKypRItcmnhBgEgbN5SHi+HJ/d6XLGgv63PIQ4uJeWLPfOu8KbFUKqvSp5xqiAa/zlQLCLodxqbSBLQcKMgjR1cqe5JiKmku3e3sb7vs/uYuq5pkGu9XlkFUVBLJQfB3AKtaXLWOdlFuMXdEAcMMdozkzGkGuSs11mPD1SCqdssUDctULUkT8d6gagU7f1Zw0W+xdpUhbuV+v1pIj+J6NziJOqOCEbgFbr2DG2zAlMDhcZSVbAr9G51R0BqQKmxtSVCLJD4THaoFnE/ZKwMqKydabQOe+4/q1KtxyhZ+u/i2qzWfbYvWbsEYqASZTdXIo1XplvjdEPxftQxo1y2u06aousc4O2sgLh3JRHJ7xYREc0CM7uPlurtPc2aTpAV8dL384x6Y9qzT4z2uj0KvNVXu5YTgva77JCkVytQQHmEInyvSSwZKBpXSfk9ynXnNpsvI3eqO1LeG5feaaK3vDUt99m078myUvJinWVtPCo0W78dukum/Zzqd3WXf5cHyP8/CaApf4wmrfJhlovKhpRxcO8m31f21XHCkHhQ+2OU3Wj3l4T7EfS/DdrX9rfh6RP47uS+OqWsUqLynv7nXt5Le9dyAYfXYZdBJX+uH1WGVJnIk7z4c3saivm8viu9q0XRR0n3s1U+0iM90r3Swq6fA3r6gvguC56BGmIBaNX9A2hMahXRDDoEy1oVAoiDZZqyoK7wmCyUy8xKb8aCXEicWAweXXiKCXTR4Q4JzXpvJY+fRHftwbG7SdnIJpr+W9S79LBZrpjbjlpnSP5uJJv7bxs/MkFOLQXMrUk6EPRHpFPrHGZ46vWLi0v88Ec/wjdf/xZx2CKlXl2C7PVyvKI8wOWdRzFXAJBnWCKU4NDmahbACgI1IcckWncdVt0qB39mD6ZsoAdzo+5Q4u6QZOWasmVKJnsGLgOP+r7HdrvF9fUbSIzYbjcQGfC73/0at7e3aJqgIGjbqMWSuEI1+a0vN+7seqQm95pKUd06T8VNrlCfugnv1wFPpFNUCb2vuN5V/F21g4AcrwxQQG673WK73SKlCGICC5c5DMFg7mZ9v7M4Rmo5tNlsLCB1ykr0dI4RB4RG4y+RxeEhlGx6wzDYGsUgK6fve/R9j7YN4FAAPx+HYrEjmB7NqxWjf3YhtPSHiAvHIbdxkep5MbMO0eLcUnzqJiUYB2vPD9jzHnR4vV7j8vIKq9UKHFgtC8cjZvypzJ0t4sy1bSJ+5ydrgKUGkEjEYuk72IAJexTLB55YW4mUb+exmcpnorkC4XN9SgV4k3ljsAy23BdING5HeT+PiXb0mXpNuFObamDoYIsw7X9/nhwZsDGf27uVqhzALGBdzTuHxsTtGsvVj5XuE1yclru3YyZAY7E0Q8Uv8/W5fmZfnUtWa6UIKWKNZx+8g5jzqFc+0kNTfaCistfDzM8pfS+Bokwu6T7SIz3SjKQK9ueidnE5eQg6IAi+s8JYSAO4KlDUNAEk5n5ApiyBEVjdLWAnu/kIVMrpOpG6CMTpKaevKZVrUEEXqtc4rGdVRdZC1VQiqkGN8v1UFZmKV28jvC4pb3ehaZ3LbSgCuZ40Sw42Pn9KimJnBieeIUyBog4kLQgtCCtIajTujVj6cEScry/wox99hl//6s+w2b5BHJLVn8xjyePHjPcKrbZIkAoiaKwY4aKUMWmMoia06NoOoe3QtS3aprEYRJbZStyaR8t0oEmrURimBD8uY03WDM1gplYnTdOg6zpwIPzi5z/HL/7853j1+gX67S3OL84gIthsNthsNri9vcG6OwO3zcgyaLwtHpDCy/GaPieM2TTODEh3msfemuWxP/YwWbNqhfCU2vyzzK6rwljABrcEihb3ZhQvCsAQI/ohYojJQKFoTRsrKw78lOuUgWoOja5DDhbmLGQAUbCMbwGAtmGw9pR6kq1rZf1xJXjWI3lcpnPsbivGbJ5X1tr5u0mR2ZIPFY97f5gBgYhZGIJweXmJZ8+eoWu7aj8SjDLikV3LwIBaYM3I7wNsj5uvVLXlbOZJyQ8XsNi/9S3A1g0CFpXTUR0ileCv7rOyTz6Vcn/WnwUFCKt4bATsPxS9JRpS1pw7PXWHe8eZ/JasQQ89W3/OIFHm3XGfTlcNe2ovsPR9oZEl4xGaw5uzURh/J0vika03tH+M6zVpNjR5bJfb9EiP9CHSko4m+9zX7kjfE6BoempABSQSmS3uj/Rx0klxjB6BwRPJ5sy30l8T8eBd0I0JuYLHCGibDqCAQSJiAgQJGkdCsydxaABizZxkp/mSXTH8t4wWjtFakv8ZN95VElpceEpQUH9xt9qYdsBIMB1JSsvuekJFqCplvi+arL8n3a/KnSt44rFhRrxBpkTb5ihqacPiAZYZmrXKXJM8RoUhS0QNztbnePrkGV69+hrX/bZqYyWUUmUd47iIjL8DIQNLHt8ncIO2adE1HZqmQdN0WK1WaJomx0rwWlIqVi3uelS75XjmNKKxzZqf5LMBUurW1mJ9tsL1q9f4kz/5Y9zc3ODibIWrq0usViusViu0bWuuSgkheCa1CRi2ADwCZfv0sXJjK48P40MDEUjd3tGJ8MKQl4pHsM2eJ/aTFBDRwaLJi83r3ldBBQj4vBGUoMgAwEFj5AzDYJZwgiFG7Po+z7PaddP7X5s6PRU0sC5bkFlMlaTXUxWSUV3RGoSgmdcgyG5s/v4Cj9nk2bjKi2VT9qyw71tsp98dUf6q9WW28uU+FNtixmPDzNmqKJnpUQ3CNU2L8/NznF+cZ2u6vY2AzxcBUQJxGWjtnqmySEgkahEiMv2qasuS465b1gB3y/YpKBkUC6+O3AQXnpm9Ki08O3vmgfZzWi55kZOq8VbwRe7QrPpGWfhc750JGuh9fGtZ13KUtYV6JINxpd2Zo/a0oSrJ5p/vNH7rwx2yPQydKi/PXNYOWO4cLKeUoH8T8g45VXpHY7Nwrbb4nZLv2bKHb0f3nvoK9zi0p+ky91ffI71vuqs8fGqpRf7KSOo9lP89AYowgZ2BkSRj9IDb6CN9IPS+TPU+firBA8u6U5Dqu5wS3bXessAZ1fLz2w6dWX8oUKSxIfqdAAxs+4Q+qv+4WhJFkGwhwxtsNtuioGVBz/vBXX5QrEGyKDkOJhvdd5lc8Ci2Dv5uLrp6DAcNvIgxGLW4hpX74S2Y7EMJyJmxXXgXntxGh7OJjOfOW6yWladLpXuPaXRNkAMIYLp+///svW2TJDlyJvY4EFlV/TovPbPLWR55XJE6ySje3zj9/y8yk0x3OvGOIrlL2+Nyd6erKgNwfXB3wIFAZEZWZVVXd6eP9WRlJAKvDsD9gbujvmBKboALNpwBaLBVgLtOyTArA8KE9++/w4cPv8A///M/4CNFZJ6LG0yx9CDHfgTYtfCmhFBQFwVNK7eURcQ4YTdNElcmTphixG7a6e1NEqBYdZLCW1UhhbPqE6Ul+GBQCsBUywsGB71tbTfhVXiNX/7yl/jVr36FaQr48N03ePvuDUKQG9dYQYU//OEP4DfA69dvnDnzsYihhXsda2b1m6cW9CmdVkasH2hNby58BNAWK6AtNYRaORngdQjcOMaQNeB8zhn7+R77/T1ybi2KGKwuk9CbwgCmAFJgyeIM7Xa7BgCppNZk5HlBAAzOhBAkDGIIErcqclZwhdTNTeMfcSxK1nI8DdAwhQ4w0LXOF6mLD9QrPWhgtqVf60/l64H7DkPigPUvGP8tn5v7ZcDNzTW++eYbTFEs+LK2Zbmvy5ohMWgYFNorsB2sgnacLRQxL3h16TYmPJHtwNHcr7IqtvYMdZzbMddyyLf9FL7nsq8BUdcsVF5+ii26o2M2xk8ubTVruweKUJ5VT7duXbG+WrKA/K9YY9X1o1YHzzsAACAASURBVOzJ8L8tW1ndSGtevqpfihQ6dON6kGxIzUfJq/nbz8sD9VksBeP6dBj1EtSEXvZ5hPIFJLrQVur1m/Lw8RQ6IbvEGHzkRvD1AEWAWwm8FPvlLNgXutBZyZmgjIOqnakY/axKePPH+cpx5vp/+MPP+M0//w63+1t8vN0jafyJaYJeT36F169v8ed//nNRxsvNTlyDY1YxURU7RXyqA4O0w27Wqaen7NoJVdJyFxSanVLRC1F+XLj5aa3XvCgtuBY1ix9joGzVaycfB7D6U+RD6ZiWCUz5koxwdMU2AAcJQBRLAhJFUbqtulTGMOH6+jU+fP8jXt28xp92v8d+n1Q/oFp1Fz+k9AMTODmL1KA8ENR1KEwCDk32GQUg0nZIXCLSINg+hohYrHngwQdkLSAChGdqv9jtY/K52wV8/+EDfv1Xv8b19Q7v3rzC7iq6uDiM+/t7zPOs7kqufez4pAxD3wdV4ZYXyAFe1ka4NBrElqPKSktOIJdaJR20it/D1gSvWAwViU3ERZHPOeP+/h53d3fYa/8xyz+7JY9IALkMDWadqoWJBaQu9fF1JYuHVoEtZkKa5X4pAy5zbmN2ifVNxt2t3Jx2c30FWVCq9YtZJvIA4BFvjAAibwljfEaDPvNK8kqHukWpjGRjpXF4XzEuE7dLmSO7aYfXr15jt7tC5rrSLgsFUNpiAfFH9fXPfF5rdTOQzJ6jXaPLmLFPNmzfuN6SJjj30zEpUAV3i13JyebpA66IegAdXtOfpQoo42v/GoDHjRPnlkXWKmgug4eKJGDtGq7GJbKARGpV+Wx98uWRt348jxx6eDPYMoNOhXeP53bRSr9oarYfHjx8SJadMH+GPI2+LqAI6GSBy4S80IUOUuOvVB8XN4YzFlMwFC3hKaQpEyzu7/f43e/+Fb//08/Y78Xth5kRAnAVgTev3iDSFThlBArIKYM4I1Bsbxnza7wpDq68cnjgQIAKEh1qnwm4LphzQdJcVlu9G7pXefEHnqrLG1rLnpoEozWZVNgeKbijXD3YAHj78Vb3ioi0w/t33+Ltu/f4H//6L5jne7E8EFRDwTsHGvlWBAJlFV4tnpECRRKbKOqtZsHFogn6u8RM8vm1CmfbRq/QFIcquwZewaGcgZTVwiIGvHr1Cr/85Z8hZ7kKmojLTVvznPDq1WtM4RpXV1elHqxalVhJGXSzQO/GfW63whHU6rCBaGA31xxnM4WW2PC6PgDxBmJTGNs3T1u7ehBHxsuCUlvsIYk/ZHPW5qsBY8pKJM9ijMUaCBjdlOKBjOryGGJwy4bE2BJrJi555MxIKRerSeHX4P7uF64RHfr9dJmpV6K2isU1/o70R5WnpQ9jjDq9fU6jXLUGZkU4WkdtSTYg9GC9BimaNd4ebKEetOoDjK+BWF05zGDK8BZQMg8l5t4YGHseMrZ78u2l8HcLEpWfIH2SckZKDKIJMo/4SHesrX3mgl4f96wlX1teo7L+XXSPk8nNsZxHqwqWVmNfLW2dcRc+/FyJsAx/UUXf843r1wcUAV57+9Q1udCFXiY1AIhoO6Ks2gmzJnqSKXRekZJNUYMpr0DWwK85ATkR5lkW3N0kRjRpzogUENhi3zA4Z7kxi+w2FH+LCkwT1ELdR9ccF8XlWM3rJ7lsvP8/McDhLMMw0Knb2nC9/ewh1kXHWluxHGspwYL4WgatJYT29wquZPGIBNmz+ESEah1hEbAnxGmHb775Dv/0jzvc3Vn5FWioFjSuFXYyHLQcqEJfgCCxGJK/I2K5vl0BHlJ3udgpd6PmdHE9MgHEBAnDbu8HudUtAzkxeJIbmN6+fYt379/h5z/+G1gtUiQAcsBud4Xr6QZEsQIJjBJLaH2YK2/a+HjlR8CR5cumJG3hnjbuVoP1PZiaOD2LSmyfj3bVvYFEC5ch8SOSElV7FLdI6/9Q5pLF5PG1lCyWt5aFoDc3stRhTjPmWV3NsgBE8zxjv09gBkKYwJyLldAhl+HiGuZAudJFxQW0oyJLNakHPecs7w7UYbT2e5c1MMucitEBSMofZc4P6tKACPZBEB8xhSFLIPkWWFiZka7dxlcPpb7fbC04NP8qoAvqXEBdvezP6nrVfq7m+0T09BK3G2NCszebRVpWIDUGlODo5NMOqPIsr/aP8J/FzGoEqGXa0xt2ISVh++37yDnKO0ea56cehB7Ry6z5V0Ub99DVl52cVGIUldsUzlLDzw0oWnbiJ8FLz7mTPlcDzr4e0PF+OPI7u/8fS3/OuEKHhdQnP/dq6/IpFuoNO2x/6t6foj7cfeNQpY6JV90b3TjarSTMKIF0LW9Rlrisq/JPYsRwARJEEWEGODFikJN/Ki49voY6duRP6jtF0dI7tMgrYQC13wvQRAUUIacEFJsSr+ssc5GaFKVpXaqlgZtHT9z8slTCSt0WeZ12qldvI2qBIPYn5KNKOuGeiRqwq7gK+nqzAnxl7AhTnEAU8e7tO1xdX4N+NiCpN21f9mdxxSqVAMCs3g/eddAU3Yw57QEQcmAJsk0RFguJVVlt+04thFj5GCSgJcodS4JLZACBwdnicMnbu6trXF/f4Pbjz2CewRBeJ5K4SeRuadNG9Z3sGzf4rXKJg/jGuMLG5bXhn0VGJyw8NmH8bVggCPDdJWVe1q1Z5lxbCQUs8osWEYGyAw28ok4AqWVZKCZGQqGJ1+OBN8MCqitiygn7/R53d3e4vb3F/T5h3qeiPKUE3N/dIc0ZVzsCUZQxocqL40Ew/nLlBnnu3SJLXzWvbt83t+zlJSaRRgsyylnaGDSPoB0rPda6apX7XxpLxHptefkocx0AMQK3e1Ftqo2BtrUDilqhHYVxfM9sie1XQUFtzzC5xjTyfVkAcHJp2r428OnRsRrHy/Uizdo+0JffiCRex/FiJnVFEKGfxIW/ydbQGkPQYmLV+pglXl2/2M+NRnnjsj6QDxoOKI8BFsur/jBYT7oatNZwynclD3/rrDX5eSEmfzjUUluzZ69XGSca19HcBtm7Yw52Djeevc0qL/b7dnJ3nLe55uv0vH3oaVtcpE9XvzFt6fPtMujBXDz43k/KTh4/R5WGu3PD3yiyZn1BOLxVzcitaw+nFw4UEYDYPeun5/GTnFEuD5mU54UQtpZ/vhLPvxAdye/o4sM4zXfmCWltH/wkhX96WnbHoEPO2EfMrUIw1jIPvV/fzmDUKMEW92cGh3tQBJgm5ByQ8yRKMzGYZrU6IhCLVcYuXOH19Rvs4q6AQWLeD0hGQI2BYbxMqrb4FUeBApSlHIt1jBkST2cCYQLzBKQgwgoFpNL+KiABQdrqdfzmLwcgidTU1Si5b1BBvJf8Q1WC9F97XfaYd/nEec0KjmS3y5kVmDyKIJ4XohyQQBbwWa2/CnCiN6UJKOBc+GCAnQIwIWC3u8bV1StcX90gBA0+rgqmxIKxgLOta4iBj+CEWGLvqI6QMzgkZJ6REMDECIhAYswMxJjBQYAiueZcrUxQo4w3QCGzoABECIFR3FQMRKCsAITCa5mQY0CmDISIHAic9AYotXiiILfDiRIl/CZWUS04pBGSGj7wCmlNWd/yvEGq5RHoaHBQIplWORCyGoKFEn+cXODQyrdrZPpaz6fE/YpGAFeLL19vsvRMOssTMhJS2ssY56wgQ1a8WRRYsSAiVUYgxmsQAzSJTdU1GtI+Dz5miDtsSpI4J8b+fsbd3R3u7m5xf3+PNIu1I6DASYmHxMVyzeaX5OWCUfu+4gzmCM5BwUOtj4JFBmYtXOVK/64PBKHy8kGlpFgBMurlAFzcTILeqmfxveo6l3WO6PtaMQmuLrcbMgISEzITYsjWuxrwUxnMtACn73tV05RHyroG8lIIzwDMZZUbQV5zaZRaKWGkiC8vD/C9WWebWLXqgUYDDnkefwqBhmBxySrVvUm+KriilmBJD2MWYDShOQTn2r0dguTWFANbzepL51TWGIIpz6AQMFFG1hhVcjOg8f/O7QsJmVOdhgrIi0txQJ4FiA1gIItFpuxLQ/ihqWdphMkQpSm2omq7mqzmyv8MlMXjxZBf7U+Lg8XMsgj68QbQz4CVlwGgXCSxrgArXyj/BZZaBohcgCCWuJSlLgwUMJ8JKHA5qePmYg/pB2NL7fPhMdyoBrTy11NQPxprPP6p6Xn1JuZUS2TxPKjnlOHE2jyyP21b6eKj9VbY7V8PpxcOFAHNqcNIwNiEch5YTE6tzyNKGyY8WP9zT4RzglMvceF4LD31Anyk6OegQ3ud//1IffytUOcMbL2l7FOpumipIsUQ9w0T9J0lQU4syml2txhxXuhBxZWiKFLlh0UTVrt0wG7berJu3H3/sytpkRd1tRr180D+GZ8+0/DPJ6FitTUQDEUDrZ+oVgQG9Ph3KsZQx48ZuLl5jffvv8WbN+8Qf3eFed6LbE5H3ASLPElFsWnGI8uptQSLNrUwI2RJFwIQAhclScCipqaWU6OQpZTLSTmccugDtgtQAMQQcHPzCtO0w/1+X2IumRUTBcnD8+mynz0otASJRkzQK6l2m9lRsfPAJBi+d4blR6wDlxkxmWLHqntk7DU2UUoJiRMguqRBFQvrG4DK7WekgXTHMKr0TzEfh40nIaUZ9/f3uP14i9vbOwmivZ8xz7lo2M0cVW3bW6r1th3GZ62rUtsHppT4vJdXX29ZAOpid+xkmhxvNqTWngXINBDOOJadjR1jZR5ZXsufSvzjphvXZYJFj9q7/rMvtszN5bPeetHfuHiY9F3BfFeqO2jwS6G1ao36cMQ7A8ui9mdya6XLkhRgtDWQsVgDPFB3mG3dOC03Xv3/VivbTySDPiE1LepY+tTW9nNlBJxskuSJyoUeBbxraiZ/h26Kt7+eQIemMp/WD+cz7vmEOs9nTGVZboCZUwblHANIZWWptRjtyUv5/VSd7cUDRRd6TlpRHhf0QgWOC11oQaroOEXYlC/S+DVpRrECKb9nUfLXBNDzb619wNG2tOV2XjempeLmUtkJ3CMki09pctxAPCv18GI4WTqLLXUoXxYQx2CSq90NXr96gynusA8RjCT9zlVJGN3YZCBRvVvKFyTWEGaFQWYmX6wh1KpLY9cA5obUi9LBZVk4GcwCUvTKvoBTGRQlnss0RQc+tF1ZRAt9lsEV1CnSUA8UYfnZyChmWdfCRXSCoVmZj13J1KU5CjxtIXMXHMxsu4jPhDDmjPv9HeZ5D9ar6UkQVSTYFewVyFjW3pWhOm47Bmq5lDP2+4R5nnF7e4uPHz/i7vZerIhSVrdZ6aF+jt7e3uLu7h5v3rzpOmepCGkrXVf0Qm8L8iyBpcNzrbQzt33bz6UekD4OKPmqOWG58A037p/UdruuAVg09VHhHdh9nsCUa20dukOOimSAkVFiuPEIcD4F2Ps8qe/HlV7t3vFYEwH+AgmoPLCa1/mp3qr5TAV+AjLrwuGeufH95bPl3rb+PhQo0k/gkIixqOMXPDQXOkCtfPDlrqM9XYCiCy3pGP9/DavkCz6Au9BhMiGkVdJM6FaLBrIbS0SZ8wqsuZ/JO5KPgQXMegsWD068t9UOOLjZHMizl/OPHAZts/b6DCZz63S9/bUjbZOxlHg/mRlXuyu8ffse19c3+PnjH4r7jVecD/cp66lhDZhMREAW1yC5f0iec7CAxCwmzZ21V1hY8/cLkikvCnYRlc+UJIaNnJRHYJJ6xDghxFhcpVrwq2ujzgFxozGIYwQyOM1an0p3VfjGsKYC6B1huQKnUFtS0xWj54+gNi9u/rJ/Mp7i0nJ7e4u7+1vklMA5iTsDjJ8MxKCSSwFs4YGMWri5UwEyHiklsSC6vZc4RPf32N/vkVJ1UyISD47e/BwA7u/v8Yc//AFv377F9fXVooWLFpcF04CVWn3SNdJrUeVU39bJhfLdzRPm8l7z/igtqiI4/k2qk5kdfGr1QGkDgRehk7x1QOFgZf3HrIQerDRs55T8Dq/Th3OyJvrA20QSw6wP7v0pQf8eqHvy8gwBGv/q/ra1r10HfT4XUfDx5Fkx08N5YDSkNn1OZe/i3erWD0LLBf1c/gwkpgs9BZHjBFJAmWyN/bJB+AtQdKELLahoUM3XC31OJDcMmeIgH6Sgj16tqsdIzU1IAEDVVcT83L3wcD5Pu/YkvnHK4VY3syu2xRWmAl99XapC2n6uVnqVt2mRwJ/4n8NSaSuJDmvtGZd3uBnrdbQYFUSE3e4K3333PW5uXolVDzGAGSGIwtFYEDV5CGe0HkddfBE9PmVqFeWiMFNyOQbNzyvLHbBD5HjSLAlCGZeUxBKFIiNEwm53jdev32B/f4vE+6KAV5AI5bMANFTbWueR8Cg73nUQSJOqugJBYwK1gNIpZGDTsXcfejPaME+qGbLNPQbSLEBczlmtOMSFxdR0otD0RONGxWjnEIUCRGeWOChpTri7u8PHj7e4v98XNzeL0zNNk+MdAJgUdEwa7JkAZny8vcX9/h7TblcbRJWfDpFZXTbznJ3rjMWmKQDTej59Cs/3HghaWIOsWRBSjZUUItRSyRZI6xOuFkUAOHso52F0CB/uQXsf7ugh1DZ9S0YSxJ7dbXoVTFaYlx7fBw8hv44sfgNG2MyDaRlkfVSfvrAWCqBBoPs+/flkgK+DTgVOh3kUkNlyXP6+VR6xsx8mIKrb4QgkAoC0dmDRlX30UO4QbvkZ0NlDTHyu9ByDqLrJ+Pnz0wUoegJ6edHhL7SdVo4rLkP6+ZADfLxQpzFmkXPG3d0d0pyAZO4JKtCqwi/uP1UF9p/ObOKBFWwDjvbHYcwj2bnyYBVk/SmGy6cRyw5pN8frWVIPhLCnXOcUo2u+H0s/okNnwaY3cdZYPnHCq1dvcLW7xhQnzHlukIfDii3LrWuEOo6orJIzNAC1lslmCaRQDyuSBIAogjnVW49cOxY3X1jbyVyehDcqcCDBuGOMuLm5wc/TFdI8V2VaLS9AFt4bxT3Ot7l0mONbXvQBuf/rk07gWabYRk+9/A75h1G0fY2JDgRG5ox5nkvflIDjRGiu3TNlhFH4pvAPBVTYRYK4Z87Y3+9xp25m+/2sbosuNo/jCQOKslkUJQIjafw1CbY5p4zMGcFhlqYOVcDIeM+Dh11XGB8wVQDpwKRcuKd1wOghpWMYn8iqzjWoNucMRE3LPlXTWM108eQ0suV3TePtQaJBkofE89sWo8ilHVaNq4Gf589PTR79PluWvMo/69QD/8s87fljastfsxx5JoyyPdjon58GFvl3R3LCQlc/tN5tK+jrHf8viZ5g3RpTV8YnxOkuQNGFLrRK/nzhgqZ/DsTwsrwotgxRiEIwlzTS+EPyhum/BIjCTI87DT5OfeZLZbsl01Lr/mTtcN/07VP49Fhad6n0M4JEtYwnL0J4AaK0B9rh1c0bvHv/Hf7ld/+MdHcPPum2mYwaTVatG6QQMGcFi8RaiOECpctdLAsXMhN6JQYOGpAAqMKxucexgk1mOZAZmGdCDDN204RpusLV1Q3u7u6R86yWGSbBF5ioyb8tD1re1lNbCxgOwFlcPWRY19TbovuituSh+be5VpC4yZMFWEwOKJLkDiQil0OHVVSQR6waUxYXM/9vvxcrohAigFCAEUSdkZq5jXkU30IFIvdglrpJbKM73Ny8wtUu2gK3mPbVMqlXslZ68gFbIeN8J9KklkPZACg4/jiwVTvcr3nmm1ntNQ0Y2l5nvxo/pz5YrCMGdR0Cvk9SifWuGuJqT1SfJYjvDoq4uo+v77kXWe9zoyexdBmwAR/CbC8s8+WT94VnQK44tR/PhIS+ULoARRe60EEyqRxf8jrwRREP/vYKQrnFjF28D4igW3zWT7vx9ZE0VoH91sMuJUOql5BdqnIP23lr5txUPoXb2RZ6zNSU+C561TExbm7e4Rc//IT/9t/+H4AifGDTY4F2q+UNF5AkmPItmrLceBYAi64iykuj6la3EWeBYhYhobiWZDBXUIgJyJwQaSp55JSReS/KdM4IFPD69RvcfvyIu/vxFelbgMY+tswhPmjiyKwGHTpOjA4k7eihLmc1A1LP1MG4uoNgZsacZswpVXCuHDASGON+LfVmi02VcXd7h59vP+oNZnu5RS2lAhTGOIFCrFZL3oKp5MMgdT0DMnKOTTpzW8MU1SWu76ixgNu7efS8/xDlbOt7ffyicWYoVjI2Po0VEzR4tcaTE9czVGsTMsWPWsZqp2Jb3xrVvK9Kme+2dTz2eMmDG1syWUAfrq+bOfoEcgy5sXgp1Fq05dKHvAZoWZyRYi1wAYy+dPIHDOMEgDujax5f6Oskkw++Nh548UDRUcGCF3+M6ejIfsabwuaqb0t4XJh7iZvoozWF0/LYmvRZjxYfkt5BEN3J6rY8TAI7sexTyQu4boMvtafWHaIHi4g0MC8z9vOst1CpFcXg5GhgrHP8FPTRfdAMQGc9hIWZQoGH1k5yt9SndOCKBkHeKLuW1btdPZbRW4Vs8Ozwy5sPdEqehllQEOsy/R7DDtfXr/HmzTv8fPdvomRrpPPeraEG+W37o7jzMAQ4UGsTuYI9FAuApl76ovFlCK37V0YGuZgx7NrAXG9VE9ecqCBSQKCIOSUEIsQYME077HZX2O/3sPg6JR/U68Wlb0oB6NeHQ31aqVxWjpaL+qRL8ILbqfA47tqSAQMjM0LS4Pd2i15mCTTNOWu25nomY2ygoP1mLZNb6BLS/l4DVe/lmvv7O+znWd3FxK0qxij/ph1CmMZuWDrm4iEb1UUN5X17x4Cn2kg0PDzsCgcw+EW3fX46jV3aJO+R5YuvpwGqtpYHam/Ua8twc8Q/pxovitS12MaosjhBgolVpaC4pvj9pdkXRo093v7D7nf26WKfHaKOx9fdB7eV76SCg1TUJuqcd47MOW9ZJ7567bg/lHow0nZKs+AMwW+jfqN3/bO6FdpYbKnfWJ5cup8N8uqEl0UunYwjdfIMeQ6h8wkQxQ1lPmRpWQLQj5DjC1BYfxzsCqCuoufXiNZyq/NmPE2eShDfku9T8MtL0zOFDHAeyzFbF+xNJW14b4vgu7G4FXrxQNGxFvrTvrVjxe0Y4MtkyuO0dYKeq30n3HH8nPSYdcpHtLeTw4NFbSvsKaw8jhR4Yjo7NnE/meCxIa8tNxht4RYyBXpAcg7Oi6u15Yp7br4zWAMG6+1KdjrIk4QIjjPevX0DgMAc5B8SSlIA4ICAAEKoykrQ675JLFBa1Mr+9mhFgjdLGlqgAE2/B2YwEogmUJSYNyIQaFtyAJDL1T0EaGybvuwNVG5qYFWQDGxhVLMqcY2quXpxKHTA2eOFBBH2EswqQ8AOqMVNhvRp7t6RukrcFEnPmj5wgFhwZI3zooqhWl9IlZO2WQNB84Tr6RUiJlAWHuBs9XCWCW7M7bfaTebEFRSKzJWPMhpBtAr5GUyEpJZCwoM7AJPynbRDFBxv7lbHnzMjB4ATq+VRBmZGmCaxPCECYwLFawB3ACeAgUwMIr2TjbStAIiD8kZoSlojcqksLTv+WFc9lsIQIWILZxd8h+paZPGETjEKlJHK5fayshUERiIgK6gWQkTaJ2TWNUlvzQsByKQWRWQm6QxkIKeMlGbcfrzF7e1H3N/fIecslkMArjTYdIiyroi7YSyAhsxz12ad8yEEuQUtE4gYISwBoP1+j/v7e9y8upL+YAYjIEbpnZTUAqpbyA1cCgHFRfFYXCFft0XaArD458Yx2VmkSL0oAIQM4hkIe+nKDMRwjeura1xfv0IME0rgcEoa1JocQ5g9oAYKDztkCsjRrPhQApQDEZR1HrLt2+zaxjoH5c8SF6rbR1nb6O07DwFEy3hNFofKuZmWjaLkqP8PRWE0j1ef1zjOUwvKaO90Y2K79giMy7q+ZQAzmPcCaoNAFAGaZH3iPQJnXW+tJF1PS3tCUbbYXZTexPJy/XUMP2KWWGzeGi0QybwwLYBsVbAxysg5yZrru0D5PVtfacBjcAA1IOVKpajyYEvtquRgKvh1h7g+rQ+CygDunSY7L78+cj8u+9L5lf/DQOBj5WX3PrUHHwxC1rEN/pfyP5lMzX6jt4j4WgXO5XIGN5MfWW8MBn1ENV7dcmweWgef1yiPrflurdMpPPWydHITkX0LWonnTLo4AQuFaPRsc3l9utPm9WcAFB2mE9WjbRl+lnSs4vwZt20DnYsRbNPYcKp1DCx6dpAIwOmdcGAxOpKVbbTn2fZXTk5LVQwcccKkq6R9r8t1/VatjeqJcs7VmkLiFozqUwV2ggEQ6/Uvn40Esn563zw1GbE5ETfFR1OzKnTrx0ldrodGxykUB9PXZ63LkQnWIvA/4hC4K80JzCfl2ToNFTm3fHqlwytqklr+i2BEXL96jTdv3yP/pv5mAlpLDgTh5VNTLIkIWZWmQNWVyE6o5TMgq6LHOSGDAYoSd4bEKikjq57GwpeAExqtPXottlqXWPyZlEQJDiFgN11hijvsZ9sT3I1dmmfQDqTOXcyDgyP3oKUA5cfnEM/2+xMvUjz1cromKgvuUOf1rBaJBAIFKjfkUYBYfYnpIpgZKWWklDHvxQXMlNkYo5SpY8+6bkhfOoDa6ma84fpfLIoCcmf+6OfiPM/4+PEjXr99hasQ3dq0HIs+YHofK2uRfuOkXwtAa3Umk75d3a3LZdnLVYEjYNrtsNvt9DfJu52bThjwupf5SJFPZ58CwpeusX2nAyyq1aqOH6q1ii+w3Y229E/tj1K7I/3b7n+51qcbr9Yqqyvb1/HomuvayKw3cAoUTq5PDYwqc9pbQQIHgJZR4YcU2EFqascdWrwF9CbyP/k9oRYngHrLUySZL+pYnzeVWK/f6hdH7Oo43I6pTUg+ob18+ob8nK7kD7VKHMtqXd4LmdZkJ1QGcE0dr2812D97faDrW5ELsZU918m2Fyc3Pi+t79wn5dLIIo8FiV4o+WaN5ucx2tIFS3OllWcb8hru9afRsu5P0gAAIABJREFUZw8UXehCF3oYeajg81i+W+FtGLyzS99TznILUJ/eXA+8gU6xFKGR2P9Mmzk5wY/W9gVfl4fWSxovQr601szsR4JUCxKcUYPnKtgz88bYM9z9q8+bE3ap8IBvvGJJmOIVvv32e9zcvMbPH/ewE3RJdbxvF0oj16DUXCyDCBIbiREjIcaIzM7Cg8VSova9i0lDQVyOOhCyH5MKEmUwJwDAFCdMO3FpAu/LGPf9tmSjGuy6nqMWaKHtGbZvjSqrylp9YzEE618/GfVw1d3dLW7vbgEwQhRQiFQDDYHLjWPmGpa5WixMU0AI191cEmUk54xkyr5TpIs7jgOK2tgzBjZZnrm8Z3WY5xnzPGM3mRWMU3xwWPlae74Wb8gDS2vuT+3ftQ3FghDkngNErP3M2F1NmKaIaYo1D2ZwAcuWynypW79GkvvbpdqynjVj+MQA5sF6+ApsWfrLPmdImHsOwuGIvZrUQKCit4xfKJCZKuf1lj9lP66phnk0y+jxetW6LfdqZZPtYAi3nNDX8pCu+GgaFjxS9jRhM+7r/H96JT5/KvHc+vUOKE18GED2ZfTP09AnXBCfgT4FjPep6QIUXehCXxGtQytfzuLusZUeLMhOmTZ5zMvMjbV/Dzw8+Q5hCgzXvzuJtT0RXXn/pPJGj5xYzOzM9Vt6yhNI3tyUw3x7NBaLPyiEwCHTbofvvv8eHz78gLv/749IeouUMcf2VrfqhSjuCYkAJEYgAtEO4tIUECgo2JDBGUX57V0vJJD1QHEolgMZrL4oAixI+hB2FYAzEBQGgtWOoMKDVZn2SjZ1a0ajsMDy7HuCIRYboamrqz0Kr/uue0HEnPHx40fM816ttTROERFCDMhBrrgXV0f5p+ZFCIEQ467LD8XqiJkRnLsRO+tHQOE5BxTVfwmMhMxJy7a1o7ryGIWgLppo+Qqw3med7z0oPOqLFcjagVgberTUtSbPYA4whFj6zVysAq52O3GbK2u7xgDbdkyLyqeH0hxb26j7PDezbs3P7xPY1AWLMS7mEA8BGDyaUfu1uHmhgkNUNtoWaDYiB9qR797y8nEaj5nNoVPHaIBcl73xCfa9vkt6AG/L+89oEfRiaW2sO7mJfPKORu6bTY5PeyXuJ6bHrGlLy8oLfRl0AYoudCFH7BTzU9XCYX5bBOZn33eWmwGbFYmqEQ+0DH4R1LROTxE5uxgEvByXNRmrSUUqBD9V3ywCSqtSbkJ0kYEOMcyBE9pD5E+SvbtRLy+b4tpYsgAjxfRBNJb1V6gbCPLPM8w9ZPimuk5ktlhSGtw2ALurHb755ht8+P57/Pa3/4D93ceuDrVj+oC74zq2VlkW/0VcHOQzpYQwARRI6sI+7wruMAf9XAqzVodq1WEgEUAU9XsCISMEUouYXHjKWkVdnsO+XmlpAYmoPilW/9J6EMU2P8+yFY+rSmJ/aD4ouJVPfd7L+bBmiXi4hawBqWdJT8YzGpuFq9WCWX5ZrBoKhClM+l4Fgszih1XJI85NGm9ZVNwSzVJJg2OLtVBWt7YMdhZFxdWG6zistdIDs63FUixtWr7T8uBWd5LW1UfdNpW/xd2tgpHVYooVNIri6gfTCWnL4EF2dgNWRirjso5rwIN0o1k72lpjShJjEcTiQZR1rg/2m0VbrQ4Pvaqze+8IFlLiozQ8KfF8hm50pT/cJNW+c1CRA6gthlQua8YhGseAahXWEoz8JFrhjfJ/Bb067HHBN85Nez0dVUysAcaOAHgG9HXN28Z+NV7T50Jra0wbc+sAaT8ZhxwNH+H2j0Ppt60q56Ut6+22w7w+zePAogs9J1VOfkq6AEUXupCjqmS5h0+69j3/wurE+1Z0YROClymH9EkArm39ZSAfkwFg9qQCIhSqgGwGI2aeLspuPU0q/dK0+YwLtFN6RG/KCIjt6SogcZLOzDJFSXQnZQRysRXHtxKVtEUJ8Er5af3Sxv9oqd6d1b3j+GEJ6lqH+fRopGlmBoWs/S19T0SIFPHq9Stc31xJXGLOCFECo/Z9VPIZCGT1uYBoEkeE9G+UmDV2c9YuXEvsGqqA0dItyACwJe+1IBG59rECEwEh7ECREScgJkJiHV92IvCiKdyCrh1AkBUZkg+P9HR9TwJjTDxOQ65vmY3XST/Xmd4AAANmDHho468cgYGKC1Or6BpPVgBIAJxAAfNcxyCljDklpCwxiSwGmsFvS4C1i0FkMY+oTVusylK9eU+skBLu7+/l9joHUnmG925qa2uGxeGQRvbz2lsxjQKod/13gDzY1Mb9yRDX3qVbsQ9C7yrl+k2DrGdgCbLb/Fb+LmuIunYu0Me6fq23RecwA9A4YtS0zQKDY8jfW6mOm64bw/62Ki33RB8DbdmC4/VibdfhlE4xd+AlutqQ9lmF6XgwVBpI3uYLJ4CzPHM50pFaLQ9/urQnj8lBtMwKRW3xofodSUd+bKh7bq/3fKBjz26TeBDb9S99+cr+SL0u1uQLt9J6+MH9slFeRtNtT626nw8kAsbj/1Ce8Dx+oSenwnc2/wcHC2egC1DU05ZOvsyBL5rajYIXSvTnTl7ski2hh462LvYvW6DoRWgTVMvD/iQSXYsboIgAanvq7OSVliIdUvvbostHp0Fb66ZX5fg+aYpZD8R9mJ5TWKhj1D4bKUmQMexPvQuAYzcoEXZXV/jm22/w7t073KePmHkuS8GDatmBaaYE52zWEQFhmtRagpzQajUfKRliddAr4Bas3YJZA0DKCSkn7EgCAe+nCff39/JeAVa8/tOCQaW3HChmAJt/TxNhtf83EPNQ7384PTgvUpDIxkwAIUIQAIvlJsTMGZnF6idnQk4aM4cN8LE+dMCQA4PsXwAW4HUZP73mfp5n7Pf78mlAUXEjNICsCIy26tU2NF2zonC08a7s+8qFAw8k76awgh0qUWlfoIAY5SZAmY/af80u1irNZiVM1IwA6jV5aPrqqHWUzmU3K1DdhY3ZHs/APj5ZW75rSJksK1UdgUUHBrHgDXbL1yGiyqMCgtfxbDOV/ymHwmL+dQm6Tx4839af/Vx7HHnwxuKs2Ve7bqI/ytgiNw3SbDJrXqufExkeTCO+fdky3vPRBn7qZLSnBovOSY1FVhnyVaFzhdbSfS69cCK9lKnRDNPTcN0FKGpoSwe/FO640LPQQE77IohQBG2hL6iRTl5rVGx/gk+99dRKVk7oNFH3aUN/m5SxpgAcKvshAvJa+vXbjl4Kkft//3QrtcCfKP4IEyhHvH//Ld6//wa/+/2/lKu3RUfTfn7QlOlvaBLlH7gD9HrzGCLkVjNJaLFlfD39+/astygC0ABGBjYE8i48y/4bgQTLoNnqpOqAJVoZkZpz/8ehXnoJ1AEOChTlDCTlBwOEOEl/yFX1EwK89YryFrnMUPu1/Auhs+KRd1PKBRQyKyIDjVJKJV9vrQRUEKrhcnJgQClb17aVTt/qUvagd0kr5QXcYt0XQJQRQgS4zosYJsQYdSUOLWTjeNfBmZB5Iq55jAy5SRDt+Y/rqh4s8soTlThPHZBRh/oJqBtHV3Jf3HOt26WpzbXey5vVLG3TX8Pq0eDfhnosxoqxtA5scZitLD004nHHaz37HFu5to7KadDYaSm35/MyVuHPjj5rneFc+sDpHPzyyQ5WXyg1Bx7n7fcLUFToAhJdaEz16ugvadHDYkP7UlrXgEPuby/1mYI0anSJu2CWEWT/0J1813gV8qKUdlhdHlOzrargbVcPexG0nFoyAA79mw+kpxj54/V6jAK6JBmcQ25KDFbAxykqTLBYM8XhQhXBd2/f45e/+An/+Jv/jv3He3hwSPTbh/Vba60DMIvSz7gDM+N6d41pmhBDgAUSb0EArYfxiVpE9SBRAXSy9ot7ttvtsNtdIc0STLlavSxpFHiaHe/L72KNFbNdZV40btdwDJXIYZl4Gbutt7DKOSNr4Omca2PMHaEARwUUaq29XK4ob5g1UQjIqACRgEBidTbPM27v7oslkQF+xdWnuL+Ekj9p5T1gWIGp2qZiKQTUuMZof+/56hj5GEclH4wU9zZmi5RhlkJmBaW3BbKBobbChvqdAZAP8G5unvIv6JQxq7qUEqYozwpEVTFPWVoHa0hxQ2yADO33sgfUeDQPpTW3MV+PYjlWa7ysa5fnU1ADXZXlcW3mHlJgeoDo9L2tt4LsLYuMV07fdzSfjUG1tylq62nY/Xp+lc/I8++FLvRU9KUBRi+9HU+zYnx2QBGZX7oup9nMWGm5WdZ3zqni2264dipyykYUNtRrvZwLPSOVkyShEgvjBREPBMYlmaJgisoyh+aE1LfZASQqTat+vl4mwwu0B2q1JsgWWaYK43ZzC2DKhSVEvfHMtaE62WXklHB3fw8EgkRBADLVtrFrGpsGURRAUVgsTomVuogtYPFefFBo/9l2kOSgZuwgAnIoOp/cSMUKCgU5zS6xPcmFoyQ3LJ4P3Pj1w3BkzTR1rSBkfZpi/bBs3CieBDdBSdv6FT1LebhxJFFrlZpbVekkpoX0QggAsdhygIAEhsXFyQCSt9IiLckCxhIAvcVKLBoiQBO++fYb3Fy/wp/+9LNcbV9AO0JABURGIvc48Lc/cbeTcFVs9zP21ncEsTBhFNea2u5c5p03GW/7RtIX/tCh5sygEDBNAhTd390jsanl0ipToL26Yq1snwPmjkns4pp4jMh1SqmZPm9ipnuAoszHOpOlRLtFrG0rs9qJECMjISOrminxUHIJplvnZz9WQdcCcWWq7QVrrC7O4DzrAsMoeCQbf8pPAQBRFHc0GycHekiuNtfl/WAKLWuA6pzUgmh2AavnErCayOZHXft6dwEKAYRQrMbubu9wfTXh+vrKGEEBKr0JTefQyOWwiXXUzOnj6/qhGElmceW3GZthtobZ2Ieg1lphQggTmKPM3bLuBSCw8oda2TGDypxPQJ6R8x6Z78F4BVJwkyDgphQW5J2mTgAsAD1bLX2DxuuosTABGsdrTIeAIe+e1/YdIFYzFsmH62rdWdg038neQzn/qPO5zunx0NZ9FCr/kM0RUn7ROtW5CZd30PlSg/VbcbLmyv4qVcwKhtUW19UXqHxofJKROcl+GVx/kVn2+YDj3movAUgA5Vq/sifZHki60xh4RaUG45Hn5pflTrjWt/Vd69dyox+79hwErAhV5jlGHU+tyHFle7R/tubr71tCqBejM7I1dmTNul6H02i0GzelOL63uUBlTjT76rGsrETuk63M50MLwZmpXbfHJGuPv3WyyeEBpR7ju9GM6WfIulx6enkbyImFNdu2DsSEtDWzZtUaJdmmE7WvjNOf97B1nT47oGjBUGSbI6/zDDup9Gxla/lnz/Pc+V/osTQU4vgAv7kkZ1nITqGjxanYs+oPrwIZ26lsfa8eRWvb3O00x8o8RiPItKilTm4twZyDCr9lI+8Ai7Ly+xrK9eT7+z1AAQlc8s5wAhGz3DIEc82w02sT2iyODKn429W9H/djvMKunl44BCBxaJxgpUJgsVQo7ZV8qoJ9TAj0iuaYvIzkBUOfZ3VtaX9l9D3vy/eWEINytfmmeDQPm3+1D8gpzQJ2yNhm0tt4yn+5qardwFUsGHTcgQhQwM2rG0zxSsc869DU8SCoMjqwuFi4OpryW5qtbjW+l9SCxACY4F1tSKGpZl5C+0IUnUUgTgM9OCNzRsqpAE8hkMQiyY5bCF18IHaAaelykCpWdZZVJc69uuCvRi/FmK8Af8+RAXooI+ibZ59lfCkLoOeEsba3FrWUNCRznrpkzGxXjiEZULS2njHbPWgALJB+l7p2mFglaeM4yY1qc87YzzPu7u+w3++L9ZBYswhobPFgApmFje/numYFEqCIiPDzn/4Iooxp+lbBIcACSJc1biDANlZBC+soXqQbd0vtszYwtY4bcxlDMBC6PnbRbTBNV9jtXiOEK2SORXUXAy9WhR9+FMCcwDzDXM4YyUqzt0v+EsSfmjXW5MfSal70Uv3nmL/sXyzKtIc4RzQClkd9VnlTfyMPrtW8/NgVMKootDb2rTsF9RNgWcva5gZQ6oKzEwpvctkztY8ywGUN9Q21xgVwCRUOtyCtKEv6/5xTmRu1j5zLYUF2bVuRuWSAZQv/1HbVu2AN1HXAybCbur3V9vehT55BvlZl9iyEavFq7TomdzLAW6CbtZW3T6PVdN8YVOSmzUZWVpqik+TgpQK0b8/qYCmyK9fZtog5RtB9sVpMHqn1xpIf9/6nIOmXfCaLw/ON4nOXV3MyQdpl7b4eZ/dufo4sUzfkUl8/YYJp8U9BnyFQdKELXeipSWQqFe5UAV8EdOB8nnXaLW6LDd2WaIac0EOumy5BJWHCKqssxTXPZsEW4abcJNQI5bIFGJ7s3ST8aW8BpgzEAM60wW4kPa1dk01715GvhUwhGfwCMMtJs7NQAJuAiEZZaYVGQkBAjDuEGAuQKHqMaEGijFMBiiyPnsY80kkiitCYQiNuTkkew6w/Kqu3eTs1w4N3BBWYhedTmpFS0Lymwtst8Ljxmu1SpEM+zkqta81h4Ur636nrzippmXQ9C3cCax9ZlcmsN5pxKusQyjqh/FSUXBmDaqWkebGsXYJ1yzs5yTjvNfbQvf6b01wBIqyBB1QU8pI/1ec2vjknpDzj7u4O9/f3iFGg7RBq3CRx+cqDMh5GbZwl7a0uPwPzCpBbeFdBntJxAUAEOGKarnF9dYMQJ/g9SXU/19tq7adTs6wRXH6pioFZwmBlbT0TDfO2vaRYMBBK42nlxZ5XATd/6/dRUO7xdvVYhWsZf+uhtM5zPTg+4KWO50Zzxt+iV7DvchgBxx9f4Ub6CDq6VihP2+wexb972ZDKqbQFiLvQhT4P+jKAogZl9/j8ZaJe6EKnEjODhqdS3XzqfUseVWZCo6Q5BcjqxHqK6c+CrVrEaA/emrztDztv6hQCarf1Rt+AneiN15JTYneci57L3PTlkq3xQqJTOZiAMpoTbDiQj6tFRlEVlY+bOEDKBMyQm6gMIIIo2AERkSIiKWCq2mgfn6XmPxo3rxz7M6sKIlCkclJOxUIkFmu21jVlaSHGcnyvujBhnvcIIYKDpSc5JWenbFK1NCltGI6C9bOVv0x1zvlhMWJ6ImRwtn+ETFn8ScOg5soDa3WVP9zPxGo4YYDGrC5OUpd1xRtljNAprsWKhhnzvMftx9sSrHq/32POqem3EFow00BOZj/+/cDXRZC5Wqrd3t7i6mqHaYogDgVQ6lnzseM25Pm17aJ7LlPPAUAUAA4IIeL66go3N68RwwSzQJEV2vq6WRka8LyCT1TTmQ9hAbQeAhZt3Ad5ZR6pqNoA/eR+e3L61HKy8croN7fWE3fpucyx3npz9Ld8H5RgsgU/NVT4JZKB5MdS6Sz1ACj8nvOl9vsFMLrQw2iL6+Bz0ZcBFF3oQhc6iQ4tQDVmi1dCmxTolfXHkon8OauVkpOQiwJA9Srucn7qLUX0UZEnRdt1YryW4hIXZcHr6tS2TBZsX8vn3fhPk6GWY/JSNptNdHJVj/BhsfxqgaW2S3pNVQPoImKKVwghgoIAKhSAGKK49ah6Kop2BuXsrDj6E3BR+Pvn3pAAMDCEgZzBFJT31M1LLT5CEOW51JiDti9DYi7Vm9Aq8CR1zJmQ84yUBXTa7Sbs93eDPvW3n1mA4a5rYTNvFJHqEI3WE//8QE6HgBk4FwYSzDjSGGDyrnKWAzV/OasDAorLGeX6b8XSqYcNesuOpDeWzfOM+/v74mY27+ey/lmAa/usAJF9NzZxrlZmjeOUsQrYoOSfUwJNQfnIYnOcd00bCbncJrCQMGq11YFptnYXizcgxIjr62tcXe2k3qhu0galNSFAFPDMGu+miYMFv69w4Rm4OGTbaSU9L9cmv9XYdwmtxAjqAkoMBJ3ChfU20VgpPRYcu1S1pN9WWsvnA+B1+PRQbseI2zzVfahgul077cbHRTnugKuKClw83Mwi8GBNVsDmz4Xa1o/JYO0idD2W1rJ4lsO2bWDuwwHyZuFxzzaCyBe60Kn0jKz1ZQBFC/ny813AL3ShT0lVuDe3Mn+iZ9osnmCuyapXb9gZ1k7/b1qQKNE1pHA9NS6CUAF5NK5HYx1kp8nDYo4uwsVi4IxkJ/zd069I3mB0+iXMqWiL8N6k0PEpwYKbnyos4J4W6x6pg1nvGCAExJJnQCS5lUwceXJxFWJmjQNjc8eU39yABgZAEpmAmgG0MYsKJ7O5kJW4uqoYGRBU5y17MwQXR4w5IyVgnmfEOAnwRYQQqQEcBj2LMj+r+lB/64O2HyVzNargr1b1qAUFD5TvYgngwCwCqQHYGMjpgaL624L5IIF1U6n5iBYBhAFkZgEpHAg4z3Nz1X1KSXo3BIQpiiUUUECiHkDxQBFAiDFiTt5FzeKn1eDUsk4Jl8qNeoSrq2vkPBd+NbCorr/nl6E8UGrWUAbilDhFORe+ZGZ1hMwABUwKFMVJ44oRA5zF0i+V6P9uBCAxuGDAE5fg8CjAv4KisLh8D2n3EhSWMlLZX9jXCeKumlDxx4DqDhe4AkSnA7AGErdWaKVOnUK7PENQ9+YNhR53azsvmGLrZ92+K9zh3c7WXX3l0/oha+yxnLMEGbZA9YSNc+BzBosYoMPheZkIlLfEO9pUWu2prtuqpeU5qZ05fl+QfSxiKb2NxnPbGJvVaUlbGuMl069CgLvQU9JQpnn6NejLAIpskzWp+0IXutDjSINGm5tOI2SWP08TY9dJFfNTzZAH+64JjXLiv6b8DkCC1T28e5frieOTmE2v1WOzjNEKJJ+VNdEKHbZXMbW/f0zOSmBw2q2KAMsX9xzwroZEEZGmotwRi14qKQICiXURIyNSVRgNNKrgUQWETPhcxtBwAqdqkB7AKZYlyEBG+S6udlJvKSNpG+TmqyK7+tuJwBpQlgR5YhcyWDql9FuzrVpfkcXaKbtvKxOvkPW39YL0jVqMWE4H5lONKdblaU+ZQbneTSfPCZlrwM41m8ABF8ntZTmp+18FU0pfcK+I20/yW865ufa+AYjceyGExnLI8ugBKLOQMKCImRExuH3Q6SwhRNxcX+Hm5gq/+PGXiDvgj3/8N2lfTou6P4TG8XAOPHPs0hBXKK4GX5ZwwrvrHd6+fYfr62sEdSv0MY0Kn7LxbQ1MXcqHH2flpRJE+QBfLSsK6/9xMzwQ2fWBPgn6WL1Ay9QKpGARoVgcnU4Wa+zwuC4BkRP4gPwgLgH+U2WDsYvuqPHtszZ9teisoOSy//t82rhF25T6z3tn5Q3DU/eqM5fcUMN9gzXjIdSv8C3A/zQ6ot8XuJkbUoMLXejx5HQw9/Wp6QsBiqDrGam04J4BeCZH7wu9dPKC9LGEXynLkM4hPiYs6c9nAyI25bMEbezK2lb48//kmQjEJo3zIJ9DdfPHXxuq+Rga5L9uYfU10WH+qGCSKY7+8xhROU0uABETMiQW0RQionIasfwGAw0Q9Bp7AqjeOiZXzoqVUEoJFRzybmFjAFP4NYPVCqSOvwJIBHGX8aBFsSyqQJCqo8VKwrZHAa+kuAow9DVxrjqDLjTrnwrIPIZBJTC4zcNTwgob2GRfvBWGiQIW+4W5WoQ1GQDlSvTyrv4s+ciNZCEHUdxBlV1GdWJGSgmJE1JOcueSU1pt3Az08S4yHiTyboq9Qm/4XiUf2Fp5nwgF0gwRP/305/hP/+l/x2//5R/xf/5f/wd++9vfaD5mhXYMoDuNFq5nC4VdQRplRg/U+XhOBIlP9Ob1W3z48AFvXr9FCDvBR3WMs8tV/ukthQrYBQLIDj182W7ER/ZyB1p35NkIHDo8TwrfnU38OL+S32Z/bE1+soIlf+qqwLJe9q6/7YwnwK+bML4cyAUXwlOM4hcvytjmU75c6EKe1k5JeuoTdSDRM9IXABRVsa7I+UUKvkzSC3X0ADziayMu/zMXhqqEGhVrjDOWubXbl2B6q2TAlGqn7pklhd0qRWtrblORNYW+e+UpLIu6HvmaRVgBA/yTwZgQ1dATFcFo8xkoNvVSZOp6XPYP+U+jEXG9KJm6GwBDiLDbyUCEEDQYtTiYwIJTI0gcoZ7MZaitZ3ui60VPgij43l0IlIvLUROYWz+rVZOd2iaAGCEAOY9c5UR1tn4dsffCmwUPnQ9c6sh6T73ZHVmg2Wpt0ynhDTCGmtber+ZQG+rWcICsG9Cbw2ZzJarle56yEg3cY+eK6C2FfNyhPoaKpbOx9e94sIhZgqrnrOMIZ1VUhrBaKn348CP+7u/+I3788Rd4/eYG//X//c8w46xq6XZgdDb+3n+uPbM2CBH6GzQLoKbuYBQjXt28wZs377HbXaFazPXQni/AAS9meaTjWd8gGN8BFr+O+2xaKuxkEwpoAwkZ3y77p9upAIi3IZN6HXL920DOx4mxx3bWA/13QhkWS82A2zLmj8i1Je8WvJz/rONmvd7MK7JaPj/ZGvryqEfZRknOW+/Wausx77/E/uyp7AT46hWKC63QIb5oD03MCvdTsdKLB4oav38v0NjNLIsXxr15ZOs/+HhIj99b9ePYYs2byvk8l6KNHTjSRobZjdM4uADHFm6uA3O0zK19ftTqZuMYn4NOswDyyiY7YZ6KXtaJw+NmHO2oet46qLH8n/pHnbAIP/x9TiJABv0pZ41VoQhEwZWsbZZnsFumXDksmlVruTGs8XYB0SrP8pbERIml24sLhgLgo7WsxEEpLF61pLURt/5Yq9IqOrCRfMnNCLOfh72CpZ9sgUrFhZCoKgHVcak2UyCcUJWrog7qZ9krVPEEQBZN1+rI1aKgzhNJmeYEICEEIFAExVA6XQAFApHEPTAQgCCWMmZdlJCAlN316rV3fCBi74LB3nSFWaL1htoHWWN0Wc2tnywmCzh2Q0hFkbNA3LQI+GqAh01yQon0ujZkysNV+ZbvFnQYqLOB6/JRvhd2pSrw5YO0AAAgAElEQVQYyXsZDLlxLBQAyPWbr0RxS6vl2/PqUjJejmpQ3OXagSDrXkoZ834Gp7xIlnOWmEQ5gzwwtAISgYGMhOwVyVLtzlqsr48ltlgyHMA569Xf8luMFm+HMMUJP/zwA/76f/obvHv3HruriA8ffsRvfvNPyPez8hgK7mF1WPRQsWxzT1261gIq65LmeLdjHCIgIECdKcsYGOAllw0SUmJMMeLq6hVurl8jhF1pPzzoqtZi3u2j8npAsHhFDdMKQ9edrgJx/rPpev+q2jL5uzhZ4y6Fxo2mghzc8UdDNjd0MIarb/+wE2mqy+K25bvfN9dcBtfI+N5i/pT1003qEpPMNeFQHKBtu47bqzu+DWXvdjJM0+ejja9ZleoYuzVvIUuu7Z/2Dvv9erC2+Cy27rVHRbjKi1bbsrja807WGWdTf2938p7aZ1X+cSm0a6kV1J6MuB+wUd9Lzdz3cX9UELKKaubYyfqdF3PmSwOJfD/5709V1pl4ZEM2PffWucNNApM9fO1s9zjaG8N6LB/20noxGCc8+MDgHF4fLxwoEkGxfBts2pSxGKZDm1oZCGLkqj2IPOgE7k1VW03LG+fRsYWEN641W8t7geTk+nXaIPHkXvnqy9kYlM/LCgfrtqXiGyfoS9tP2H9WJYYQ242XcxGQTTnN7hX5kxARj87JOSwDggLtnCcAdtcT64+mpJcdu2mAWWkwiMV6wuLMUCZQkvUjMJBMqAtiM5IJmK5ucHV9jRDVSoPtJqJJtBhkgEd8d8KAkvWASB9ECg5QUEsIBRB0KWRiOXku2XfCbVkLkuR5gO8150PV0jXyWFvWFFo/eKTjY5UPVTEn34fSp5nleeYEYIYMlrMUISCoq0EixhyATFHieRDAHBEwIXBE0BDmgeU692LRk4M7eJC6ELGUhQRpvSi8f/r5FplvcX19DYpXoEAgTogKbMQwgZER1P2MyG6smsQNKe1BxOKmlqWdraIkFe8BguIyWfYm0ivgGSFmU+0KSFIUXGIQB4CSAGNqscROqZtiBJgQYgQCiSWPKlE1fpN1Tx/+uROP2OpSgVVbF6RdVYFYzHQmXTc0remYxoRHltHSZuh16MXKgwqwW/f5zgHIWehIF2k7mBSHyEBIoAiknHB7f4vMAvAsLGVUCc0sskVmUaLRpZXguQI2EbfByUfWRzauZDetsbrqIQholSNYlyJJFwCOIERMYcJ3336Pv/nrv8H33/+AKd7g7Zsd/vIvfo3/8l/+Mz5+/KhgY3QHcQJ2+ADaNjdWwfGu3hKqubP+6JYJCShrHEFg7dcqmRMQSAOv3yDgGjG8AeUbcBYgVtaRbFwGAwTKumJrCSSOGCPJTOIIVvc0q1pZj1QFlCVXDyTN3dQqX7eWktJ4wPakBiTS75lb7rd9zPNH2+8lUdOfFoOHAxXtdXgJwkCJL9AJAVzq8xABRDqEM5f6WEw2X1dmIDEjsdzkCJhVpnYj1XXPTs7FnVfHja1/A1pArl0/KShApyCRWRUZOCv5Z53jSfYBjWUofGHdEGxH1jVJ4rwJT5fSANBqFCgDaQmEjLyi5PXucXGUyBGVYPcHKdRbGaUydiDSXZRw5Do9WX6Vd6Ft7sFLIls4XdascpGrElDuR5G9099QWTTxg0rt6Raq9XDJnsleaOCdP0Ag2M2oIVC37qorI4tsJudcZBxTi1twwksS6h9LE7xM5w9ejEbWvkvya9KhtOcJok54SIy3rOPM4rMMOcrghCIXWN7b0ZvabmGfNpC8d39tIl24/XYkq3P9+UnphQNFT0Edky50ui29/iUtABd6MioLCnUb1mdAvXuMX5HMugBPt0ptQcGpSdenp+ZvUki+nn7mci1zkWtIBIniwnGwbYd+e9j64D2m5Lr1ULrZLAUMJFmU39z97BW2xQJ3er18Me572/9LWhXsOrapfc0KOGSXt7ngSJrHW537/qiVWAg55BSYIK5H9/t7hBhwo2BABCNnQoxq/dQoeBqgOdi5o1qIJRHWs4KCVTlfgkRWx3Kiye3NaLwQrKtFj2+Pz9KAipQSOERVqOQzp4x6MxvQ36g2sibZKrw3aTuWrNY8lTFELxcAo+eXtRKpCOsEi1EzcpGV4vt1w39aBVkVcADI2M/3uN/flTHLmXt9aDAmtc4GLrX/XJvZrUfd6TSR8QHK+NQlqt7WRQTEGMuYEgXspiv89NOf492797i+vkbmGR8+/IBvv/0Ov//978CJAFqO5ZrL2BrVtjurtqIMc5kXpqAdX2OtvQHTNOHN27e4vrpGiWdFzlqwAH5+QPq1T/aAh+9Ya3uN5r0CoAEKGLpnHlzcfuJ7aK4N4Yoj7zw0rRVJguvrXKhAUQWLti3Y3dq7pm+WDVKYv489RFsPA9foiVyaPkv5ryN2/1fU7OFiRdcVp6wzm6cKUOpX9y+FuygUUIj1QAochnuZzWo5e+DlD18NtesqD6f2KR0yWktfll7t7ZVbd2U80di79e2F0VcIFAFmF7qc6xeQ6MugweIzXtmepS5m2v950YG+UiGhCvvPT4u5y/2vaz+aQqFCAtenudG5GzhE/pn99CEpyXCcx6wTnZK4bGh90IAApZ5L15hzUQNmuZJPj8NgHc+lvWYtRPa8mTf5BIVqXJTRkG9NYScDGUJRbANFMORq85Tkn1l0SHyfCaEcj/uxqXFlYhSrOs4zMuV6Y9mAl9bBokHTvOVJ6a6xm5B/pz/9FwG6Fd7MfWR0engKHX/Tw5lkCGSNcwzArMmafNm5jW492TP8Bx4sGqXz4I1aS3AGs9x+ljuQ2d+ytGidgUrwbTDFtkearM0GMFZgqIJgJWOdeQEx1t9inEAkrlZx2uEXv/gzfP/dB+x2V3LBHQhv37zHd999j7//+/8K0tPNc5ioa7VQ16nFRCvtX4AD5f3a9wRCoICrq2v88OFHvH//DXbTTiwN9bY8uPJ6Sywb7Mph5L73c28wF7v868NT+uMxWvVjadRGYFyfh9Sz24uo8uFpua2lJvepacpc6Odbff/0ODZPsWGa9cpnJ/w56vnlOFh0qOcLeFyA7Zr60/XSkXWKLAg+tT+TAf1fE51jlEb9/bKI7LCNqxxXZHunMzxNySaLPkkBD6KvCijqxMwqKB7aO9czOBN9SiHiyyUvhlax5RP0tZNvPhfi8v/RLTAva4E/RSAsok5mCTRrinJW9w/S3ww1GgA0cmKdF5tFcX87x0CvuHeMaKDawFt/LUEdNM83Vacr8CxWPTWIgzxxbgq1zLozi0LYm5JvoObQmZwibj/6/ALA1e2KLZgOgJwy9vMe8Z5wd3cLAYkmvVlegJdA1PSNDzwsijsjxKiWSDXQsVmoDC2KGF2HtwvKyFXNFGaxcvLP288eKFp0Xcl27ffTGEFq3u255RdfTqi4D9VfxqVR96/mTVzjI7WvrFtJLXInmfMpzchpLtfJM/t/vi+tKS1ve74lMvN1KutGtSRqs6gWZ/4ZwYCmGCTQ8xSosEoIAkpO0w5v3rzF3/7t/4bvP/wIwgRSHr/aXeP1q7e42l0j53ndEuqU9bXw+0jpknE9RDbH2zwkUPxu2uHV69e4urpGCFFc7pY5dPWpO5j8rC4yxbJgra4tH5UvR7uiric+3wr0jV6hqpRszLt9trbC+2ctUGTw4iiofn1v67ivp3sYWFPg4vqoK6eOW0AfBP2hVDH7cwpqum99RrLfgvzhga07DaqrjVNwX54N5iZbXqZ0Wxyt7VapJ1Ycdc7YraO+MvXih5K8/Ob+XhPF/Db8skTiF05rIJFfq/zvn2LyUOFPkTmX7HFuwKje8umIXw5Y9BUBRRYnod08B8PzjPQY9e1Ch2gJbZwQf2pjCccPYB+g2L4UWpyayubayNFP0LSDAnOzx7iIKQSN5zNSQgevAgoWsZrOo8gEFguoVfx48K/LdWxMcBJZcOG+1u1ZlheA5KNZ08gN0Go52yn0HUfQ2BEOnDBsahP1ipQqbWWWcnXRgZTFpR+4Nv+xZJu8E1bFgsbM0oP+HZTzxaJovyfs93vEuMc0EYCgN1RliQlksSkoNoqzBfeNYVKZOyGlVMuFKbWSh7AzjRUXr/t6CwoV1qvb4fp8YrWQ8f3gY+SQk4QOWSb1N3MtZ1qVqGs70fF0ryxozBpl5+NraF/BuqsTAnihuHgQx96obW/TAfOccHd3i/v7uyaNATvepcz6xfqeu9/sd/jf/F5SprWrn5bD7n2zaIuxBu21WRII2O12uLl5hV//+7/Chw8/4M2rd4hhEqiGgN10jVfXr7GLO8xhhzntwcjwFn61rMNggB//yhOuMWVI+rXJAQMKmJlwbvMgZwZCQIwTYpjKaWsL9sp6HUJfXxf3hm2U9aY0YrTxeQ7xWOXbdbI8NAaPA6EO5bxQixr3VV/6Y0hPQVxEHdlV7HkLwmymrmIV6OP6YHOGy1aaQanHIMo4UnWzBdeYRwdLG/3YFWvVvej/QuQ7BHDKsa7pturktcsAxnQu68VDVHcYO/hpXb27GkHmbejqd4x/fV5b0r48Wjsk8BRCxFKrsrafS69a7r3nL+NEYtJ1UmThrVxxcjF+z3XPjxqvPhNLfT1AUdmA5asIDx4WBoq2+Cz1cX83k/QCFj2eBkLWuU8tXuaafxYiAM7mXqigKaPF/Jmo10EBAS5M8Ad1c2mRtFPiND+N+2j6BJNT7hbZMRYuaGy5P5JIBHeyynSKeq1KBxbB/b7S/GalO2G5WfxsS2Sn7x0DilphxLektrX+XPt30cJTBcyGXQcCItlNS1IbsAZ1Dgr4OCsI1oMGdgHMzZqEKIMImKII1xIQUxT4nL2ipu8lRgitgFaUWP1m/6ry204As2YJQcCqAAOpUCyczM2tV+hzZuQkv8UQEGMEMyPlvAIMbOv3UdyChb7IyuO9JRQx/Hm1LTWHWKvDC7rKjITg2oeV3RiBA0bCMjOQ0ox7vsd+P0Pc0OrYW0yz0gbfbnSxaFx5TUncvcUdq/LgHQBTiCCKGu9K5zcJ300x4sN3H/B3f/cf8d133wMcgRwlkCwRduEaP/34E375i1/iH//5H5B+nqubDEHXotbipLeSsWeeX3xA4iau3QD0lHXbA4gScBp6+yBnlsDhIck6T4Q5JUzB7kiDCvHW/xhS7TmzKLKYT30bWzXAFF8q/NLyzZKo+9vqtlx3RjeLbcn1cdTVpSx8LTi13cJjtCG3v27LaV3ubkCi5hVbKciwomUS/5DaUe1vGqx6QZ1LFxJZiGAHF6hhFHRud5Fb1mkhNDwjUYBEIvZSUADbgUypYF+/DfVtTF+3tO/CWUZ+zh2mp4JnxtRytMcJHsQh6zRYINuueDn89BUBRf4PrwCO3GuemS7HF4+mEmJk+OsFfDuJmu5yoMiadvNJ6PRJIwCRBkrOUKuiBpMRhSyLIs/ZW4RpIOGyaXgF/zz8VUCqDhJqu96DRD3Qbb+PzZv6VFtp5KDg399mTOXBtZqHyY8LC4RBDZ/yELK681ifhgoSaRcHlWyygkWZE3LWoMkaODlnA4Dq7SkxymkcMyPG2og9M2rvVZCodydrXNhgVeKGZ6E5+Su5Wa1DRvGKMudi0USh3go0IlGe1jlmk9K7WDucdkeVZ81+rvQFm7JSau4KbnP0YGX9m7vPYQsOVjylhMRiATbPqbMSogLYlZNqJpnHzU1QvpgA5hmypkBASZ0a1aIGOnb2p+8raWCIETHsFLhizU+UoDev3+BXv/p3+NVPf4Efvv8FAl2BOAqYmAkRO3z/3Q/45t03+Kd/+u+iHOd+A80A1ZuSjvHAehoafNJiCZcYXvWhgbA5JqRZbj1MaUYKCcX0ExH1hrZ6k+AoiPl2Zc694dZeqdk6qNGU4zcVpYVV2ZbyzVJwyL9nWBBtMzzTLUMPpwr6jPqu3Gi3aHNv2d2umz6AvoHRrrRBHbbBW36afrFEbkzsexkaA9yeV4l/ELGB0Da2skZWkjV7CWb59aIbbO7Tfm10rrb3Mt/apNoOO5+DqPtLZK8zTnjWQ0Mv1L1g+nqAIk9M5x30c1DZIO3LhU4jKvJZPQEEzK/anl569lQyGFUE8afsv9PNkdcgmqUw2b3Wyt7+k2uLD+fHB8o/Bw36glRROda+I9meoi55KKOpijMT2Vr6yNqkWfaWb3TfCEdNlx5C/RFyc90vweKIoIAqqjKyXHdNGuyYXFBju52MlNlq4GK5htwAqf08F/DSK9u94m3p+zWsxEcSvxtkMjNxNLJVxRlMwK91jUHArJwyckjFAqo5cfVRzLvyx7e1ue7sPhcGtBZnpNRXeDx0U7AvYW1WZvJm4iMObmdAOS0f8GHOAhTNeY95npsg4DWukAEbbYsz99eFu/YrMNq4/GUgUEAmBzrZOqOVJGfpFigixgmT8aMqdiFEvHv3Df78V3+B77/9EbvpFYh3IA4CgDNAFPHu3Xt8+O4DAoXCf8WhpxvPfm0euaRZW0IISOUab7Oi85zbfo5iG3nrt5Rm/PzxZ/zxj3+QINYOdJYr7/V2vL6+HcOQBqnXFIAC/wwUQNKv6Uu1ZctK55Cljh+O0TrguqKsnlSvPq2rk865p97R1sk2Xt7QlH5UmoXjQFf3vDWYsydYnX/xIFEhAxGpDJHQ5yNNl1s8Aa12aPazcSvcU/Wt52YfodOm3RdBo7n3ELKd2YQSbJTrnovnnND0pMV1wtkLppcPFD1ZHx5CL9fJQRBHc39e2tCWU26GeWRZvCVZL6scyu+gACUCDncIgIrWrk7UvdXUtmZ5Jp4r8uKG/I7KKJu18Q1pNlSo9lu7MXgh6xRaB4FOnynqZFbPtLxOBf2kaqlhgiipAtbfTlRcPZqmtUqMd7mz8s4yx8te6ZW0+q9P3LInH/jNJen3Pa5y0pZRWTi9MKp2vUg9pmIx5ISzGn/IMh3zVinFdwqfVPrie42RYy6GxifqeuYCXoQQxGUHgN2ANeeEmJJYDWECQgYSI4UZAKk7UABFuX0qBKegosb2YQYS0lARl6vrQ1G+AUKIAaQ3pvk4WpZXsPZALJ/q9xqcsWcsi3ljFlBVSbf+9WvBYPV0oMeWUWijE7lMPCgFbsZ6lI99Yz83mxQ+T//OWhu637QezIzE1TqoBYra2ETyKXzqLbG4FKJjSVCe0/QQUIoSIRsI4upk+cRJ8g0UC2/EuHMLGHB1fYM/++VP+NWf/TtcX78CckDQmD9SJ7GWu56u8N133+Pdu3e4vfsZnBNmV2drk/2zdbPy8hLc6AHOCnjVgM7yU10ImghsZV4YTwaEEBEC8POffkZKM+IUAWQguX1lw74tLjTSZ70kwGCNvbZOOirlb19edTEEzIrPr3VZN5l2xT5Mh+JDeaByEZauvLM2H/WFZmtvVsQFYNScYS6XkJNo7d1RfywdLmtF+t+Er2nIsy6Vjj+5fZaGPDGqGXPvbvU4KaDp1tLJ/YBy9/kQEf0BcpaOc3UNdWtZV8Zj+EHK2iZTniKetwB+XZfMOo1g61Rw6aTvm/WLfR6HSj+T4vCstKXO/eCqnPQogIMf2F39S0+saTv5vKzfA7l8/eshnuGN6bbV0f95UMd9AL14oKgxiD3rPDw9s/Fp/ho9hIEfyPRVa14nbgWy9WQb2rcV/D2WRBXO9haFRppcMr7N1EH+rb9x+1cVVwd0bOI/kBjdlesrtMno+5xsN7qVYlFUi1RYANbeEq8sbyubRhOnY1hQFU6PknNqLgFg/crdxA5SEIJM6WBkZFDMmNNHcMhAEAuMFOTcLKtgFEJAQtBIzuo2ospfJoCIq7cIP3jW+oZp3wYgkyqOxhW5UaZEkVqHcaw3Ezzw4X+x8SKEsN7nSfMlGsQgUVCggFtNfQ9RLHzMYGSmMpZ+zCgQyOKucFWKQCrcm56jY7BDwBUH3JtAy4yQ2V1Xb4Fb2UqAWRIkHU+EpGuktDxoDBPmjKtd1JBFYvWRUwYnIM0ZISQQNL4LB+znPVLKiFFux6JrAoW6zVKM4h6WM3bCbAjzjDlVl6aKXKK4rtm/Mobu9/K3Aj0IxpRJe1WtmVjdyzIBFOoaW/KSmDeE7JShrK5fEczeDcnG29IF93dLvdrTgwOFTzvhkTVAeFHiWCxCSsncfgq5a+W5hBauyi+1ioOvQwtaStqcbTyEx5nlhsSsgf2No0Rhr/2Zc64c5+cvAgJZoPQ6dqLQiiXXRBEp6bvZ+pswTRNCsNgahF2cBEyhCQEBkSQG0YcP3+N/+Q//K375yz/DFK4R+BpZA7QTIgIzGHsg7PDTT3+F9+9/xP/4t39F3ifsmJBykrqXeceyNuYKJI5cI8sIhADkKOBIUTR936oypj1icU/GFjXST/v9Le7uf8b+/o+g+BHIARyj8HomgHXFIh+gubcMTLIOMAGIMp7IkFuRACCCmEDqypeRwWS33GXLBcRAsnXMAYXWvoIxF0sERshq10Y+pasd0QDUsDJ80O0aQNjWN+ZY56POFQG2YzOfLNB+yaotBX5s1pTDikz1VPuZtZxABMYkF01QEitLygrMeEtEgoemmDIyEpg02H7IKwqQWy9I5mTKCVOn1jBnxKiuR0FneRDAXbpMXW/L+qWl6IUXnGcwq7sjEwJF3StG/djWT1oWsHSebXUJWftS+UV40i5WACz+YjGQP1RwXpN0T5RrGUCOzddtysY4K/LfchK+0EOPCtRty+tYHcYiJ5W133jU1p1KBqhbmnacmvVkUKvnozMpYZuyEstNGaFcno0z2lKvXlbcehNMX+bjJW+f07JF3WozTLShTqN3qOUpu6G1fbuXkUa3CfbltXpbyeaRVksvGih62JK0MePnnteb6YEt3vAaLZiqpU3K+tbytuj9lrCctNVTPQORvILb5O0eWDyCQ1vN6i99xNSzmxueoaOeJK+H0Dj/ZjsdrlFVqd2e61oNbIPv36qKiMVNNY+iEOstPCGEctsTq6BQKmp/EpXTybrwUpdsLLo+iLTjqPneZ8/loz3xPySyDDaRLt1wSLj/Oh6hrevFUvnj7q++L6vALgpXq4idmzLQrjssN4KFKCBAsJulICDRfr8HIQpgGAEiC5Q5IecZgRgpZeSMYvVBBnKRPMsAYhCwJoQA7PdIBhY5hdGUmBirBYm4vKm66CyLcs4QfFMBIQLY4uMUPU8UnbrcsQtF0Z6klr/VwsorrBVApDEP9VQKXD7m8n9qngvG5zSylStADDAcllmWd3Llt2qLr6BdW+9yB2cgsSiNOVdrkcQZ2fpdgV6CgIAMr69RsQ4zoRCo89hAgsxZAoxzRsxRQcPqzmVAEanrhIwjgTki0g4REW/evsOv//3f4Nd/9dd49/Y9KAfnPUJyMx8mMCICdnh18w7fvP8OV1c3SPkeOStoYu6U5gbHCRnUWBOtkdRXrH5yZnABYnwa3/MyttZOA0wEqBUl5X5/h/39Hf74p3/Dd/sPiOFaJ+sE0BaQuowmlmPf1AwtaKLv6OMNOlVtk73L9YeRd9Way9khCz0DsVmvBmxSlv2q3uK0yNtPSDpclit1Q5quHrbPLvLwO6kekqC1EloGoR/UoKv20tJP3sickXJGLLKI/M+vzQYcyF8GXEse/T67BhesVMvxg7Wywh2OPTriOlEMdLW8jw7XeYTYsVx9Hh1lfRU+XJ/+r2FRA76Q5420CjOt9u7TMh6+dsdGe1udzk/nq89jLcJqeVvK7C8QeBIt/yQ6ChJ5y9VxEpeR+4HH7WvmMB+bZ0U7HtZ0WBF/kHMGnfZFA0UX+oqIB4uM13Calew8Srp39bnQYVrron7UBnpWXaiG+4hH/B5TE8mK1YKEVRBsfya9zaoq4AIIcaOUyil9z3M9UITj1/F+AdRvcwYgNFYFBCyt1HrlZ/Q3wwLQute635+HTFgn2N5e488QhcLYrEGgU5gxz8C0i2WdslhBXL5LAGRxV0qIMQCIohIZH1rQ7Cx/55xErbZT1hAQYhRLJAWWSM1aDCAqYqyaYBlQQQpMFDcYZrt4HkBuBONm3vbP7Vsx5ZLv5prQIDJHlfDtRA1bbFURfDnVkqgBNVeq2p/nAcC8T7i9vcM934MSV5DOXBBTQuasSwcroGhpVA8pa40DoFnHi8TygmJAZEaKGcG5FBLEooz0djoBYHQMyqn4JOAgT3j39hv8+MNPePPqPYjVQozVooHFokhArQnMGa9u3uIv//LX+Pt/+L/x88ffd0p27RnlaAUnwnIN8L3vALB2TFRNVssI8kwXBCqwOWRgEZH0xZz2+O2//BYfP/4JKe0Rw84gAOklOxU4RqZ3D34yJf5YRudalR62dxh40T5aT90CsP2YFeCiX4Kbdfgxu5zrz6369iNp6cYL7TblbbAAbOzbdwiUU0WND8di20LWrewefBoZQvaAw/QSpZuHzb46bHUNEiDVxzYzi8nmzY0F5GaPGtHYkvVrpzXw42ujLWvtdsnn3HQBii70iakqhSOwdqke9AtLFVhHvvXrpXpl9LJ4rxL7fjqQzH0ue1M0pqUsvxzlw4Ucg/NdMifoNkswETLZTUXibpaTuFZUxVcUNCqKmHezEcGagHpV7FdMVbDqO+JQx3iA1kCJVpE3cOPpu7cqsAZ+eCq3gbFagziLG1E4Msy9sii/9hsy5jQj5ojAhEBBbE68mT2Ja1eMU1VoSJRy47Sqz1AFFwDEIC4QuQfpWCyfrPMs0HXtz4yIUKwSQuFrpwiNDqkWfyxpDB6082dM3bpeFpLSIVhww5YKLd8alKFp+mwY2O9n/PzzR9zSLXZzREZGTgmZxEKhxC5iBjgrYOOyD6IJZmaE4jqi2mEzng6cc5YOrSWRxM+SokzRJSBLzKK3r77Bf/ibv8X//Dd/i5vrtwg0IdIVkKX/Q2lgAHgCmBEi44cPv8D/z97bNklyG2mCjwMRWVVd3WR3U6JEijPasRs727G9/f+/ZPc+rO3sza4kSiIliv1WlRkB+H1wOOBARDzR47YAACAASURBVGRGVmVVVzfTraszM17w6gDcH7g7Xjz/Aj/9/FeEMLX+KX0qPF1AnNJgFhyyu/fZRZU5zxNWGSNCSZuQgVYbqyiMIwiEt2/f4HZ7K1ZPycWMMl8ct4bPzVaFnTKyIN/NpHDK+Ug5/r5p7gcv1IZqblWW3FuL3wKY6S8t7V1JaxnNdwKaAOQPTRMrE6Bau5ZV1nsqs5W4KvXPkRN/4fLD3SjerTcS24kFo3VLtQ/clXjdEDn39ww9JFhkbffQfKufaO8+lFYosoaWSpHiJOdM1rI17TIzR1tl5gR0BorO9BEpMXjUaA5Ck2mDdBDNTSg6+88P9r35LrgynEn1sgTezVl7NUREoiytortvLeag5XvKk/Gk3L964hRQ0CpKQFHMIFFOP/8Wa48S8LBolcQqfp8WaLQm0J8C5d3WZndyWgUZv20g44wVsUkruUNpIMl87SGECa6/aHmYRRwtVhz2nTqQtBjykAQjpigxLJDUag6IMSByciWi4g5SAAG1MqE8z4UobmgxMALFnC6lE65ADs4VYEtcomKleMQYIfExUAQTBS0yKxtFMoMY4j5VAQF5132JN6n83+zUZ2v/xbFSg3V1iu1bJcJHIWNVxY2Sy6XubW4zCU3KN4YRwzBghx1o3CBQshBDyG5nAhZJKWPkFOMmbVxEgu88iCJiHtt1HurOlWN1UHFVs7GpiAiBA9RVSw3FiAid6/Db33yDf/7nf8Hz65cgdPCuB1jczRzF0h5abSYgEJ5ff4FXL1/hT993Uk/nQFGth4pyn8EbCihbMvP8oLzjnJympxnL3BoT8FXPd47SVk+qv7pihhDgXQdmxvZ2m4HRMpfcZR2ZX0EsPHLItXY59t7Mm5Vww3mOqRQTA7ItB7I+oq5m3DEMoEQFJLKtULmbMpAPhMiukncoSx57KuOlGFG0ria6AXgMTS3iAOV9xWiPg/4Mn6nsgGN6Quch5Zmy1sypho9CTCgbYJ8WHcsPmcyaJ+t8vQ5/hF44E4B6pT8tybxnF735sasbczoaH3wPOOu0tjRWBrKz1B79KxfUvDezsXFfzj4DRWf6uDQBIWYAn6xkGMW/XWXJCEK60zonramslDXUM01oBTAkJEqlmK7zgeY0imYlICvQd58CH6KWpyi5hvBsVcu6QmADEAlY4TLfaayBXyIX1UoMAxir+3M6FFHfXLEBhPU9cdtRfaoomJzApCbNu5Qd+7hbJxMIWOJITgPTU8+ggYdHhNEhhChedymGj3MuBWGmFNNG3M9CCHIEPSSwurUaIRDgCxhJQQIeh+SyNg5jCqoM9JsNfIqXxI7gnJRH89BTuWzr6OZSAbYk0DLp2NVniXJAbKtYknbDxJWIkjuG7I9PpwA9aW3m2my7296Rz9T7SSCaV2xcKiClupF0h7Q1THIzuVW387xUylxcsQQ00SPvIzGCuhaiAHbMDAdxUVTFP4whLV9cwETn8l97apiNS+V9iZ+S3SAB0y6S5+XFJX73u+/w6uVrdH4D5zqZq9iCswwJah7zJMdw2PSX+Or1r7DZXOJ2uwUcI0R7wuV0jDIHMCU3tgU3tFwXcgjZ6i2570UVzKO41hGloOk1eBSTBVWMER8+vMeHD++x3W5xubkW92BouOASZH8vKegxA0raVpqdHyh343LyJr3Z+woi5SGaxltm//ZEzsPrsHU/nStzXTIzlHkpYpPySftuGaOkstTB4qXWpLlWTUrZnj5TcP0Ymt9sodx3Wv9qNtM13VqMVm1fTsZaxWd1JXBoxfkoFFfm/JkJOG3fWYvJM30ssgodcMq+0K2lPOa5AYu4HPuRV4ckCxRO4VLMe1KWqsq5G7UQwvbCigyr90whrTfIPct9BorO9NGpFVZWiEbljSVNYJ/I9pktfA9BRzVRUiwPdRwbi4k6p3WLguZR9/ZUIGzTLKqBKk0zWqOV5czt6ghiWAEzCbmrSn6mTKsGOgufrNnWOQHGOOEIkp61eAsBsgGZLHdCgMSniWKR5rNiq0eISzVCgAS/Tso/eT3S3OddTI1XlLMnQowOSC43zBIseRxHAQ9yYOQETBjrk3EUCxgbzFUV86kClXb44TMwROnodvX2ykBPQVLQCnTWyrkdm+qSd1wvlRGmn1xdsdM+Z2szSpZoxEboW8FDSWdOABPXN4AUNypKgHJKAaxTQHxKboBinVjeqwPOcwbxiAKIpK+6rohf6lqm/VoC7OZU8vWiwAq4GAOj6ztcXz/H8+cvJF0FlCjNWZysgdIpX9lajxy82+Db3/4TXr/6Ch9uPiDEIcUJaoNLFwW58DkD0KDa5b79dM4jxADlFQuqKV9a0ExPV5O6xtQOAR8+vMP792+x3d4iXAeQlwDjBVA8TJXgz838vZjIaZRISWUSMavJXoPeL0JNdy5J7YRRO5fN5paRs6mEtvjOTK72k5QfiY4GgBZzWG3JnFt4sQ3t5sTJiKZfPz4ksVIJ/Qxp2VrvTB+fTgzYmQGnMgRVN+qvIgesn91OUj4VPubEqwy0Y2Wz1ErMqWpxBorO9FFJl26eG8B739rzXBpUjzjcf5lESCDRCm3+QE+sNTGvVcX1C4oogWL94Mkly4naQiG7QoDMwkKYK/+Zr1o61CJHAgUrwQVR0O+uOimOkHlPN1+IgeSuI8GhY3KvRNnBTyAiB5lpHKkllAa2JnAARhSgyMFX1kTq1mZPkyKiDCxINgS1NBjHEUSEvu+NMs0lllIitTAqZSnA0iQGTIwQy0BpiwhpUxPDGwVU0guq9srXDCppefU1cLI6OrZnSrriwidA0Lz1REIf0h9zTPHF4mo+KqO8FsyYWVzLUOJRqYWAunyRE3fVECR+EaNWXi0gorl47xFjRNd12b3MBr3myIAvgBEnUCphhlJmSrF+iLDbDfDeY7PpBXBB9uhOAFaqHcfULml1ZAeiDq9efo2vf/0N/vLDX8BDBKKc5KdzogV3NG/lTz29L7dl813dK7XNlO9ijLkdAFQgWQaOwECM4DgixAF//ssf8fbtP/Dq5WsE9MjB4FMjFWByiZILKyNbfi08Zjjj1Gq9GTsL9/a9S82vB1F6J8BwSwbx20sWeC3j5qPo6W1xzdxQqvI4UuNZfnhadAaPPk+y0zjZ6Yznn9O1FsTZqujBKYFFMvO0+R2x9jzEUpXoDBSd6ePTnRh8nxBj993Ok/9j0P64QaQPYXpyyDGCmSg71m3AWjO0z+bn2BQtxQ9xzk0sOQAFitbtmeq+5MffHfzYRDh5YNKVbJGwyrtlwdLPbBiEmJpnxBqEVeuGnPoEEjeiMAaMXYR3Ai7kgMHqjgNRiMdxzEARYCxIyGVFvD16XBVoCd5LWbkOIVTKtB1PGs9Gy656sAWIrLKe29FawGDK05wD0QIlOLVL3VRANhEpzLhgQE6uckcJ4gK2yLds3TRbMhSQSEGAO5DVi+1MxhDQLJIAaAK+SQByVmAHBO88PHkEchijcQFkTi6I0QQddxjHESEEdF2HruvQ9wLweD2J0QMcXfK2a+dLmUc13e1tQNj9jN1uixhHON+hjX/AUBDGKsUJuOIOfX+F169/jc3mAsOwA5EGltaT/BJolBVrzp8ad2apf4mkfUIobpfqzhejxDGqTluzm70EkAciMTiO+Nvff8Bff/gzvvnmO7jNBRKsmUGxQ32sMejEIqwp7wT4aLd56z44lgq8up+Ya0BumhClSY+wZo58EDLY7DpKCKcp7/LafRy1VmzzDyGv93ltZwVxn9YKXnjkoTt3bb3PMvSZ1tIT5BVVA9Inzc7pQo8DE8/RXM58fGES6FR+nIaeNFCku5uZ5tbMmbacM0WluLBzVD91YO6ktkTLj+0xq1UBWo6rvW+ZUmIrihVXPMTFlGJ/qVatMU3nMGVFjO09LkvjfGZzrdQodHPlZgsY6Wv767c+JsA6dOtQ/64dytycvrKY3hqhh4xz7MQXX585XDLRzeSUHMdycticq3d0yi86oovVDqBKWokhsNyuIQ2HmGMsUMXTSbyKBAlgp/UkENLxpZFBTIghlOdTdVW9VZ1DNkCXx5+UN53AQapCGcsQ++C+5jRbHVWcLetmoj4kcCm2AM9Ofiq8z+1LUKtIx7hcLlbBW//afOoduEMgQInzMnddlHwuDJCrCCYEEIAOFMNk3mGzK5zLkuofaUxK1YiAEQ5dbgFyCViIyVJFLSzIAkcSnHjYDbn8UtcIRkBkQoiAHDAm8WPIabB1JF6VnSkFeAY3gBzgg5dAvzoWUltLwGNRqBX04eTqpGNGAlyH3HZqjWJjxTjv05isG6wOWMvVn1r1ESBHttetalOBXQuZCS5ZdaRwxGbMaBwj+/bKtdj0Qy5vDjqZ2jfN8ZECOK1w0n/BaFwt9DMl7bMWiiIwHEugZcQIRAEOUyMlgIIAz3DeSdDlFK4rx6Mx4ztGRowjmAOGQYCmvt9gs9lgs7kAdx08AYEIFGRu8R6mz5DSYAzbEdvtDnEEqOsQxhHDbkDXX8H2FxGDNag5IoicAEdMIIog8uhog9/++lu8/vI1bj68g2OJz2aPmeAUhFjHqOWjpeFfAihrjAiqrJFiZIhBkir7LrmTIf+WliNEjrjd3uCHH/6CDx8+YNM9Ty6fBv3S+BOU+NNOGCTWU2CfT4LTtKXjY5pP2LAMZ1dGnRfkEWfy5IpvKq4162sBJIrlITuqxt8UTNH1K8VwYoZjgk+yFEOCgrs8cu27pb0nYzndK/VqiEu/mYv5LyaOioiIHNKcFcEYAQoyyzIDzhz4kGOZpXYkQNwgAYJL/EWwwI88a4N5TCmHVSdv1q0iy4mbJYE0Bhw5GbdqoRchbpZUTrYUwDLxCDlkxJ0ApmTBlhXPUxDBApeUr5lHGIAzdVpM6pDAYfM4RIcCXrflmaa6D0A+tEFxZ+L9naNjg1Fb2MqapvxW+vxxaU56a+mErbVKtbAnzXGaz9tyzoVimJcfH5VafQSQ8meJH3CNSDIvDZywOE3SEpZjzyEIRyXe6sblm87FnPWadXrskwaKAEw2fVo63cFVayZWXfAOPXhg9ahG5V4N8mCZ1pI9beH+iQEHgYRsH9+8t0D7U6MV+a3Ji6fP2bvrt8ewJqDhyafIQ4mt7mM11QeApa3YNW2ulhY51UlSeS4quh2QQAELkyqAWPyDp2ViMsGjiyTeljoJqWSusKrjmS/l+HANnpqeY7UvMYrK7OKnyaqNQzldau7ZvVOxeaGyZmkExCyGmS7O12YypeaHLoolBgvXCdfZpXYS4V7dXvJ9I1gdZ65dZ2atWgogNp0bRb9yoGQFMBGj9AJP3oQqi6x/LIHMndP8RFPIu8zk8oItdaRkUVTaijlCXMFidgViEqsTZwAbW2u1LBrGAQDDkYf3HZwmmxiRUZSlSpA2XRVjBI/iyuS9xIeRgNvFwkiVwwDOLmhtm+tvBbG89+n49VY7sWOpphLDaH6kLLLHKrZJebPVn6dlUzVdRXs7fpmo4XnNWwEnOwCXSyJzSgTHiDCGhNtKC8MHOO+xubyo218VVpI4VI4pgaUS5BxgiXU1Dthub7HZXOLi4hJXl1cg5zAMI2gkeB/hOwfvfLY2CSHg5uYWNzdbdNTBxSG7G04FQEIB8eS3Ks36jKMOL19+he++/T1+/OGv2G1vimzFyvOEdkbkZDHFbn5OyPzWTOvtGNFrBA+mkHk7xoAQUyEIGMYd3r1/h5ubD3j90qFzHkDi2yhWXqVf6/WQ4OBcJ6cSMlBOwylrISGlYcBr5TtSDmMR7AkLMoNxg9NPK3/pe/l9qt/Nh0NgXskmAC6Bs0EPV0BJpuoDnirklnQEzVQCpdM0XaBstJVFSDZLIpiDtCGVtjPnEaKssVSaCQWilTbx1Zwxp4wukQLudgyKUqT1oMRGqW6s1mXiaUle526TT7bcUsAyXSN7/wTEwL5TyKZz3x6akYsWMpy9WpKhvWWafysXYC9Nwct1dJwGsyx56biz66L2PREb+VjH4R0Key+aEWoeNJ9D1K67S+V69IbaS5PSzInYqgccfPmE1DafnXNOQgtKQSUKrddQnzxQdKZHpn18owrUE5sMzvRxaC2ulkXFau2zizHsDaxaHPeiLzVZcbfgEYmTUxyZUpkkLObk+aOsfbp5f8e3jWZdUim1OtxwWa+rBKpiGZIMnnLKT5c4K/UKirAqesn8jS3fNYKjUAJiyC6sYhUxDAN8t0Pfe3jXAXDZsggo7mNiCZLiDEVgoAGd34BoFJOR5MKEVFZKQYTVxUzLrvxYTlUTmBLei+pPTsCqdDKao/qo8dwq5rt1RVN3Nj0VUDv55D18gAXz6bGPleFSOciVfk99yDEiUgFBKDjAB3R9V7n+WeBEea3wkb4vFmTiqnaLcZRT7i4uLnL8or7vAfJQkIY5ynNhRBhHRGZ4jIiBk7UEmwY04z/NB5QFRAJHBScdLjfP8dXr3+Ly4hrvP7xFxA7gAN11zwpz05QM0xZzwIYCTA2AVO5N+TFblTiBQwTcJTAC3r37GT/99Df89uvvQN0VkECsDAdUZaiBQHI0XXeohUTa2u2nRZWOW9C1vV1Nova1g4qpxvlRw5a5tPMGCDApx93isei6IgWs9gJNXcvSXOch10tj1xsQh8qzEnxoQKbWajJLsMQV3z68Qn6mtXR4w7YV/uwIpIVnPlV6LMDoSLqfgHqmj0AZ5L6DYHUGis5kyK78c3eLMn2mM62hvLmVFZeymItVlgFk9iyGzFPetGLpXo7U/I3wYIVa5ey8k0jlRVaBcibJj02TOlc4W2nn6jkjP80pR9W+UbvbXUNuzdtPcE7gZa4q/c9ZaZgwFwMae2WifLK4X4whYBgk3kyIjI3zgFFYiMrx58ycrVE8DRj9IMeYk0u78AJiuWSJUAJepzSoAF0RAMeIkYGQggLriWi+83AatN2X07RasKglfa7rOgyCbOXGaF/br2i2mm499tYhzFMl06Z2HC29YdezGRccgokBZVwKgawYi0WNmJXttjt0XkSqSd8BWSGVk8TUOihWoMk4DAhjwHa7zbGLLi8vccEbbDYAmDAMI4YhIIYIdfX71a9/jS9evABIwKzKBRLTtZuZTLBOB2YP5zb4+tff4utff4N/vPkRYdxC5tZURlY3yZLO3Lifdf3Pu/VUXZt7p2q7ZPnFavqBgO3uA968+Qnb7Q2uNtfyfvSYndG4+W7mPrX8mXv0LnRINZ2zoppHhZZhq6MoD7f5UmVrQEGej0hY3cnQ8EK5r9gLsXXjWVqrp7x0TO0PWbbWGwJtXZeATczOUyfqmSdLH7tua6z66aA7HBL4+xSktM+RmvHxsZnmTEfS8bPYGSj66PR0JrNckkOTNZ8n4adKq3ZjHnFizyCQYg4NiHGMmD6r9Dcmwss0X+92M1GUvxok2ZvyCTeu9k3fs9fNzqw+o8qf7NxOBfj8qa4VC+yQd/oPMEvt3/8Uae2Ofd36gg3UDDBRRhKfiOtWRBijuLY4zUfidpR8GYgEpohxGLF1W8B5dASAi9tYNO5wWY/zBB6Lm1hUt5ZUri4dt+69R09qpCTWHzbgdYxNeVD6UK2J9C8kAGROCavcDycwztzvEp9qrTWDHVr3nbKW+TPlwmVMVHeTdZYgcwrUpbbnWGJAESEMA8ZhALxYh+Xg5IwcUwqIZWxx+YMFY4gwjiOGYcB2u8Xt7S02FxtcXV7C+y4FURfu6DqPZ5vn+K//z3/FN99+C+98TrtpgdIzlVtucvfiDo56fPniJf75n/8Ff/jTv2N8dwvHQWI6cHHVK+OnzDHWpdHOB9XcwCXWzBJPAZzdztSKybnkAsri3nR7e4MPN2+xG24xhB2IAyhewgIYE2NVLS40D0z6Ol+Z5RW7ybEAvPAdpKK5djg2DfvezHg9bE1EU2z3iExbd21NT75Joo7FSpFS/tOsSjxBkwq4adNFF7o5UKcCaMvhFk3r1GDhA6xj69I82+mfkir5bQG4Pnl+B+k+eZ5Q0DxR/vkAG9Zf6gbeAr+H5p852g/mftKkQl0Ftq947SPK2Geg6GMR2dgJJ0nwNMmsKFMOWHqmJ0Zr+OkjTDY8N+nXe9xrROxWjcscaF2CZoWCuXIs56KuFmOMEmyU5NSdHDQzlYNm1+677znmcAikpwPtK3JS0CJPqs6cAgg3zwowlJCh+cJPszDPWVeGqTXAoTqfhu/mZAyyX3h6b6IW0bKwUrW5ATZqK4dkYaNgW7IKCSGi8x0oxWtRUEHfjekYembGOAZEvgXIITDD9XL6lXMOnOLXxBgRYhD3ohgwDAPGccCYYiaxnqTFQNd5bPqNAEXjiK7rsOk3cOSrsmh97Kcq+XqamvcewzBUbXAcWcWLUpvWQbTvAhbdiRLwa1DqJvVksZJ+7lNEGfXR7goWaaphdBh3A1zfSUwi5TwCyHnEyCDnwF7TEHeq4p4ofar8p0HKQwgZNPLOw7kOXdfDUwffb/Dlyy/w6tUrXFxcgFwCWUApeLIR1hmABvRN10QZh+zSM6H3PV6/fI2ry2t8uHkjsZRigmCitqAODAYcJ0PR5NKJZm7Qz+SyBwYoOlAUa8325Ek7XojEvbK4XjJCDLi5fY8f//YDttubYtlSp1KlJyBIAiyMG+Ecb02vJR6pLk4tGiyMVJfgeLq7ZCUlaGOMrRlrGjR8fT76okIwhHlLHQ36LZZpVAUbz6sJOFJG93T9PUYxaus49zvbjZIEtBYwtwCrzrj7LgOGD6TCPoGNlrmx8CnTIZfCY9ahExUId2vV9p216Zyap5rZrUl+AXafprIWKDLy5kfFyB6MlnWWp0ZnoOgjUAkAenel8iGoxC74ZdJay4hHXVw+cZrs3uUbRSSPVTTi9W3LtMCxK+ZdUTxC3gtNm/lJWSzf6/DWsu0aSQ6DP/n0noLgF0Ogpj24XaqL0mMqBjCjNs82aowxP1oCXbJyp3puo0TMAQ2PRXfJatargtv6k/nT31w/aPijfgcJMAi4enYBQI5RHxNgI/eT8qMKcBDleTfsEIjR04VYIiWrixhjtioZx10OVKzHqpd4S1K5EANCiOi7rhzNDqBz/cTCw35asuk5ACE39vTZKh0u9ZukT/Xza8ny4Sqx8iBjLAEKJieeAtZqh+ConHJlY54gBf+OSSEfdjt4AL4rlmUKMIqCGisriRAkJlGMnEGtkr6+L0rsOI4gIvT9BgCh2/S46Dfw3uH9+3d49+4Nrp+/zMBcXe8FACyfZkMg8vC+x4vrL/DFi5f4+08/gNDDARKzCpwOf5LvznFlRaNK9rr+mAecST9Tm8na4FJbikXROA64+XCDd+/egn4DOOfBOlm1QHEuj7jaJWwrXSvLkOWEqRxAzeehWkniVQozskUGJCw9stIwAVROKv85OaCBkT7lTzHb4uLl5NkFBfRQiVq34CWQaX/dlmNsfZbEM18nVf+FtMWJ6NCct8pl7kAK9echeiigyKZty3R6/ZHSTsYhy/ZPj+bAv6dLZ6DoTGc60yPSjABOzclrDzpn6gStrh5WuDcgiZ6E1WIHums7tx19QuLq2/IiuW65aZXGY/J/KkSL4Naqt00/66EmxKJAckq7UNoBT33fdR6+c3BOlFYwgylZUAR1Bxtx0V/gciNWPbtxRAgCACjIIxYSklFgORY6xAAeCeQdvHcgj2RxNGK322IcBgzjkF3OQgiikKdiEigdZU9iYZQtXZLFEYV07LNUuhjfZegTBJeskxgxxTmyblN2J76l1vWsCMty7LooYKYfZoCE+XS1iAbgPIopOf0z8VQOPF4dKZgvF7SG0jHu2dqlmPABySJrGEdE5yGnBWoQdFGENZA1geC8WAXtdjvEwIhxnGAEzCU+T4wM7znFmor51EYkUOjvP/0NP/74I169/g0uL68zoKP3Z+cRGQClzpFA1OH581f45uvv8Kfv/4hhCABHkB55bqxWFKxUXjkEEs2BJa1ybgFNIjmyXOMUCdgmlnvb7RZv377D7e0WF5sunTlZTgCr4quRukjCjJt6LslgssXdJ9Wwi8HTmyGfDtH0ewaJzKYFUK+/FTx8wtIcsWg8eaAonqh8RAUxXX7oNHn9IojN3xyJwEFwi/Pjp0OPPAfqpH3a0y2eEOm68lh8cXw7noGiM53pTB+B5ne6CTgKhDnG6kDfyK4jgCh/KDqiGEhwViLy7j7kRKE5nOtBFrClJHntkrLwBPEnKf9x9ntZv2vV7DNnawLBHxQtQnKHKNYiOc+0j9X3nbiUESReiifEKC48TECMYuWgdHFxgb7vU0wVTsGHB4QQATjEEDC6AcNOXMgAhiMCx4AhjuDA4n42juBxBA+jlCsEIMZ0upmUXU84c05OO+ucl5POoMqvuEeVgK4N/3BMhq2CwlFSqL338OTk3YVBVuIUAfP8xuY1qt45TCX4bBUBZU7Ittc45cHrwFxu3m3Je4/r6+fYbHsEDMX9sK0HMygyEMQSTMATnxN2Tl2qBBDsHEkMKCBbiFW8l90UCiij8a7E1THmVhmGAX/729/wxz/+ES9efIXNxTW87+s2ISD3Qb4YkYIvaW0B7nDRP8N/+v2/4t//49/x4eaDuOnwmK2hBAAsbdd+HgIDc3B4WgYfrVURQwBd7wFmDwSHt+/e4ccf/4abmxs4ukDvA7xzNSsksIhs2VCsUDM2iLrrNVZUvmEBy4cW5tXa/AjSUfLRTiIiTps9kxsglr+AKJCtjY9lwbxZa4U7FGWBpw6/tx/kfBp0ys5N/LzAMotT+mdF7SEV96F9IFG5/7T566nTzGbH50LHKzJ3yqKEuzkuo08MKLpPK64doCvy2AMaz983nbMv+XsA/PuDdZ6YjmnKii+NsGX9QY5FB5aKNdmSnSkPntZU89gLx7zCdce2zz5a96lDzQdzAkpr2ZO5iOun6q+c07QaQYwR47ArQFF6IOnJKYaFKjOoFTie5t3uTM+S9W/Ql2aoct2ZeUiR+QAAIABJREFUfwIgbowraHK/EkjmcKHsc7FcFn1X5PmYH7TxZipIZQULtO1EVBRhSmmIPqtKnXw6VnzLKrrpG1s+EGWJqdxzqb4agFgeU/cH5V/lAombIgWRP+c9+k2PdJhZqQcRiIu7BEcGOGDYbeGeX8tpZuk4e+cc+r6XQ8SIwCEihoDtdodhDHCdhyMghhExSKDrEEY5ASu5g7FxJ9OhoPGEnAGKHLny18R/UR6v8Y3kehTl2HX2clKaWslwaj8FkTShFhzIeWjPSaLIVh6Gt5esSKp0GvB13yzD0OJaJ1FV9rUOCy+bVGzwcP3YbHpcXz9DFztEGlJeM+V1TuIPESHEgGHYIUSPznv4Tttc3M+6PoldIQigmIAijUdUYhZZgAM5njRHiXE1DAP6bsQYBrx5+zP+z5/+N/rLKzx78Rxff/1NesuBXLI4AwAOpYIZjEoNRADgQdTj1etf4ZvffosffvwTxnGLSClUKbHExzLz4hrwrwWR8olmOiwpOwFXz4syJ+MsEsM7BrNDCAPev3+LcQwy5vVVU6UqRkniZwVCAVSe/wof2bn1EMvMjYEWaJb5utQtp53GBOexYgGOYrWW4/9MiMwfz4NExXzQXNoHyCTA3K5T1mouXzNPc4npV9rApq9Ma+BesnAd8qcuS2w6YSpKL/eKrCMOApO7PLdrXC4duwXYlliAtj30vrjUJpAdDM71QLZGFXl+6hZzd/lyjRx1IumVUPf1DEWSEzinj1hQkJr/J0woQ9Lw9jqh6b5kJ4QFUjmDrFxTSRf5scM9U+Sj2dLkotQpzafbWsE+JY3FUrsqn5p/ad9w3zsXmIfQcnBWRR+ApqNlJqOp8lKu73t36b1lqLf+Pp1Ml8vY0CcAFLXxMVo/z7mdjJb4MH9aCXGR2olwqaHJXOLqshVkDqdvaU09Zyhnn1e3E9C+ck4yLt8d0oJrU7La132omVgnkzIdaPtfEHGstd47tz0hm+QwJ03mqIKUTzKCblucipWm48+KA0Xtj+a+CH0EiUu03d0ghCgKCAFEUZRgApwDHMTCw/suu0s4I7DXc7UV5OeFerblXjkGyblUbm0PrUuUgNQLpIKu7QrbNjIVpvcXZn87umOyXMinZqXyZBWGVLlYUy8/o1ByAiAYkSOIIPFeIEIqMSFEwMe0ElD9LgBwTMAF65sAI8DBS0DhmBqEo0i/7PJ0yqCkYxGA5GbjAoAREsXXQeKdjtoiYBLe4WRGTkmJ2A47vHn7Fl98+Qp9n9yukNYsJ4FTyXkAAiZcXo3G2ihgGEZst9schyjGkOIGSUljTFYpLlkRGaDIAkbe+wxaOOMqpQHS67UkKU7pUgwB7Bw4jAn8EkWobnZuhH0uw5J9NU7yyGj6fS5ey17i5ekBtih1zQDQqlCATvnH4CZgOVXMbwTwGZjSeNBxVuYfUSdlgWeOGEKUE8PYoyeGeP8RvCOQp9yXCiICYr202+0ycIT8lwJTk0QUihQRSOJVjWHAbhzA9B7f//gH0IZw/cUzfPHFNTb9MxA6EHVwcMkSaUxWHR4yolK0NYqAk7EGAvpNj69/8xWu/9cVtsN7hABxTWPl6bIbb7+vIRubRizqUhtGBrkS8B1Adt2LUeJEkY+AJ4Aibm4/YLvdAS88HHVpNmiAF1LYgABOwd913mLkiTJqOTSQEZSnuFLa9k1zLXjGvlh+cQKf2aZDSHOIbEqoS2lpn7S+VgoZmb9cMxAlkHuunVMDW7dJeaBVpPdqZaXgpPOCxGVjpHpWy4BawOnGRoAOYoLLc0qZWUq7xXic5UWxbFRrNw85yc+BowOnazL9yxrBzCAXBBjIFlGc6wFGAhWjyAdaZwaInbg6chIYlkwujyFZ8NbU9u55HEnEANMUFCrFSD1IuotC888ClZxZLAofThhnigdkrcSLTRE4zw/lhEZmWtnsugbMZSdrJLVlWuTzKeD29Giu7KfiT3cwqQL/LlNkmaXyCOV6qK1S+4+iZk6dSZvM9bKBUj9cQHWb9AyO0apcMygYVd9aPWRdf30CQNFToj2L6hx+lK+3s9FJC3WAfiHoCM8tUnaCPyVQdqaPQdk9DMX6xHbv4nv6P2m8j5gEAhHMHTlEhPRIEWa6TlyOHKVAtEAWuPPnJ0Tt0tCKdg9NDlO5SDBGq4gZxZFZXMXyrjXuNHeKYq9uD7LTLLNiNA/EomhFI0Qy0Hc9um4D7zxCDFJuSpZLJMrFOA64vY149+4dtttb9P1zY5UG6LHnzskfAiEGFmDSAbvdgN1uK0BBHMEcc3Dp3D4kAKLvfHJf6sVFLP3Zo+2nirxtjdLWYq0U8tQYwogByR1Kd9FniRaWlnrxK2m4PbyWgAGVYUSbPDEdYpw0GlJgXa2cuIr1cM7LiWNE4PRX5iNAY55RdJIEpA1z7szoe+FhcUuTt7wXNy7tNwAStyidaMepLNp/EqsqAhix2+3Qdz36LiRAIuL9+3f4/vs/4NdffY3ffft7XPQadn8GFKj02wReSa2x8Ru8fvkVnl09x5u3b9IQiRjz+OSqn1pWOBSnSPnTBlDP1hypntIeJb6T8Im4l8UY8PbNG7x58zN+9fobk7aWpubDAr7wROC24AbP8PSaui0FUG5qj6bRZ3Owxhf3o8TDoAJW4THWrTkAyl5Tdfq05Zjvl/q+dStkoyWSKV9tdJV4lZG1zHybGEgnuQGmxgZAL5+flqwwT1YBngEvl1zw1/KbDku1YlIL1LnxfLA9qXT+fZt+zfsLwfTr8qxJ63PgkzOtoU/JDfEMFN2JZjp4cqmZOHRefRCw4hAq/bksVGtobsLWa9TeONOnRAzZ0bZyPsEInev7tpqkm1f1a+ccOu+x6Tp0ziEGNoohVWV4UkR8xIg/rvD3qiqXHaDKwCopeNINlPrUWsGUeWzOEH5VqSdbR1xNzfZTlHkAyQrj2dU1nj27Rr/ZgEaxOhmHkJQIBwEJRoTA+HDzHu/evcXVs0t4r4ErWU6NihEuAU3jOIrr2TBgt9vhdrvFsNthHMf8fOSYj0snR/BOAk37xJd9108siiZWA0BuR8voldsMc7bsGAYBjiQmdkwAV9q74wI8SXspwENmXACYtKxeW+KeOfRw4dE70UolvkVS03VKgJFLbR8pJsuu5HpYxeIvAAwRIYaYLbPk5DTCALEEU+toBaUVxFbwJCQLt+KKVgqmp+Lthh024xb9pgMQ8P7DG/z5z3/C65e/wrff/C5ZTASz9ik83JyKyFIOBw3C3eGL56/x4vol/kp/EV6H3A+qg9ll1UCC+lepkQ0/2jhEbd3QPM+sPCzBrJmBEBjb3S3evXuDEEfU1jENA1lETwqT8tC0C340kc/UBQkNN9vxM/NZyiHzQ3qrTqX5mcGIdkZar2fnIpuCTixQ510F23Lfl7QeMQPqNVC0KsT84VwM6FjnHVHOJp0HkNT1mYzbI5GdK8p7BMUdWNbX7L5s16QEZtY53b+ST5LMepKA5kOPropPl4ZKhohIE7CtvIY/21noeDoGULWg456nDucxW9y59fRMnyp9SgCR0hkoOop0oFoha24hmhnQWRNa2lW6O+kUOk2NHkDofqKUo1KmVcnKxfbamT5ZIubsL66igwINIqsUoX5K9Y6qxn/IgyPL58XRxnuP3vsctwj5kSSYZrTjxBU9BbVm7Dw3S92x4NwoI6tfK20rlvwzfaUKW1IksxsCI3+uJoOTc1batM+VF5CBqqoYqhSAcHV1jevrF7jYXIF5FPefIDFTiDjFgJHnb2/f483bn/Hq9ZfoustUh4BxDAgjZ2uI7XabwKEhu5rV8WlqfvUk4JD3Hn3Xoes6sSxyfmJFNP1TgTS5geS2lZPUxOtC8hJwQuqtAZUJqMoCWN4hww8tU6iCnNp9rcLLdwUE707WoqleMzkruUQC2CG5kpU4Kw4cAmKanwqAVsZJDIxxCOL1C0pgZFFymRldJ+KYdT3iOE6sVUKIcE7KPI4jbrcfABILposQ8TP+jv/vP/4nvvrqV/i//69/Q9dtMrit1m25bvpBJQaV5N/h2dWX+OY33+FP3/9v7MYbAbaMWV/t8mOBnTJBlHkZdd3ss/rdsIhej5ETaEuZh4mA6CKYA96++xkh7CBMbOs1D4Qsx/vRuGtTgYmpuNzN0VK8riLrKViUfvPcMzBgStNGC/nOVmNaOiC5U7WnztVWhqcEiizQImBR+SvJl+PC755fCzSqu2BdDpsD19nZeasJRrPc7ok3WZzcwJh3Bz8EoHzyZHmV6+EHYK5dJzLIRLZKzxjRiqv+1PG0hnQe5Wk3zJxw+bSonQceCiw65bg/Ns/PeWx8XvSLAYrWoXhrVCc32e2ayQ31YMiQevl+yjFizUDb8qxCuk9PGreEQClA7PKzpzGFTm1rF+dsekrlGuz3M31KRFSECegnHR5LDOQj0YGiFLLyy4xewcxVfBfEBibmhx1aonzdPYNVG1X3oQSkFEDlnmmxDfqqgXc1Dw00vC6fslvNKd4RwbGHYycxljStSWwJRoVEpnl6s7nE8+sv0feX2A03cBzgPMEFsfxR5YRZju2+vb2RQMN9hxgl6PB2u8N2Ky5lIQhQJPFoYgoOjMoNBwA0ho26lnVdlz+d93A+xSsiA35WlhsphEYGIwoIpRYp4ziCmLNjmCOHgFCsiGIEO+u6VjNWpWvCzrezT93f/WVufVPeUUCxKc86vtH1w6zdBAApPgkk5oXrPCh68KipOoQYER1lHqWodXQmxo6AOgqqeAbIdyBfTlAjiNVYdIy+gwQYj+WERgDJqquAfOM4YrfjZFEm/HJzS/j++/+D//bfLkEgfPe7f8azqxdwroeexFZicaS6MQNOgRICwWPTP8O//uu/4X/8r/+ON+//ltZTB8Y4kadavrWfuS3Z8qKO61QWq2da8IgjmF1mKWlPktO0HHBz8w6MAFIACTMzJgsPFF6VxWBqOXIIGgCy+ZFziAoQ6f3GUkpYSazOWE9WzGIK5f2rHI+qAeDWUubvLP7Q9C4pULQm/ZkFcd+jyxAaNJ6RumkJJWAOLDxuF2fYvl+aT+apBSvtvCitkKyMuJ1/7Byu+aY+M3OLliewxq1igDdNsQnq2lxETl5bhU+QGIgjpujbBI0Dsgtfc70hOzYKL94VXNgDFuGea9FDUCXgAnMxOaff75unaeMHP4Le6sa5AHOFOpxSVPlzzzOfiLo3i0+cqNx3d5Uu9IsBik5HBI7LgrCQFTjtRGeey4vgKUqEec3w5Nrh8ZSFowcfrAxQRGVinwEqWwC/lMCZPgGqRPqM/GAVf/Hk1/KLIjyIkiixdZJiZeSVqWj7hCgHZy2XaoHoBAOyleGOpLLDbYRx6JyRQBgFiVSZWltsQok5xASwK0obSOaKjP5ZENlOpfKedz0uL67R+Q2c60FxECser65CARJvRZSEEEYMww7brXzf7Xa4udni9nYHQCw/bNBiOdWpxKhR66Cu69D3PbquR9c1rmXplK18TeCwXP0Sr0i+q0uTAgzDUCyZwAyPpFC5BKy50ib1bn06ycw2dEO1IkpQBWx6b6HrjAXKWirzgirdRrVgMqDgPhLAhKvUIpwjvHhxjQ9XwNubdxKUnKJYFREBMYBSQHXhWUrKaAmO670oqDEyQjqpK8c+M+3hyMvJXp7BXZfBINsPtUXYiGEI6PsOXecRgscIwk1k/On7P+D59XNcXz+Dcw6XF9cpL2fqJyOubnLhJaINXr78Cq9evcIf/wJgRLUnpXGE5qyKtJwlcPLMRJFAfrs9NwdASeBvtV4CvCf4jYdzwDBswSzumjlSKWsdiphlsJS7kbVcIeNetMDLAqkZ3rcnLcLE2GMGWAKJzxVuVXkZ+bTGvKIpoGWKyCk4f27vDOCccgUzaaUqcXb9K51hN0GWlpDWAupgzjM8SKaqeQ5qOF3vl77RVpQ/CxRp+goSxsS/Imaa2Dm22J99bMwWnm2+MwzfsTkx7lCyDDgZR3HF458T5TUwuTkWeihe0phzbX4PSQu68cqersD/pWceRfd8CHrYOeNY97czUHQkTdt3HxfaoGtzC/IDMMNk1aW5i58vZeVQf+v3T3bGOFOiLGYeyc4VFlB9V+mxKL6qCOiuFlUWSxLv4UkDRJlMoGbZTm7un2Y8nGJmqWTq/JnmTi5g0vTpYyjNg/n1BCxrDSY72kVRcK7H1dUX8P4C3nVw5OFchPeMGCAWJTHxESk4tAUQJf7Q7S1ub3cYx5iVn+xmxgA5pADVGpQ6uZj1G2w2PbxXlyRknnSOALWuMK5KGtupgEq1m5kCD3rSWowxOcYUXogoJ79ULmamx/LYWGhrVmWdjFvfys0R5j0C4ORGUvsOCj4HRUrMxu2BAHavr7+Cf+Hww/sfy05m0j7VjTLrwlFOCyQddykd8i4bWGS3v8oSTANXS/91CSgahgFEIVvvaVVtsOzdbpfrSBuJJfT+w8/4y1//hK+//jWeX1/j4mIDRJfYvUuPp9OeKIU8TutnOn8L5D26vgMoyIlkUQAuzXufwFkHU6c8r87FK2pBgdqtTdzMyJwAphYgIY4Yxl22lqv7ez+gWfHmMWRAGEsTa7lZWdEqR+mh/SLk+mJVw8vOaXVyD2dBYeukYK3O3QmQBECThtlTnmMsqxqgqPDfXDkTKDSZ79vyEPaVj5sjFe0qVR0s9LmLntXywDMXW9qv5BexXeQvl8D+9cPVrucWGF2dwMenPF0wHt7SB7bR8XgN9UsYHPegCnc/Djg/JZ2BomNpMWhiy+zleEUQ17tbJ6ZaUKY8qdBk8Xvaqu39yNSVzcJQ3T/Tp0xlrSxRuchcgypvSmS/FqUlD8s8JswOY2W+nqLU5sEr9yKOkl+PplrRWLeQThYQMh9lK/VhC34PmkLonHeM6nnsGLKuDqo41kfDL7dvmTed6/Hs6jn6boPObxB4h8gBkcRiJ5hoxkSiwN/e3mIchxSsWq132ASf7uBIrIn6vsdms8lA0WZzib7fpIDHpQUq60xC1kQsv9BM/AZxM5OyKEA0jmPl0kTJugUJ9ChAkKp2RbBPWOrCeqZuQkld5RKXKq9QnHYD9/FiHpZN/+xZwmZdz1bzu66bYnlm13WCx+XlM4zPBgn9FaLhDkqclc7Q42JdoCUXEDHCmbhhU7DIVUUlogQW9hngI4owXSaA4ygXdrsh9acEzXZwcIPHTz/9DX/4w3/g1atXePbsObzr4F2f+Eme1ymRK8VaQJnLi2f43Xff4n/+xzP85YcfEdEfAAlT2RLrWGOKuXesK1p7vbjfqhWbPBNCABPgO7GoevvmZ3z18nfwYEx9Daj5uz8dJ6xbhfVQws0rR1KpobRDFb++Eok0HtNCVseIilUC3Hw/ML5X1HNfUebcWJcs2xYSQL1RNAcUKTkAwfyW9osck3xPyK5VjOy25p7oentymgUyltuzvWLjD+a4U/lmeSvzwyM0K68CvB6YKsXxcwWLPkZ+T51ofuL7SCr8kweKWt/3iUAhTz1aeYiKkoosmLRlKMubCthzQRLnad8zSwve0gUrJRzOez1ef6ozK1K+KwSvVbtgdrGarCa1MPEYtDqftc/dQeiYAghzIJp5Xh87NGm3AM1DkaatVgbW7J/LKMvKZ1VAzklkkcW4O+lcnIWU8hCyu1NBlaCb1rMtw+b60gMH2rTdbW8Tq8TwvTwzBxql/CurDtr31mkpY1XTXJb9pVMfruB7q7iDxP2nzotrVl0zlNIuJoFwsbnExcUl+tsNhpBOrDJKl9ZDP2Ms34nE/YuZsekv4H2HGGN2L+u6Dl3fy4lmXYeLzRU6v6ksMmz6Wp+A5HJj70UqPA3kGEnDsMUw1JZEmjalOEhElKylpnH4SC0EmBJI1MbdaJ/XolE+uSbVBhrUfImHNaaI9p+MxVLv6XJX+Kd1PSu7+oe4W49iJ7O6yXHsev3Z1RU23QY3uIVjIESGI8BpsG6OiApwUnFF8ySxfyJDjNjA8J4RYgAD8M6lgOixmnfVqkjBIumyMYNFqvSHwBiGATGdqOddh85vMAw7vH33M77/85/wq19/jZcvv8Kzqw6dv0itk4DYZO0j3aUSlbTpdrfDd//0HS4uLxA5InJIbZJ4B0U5r/qzxQxg+LhM6hMrIvtdwQxmde8E9KQ45H5mDMOYeKDuUdv1OW3llSovTGgamNrIoVDXI5Nnk2Z50pbH8GgaQ1Y0m05JK5Qn5gIY53QI6V8SUZvOqOYTs8YBUFCOVrjoS/qUumJmcjXFN82f86znj6auTbuk1p4WYLLuI1vk5ZRn+0ZzTXwWGZFitoQsZanBofZ7RExrjbxDSC6HpPLKStlVCrruuTV0KM/1ov5hYlrIzwJFtXuqbCDoLKvzffMalw8C0h4PC5Zvy7+3jI3us2JIVfQ4qsI027m5rKn0g2GQ2YKpbeTTNMZeWe8IWq37H9vnc0ksZjO3eOy9e48yLMxhj6DPPnmgyO4AS3vUO6Zr/BRPSRzbHWlgwoUSvGD6cqWcEepFyKY3895eP+clZuXl+8ekNSmPxgLa90wrgDwwzWVH0931J0fMwMRsfoZOVReuG2o6PbvDc3bu+riu7PekrF6wGTtW8jTguxVE8zWKaecvAl52/iOJWgaSIL/tWhJZY2P4tDPos1hTwRJ6xLWZhzifrqVlCQasOVDTKIq7OOW71O91jJc16czHd4j2iazwywWqcVZOtZjw3TR/iWjiZkGgQ1RAImlXjf0gCqx+d6BkvVPKAKj1ivNBBPOEThBFgD1AhEiMEQEjolzPbzsQ+YV5rJxSxExAdHj+/Et8/fVvcHP7D2y3l3BR7ZxuAUQjSCQ+Q8TV1TNcXl6noNESj+hic4XNZlOOtPdSTudc6hMPR34WNGwV8S5uEHhExAB2oqhFkjWKmYEYEKO4wQ3DgDHsMIYxu7whKd5EAhJ1zhs3KLWS0jhOHm18t7kAxhVNeJ6yENryShvnRnrSWIQJIpF+GSU/P29cQm1Riia/LCwywAgAYjosMOb11rGDDx4dO2CzQddv0Hc94k6sBSIA7wCJk5PKwpRBIHIEJgdyHnCEmAEOUTBj4n8HiROlkW10zXee0G08uujhR0C9zWJUQMABHCChpiKAEc7dIjLw7Ooa7ICf3/8d/+//+O9A5/Bv//m/oLt0cNQD6AX0iQRwGsGRAPIA7cAY4Iix6Z/jd9/8Hn/+6w94/26X14fSU4UXMriT5kPHEqxYui/Cp/kkcsxjfwkoFmXSpfmUEYKAGN73Kai1WmUxxjGg68RCj7UPeQTRCCCCY6zinbkJgLKfyPCSuOjFEsya03+TnX+u2J9JlNyY5L5ocJoYIwon2/EkayxDD7KUOgTVnGMRDZyCHDl7cSXknCoASu2e8yhl1Xl4HtioxzGD4LgHeBQxNgAOHh4dIhiBZfwSIwMpgSkb7RGC8IcrI5e1HFriDDKltp2TKRUsIoAc53oUntLYdzbWHZAtz8zJpRaMJirvlu8zzZJKRAn4SCtQJerLUQGH5LdTyssrZMXcBgfI36dMqU5lQjM6nByUQHBAp/ypfK0ySflg89uWKIt7k6rMyaVFsLU8rps2JRFXv7IGaThR1y2Ld20ZViF4dy9ImzzPHhNwJ5rO9Xcr55rS5F5tsrhTjksvUftMU7JKF9f7e+SRJ0ifAFB0gB6xsQuDt6O5KDr6e36xrTSxvbdnaY+gC5jJr5VXjqVD762YN++V/11ocRB/QqPxI9D+CfOAwHgquWYtGb5TUIDtpDsXvCjpprpaqPBYjU/zyrz1Rk5qWpCZu/lnm8QRrDgNPL3+lIK5rOfI7rTXVhtCkVqlZX+G+6zQDoNINRgwn8l6WrR51Mah9qJSPberxU3fb3B9fZ1OjrKuCpRBLG3Lcdxht9sixoi+36Dvi3VQ5zfG/UxAogwUJSulEptpng9z3iQAHciBERGMghSTu9IQthh2OwmeHYcJ2KTuEdnNTd2Wtc+bJatYExlluFFMFvmFmk/b6lnpvt+kMq/7rxXKuPlE7g8igu86XF1d4f379wAGIM0+3jtE9nLamQEjohlb5Dhr9CEGxDEFG2cGR4aLAfAER9GAdQTnPTow+r7H2I8YdgExMuSgMBZ3tiiWDZKdWI051+E2AUabzQY//eMn/On7P+Kbb7/Ds+dfYOM3CTghNZxDdjlMAJRalTjX4fLyCt77Yglgx7sZTxrgWuPbtqe/RwVrZwAiBQsr0KmyehHwXlz4SlvudjsM44ALA9SIbC79EaOeLGiAAhR+PoaqWZmQ86k3LI0AZuYbtaQrFj9cEprLa6FsrPdIAQku101JD40kC6bUc97MeCH7peSHxDP7rHbKwznjVFbpALXq0em/NXqeWqK1SRcLxfkipPrl3ZamanuZwLZL8575f1oi0/Vr5eVHp3WC/uHm2fcAGYZNr+wbd3dQm9bRRJs/UP16I40OdOLDGnUc01iW1haq7uPpBtXKZD4GrRBN7/LeuhTnGoYwz7D731vjLjs3Tz2Wd8ynDxQ9OqVFZ9Lv90Vn7k4PF5zwTGd6BFJBekZpACAKTVYkKYWr4CyHA1OFok7c/orJUohm527rRhFhlL2k9FA11h9ukp6LtfBQdG+bMNN2J6d8cuHKx4HctzlUNZk/hsbHzP25v5WTwu7E3UwAlWTVMO5S4GnVSWW3fhxH3NzeYLu9xcXFRTq1rAOzWEWEOCIyIcQRBAfyLsUkEgsmTjFt9p0+orF/VLnK8XmYweZksyGMGMIo4IRxN1Nyrli/5jgbMzE/rCXbFMTc03on4eHltZXmHluVlnl7L0CZHiGHfuPx7NmzBPI5xDHKyXNgeJ+Cmuu8lH3vOPe7G4OAg0ByH0uubgq+cUCkmIFEAQII3vfoO0boI8ZNRBh3CKqs69iLgHMSD2kcR3i/Q4wRwzAAzLi9HeB8h9///l/w61/9Bn13CT0qnHMjOEko767LNUcOfbKwnH9WAAAgAElEQVSC0/rIG0f0rZnjD/GEBYzkUYYGq9aT+wAPRsSwG7Dd3SZLbyOkJwVWY8XEHIDccNO9WLNYMEbtZ1h+tFZzc+MGcJTA4JUTXF5/Cp5bARLGOOaoetx5HZuZmqcuaGywBGVWVUx15ZkCza2b0j6q23UBTNqzjJzl5/uSVY4NMlzdb2UyMt/vl3MWyVZ3o5sWYy/ZcWzreaYz/TLoDBRNaM/sYRe79vpHBovO9Augu2yBtkmkz6fCoQf9brkVo9P4JPtbP7n5jeYeisKrinWTdGNwoSEY8pW0x4Z54eHU9LC7BbIjTnfYMpoKg6cDiebAvuPetq9q/5W/pDBajGCSigFOINYbBI+LiytcP3uBd2/fIwbG4Do47gD45N6wQ4wOIMZut8WHD+/hnMfFRcCm38CBcLvbYgxB0mU5Nr3rNri4vMRmcyHKuB6zzMDkRD4oqJDiaiAde89iaTKGiDCW4NVjHHLg6raPnEtWLQtzigWJKB3nJW4U1q3Rxiw6zTiQtlS3VioAQxm25mEthYl5scg6SwCvUVr28LFzHpeXl+j7Hrfb22z5E0FwjuG8FwshjohMoEgIzAnoFtfDruNkPYbaoiayPJvaW4AiAX82mw36zQZgJFdQh9ubLUIoAIpiXSEwiAKIduh7UYJ3wxYhAj/99Df88ONf8U/v3+Jic51O01PQBWrchlbx67oO19fX6PseU8sZqsUiHVcJ5FJg4xCgZzcGaouisuSVJUGAUE8ew7DD7e0NxjAWa6dmTuZ2UsgA3n2W02KhRAREc2AJ5TxKPKXi5jYt3/E5m08qn9OBcSgww3SerV1wDlDbr5pqNgVi8yhX0FlOIJfjDsvQsWTHG5eoOGeg6L4UYbYtjnz3hJ2+alipQLAQHmQh4cKbac7Lr35M3nlI2fMp5HeIGrfaR8y3/vz86QwUVaQDYa06bRgl2yGnOwdXPV6Vw5nOBAC601viuQidWsg5NT+uBRDmdlXLzrWhWbCWJ8pENY5FkhfFy8Tc4YQUacwGzTO7N6jiMpNjXZ6HIWvFcWprHdn5jXo4NiYuR4u0b+fwJCXb26brmnsZfMw7+KyKi4JC5r1cNar46fLiKinXHt7LsfUhdunYcAFRnCc47xDiiHfv32G73aLvL7DpL+HJYeQxnYBmg1lvcHl5hWfPrnFxeQnvOuQYRQtrCjNAMYrLUQxizRIjYogI45jiIiVLolGtiUqgZQWJiptTquUe0IdV2UsATnb/rJbBwwp4y8pTVxMtqfJ+iheVyzAt2TzAaOfKuVFcr/dz8b9yqszwTiyKCoBB2WVGAoFTCloOUHLFUYO4KL5i+ZQzpHbkaNxUGCAnQa8RkNuRWVzPfN/hMinyzIzt7RYxpnhFBNPuFsQgOSUMI95/eIf3798hhBGRAzoisCMgUC4AI+aW1nJ2XYcvXrzA5eUGNBn2VlQv7U2GB/Jda51mwKDS9tP5x1pmab1iZKmTl8/tdotx3CHGEY7aIMxmTE+v3pO0TDK2Igy4BUDjfDHp+laAvczbhs9z21HdXvMkqJ72N6BTOOutPSFoDis6qyy/oN1oACFq5q38Vcrl9EWCmSPm5e1qzSMsGpceC1JrmXXcTufAwuTzsf6OI11v9lGaET450IoBwElQf6i57iLZe9XCtno8zqVQTUmTuWkukbkyHtPuJZ5XmfMeu9/U4ng/eHMadkoT2oH8VodGONWGUjq05FAbnJbM3DmbZSvcYGbsf3oA0xkoqojNrHOIAa1QSeUjC5uf1oR/pk+AmLNAUdwDjiMC6lh9h2WYBycFbNrqFIGzCPskNw4sSi3YW551zlUjd2KvlAGCsjddRnQrptgTku5JrKWpY3So9rfWDP+Y7DjGUnZirI41k++dVqBuSod8mo9t92OEDPNoHZtH0tZpXk/Mk5+F19R6RTBah+vrF3jx4gtcXv0EBsMNHTCSxIvRgZXyicwYxx3EFS3g9nYHRwIghSDBrcWFqUPX9djebjEOI57HCO869P0GXddJO/BM3zNA7LJ1RYgjYmQMY8Aw7DCOYzoJSyyKWJClrBR55+CdR+d8dUKQba95lw7dPW7faQGZff00N+batKbKLM18A8RipxgvcHXvsFLclLUK1lCXyTmH6+trfPnll3j77h3GMAo4RAI5OsfwKUXHDGIJ5htZXFh5DICXsdJ5AFFhmVhAlVj3deSIcRyzJRM5h4uLi9SPwO3NFhYc8N6h63yOOaWuWswB3gUBiVIcocgRBF9ityMFgjftpW6Ml5eXKQg7LZ9fwCjyjzcArLE+Yy5AX4ODrnA54gSIiZIUQgDnOkngdnI9iCSoNThxazIesFYAejriqcjGetOm0IMAZH2Lk/GbEaU8lRZetO1g2VCtNiSYdwoObd0cTfKoLB9Q3TkNJWmEGeQUuLKgi7UhqkEwIr3H5l5qO146jOGExAWY0bFS63QTRPQeeVkwcIEIZlycdq1/SDJdXXh4UuxWtqj5nNlEZF+b72xqM4DR3DKUgWpK8+ZddTU72uwBJo9FNv+HA6uqeIUqp64JhP7ARKDCNgdByvuSwQYOYqGtrPmAxXokOgNFc9Qs2ssPNZQBps+AM870ZEnYkzEjBR6ZCMouZObdT5/ybj8MCAVAASC9lK3Q7cu08IMLlGBx4YcAhR8zPlEtDt+lLkVZuAviWALMLovSeTf+2JI1bbgU7LTIF2xErqRZZoDF4fLyCi+++AKXl1fYDQOc81BoKTIBrlhLOObkqBURI4F5lywhiiuY9z26LkrcohAz3/a9nOok1ikMpOPIK5ecCBBLoN4xSuDqMQTsdgIQhXFECHJkugCC6RQqInhy6JyHTyBHbivl60XeM9I3qbDIlQL+NCgpnBkLOBSFax1nEREuLi7w4sULdF0n7k7m/RzviYAQGS7IqXvE6QBolvhBROLuR14QGuHLWlmOCcSNzAjjIMB1FAs07z36vsfV1RViZOy2AxgM7wld5421mMsWOJR4eBxCjtEWxgDnPBy5BP6pUFtGQWSkfHts+s2qNpIKKAhi4L097GFBota6qABKJQEBiyQm1DDKiX4xBnifTqyjdGpfspw8QSS2vVQBG6mYvCdPo3M1Fw8ArYTFe+L+1q5PD0e6qSHWdIcVfRsP2PQkpiU97oSlNYFg63s1EKMgIsFB48y1gdXPtJIsYlOBNS1yY/n8AM/vycqmUKeug7Aek3cjyWVqtf6UaB8ydkLKjf7QwMyR9Fh6zBOq8mPSGShaomp7YYk7rCL5wOU505kMPeh8tZeXT7jTdiy1UgEwd6E8bmWPBgzQa/UObBJZMtZrIBQiILYxMOS45k959ciClS7+VmA7SnbT49SPJGaAQynPLM5ukL07ZKGgYXNVU4YRK/Ocz+YdIgdyDsRO4tB4D7BDZKm1RonhSIgpdlDnRREZxhGUjrEOgRHjiDGMAANdJxYgkq1Yfux2OxA5hBDg3JgsI5LrlVqZ6PH2ERKDKIwYw4BhHLEbBoQxIibLEQ5RgKukP7pkTaQ7/9ndSoFjbY+JVm/XwghmJ9Yn7KBxWkrwYavQ3xX05IXvNR1OuX231cyPIBYw6Pnz57i8vMQ2nWzHBuTQto19suZJ44ojwEwC/IQI75y4a+SNpaSYpqwEXEwB9WNE5BHMAy4vnJy8Rx2c36DfhKzc2thH4j6b0k8WGojAzc0tbm9vxfURtZtW5Aji2qKIKLlaug0ur56D4NHardiJQtthzqVsonQ3z7UWRaZkxbo0TwVSxxgDbj58wM3Ne4Q4oEugkAKfAn4p0HzfdesR1j5annbVAGruXlJlF5NdOw5LPLK7rWnLwEri8yjWR5Xl2J1yWqI15Tb8OqNgf2z3r08WnGL9kA2Wciqigpf7+OpuQMf+EWlBqfvS07CieRJUodxPp00eCSr7RdIZKDopzWo5ZzrT06SPiPncmbLQYamtiFVUUYSWFD8ixojK7YBqXJjVVSTfTyKPg1kg0QS6/rRpiRVWz2Z3FG5lN7e4pBhozjxzd6WlMo2v75pPAht/GkYylGfZZdZdfD3S3jufwUSr7CSbHQARgSNcjOJeZI0bNPgwUbYaIvLouh6+c9kaRFyGQnqxAAlAsjZJCNUYt9iNW+yGHcYQJEZRSMGtQxC3rJyXuFl4L1YkUtqyZqn13f721hhECSzKYKneO3Cc8lF0DE8x1rsSWcR5fWHVxebq6grPnz/Hu/fvEIYhzRUMpL6TeEbCKyEmkCL1vVopyDxkARQ28w8AOHFZi9rfARx3KYC1Q+c9HHls+gsQxD0s80uqU4wRjhwiIogYwzDi53/8jJ9++gd+9+2Ay4vLNO8l948MxJZBw8wIMaLvL/D8+gs41yGEXW6T2VhWxhJjb7ydpvnnLIqK5WzdtzEy2MUco+j29gYhjFA+IIK4zbFa1N0RyK6KaixMqK7naZX7aVpkb80ARvZ76753KA7UJHc7bx4opvA1V20xLXmtwmmXSj5uYQQe156crCsBZKByEneoqZSC2lqHteD0mQrlLucyZwhZCzPdVAMOz7nHgw8HwSKy+d+H7rHJ8NnR0wKJhJ5imT4PevpAkRX+GJgM+JlN5vmF8JBprApr+3YgD5EuNuuY9SiWbtexNqGDifEJx9ApdueOzHGFIPYkp4gYDj+zxj97LjBECpCafwJTZWmuUdqkiKZlaPKrd/9YjvBu4y7cleZ4W3Xu/Nvt6WAjpJBah6RTk2J6F3pSVMSw24KYsi2QGtKQvA4HgtPda04BjZ0GsJVTn4h82jFzKEH+FupWKaX7x2G9G18r3XZ22W8Wz6uAmznRyQImUS1tmp00EfKNEG4tcpaoKo4R2kliiHAIKTgh8pwu7lIRoABywViraG7JRYE7AXQQUt+IFU7HDg4enju46CCxSzRxiSGjtZ2c9pO19qRgkgOzB2EjFiSIpSgUAA6mzcU1LCTlm1P8Fx1jREgxRpDBIgWg9LSrwBEUgvQCw1gLMDhI0OoYGSOn083GIcVpEYCJkxWRA0COMnDhfQfnfLJoKX/TPmr4hz2QLVASIJSfsgooGd7QE9FaZmjBpPoBsawRyyyAwMQp6DOlY9DrNxXUjVGei074KFIJJj3hv4bE+EZP7pkCzxwJMch4v7p6hlevX+GHn35AHLYAE2JkeCeAi+qk3jv4ICCkowgm4Rnv5OQy9c4ClzhnUk82ck0Bt2MckjUZY7PZoOs7+L6X4ZSCl+dgyUxgdoja/pFBNOLN25/xj3/8hN1ui6urF8kQMMUdigTiZDGUrJFSyF/47jlev/oGX7z4Ej/++HfkEUqAd1MgqAV9qtY3Y7+gHgpOlNPrFKCv+i8H1SMZco6xG25SXK4ABsERwAhpmBPGFMMLpG5pYSK/SDwnSb8EnLa8ZoJScy3/zM/DBhxJGwqc53WZZxV8qtJIa4yC0NYlVJcZAgEjg2NA9MqxctNKn0uufIfXbAeN8dQ0Ul0/klhJznfCBBaYYbVui4n/IojlhECQuOoSEVx6Tvi9zCY6v5axsL/MMcXSAsXU96HMt47APAAIIOpSzLAITxJTTssogfgLH+u+kEzfnDczkFYb5jx8TKOrDGGWfir9vEwytxx6qtYulLFMRgzArQBFCSjxyPZQ1Khr+2juBLGl39pYtSX2Gk8my7cHW7N1RzTxA6WdrJylxS/rYMmKAGrbYCZ3WlOqU9IJtZ0V4uLy/XqdPFyu07XRZKOW6GA9VqYssqKRuiqsc9+bMwWYnW/3bGqVWbAqwTxRmnD21puA6PKBGjn25wGZyNKTB4paRelufHa4wa3AeydqBN/TH9m3L72V5T44hk+Uzkehx5yk19IRZTq0hbcqqf0gxGI6Kb5K+c2TByeT3V6g4gRk1vTZSs0on8gyNdcLvXmWmTEMA3T33KY8522jO5XsTTlUgM8r1R6gyJavlvL208Kzq14/plv2JFhOt2qvN/PyEdMG52+UL0hg07ZMlBdBUaoiltu5KFNaMEkxgXpZoFa+5vw9s1cL9lXxUZRXnMQlskJhXpTSMeiqXDOV3XIFPJ1o7QIQJTc259D3Hfq+R991ObB0CCEpJkagZhagJApPhhgwBAGKJKD1UADExNs5cLX36DoBiYhEEXQJ9JQmKG14aCqaW9uK+5m+b4XwuQRnBtvClWzossBn1kqnjneVBtFeRbM+iavwZ849l1cAMGm7zcUGl5eXePv2vcQbIlFWyTuQI7gocVucc3AxmOOeEpiFKO6MzfyW2dTUS78DjGGQ4M3DOGCz2WCz2UhQ8k74bQ5ALsAMYRgHbLe3GIZBeImdnNBWns6Qg7qQOO/R0SW+ev0bvHr9Gj/88Hfb+GLkOZkjuPqsghhz3cqKhbQxfpSJc2+k/5T/Ypqbh2GL29tbjMOYy1HaQQDHAjByTjfPRlS3ea301e2pp5RpevtdrexP+zvxquG2Y4hSvyDNN/m3mQbnXPnWWhTlPGavabQzIyM01juljDDzcV1TBSBVHZrIGzBjNjNKXa55AKxNR65FDtU8gcxDMblhllMcJ1SBRMuYQLtkm2KvsNBiHNowXNbVjYKev69YlFfJivuFlmqo7q1k2zJNPrS2ne5Ic/IidPPPXFUX1zKSAFj+Vv6Ztu/jeyyeIsPDaSxbCs5cXWuKeAIy04v8Pka+PpQ2NTPgHYu8dxNhqay6Bp1Ijpd0FCjlzN+luQ432pMHis50pjOdaS3NLmlGE6iUL9o/FxdFlZOC37xAzYOnpCyNUnXpIbL6WDSvYymIoyd0LT64P23UCoRaAhQrhjbNqZSRIRSiJDgQvHfZ6qdYAaSdrGQZ4oz0QlSeByXYJ7mC9X2PzWaDvr9A32/gnCzHIUqQXnlOrE/AAKcTrCgBRWMYMIQxH3cejbUNJaWKQCa4MeU059wzDrYo6Sl5lieLOtQK3fciNl8MD8wpg5lnUj9bkIKRjqTbU66p2+NcOQw4TATvOlxdPYP3/0AMMQOKFuxw3sFHn/olqb4J9YocJd6ycpnpi9YSp1ZwOYOI+nlxcZFAwMYiIQFfzjl4cvB+A2bC7e0W263EV1IXoGwhloESGEncoe8usekv4b3PfTDta+WDGiTSOs22rdE0Kc952i41wKHPSZBytbQS4P/m9gOGcQCQADtV+onyqXPHnas0U9xmHvq4sWTsSORZ/t4f9+nueU5GykNvGq2gfRa2OuYyQDfZMCjgtnWDfmpE1diwZK5bK71HJbs5pwO7AHk6d92FD+/PW/Uasq5zl1CHz00KO9OE2B7s8Pg00TNOl/LkytpszkDRmc50ps+P7HrezPmT3e/qHk2uyW82SvFjCWNWMH+Kouv9ab/bRt3WfGy7s7EkZSCb3JrUyr25BNSayoMcg6PGEDLMRUYJZkpgASCuPwRyelS5g/MOzgmQ5JxD13XiQtT18D6doBYZCDGb4Qcq5RdPNrkvrkgFIOJGCFeQaAoGaRseyU8k4F1NqnqfnjfVckOBB9Ezlvu+vTNnpbA/v32Pazs6gCSmlHMez69f4Pn1c7x58wY64Yjrmc9lUFDOliuygkoxp62kMY5a0ChmCxZAAacYI7bbbX7OOScuhSlPm5YjB+86eOex3W6x25U4Q/K+8JcG62cIr0kQdA8E4KK/Qu97WB4Sa0AFZg5DMbUViAJK2vg1IDSxQiAkd0R5iakAg7e3t+JyCTndDRCLI6RxWGIdnZbuq8RO3l5RREbM/QSWYOr3BcGmpVqaI6b1/Vhqsx0jK9/I38rsX9YWmvDfU6QlsEjvPYScUElIe+7Pt999gMq7jq+2f2shkCEo/V3b6gwWfe5EFiziT7vHdXmltGlyF67/ZQBFXKkF++lUpmsn3GH52KcwnOmXRUWB3qOYfeQdxKX4F3ITU9mG2wfstXXLQB1Dobx1utE5BUIYEtuhvmie27eLeohobxdPXBbs9cpawPy/nuZbjqqkGsHzyMa2sJC4sTVp5hUUGeQB6vk2K7FRfhABzhMo7dyafVMkrzBwjHAdQM7DuQ7edwAkkPSm9wIWJXewvu/FnS0VqFiO1Eq17oQLUATI0eAaI0xd6Ir1AEHjDxUQoYAWjTUIldWRMG+BIHr2FD5d2t3Om7erLBqsCcvxtGiZdkyS2eqsvZGsiKCuhA6RAzabCzx79gzPnj3D+/fvEYxrk1okkJMYLN47MMvR2znQsunjDOY4jWFl+jF91wDnyhMhFNenmCzNLi4u0Pc9uq7Lf9mKjDwIDn3fY9jtsN1ujeWEnc2sIppUZ3Yg9rjon+HF8+fw3iVrpghQieukQC5B4kohWSkpcFPzcz1/TXmrtGXFH3m3VefiCOaIm5v3GIO405FnM5alHyYxyHLW+3hzBhTJc68FutYx2Zr4FesVkgSUsbinMuZxsLsq6dyMHwEEp+vT7DsH0z4dHSOHzG4BFV9FmQet1QnX6Z/OMuuOxJMvQtU+gAKIB9Kyk9De50zcq0peegjixSrmy6v7O1mTmt9C+6xoy9przjxdKMynAR08vpx+CFR8XDpqfpjIPElWkh/61KmKtpfsXHo3+bohUvkmzszTh9P+ZQBFqzSMpz/oz3SmxyKOB4TCjwwUHSIGqrVcTctbEjVnCoTsmy0ElT/xwqHlc4BE0m6sQLLCxOX3PfqAYZJcemZP+vcVmG3e2VUkB5quS1osDu6Sp1o96C9Nq8qiCJVGoSw5JjeX1B4xuXoxAiJHxMCIoXRJjIyuI3jfo+83uLy4FNegzsORuPvYE8gEZJLjvsO4P0B81gPSSU7MEWTidBaFmJL7mX3XKjoMpgjKYVc5/aOs2E8a6UD7S/o2IKy6ozXA4gLIe1eeOpXuxrEJAsspiHTQ08oS6ADCZrPB5eUVLi8v0fc94naLApYAOrNYkC7GBBIpfzXCpwWNrLWEuntZwAhASk+uD8OQ01BA0IJFjjzAUpYQIsYhiCCcg0Nr+xewR82MGAIUOdfj1atXePnyJX744QcwJCAwczn1LlfH4DLq+jMBnln7Lh6YyuxIhIBQyNGCEBBxc/MBu90txjDAgxL4mtrfe5Bzcszcyejw3FvHNqqvVy7Q6foiG7NtAQPuabvi1C5mWqpSMobdiV6SDcoTS5a5+WKam05V3n3p2LJkID7FZdz0GxlfkPH0/7P3Nr+yJUme0M/czzkR9773Miurquu71N0SC5BGaBYIIbEDFuzYgITYsBhplkj8BcOCDRLSbJCQRpoFYgGIERJIbFjAehZ8NT0DM+qmh67OysyurKrMfPnevRFx3I2FmfnH+YoTceN+vbz2FO9GnA//NHc3+7mZeSpiWiBR95WNlUdTgm3sGEdQPQEmROcIf84hi0Mq14I0PZy7Fh8pEnMqf1ryF4GdhbRSGwzbgRW0F6DVVWlzWq+OV28MPL/Q8pzwnOjhwNEFmt6DOzMtBseAFPw6JbiuXt8RoOiFXuiFPiRaC1QVHg7FxrRZXQDD2TcLhXMTKGVlT3e3L7aEFIqV7LBb4SmvWg8M0N3vLuq4LlO71nMg3zFKxaahTDzc9eLBvbL/yfQxAECMAbv9LXY7OWnJ4sQISCT84H2DzeYK11fX2G426LoNtlcS3yWEA+T0NclXYs1E9H1A3wdMnTAnz+UTPQRP0x0isucKRTFdJq37dDwissSqC6UgsdTm04LyEIw6rd/uzmf3xqlcADlOgKKrqyuND9Rit9+l53KsnXyiHVvQ3CMFHLqd2V8BFCXIeWlZBCD97vseIYTU1wYUee/hoG5zCgRWgXuJ1O2sVKqVPyGBfjk2IPLoOomrBTBC6EHUjCyggIx7a60SwFnxoIFvq5Sy/DVNjYgAIiL32B92OBz2kBOsIphd1muj1MI/oLBfW6JgFWOmUTd8dnTBwDwDmnjVUDtrHq+0pQwkTs3dj6ESrqlTjqlFad3nNH/ZomuLxMSa9GQ3xfL4nbp8qTyya7X9d489zQkmUjo3ryWwbLmBktXtTN7Px8Pjsfl2jRzx9GhCSnrwOWDEY7ZQnlEMgrrECxyuF09L6AUoeqEXeqHvDiU5t3C/MSSAZafQFbvXmYpTAlJw3HvcVUrA0PNaZE+hoVFU2lAsATK7Um/vrEx/6C5Rf09NjCxri1JXC6pEskg7IsQYcHPzHre3t3osfZ8UdbPEaZoWr65f4aOPPkbrRVHvug6+acChASiCI6MPPWIMChT1iEEUZ2cKNxVCczoC1dyB9L9kwWPlz9YgohSNgb7jgu6cMnAOL96zYlEQTfydVv1OozLehRhKkcaXatXdqwHByUlmI/N14QuxHotiCeYYegL7yLWltCoCcl9ZvKEpqyJAXNFCMMsce9flj9Og2hwlPlbbaI2Ko7xtjiMTLu07AUxoXINXr67Qti0YwrPZWiomMGtEOtBGkXSoHOdLvTToWQLE0ok0RldA3x/w/v17MDOaRlw9s+HSUPHMc8qTxQCUaquzEtTNlkUS68pfzLJuoTT3ncG9kUyXrHNq3e/PR/EvqABhxwr5pfup3KzCeuTzrlQA7qfTeW1grEBpcniGvPFCd6ZqxeHLyBF3IsIMP5Ic7LBUOjuoAoCs9nGM/R+hF6DohV7ohb5TxAUwlACitFlau39YENssTIqiMRXz4hLlmopZkYQlzs99OMSD71NWKqgU4FOTrxZRW3D5uFhNRT+T84iqmO72t9jvd4gxVCARcXY1atsW2+0WniRotW8akbW9B9ihhwBMCWiKNXCWLImQ+zsBRManKaaGqvml25n+HhsnnMuzc/xe78yP05fy1a5xE6lcygXlIqnMEJNaq1jQZML19TW22yt8++07jRukbnWDk9bEKqgBK88Q1WO9dG2JMaZ4RfZuGb8oB7bmwV9UwGVZgOAimAmbLscvAqAujAYVWZkTwqIAJQGR0HUbfO/jj9F2YkVk1k2SX6zc7EZjigoQc9is6eGpkTgNJpkixyxB3UPf4+3bb3B7+x7X16/Fug95TFkwawFhDb0/RCMAACAASURBVMCbyO5MOj8WkNan+DtKZWa+F0siBtxDKrXza8/qOKAXpDXtbvNg5AhiBSxJnc45z01nrzGPTrnvh/x0dyqDPt/d4b62lq6vM6BupaO37pDjFOW0ZN5y2eWsyrVYW58lX7zQpejRgSIU8+ugIOv0AVKZRNdz6J/E4sv8/QIUPQOqGGFiki2VnrtntvK57/i8uSzcAmlgrmooxpwwmJ+YmKqG1iYiDR1JhyeKzBPvXWZatGU2/z9xP6HlF6IiBgab1YW6KeRdexMM6sETmUEcJfgrOTjnNeaHKmog2Ok+pvgwOM3BlRUIIAohAEoC11T7p6cFMNDAczmWi7xHRHqktolOpFv+Ksbof8wRUAVukfJLi4+UshsDlYJ1Sq/lELNFuRLQYe1SKM8aMsYsbofgXAJ6iNNQs7JG0l5KcY9yUEKu8qnrX3GqpanpmEVEjAFARNs6xEg4HESxJ+fgibBpO3RtJ+4/bYNNtwFUsWYSVyGzAIlRrEuE1/JosSLaqU5SdFb3NpY2NH8hfSvvhloga1GGnP4WyxCuOnBKeS9gqdwupKe4VeiHK9pvCiCa+l4PdUo8O8gvuYIMaY7jcsyaSrEYADZ3I80hRjAFOAKurjZ49foa37xtcbjZI4d+LaIVOYDYS1BreITYI7CkEYviyvTN4MigGCWOlfNoGvkwewVmAg4HHUmDMc4MPQ1tD2ZxVTsceuXJDahjeC+no1GBTJIxHCnwCYmjFa0fKILQYNP9AJ6u4FwrwBejmFNVCNXJMLHnnCI/6PNyrShfq8crMtADOXEtckCIPXa7W+x2ewAuzZ0crVxSn/IUOgGPuMqjXIpsbrcdW1LXUlFqua63pTeqX52mAWasgeiZhmMqz3ElQDwGj2xty/OTjaOk8KahSbourRkKaVYd3cltpHmkAwKsYshtjpjWXkBBbbsGsdolCA9mj4pSxqHU/kk/4un1rLLCIy0j5TowR10CzPoqB/nP9TK+mmgRqvsDIMBcNV2hew3kcHt6tSK3YjWdEv9zoafXtGkab0qNypPAtPyO3ar7YsDDU7kV8lYJzBGROrcuzOxDuWCGD8qiL9/Uj40hnUtsrjLWIe1UJuvDKRl5bv3LfFPX41J00cSOZrVqCS3kr4Wk6nlrSX9aU7Qj+RXTwHI61sfF9Dccy1O9nZKe0GHmx9e6sT6djgnEhZSWrM0X0khzcVGGanNhOYHnDxQRjk5SHwqlSW0Ya9SEszsSLUx6dTnWBYU8ttPzrC0jBmNufE8HoTtycC0zcGzhs0zKjq9mKLs2CMY6md9Mu99nX2QNdjrfgSCwLsmFU89KBUgnSPHRlZOiQmCAHQAnClCxoc5aTjIAJrnwZAHekbmBROkT7ZY8BHVZsfRUsSAsTOgmPJGI0yKEDkAMIsTiNKGsVAyV4eNzwdrWJmSHEVtW0pp4ypRTCeGFpJgUqVLJF1aOkLjeXJraTBXcpj9iOAYExgGoOBWMWdy9SskhLbLD5OyUJsjupykdRBoo2AHkIprGo+8dDgdRPuA8GudTbKKmadA2LZxrVLFlcZUxsCYdt44EEtXKZy1QpLqoQm+AmzSNWZxoWTlKvFb2qpa53IGlAkqEQkxObcwprbLLyjmIZAwNHsqKatmu5pJl18s6SVrTFnq14oiRsFMUjlQZLU7pKbhr4p1TSfJIx8dzBMWIxjtcXW1wdbXB7f49YtAg0UCaH4gITB7EBMcAeQAx1Aq1Kk2ErNSDAccRMYoVUdu2ACTeUYyMEIJaGkmNxbLHYjZL3Kvb2z1CYIT2AN4yNt2VtAqLHZFq0HDMYIpZCUs6oQcQwbQHs8em+Sk+/uinuN7+JW5u3uGwD1JOL25wMjdSSpchAA2Q5YEc84TT72TNUbJhWhcKkB2AWQMZJzFDwKIQcDgExCBxpFIjMoDIIO/kOiH1Yx4UMf22LQHRGyiDUrpWcazLWa5F5ZrkVF4zyNCGHus6JDlnsDXFeOJcqtwQtcDBuj5ZcNJsxZbX2mr9SGDHcaV+WZFRoMjqWKyTelvrHGvrEDJlJWrbeN2Q0dRSRH6yGuqaecStAnnOtPfSiZRkOpWsAzFGODJ5q5bLMm9KJWwcjC18KcnbBMCZcjnUwSbKuExrFtTMR8v3hvPy1OPzsiLr5G8g0UiO17U2ZbtS90hjHHndSSPb2n9Qu2G7Mdum3zxlWW2hXJwjfIE5AZZweTVM87ituZOnqBn5dRaFFw0lcIl1bR0daU0hnpIZaoqwJsiy2BTwR3S8PeW14+15RANLa4h1Des7hoNbmWXPiUBcuGsnVuPEHktgs6R/h36zNhumsdgMKj9PzWUr6fkDRd8h+q4AYs+CShR3dtw/3ET+tGmNEHQfZHlK/rb7HWM+ItKeqK1WVAYvBeBKGCw7/oJ1G+xUr6pW9Xo+bepe6T66k0wINw2Vq/qZ2jBVlDmqdwELsO7cIpJD27RomxYWYNg5UXaccwCLBVqnx5SLEp+PNc+gVQaCHDlwIcnEgSXYGqDDeNXck8ySiEDF0eAGREzw9km01PmFMDQUGifrkBmZmSqXhPHTjz+X5hKY0s3wCgxev3qFb999iz70OoYTfAOQFwXWERwTGu8FvFmxkWJzlsUDapoGbdtqPKISUIS6pbGCRbF2Q4sCYG22r3A4HLDfSyB2UdgZwRQlmJwx0Hz1d9u2+P73f4Bu0yH0AYeDAO0egKMejhpMnZo3yWtEFY8YkFKDRSVQJL1ATucC1uscEEPA/rCX088OO3TdJr0iYJzF8SlODTu+gM9Saek398Tc9coQZ1DfVDbjHzoOlKSkivIMT0E71zXuopQwrGVJ9viMt/Ry6TJu4yJbANsaQCjnwFgAI2rxUqzBR9fjF1pB1n5l7xbz/WPx54zyPLn+aHEJKIDXcV0Ws1tZrBdN7/nT4mb2M6MXoOjZUalMPW5JvtPE+t8IPKDqzxPQbx6VShx9ui1mdofuwNtjy5V83RQ4Vlc7MmUlWZsgWRM9XTKh5LEAuEyXhKVsN71Ke45tZsqyTOssIYdCq4AecpRu0zRouxbeSywW5+X4bec8HDXoug5d26LxDcBOXIlCSOmy/bUdcOfyXGL1MLeZAuga76zmd8qymlVKbQl3nzTHg9m9sy678G12DSHUvFykx/zY7D1BWl61SPSuwXazxfX2Cl3XYX/YgQIUmrBdvBqQcCRWNlNg3ZD3DDQkIhwOBwGnvMdmIyeP7fcogEhLAwoWBTgXdFwFxMh4//49vv32Ld6+e4u+79E1HSKHVMSxmqT9R9YdDm9ef4TNZqsWTlGtiHpEs9oYWBBN1WuJhqb2zAF5XzgiWVTaLj8RQjwghANC7JGtg+wZtRJIk1XRJ1TndRcajdFjqbLmP0ZVT89bCpA3Oy6Q5sWoFIkmN14uS8NkSRFICeaeGb3cUzF+ysDkC12e8powXDeeCKdOkuM8Ywxd/+6RjV/omZDNvVP06OD8hegFKHqW9DI9PTal3c9qZ9J+E7KJ6YvQkUGioVJ5/o7uqSQ7s1oURqWEywP5OaC8b8q4vMypvI+1AJRKTl2GoUvHfdLU7v9dyWKjmCtOijqjoAmPxpq+hzFgVRuz8NlNkvhA+58c0PgmuXXI8eeAdw6OPLz3ErAalBS2pLwBGjNlUM7Brn+pcJbA0shXvUilFEjs1DRxvxmCnnQB9l0CKus+qgGC8t4YSa+sY5JUPszzaRBBTyILjK5pcbW9wna7xW53i+gYfR9AHBAVYSFiOU3RAdGJR7L3Lls4GnBY9pWOATvynlm+d12Htm2x2WwBEPq+18DSBopkQDwE4HA4IASCdxHOfYPPv/gCn332GX758z/CJx9fgSFBnjMoWfROAokYUNfLn/z4J3j96jW6rkMIe1HAKSKEAO/EVa4Mxj1ZN2vFNK4LYhTXhv0uwGqKCaPavrid7RH6g7oyxOJ9LLBOwcur2YsxLPJcyuW+XlWcAtQb7i1dguZHy3D8PRzx7I8xXXKfzcZO3wc0nuHV5zK7fM4X7GGtiR6vbx6ObP3LbUpAdkdKV5bTWPfcJam0Z85x/cqVcA2tPpX8Q2aBJ0fl3L/c8ElGtfm7ek24YX69e770AhQ9RypkKImjcZwhl1DP+sEV095K/v9QzO6maOyzXN99aorNo1KlqK/kiYnHpvhpOoDokDTAM+TkkxjsaOnCfcPwvQnB0JS5FD9i9Wp/Pk2N6WQKT0gWUUBAij8DDJSsC5cJKEAPASBStqvibGWq3KsUTEGqVxGfgu3+NEgEaHsYoBS52PYrlc2Zd6f4ZepajEnKM2uQqCc9tW0LghyRbqc+CZBktSiUZVARh1rdSoq4IlNK6NB9ZOhiUrKKgUTOOQkY61DnoZoppUD7xxRpFPdLAX0OtBpcm1IAyHiUqlfzTjNDnJnGYjhzkc490PTsQelmzp+0vxlt02G72aL1jZ6qZFEyGA4MZoloYGCRIwEWWy92MoeAypUsh3vJyomBROW4MTc0oxhZwCLiBPYwA30veUff4/3NDb788jf49NNf4Ytf/jG+9+YTgMQyqGrnXHn5QyTWcQBev3mNzWaT+jbqkX2EgOAC2lb6LhlvjNw0crsyCZjJmn85/+Ylg9IbSajXyjFE8WcXsdvd4tDvAZL6SjIGGo3ngCwTcUq9zLec70YxiO6yV8d1fvb9Elw9jAM1cj/LhShyvNB4WiHvrbUsS+1s5nGnNHZ6VYBNW96NV33xXBk7zQCMsp0eVNmzuEgFN3xIymZJzBJHz+bUPPPbXDkd90vfLv6OR85FFfW0BJVrkxLp3BA5Ld3FrRGtERtTBKYVz36ovGF0TH9co17S2g0AyvHQiGg2FKLlW3GDzU+8slB3pMfQq1+AomdL9c7jRdZ6vmRiHzoNtbmpyysnqQ8XTwOAdCLVpCdJOqYKOJXvTooRwZxkzhBCYb3AU09X6SdrooyKFNcvSdMJ1qf02EXO6xOH6s37AoqKEiUBW5rmtPwqi5lJIVAWXSYJQJ6AJMl6OW2sGU7W52sVGygIJQGCQwgJPHDOgZoGBA+CnE7lvYfzlPjGAJEKhGSIgmy3B3pRHftgnsp4HEQSthoosLKywagAPc5STY+tDVw8VfKq3S7rYpWm/EYRrEkdt2DhTpmpyPn++HuqdjWf5MDLcqJcg8YTNt0Gr66u8Y1vESKjR18oopqG8pL3EsDYUcQBkOvM6EOUQOcLZZET8wL2+z0A4b+u61QhJhA59H0v7ceAeSExA6GPuA23+Prrr/HZ57/Grz/7FH/4yz/Cduv05L0cuDnlX/ShI0KMgHMeV9fX6NoN9r4Hc0RkIMQIn6yjMoBZKnxD5SaDcDPISxrQlD5mCWJvyyGPATe377Hf78Ac4bwocIZfn8LpiV2pBnkvRozkUlt6Yk0+enbeeazOK871+Ls7zaRRKtAnLJqn1L1KtxSNdcxVUCMXRaL8hZS/p9J+WOXMmP5YGN7nR2MWlLoyT90c88B4zbbfd+TfI8taDqpegAj6V05FpFHxR0nycbCIcZYo/MHRRWNWrUhGNi1s7+OE+XDi0Q8RwPvwZqLvGL0EuH5EMtnVjb+rHljKt/OfD5lMD7Dvo89pDbHOFHx8P4kT5JIiXpn/n0y66j+AddFRGrTpfS5UWYVcdbbIWVSLflx86p6davmVXDRKd4li5GRRcTgc0KsrkHPS1k0jsYlcOlXJ3M0sLztNKo55l5d2Tusa1f1axrgpB9kg+ZGgna+Xrl7rFaHldmO14Mh3pe5yRPXYlTDZirG1UyGFc2nxV4770zmudN+b+qxIoPiTJ3s9+whd0+LNq9e42m7hnYOHBCq3qtj7zgGNd2i8g/cOrQGLJOdfOdNiK4AkkyizUV2tDgga/6ptW3Rdi6Zp4L2H925sqCZNit1uhy9/8xv86lf/DL/+7FeI8QBQgJxIVcxr6fSook+I4ZzH69dvsNls4b0Gr05tOc9LdYDhqT4cXEuWRQIUZhC1fixyRIwBHCMiFzGKtPyTkxRZYPd5+PG+ZtExWDZNdwEnrB/OWSvPpkeSZaw9LQbRWCbmGTljxBTIQPDj0YcpEpZrRl7veGpAn5X2PVHFNlkiua/cP8y+f4b0FOT6J0QvFkUv9EJnUN6RqieUFAsnXVib0NOmu+6oVWFK0sXz0zu1POUxu0mZmdRVSJQIGu8QPRXKO5zTivO97n7ONNvJycztGFFh9s81uyRgYQmdKstXjM2spJa76KeXeb/foz8cFCgisHNovEfbtNjvD2gal9zS7HjdmARiTCvSJ2xgDeuTLLvUmmgYwHrudLPs5pTTGfZJ7bYyueE71UjaslHl/+I0rZTAwIqBZfc8A2YGtGub2cidASGqNi2/p+SPM61ZrU0qAVYezt8zwAZwjPDeY7u9wqvrV9gf+hS/iBEGaZGCuYB3BGocYnSIgRASyGs5TBfa+i7GiMPhAKIe3jdwjtRaqYX3HofDQU/Rk84z65rDfo/ffPnX+NP/60/w+vo1Gtfg5z/7Jdpukzo55227/bl1iAhd2xVuPazASz2upsb48Z1i0jxrpXI8SEpAkRFCD3JiWSTvB61vecoVEj/lqWG+LPduRWJALdX8eymg/0F3tulxtyxtPESeGsVClMZXniM5MsoFpXRFe0yqIYknKoicREPBj0Eka+U56990+hdsp8qqZOAiniTJdHsd0XFM7JmoA98RujBPPWP6IICi4y4Ez9Puxuo1XfZiF5nWtMFaulBLnSJkLQk0J6QzEuxo/v0FPfME4tH6N4xhwLymPfl4xjS1ypwj0FxOCDrGc9Wom3qUy68TDwz16VleKNsvK5umDdhmkOiLBK8Kjj0+2mPUC3L8+aDdmRXMcKO+vjsdX5i4+L+8OhbDlguW7hTtNH5mqNAD584PU+5mk4AJKBV5HOZWhfmRLlD0Pw1boi5yFvVI45gMFFmMa0gkVgsKF6jQKG4/rm3gXZOOLu+6Dt5LTKwowZPEmiYa2lAc1WzoC+cYWjxslxKkSOUxpcZVQJGddpYAo6T0F+mmjj+Nac2qpOwRQgRXRskMCcCkLVziQMeoshaSXsgxuczNcmwBVFkoxagA3YDXUryapfxnCso56K31fY6ZFdW9ieGdx3azwevXr3F7u8Pu9lYgohGfRoUtSosfKoCWWMMhgzKRTlAxMpj71MchxBSzqGk4WcH1fS+uaCy9xcwIzHj//gZ//dd/jf/zT/4PeNegaVr87Gc/h/NNOUhyKxsgpECGI7OmQrHhrgG3Y4ChYcQW06tIUvlQwPuydQqeTOudAkfVLXMCsbEo1mqRA/r+gEO/R4xbK3lqXzOgl2EX8/xntUj9Wla/5Htepezlzsrpj2WTEpBFBv8JtdtoaTGYyjWe/3NaESBXzdxpmkE5px8p9JLcMrhFlGOjzdHc2m1z4PF4NMdpZEFVFUdBcZdbIFs2DqkEBDJPJNejogcSPy8RrVk1V66ra5ffi1npuFO6YIFKjhzzShbVdbwq4Ccz4rkFmHuvlmuqvqwkgGLTCq4YmAwwJzdhO+VxVWkuaKmyBsg+ChavKs76CW9cpuF8Y2vICcnekc7LppiXJpqwXDbG9y5QsalpsNRna4HweFrnT6sAPgCgaE0IVQJyALvnRiMemFjELzj5uBXeiHEQ+HKW1hRrFY5y5slhw21w2f7VfJcypqODj0AAG1cVcVTSa6yC8ppBvKYtV3I6Hek/AhAudBJbKcNO6Vh68VgbMDOim05jmN1CKkmZk4ejxLghFcIjgzmCQBoAVsZRpRgou5AK7pvNFk3TSvfE8jlKLmxAZpUsVEyWbjUtLe62VgytJspM2BGIGxwTsVyhlM6I8RMQ+3lAkSlCs8INC/ARqJEKxh7R6bMRQHRgp6eJFUIsUwTFXq2PCMEzmCJc9EkpYgqItNfaiILLpjg6HB1/piSaIhrCHuAAIg/nfGq8pmnQNE2yCgkctC4KSDGAIoB6pRyR/uWs2KZYLyZsVVYdQCl0E9lc5PSgM42DUyjz0pZ5TqzxzywuDwXfbOUiAEDmPQfbjc98JGuD9HdKYdSi+S9pGhaLSAKER5gli7mtORC8ToN54qkCTQ9zSW0ciyKMFe/8fHantLncgKHIEu+GSfiA0cPcVx1ket40G3zy8Q+wu93h3bt3CKGX7iexajGg2oEQLIYQxLXLO4fofMprjoaYtcxpUCRAoqR7cvCe4LoWwTfYYYcDS3mNJQMDN7s9Pv31Z7i++kd489E1Xr3u8L3vfQLvWgWipM2tXx1HMHp4DnCxg+MNHEMCdpPTpmXEwCnQu51+5sgJaAPWsadqGGdeNkU8RUQo11LWYPIJ8KgVrwBgHw642b/D7e17vH71CuAIQoADg9ACoQG4gcS9OgivsgNzA4sXxgCYJC8ZbfZ/8c9xcZjAAhXjfGhJRYC0WeJ/UoCHAPLIVq0ythlyTLe2BoLyJZVABwVdAhWkMz/4it2dfmLxu5SQZY10a4AiYo0jl+cMcgQ4AqI4ZsbSfbSgyBr03ea9wr1x/DxX2dZFEU6QKbNYE4kVNDNZLqakUp8yo459lkoH6BoRIyPqOir8K/ekvtbGK1bFVXLuMdl7fu6qnorGw5psMTOu3TInpMGw4mm3sgFK11OX5oZyQ4MUhMklj4Ma5NTW0dQ4DYkHQE7mW5eta+W6vsuAY7N8qnNNyyQDE7tXEyXhdLrb1JNJsqkViXunVfFnVvGvzcwl1boGsYAOHKw5CYRG5tRT0QtDv488cw5ww+Puhi0QvJCtWUEfpVXtWT48WPjT77V8Ulua1x4Jx+nZA0UfMtHoC2pluBQCLjivPE/7q4cmRRQYCs7ENJHUjx1ry9MnseViHQOmLpxfyncuv+XXqh29Y01Fx4KLmhItk2LURd9AIoYc4zwFxJjeXd5LJ1idMBymhuMlWzzt7C9lroBDkneWyECuxf4rBQBT2vO1KfHgaLY0tftUFGj293Raw8fSCXUqqCRXAxNmqHp4MTdRKCL2hz1ub27QH7KgKVmLwtM0bQKKqvgLMYNAHF192l5Saupj0oftJC6tcl8EbC4U8Sx82zUiOfXMeGCY7nAHvwyYzrMnX+br9Qll5tyDdL+OJzQ+rW2UZiGVmRDDCuwzCYDETIVAnuECNiuiNP9mRSOBpZUQO1EONjhAU9cvVSwjMDgGMIKARRzhtY05Cijddg222y02mw0O/U4Bkzztlu1iILb3Lu1hRDBY4w6ti50k5bYA/X3fw5HXWEUNmsZOwdtjvz/gEAMklEvEfr/DN28D/uKf/QV843B1tcHf/Bf/Jl6/eYM+RDjyCPGgY0fqTY7FwgsATJHmAuSABATm2ANE8N5Or3OF5ZTVLSbAPbmggNMzwzaY6sE8A0Xs9u/x1Vdf4ndffYGP3lwp6B+x37+XOExNU/GXdJ2OyyQ4628gKXX3Q7nkiuuAAETFWeyaI7lWk/GkjIm8ISPJMhcA/3BqHNbpyFIyznn8JAGVBSNQr6OXo3nJ1DYPjrs2rs6qruzclKh0dA1cIwauLfeR59iFqqxkfJ4qsaxg1xtfJ6/spz2dQKLxe+VUPsQEjuIDK/M3C7w0Hjmm70musHWFjrTIsf4jyxPIa5iRK/ro4WlVrhdVZYrELNwA0emz7bGxAF4/rKovg5emNmanErh4F+aN0bzm1Wvi2lRKOpXXXoCiJ0+nIocv9CCUkIXiEg8eeLx5/0nR4nHyJwrjx0Ci9N0Eb1PGS4VjJo1yQXm6JxcsWOVUT620+luRUiICRAXJwpRcFyX6aIudUpxqO2c55cW+qqbP0wekKEAOfWC8e/cOv//qK+z3BwAWXJc1NowGJXZT+3PiCsQabDVbhHFS9CWWTFQlr3izAHQqvYXq9klCrZY33T+hzcu8cvI1gFOdwJfKspSRAkmzCpxZW5XpKdCWjkw34GbM0wbqCAdGRA6IsQex0929crwQ5vpfqhDzU/ZoAosiYuzBFMDq6mSAghi6iCWQIwksvd1ucLtr0AeJmZMsJ7Q+gFVZXCCJHJrWBFsqTmZcQxHMLgW2dk74yfugPAl0XQdmRggaswjiTtmHHt+8/Rp/8Rd/jlfXW7x5fY1f/OIXiDEOrNIY5CKcA+BFt3LJaszWOjl5LRgIBDtFKoKoGY2NYTysZTJQcCKOFgiRe/ThFu/e/x6//e2nuNoCMUQgMm5vb7Hf32DrtygtV+wvK9A04mFbOx6KiiFr4MtxDsg79qy89eBr19CNqB7Ol8mi/DEEcKqv9To/5LlV69SaMmAJtnoCZEUjk4WKsiaZaE355+dMTQzzc//MXDvil5n0bX5MfL20nJ3fFwYmlHO0re+OCE7zXgQKV2dfANIVRdS2PR+o3jfqQx7efqEnSN8ZoGiIoD0fhpxZFT/UieSZ0Lj1j6wk31Vaw54XxTPqRZiIRz0zJZeUu1fAUHEplePHDnKdAw8v06WVnCJSA9f5E+Gox+NZfcwr/BHnqAIHGSMBjWm1cEdEaNsGfd/j66+/RtBTz2IUVxrv5MjzIVCUrVzMAgAJeEgACMZxdezdCiSq3DNyuUrhMltWHa+YKXPzSmUd0D0J6iOLpyXg1YCEFRZFBUm7WAHrmXYqDYkBFRWMY/Xg4KQ4269CBR8Xw/plgDeb69nQHWaotJgVowPBNw02mw2atgXtd+J6aHWwhhy8KyfoeQH5Dn26F6fO654gogyohdAjQizX+r5PJ6F1XatAywEhiCtd5Ijd4RZff8P4sz//p/jkkze43b/FZrORngkKzqg1JrO4ju1v36HxutsZGeZ5ZpgLk4GNWe5KlnEzgKPWJLsQTfTRePIVDY85oO93eH/zDT774v8D0QFe+wTRYb/bg7exSKguW53J06H58TlUNp9SuS+48AxwvYR/2CVGjl+VANgaLLJ5KI3vwcbRGhrOUuY+STamnyLRjAxwkqi6BATVT03HvzqfF0Yjcw7fmlrIT8pWNyXYV9fkqTBIuQAAIABJREFUenGM8QXYetnlRzfgqODyapPjQ6OyLS7Xxi90efpuAEVmKo0sd9q+wpPny8VF6B5Kz1klfMhsnxuN57Rxo7xAR+soBy9c9+wqkGQo4KhwONx1XFW2Y7mpADrX32k8XWDcnLJbuzbYoQjcM8o+8lxpFxgBOZoLsCj7pMIgC+szZTT3nvo+IDGh5htvMuvBRbEQ4VTPpLgfKzYzvBNQ6f3797i5uc0KSgHwkKNRzAUr/7BtE+CWGgUa80SAp9m21Eknu4vVIJGAFdOI3dS4yfEiptuhtt4YWisYcDIx71UgwGo0blBvQRwEmEDutxGfFm5hJaBTgDxpcKZ0h8I3AcgnJlUKIUuMJOkbEeKl3UsOslEi9ZWg5i26tsWNcyAOygeGuqEC/SiBIxpo33vwRCyr2XYDSdmVncRiKGpsjRw/xDmH7dUGzjsc9gfEKBZGxMAh7PG7r36L/+ef/mMc+nf42U9/jMabOxlDgnAHAZ8C8O7dW3C/g6MIhxRwQvLjkNylhifqTYFE45PR5mZSKuZv4ReLHSLtFHDY3+Crr36LTUdoXYO22cBTh/0uKujGCStkNuVwmU+neO6hVvac94q5HJjeBTkjzzkZ0Oas8nS+yfd52eWDqqLOtL8m7wZT4hAsMjykDOifXXCls5NrbwEer+1C0rmk0h0UMF3X2kWei49diKdKgH0gz5cx3Y6VngGA4uJT8gyQ0LoKtLF+tblh8u0JGj6ortY25/Lw2XN5Pq8pwq8TfMiMuKqfF9btKj8e/Dai4loZuwu5batnVtCaZnk0JYVH5TsVv3UTstWI6IzYRy80ou8EUMTlN7JdgEcs0KnE6T8ApE7sdu2yqA2bQHy8QN9pmm6hetb90FtJFKd1gMQS3fX91fmc3SPLJv2mmA4csqr3HxpcXdumY0V6cB+mgAxukN21dI5ltDwekvUMzIKBBnKVw+wJgrOZT0giJMLsWvlI+jwixoDb2/fi3sMGrmksoBTLagrQKqyJAAkyagCGTeNRFPGyHXLVCssireuy6X4JRqyo4Gw6dQLC/9ltLlsa8ej9sVtaBqmmwSmyDHLePABstBBTLmxFZKFc2NS4U0r9UODm6jPijSpQKeUPZfcESUZApEYtisSqyKOPPQxNzVCHgksGWBQlcd7BR48QIyItWxSVrWQGEyY/S9wkBvfiFmfB1rtNC+cIIUb0hx6hl5hbt7tbfPHFZ/joow3evG6x6Rp47+REK0CAIo5AJIT+PYADPCKCKVwWp08t3eq4V6Ko165AS9aZkmMe86bUFXBR6u4IRAkafjgc0Pd77G5v0JNHaCI2rQOiS71eq5YT/X2EMnuXPHHftFbruwANwIX6FqdxmOax2bIstOp4mpwsPg3+jgvE6b6tU9WJj4VVZLTTJy0q+MJyQmTwBBC42ipJgPBFpZaVia1yO2eGuX5K0uYaXtdiFfgxZ5VUPCKNni2O8+ElxgNzi9FUXYZAzbi0GSiaL1h0i7ePlKG8a+v0kcTK5WsprVWk8fgsT5VZKqK1mPCKPCcFvPuizE90JN+jJeKJdhnn9tCi9wdJqwKezxER/YdE9I+I6E+J6L8ioi0R/TER/UMi+jMi+m+IqNNnN/r7z/T+H12iAusLiyQ7ZsHyGVOaR555PZ4p3WUf44UekpJGu0o0GtLTcDd7ZLqnui8LvbbEExaPmVif2+D3+kpFDtjt3+Pdu3dgtkDDUjYBijwc+QnLiKxIze7AF+ZYQ+uqpToQ6fltlG27qDi1y5aGpGYfbevhp84PsGDb9hmnJyDSRPXumzjnbfuH2cpIH6g+ceLaSkqxoJDjWtgtAHAE33h0mw6b7QZd12UgyXKbsKRxRCk9Igukv45HmQgMJ38dAZ4Ap6c2QRTdoG5ofd8DBPi2Qds26LoG7aaBawTMe39zg2+++Ro3N+8Q4x4c94hxj8i79Juph3MRzjG6rkHT+NwsBN2Zn+KPFRZSdWMXf0uXTvtwERgeQGSE0CP0vbiHpjyHpz9eevW+P3myBDom7lbfswXd5Wncb8Uccccs57AIk9On7lUAEuXvFaBQuIcxq1sql3PCAiUUcuaDOypP90ipHbi8di6/zzWA9g3ZB9kF3U217flMktc00jVO1j47v2/qs762x5+Merrf0ue85q1W6wHZOrW6mM+HFqbfpRYZPcvz/Z/44EU9vgidPdcR0c8B/AcA/iVm/huQ8zX/XQD/CYC/y8z/HIDfA/hb+srfAvB7vf539bkHo9FS/ixBlonyviAWj0NpreT03Yxj0+elXx6JpvYhRZKZFz0nUhm43TyU5dNzoPtvi2RojrtNcEvvrthTVQXp22/f4utvfo/+cMhuXiTiKtFYNM1Kca1zjJ5BrTyXyvQoRlH1MgBmCbSpgvOwNmk/d9JK6WjVZ9tjnMtd+miofJ7LV9NA3ACiKZ6ZA8eWsihBJj1FEeKSxYgSPFuPuu+6DtvNFl23kZPwrHxcWJQBA3DInlEgsAxKPltrBVE0EeccvG/gGw/feDjfgJwHg9DHiH3ocQhBTtVqPHzXYrPZoN10IEeIMeL29ga73Q1i7AEEEHqAA4CQ2oAUjPJtg2bTwTUNyHuQ94Dzk9vdeZyYGK89MQMelfwvf/OzFvw9fbhHjAEhBAHE9gcNyK2tNIh1NWzFJ0EnsX65xt2/EDi0lrTusrhol1wPck9N12m4us/VPJVp9ACnizywXASUddXHO4Pw4npWKZ4zANeToEL4pOLf6UQrPoOskUG++yCbhZc+66go/6Kw/rhyX4719iJ/lsS0gg+e7AB9XnRX17MGwBURHQBcA/gMwL8G4N/T+/8FgP8IwH8O4N/S7wDwDwD8Z0RE/MDal7l75uOdVQjJTxxN4yF5T2P+z2a+JP6cSy8+nTjOBjz4Yf7EcMhL1USv3De7PzCYMaWEjmpdKg13KV/57hGLizy2VZAt3ss7i/NlyWAAJUuFuVObJt4+UpFpyuKr/ebx+F/IdrJFZupIkLmwjPox99xseRe6soDY9HepstfS9uTcq42R/PTvPMNNjUWCHU1fPUmZd5gZIfb4+uvf4927bwvgRa2JvIMv3M7SiV3GXyWfVe5VyNfS1/zcUEmuyzZdvxSzyBiJCLCT1CZd4jQ9tpdSJnX5FvIsMkvpLtFUrJqkcVVuRscAvjUCPhRAKVWHssxVyY6XHay7x3KSGbhQiAhgFlcv5yVOUdu16LpWrYMAjpSauoa11JoIhEjIx6WrS2Md0HowxxFAep66HPWsbcO5bSwGHDOn09TkVDSxgmMCmqaB6wCEHiFE9P1BrXUsDWtuRmQBinzTCNDUNGi6iH0fcLBA3AraAGIdlYF2sUAwN7SpuF7Gm1Xbc2nRlseGWdQxBxw4iEtdCAgxILuwFmht0XQS9TzzUnaeGf4/bPe6XCVPVW6Xk2+U6akFYDH+KEICmwOgqC6ecQRlZKC6MMchVn6gXKdcb5pn/ZWU52LDJanirTTX08KUUVd/mipWkEKnkDs06IlB/JihS2x2QRtOa0X8sok5NunmEJCoLG/qLipXr2WpeVWTT/Ho6anUzxcWqwmrttV4ZXLHRLbaWpbyn5n3hutRXhPSlZkXy0yXy5TX6WGbDvmlBECHicpvt1SmieeP3z/GKeX9udPQVvTL6otHuJflxaeGuUixHlDfuVRWK9I5BY9Ym+k4pfVpnw0UMfOnRPSfAvhLADcA/icA/yuAr5jZju34KwA/1+8/B/Arfbcnoq8B/ADAl2W6RPS3AfxtAPjJj396bvFWEaHwTyeAJ00mB+88rB6OvKfxQPm59Xj8syMigPzx59Y0QeFDLIJDRPZaL/IY6SPHFZLzaUIgHpAICHWhLoXVTgtehWSZHhw3sAWKPZJDTjvlMWxPE9ZLc2Cno0jvUYC4EFlUIU76FbkI7xycB7qugff5Pjl93txvYLt0NBIuzyJiMCJAerQ3TEmnVHYBuU5LNstfAwENjEjHXfJm75ZdkAL8lorJQFhHsVwVSkeZGHEQxcMCGdvekGMgQBRiWHoRFAl9JPQsVh2ePXoQIiIaDVQ7BQkIf5CcSKV5amFB5NU6JEBOetrh3fu32O9uwLGHBwHUwrkOnjZwroNDA+G9rIRkQVXcXwBGDDWAVD+bg61O9bHZDTmItQk7h0jp8HIITkBJP2AFeXiQYBoLVAadzXww5Acy8Lccn1QCmIN+P0Jj4IoHfwHAw6LKMJx8Z+gcq1Y5cNlSZKLBZAgxxOJmedAYLMCAgDUpDQMiyr7MNU9Bm9k4zcOB4RzBNwS4CPIRTmN4BBU2ctwQp8YLMv85p+OcAOcJPjowxNIHlm8CJ6RIFM2uyACjamACYC2nU8AFAAI4Rni1WooEkJeg2OQdnGuSARU7BhwpXqG2a+RBTYtmcwUXABcj4HoEvsFuv0eshpQD4Kt1Rr5GdVtxRXntnfxctiAauA7ZXyJQ9ACinNJ2ANBLbCXvAggHgDtNL4IcgbhBBuyUH5kQiTUuVOZpGyflP+MDK/fwJMhUHWZZLyim7kgpRAaTk7ZUsJAJCLGHIwGgJZ0ZgYR01Cq/lJgpDfmE8lq1jmqFNcdMidlVjAUEbLoO3jcSp83JvBlddvcznksh4RlgInhNMbksaT4pNpC1UwnQDEspUzgiCJFJ+6Gcg6X5yPvURzFGeBd1jQmILPKAIwdPDDZg1ipZznUKkNl8ETGcy6Zb8jjZvLb0ps5lR5NSuSE1mK3FxoDrT/yEulpXZZo94lTyIPKat8nBRQy2YqPgZFqDswzKYiuGVT0f9mAAormPT9VJM4rLAb0lIXe0o22drisyrMxCH08tmceKVYrdM8+sUXdWqMYPT8Sr2+HOpOv2CQ/f+ZHRQxynBcM1JEJMNY+dQmcDRUT0CcRK6I8BfAXgvwXwb56bnhEz/z0Afw8A/oV//m+sapU1JxMB476hqbtLSfGR+/dCl1iELpLVB0OL/LJ6HBYTvglkI/54irMrKquHi9MozalBM9w1Wcl6WfIsLqjIVgntqpin9dYUTko70pXJeYFlkQryjoC2baWtIouyxBlQEsVfFMPTBJgZ0iZh016KdiMShbWOQXFkXiAT1Ll4ugSJCsXiLuM+LaBZkZguTyFsD0Gi8ncp4CewDHV/TRaDK7krVvUGqs6h8i1bfDk9xohJlSeIdcXu9jZZWdjJOt55UZBsZx+uwDELvhx855h/Vy5myHUfdkoZb8OZ5VLl8lY+X1g2xSgWIAn+0bamUmAengZYnAmqSsc4gPQSrWeoJYupBE4VS/Mo1tNI46EV38dkVhjVUzYOGRkILsCXahozXrWyEuC9yy5gjhBCFL5G1JN0irra8kECGDgHUbjAIFdbAhlLJeA7AW8TdUzLEmUlVy0kmCUQdeYtOebe8gl9xJ57tK0X2M5ArEjJyikpWMqLh9hjdzjgcDikAnhftnEm5ogQAO8Bojx2SkXSxkMZiyhb3qWEBFxgRjTXsxDAQcYxlchG0UFOlTuOpgyXa3jJY+VHvhxdO6tNE30HJYfl+zLnC9iU5pLCqnE02kZdPfFMlRdwuutRneZw3FHZjuoyyQnsiAnAsXrLJ68B1XpEdsoTkoWjGkelVpub/W2q5IHOWAL1lesi8jVmTicdVqcQmpVUuYIOeDeDRZLmWh3kFDo7xTRvKaNUILm2/D3rMjbflHkOac7StXx+Ku7fmnIz2ymWlr/N7zzZlw9D43hjsxbfozKW7+T0lmlik+xO/f4EFcQHLtLSHGqbbuvVgKXCc/3IXXS1pKZOycDr6C6uZ/8GgL9g5t8AABH9dwD+VQDfI6JGrYp+AeBTff5TAL8E8FdE1AD4GMBv75D/C73QC51I54JDU4spD3bpV809hRJwH3N82k0EJ3DFrA5MMEzK+ERTJHeGQjkO0Y6aBqBCocrAItasBG/Or9NK5cRKQbauiACchfKEYugVGglN55IpEnfFHpMimJRZIClaSdCfLIEqW0VC87lgrFyNv5NaeB36A253O4Qgp1eRE6U5B5pVy5xBHcwKxMoSYwSrCcp0X2bVKPXRpMBYql5Uvz6oAzOL1RQYTGLpYjvkNHpn2D7zNK2ajnXsSRgn6S5jQdZ+5wPo61IZyDFOsAzsrRZ4Nu6B44KWWSIMMmQFB2TOoCqZxJpVcVjBPIfGN9huNthut+j3PQ7hgIxLlgqLQggK9ogVI8kpTRTB8MkFzdzHxDKtBlWOUQXAIPNFcgUEw7uIt9+8xa/+8lfY/cEneH29wZs313j9+krqD1X4o1llyvuCF3m0TQtHhBD6NG85J2BCBQISIUYDFqRf7eTAseUR0jOxcKUsoTvpiwgDwAhyelxmsKnxPe5v6cN5mls3ZtWDiV3vXEe1nAQgJ+b5QnkWi9U8B86VZZ3ifD+UAUpmORkyu0nOKMAGBgGDMbCQy10XlVF60AmMdWxr2YDEFJfN8aHJGLueX0bPfMBk2HCa7UjdFymrvEnOe7S2+LD74IUuSGSyx9oIq5eluwTu/0sA/woRXZPM/P86gH8M4H8B8G/rM/8+gP9ev/8P+ht6/3/mezFpeKEXegzSXYtnwNH3Yk00L0GP6R53cERupdqCpaQzqk0rTx+6Vzq13NWus/YNDW/eR73mdw/X8Zwqj5PFW3h/ELxiDGxOlcs0hNrVQNwKzYIhYr/bYbe7RQzqbkgWH0QhuBHeNgGqVspvBnGmPmVFSks5O2GmuKnXrS6M+ih3FSpY2jQ3acEPg6IOg9YOfy/XEqPTaKj4nn/n2CxycgmN/5bP2D8a52n6kAn8qS+qo5ROGzz1BvygkZKPT3bksUfKuoHF2mbTbbHZbODbFkROTyPLoFd2NVQrSMTUn44sOLWH9x5N0+S/jVwv4/xM0xj9IEBdLrkCX5gZxA6hD/jyy9/h009/jd/99nf49ttv9RQxhksBvcWdzmscbedIXOW8BrImKHBQf2x8yXcZdzFGiSukVlOjGnAOXl3fL6xFYiw2AWT8SjOam3CNLo746M7r4dT76sKWxqR9JqwuCTIPVTx+PDzvFFj5cGJ1rlOGZMsT5rI1ETA9dpMiX2yGxGJDZzH3Ys5cmqfW0eU2TZ4e0eDv06a5dfEcvk41T6+aW+lDjpMletg+GW7kvNDzo8fovrvEKPqHRPQPAPxvAHoA/zvEZex/BPBfE9F/rNf+vr7y9wH8l0T0ZwB+Bzkh7SK0ZsA/nHnhC313qdgpg+oTM5YBp9JDLmon5ZXquLD1Ofme7WLPWyecQlM769OBoLl4Zypjqp5yztXHW5tmWulfNLE9fWE6gX1EeShiBnE2+K/jVKyLRzas1nxRhqGxpYGnTcxPJEr/zRBna5Ml0Eb7PJWQou4wW4fmMgMRIfbY73c4HA6StLpakKOkpCeXxSJvU24tTg5rjBUkUKAGi2KMkHhIjOkAElO77zT+VpQh9bTGWHFEGmrfTKSzZYa8Mg4sXH8vG5fSuE38wZnDpvvAggsPq6blYMtP3OvgxPWK4eAUoHHOVe1AZBYrgHdIQIYDaQQyCwg/2WyJHLlk0VJOSAbWyD0H53yyWGFtW7HaU2sWsEJlUk/nHZqm1RguJAGHlSek6pynUO08AQSlzAYCJX4qFGImRoyMqFZqdr9o1tzu5Cz1XLnSupMIXi3kQozYM+Orr78FOMA3hOvrLd589AreeeQjseV5rx1ORHDepSDcAUinkDnn0DQ+gWK5/4YWRDy4Pg0Sle5nzAwOcjJbACOGiNAHhNgjBAK7AIrZbc/qPp6vufh/TDb1A4ADj2NMp5h3KFm+SHGw9thjxdRDiKCFOIpT2OdSqXN7zia5+F6eUPKCV9gj6tpS7nIPRtvEGpvWg1Jc0ucUoh+V47LyD4+/nbLRNaBLaBYZUsuR4s6nATBkVmepoeWztkkfVnXKDFPOBScT57UPYF1Si42EsaBQv35q3ikUQb3WHAvSP5goRvnP0zqpeSqdbHFec9yRlOrcZrI+1l4yHT4g1MF5XM33yynJLZedl9nq7iS+4ThprkqP0tltf6dTz5j57wD4O4PL/y+Af3ni2VsA/85d8nuhF3oeZAs0UG9Pn09PY/ejJuZ4+qSV6B538Abm+FbEcmmdy1oW0VJQlbImiMWRxphR24kHkKKI6JQzX2FuNEQOFgjV5ERpC63NysD1U727qtaCrMyksExJeKlen0snBxwOISYAZLm0C4CMFpscEPseNzfvcHPzXtxpENKR9N55DeA6OD2Jo7a/LMx1vIwMDiWACCjuc13/hXpbna3ASQVIdS+tn3KbOO0XGiA6JRBk5Rk+U5dlEB6XKAWgzpfGO/1zv8tnHRqwC1pJgtNAtY1vBsIOo/GExhGCk7/sBZCJCiIldz8rPY3L4FEEDy6qXFr0eO/BXoAoEZtDSpUgbmIxsgaJlgQ2bYdX16+w3/fY73v0N/vBHKNtTkDkoP1n/MQKhGUAr7SeiFGCuVOA8pHFVeFKOLSqOs2HQWBHmUdz94E5IASGa+S5dzc7fP7Fb7DZbHB9/Qpd16BnDbbsPRovgbKJHAITqO/hvUfbSD+ZpVDfH5QHm6q/SxpaDcj4yECRAWEV0KTjycUIcESkbKEElrhQ3hXtVQJp6b+CJqaE0lixZD2XWntprFaRblLZY4EypXHL0E2JrNCP133jZQPGuVqDjD/uAs6PYZQCyVFgKIYI8nnOYmSXs3n34DoPG0kOBI5R8dNhfKpjitn6Z41Su5C0N9tGSqlBr6Rq/rsrXVKUqCwha2TApvQ1NX1QjMgU+ol5oCrTSuuxaumq/t6TNs8F708E/F5u7+EccqR8a4JMzfFycVnWm+NpyQGxSXpezvdJURJ69fcd9I5VKNAD6GlDK+c1JJPt2XQnoOiFXuiFhnRcwftQSETHM0CACQXhkmS78UlEV4Hadu9PSAmUnUlgtgKcpCxVeC5X9AtSfbpHIiZk5yBTdOfpLJFKj51PuZ8Npp0CQgo4QAaEjQCOYboZgRrqB8KfQQGfiLfv3uKbt19ht7uFbyKc92Izosqy01NehL8MaKn5LgND0+b0ZlGUgrkmkAYmza1qKQOPUtBr51KwWbHcYUDd5ogIcJSU7VqBr62LpoXzWhgmInj4RWBoGP9r6i8RySlyzvjTwcGBmbDZdHXaTNi0Lbq2AzrGxm3Q+l7AUmI4FgCjUjwIRaBTybMhh6vtNu02W4k3XYer7Rbb7RbXcYOeGkT9J+MnqvJlc01EDAGH4MCOcYituJ6RR+gZff8NAh8g4avMcsj4RIR2KWtAGfzXOQGqzLKm/Jtdt1BY30CZUOuoXJK7ggq+Lx7ScUPOgTQK9fubHp9/8Vu8efMxrrbXaDwhtoS2adD3Zl0l/Oa9h28atF2X2tasiogA7+f5w+rjnBsBAEMrPK1OvhdZgBvVCxw5bDdbNA2h8S04uJECmmeAI1RMQ+mdQbtmDhi8NJuJjcmUSFG3QdDb4frI9VcC5/5G3Z73ZUnPzIDOWeduo1s10rELRX9O5odpkLm8d1K8GV7Z/zNEM9/PSquslzKYcsI5idW/04l1GYBcV+KHkV9rK8h7yDOto6yj9PjIP3fc5PkhToJFywU8Ma87YBbVq7wirfIl24hdfXTeU6J7AgkfgCZ5cvV4Kcf/6fQCFL3QC73QB0+lp8HqnUezmCCFjBhApGQklgW5pwcIUiTAT5XrFEFxmk5bbtYJZWMFoQCazqJ1dUzhZKnYblGFI3BA4Ijd7gb73S36cAAIaBOI4kDk1XKrUPpTnQgp1lGZ45Lya1xlO5+LgFdd2+Fv5xy6toV3gggYfGrlAyDWUDQFFNUWJ2ZZM8ylAthAcAthDw0EapqmiqszBImYWSyK0IsSTA6O5VSs7XZbVdaRw5vXH+NjvMa+26E7dOh9ny2K2CscWvNgOtZe8/YAvv1or+XKvPfq+hpv3rzGR29e47V/g0AHATQowgLF1yCRAEgRET16BO7hHOHHPz7gzZsv8Cd/+n9jt+9RusUmEU7dcUpeoOIBAuBJ7FicF55jJ6lEtdoJQQIKxwKUzJup8wANEcFZ4HsiwDu4ppFMI+Gbtzf48suv8MMf/gif/PAT3O72+PbdLfYHOWLclFGn1kTcdQCLNVTf9zgcDtnSB5iMq2T1DiEUZZvipAlS4F7iVDm0bYc3r9/ANwRwg37nK6XbAJnUtqT8zmZxV6dtAFS072U7YsxfZbHmZmG7waM7SSObrWwyJCxKMPnkfYJF9qUAt8tSnbMuHgMN5tbu0wCGYr45qXQPROc33yCBtdc/VCpHXzmo5yWYO7m7KX3wrfw0Rd6j9MH3yz3SC1D0Qi/0Qh80jRcIM5c/Qmxv5wC1lhiBQKzxQ57YCiSbPVFPfQOSYpG2wh94lT+CLA1d/Wqh33be72kniMxtbVAmmAVQL4Gso5x4JuCJg/cNvG/QNA0a58VKJcTUvMnqozj5TBTlpeCcebfTAIJZgVUVs9lA0wrIvH71Sqye9CUiFhNyolx3EgubErBhU7yLa2PlvgbjzKJoicyFaw4oStfYgRPEozGKQLi6uqrydOTx5tUbfMRvcNveYtNv0LseADJQRHHE8eY8VgJFr17txPqqYLXt1RavXl/j1etrvKZr9NQjudqkHeoMdiIK6MiO0eOAyAHO6ylsTPgn/+TPAf5W+yKDITFGC+0MUgs1QAEV1gDQpSWW/ifd6MAOcA4IwRVWRjGNp8hlX1WwZQLDm8YlVxwiAjmvmJEH9we8ffseN+93+PHmChEyBohyNCgZF0C32YDUBQ9AAofMjcwAxyUQY3ivBBGnX1CwOcrc3DQdrq6u0bYOMTjsWaydZFwGeKrXgNJKyQzwEq8wp80Bx0ixiepZajw/HZtpj+lbyUKxNi+aSGX8Xrqb+nMho3PILANBcticomdpnVmb34rnlkCg4b1TVjZGsd7YfFwABY/v7n+eVdEQrsv0xASVCbqfds8jMYl0AGgAavQHAAAgAElEQVQUFmIMEtlG4LBI9wW+Phcqo14+P2LbmXnsgjwbevJAEVd+dVO7xOWW5vQEQyK75QlisNCb/lQdGLNYqCP377ZhP87nofJbSxfTNTmv1ku0ZkBPpWECgO4cljuox6nYxp3OsFqRc5q1UJcM0uPazpwpzYo2qO0WBu+nG1MB9c4jJgJnZ6wCiJjMeZmKcc7a7iMXNdNqqejuEaig7KLP2XWOxTHx5THXhWBrrGjKYkyhNVl7UoPRpnRiei8LGVMHewN2dPMxkrIb4EBgsl1vBRCoTobr/6SuECAgl6t0Y9LfxekfVVrD8mCpB/P4EKU+PzwWwuaSMCE9Io0XsqCTYrnBzHLUebkNn0qQ6+cc4JgQnW1ycwJ8TIGJpnxV/FOMUxTCjwJF+/0OASEd3cXk4H2Htt2gaTo4cggsbkgxCr+kuCohwkCiGPPx3eNhMji5p+D/yl1L+1XYIsKRy3wNY2W13HEe11dX8D5HDnLEGgdF3YuI9JQhG4Cl+lq6iQ1dz6h4Rvjekbr/UXnfnhLLJUcO5FDv5pf11r5x7AEaA0XbzbaeYQnYdhts1fWs9R28cwkkJXgcKIzA3FJhAAQA6LqNtG+aCxibtk2ubV3bCF8zI7kV0LCmwofkCEwBEQHwUr+fwOOP/vAPcXu7w1dffy1KNgjBeMUUQ44AeThyEDfKOih/7g9XgUVN1ySFvQ8R+8MBoQ85qLrW2wCcUU86B+/TgAY5wIPgAfQg3Lzf4d27Hbxr8frVBlebr/D27XsZPUSAAzwTurYB9y384ZAsiSQ+kEuWOYZV1vJbWajMh1NrX47lVV6zCjK+9/HHePXqDTw5hAOjZaDdtOgPcoLhpusAtKmfZc6NMi9yLKYHSVTcUAsLwMFayyNGEBDF6fpYBr5OoIpwC/IsKx+5n+eBadFGTkNjawO2VWos4wzds3i4Tlfp2zo5DGhRKts2L0iw+RgCQojiTgmHqHWgwWbMpNzFEGRO+YEXd13G7495I5czu6KR1srqpusFkZ4emAPGU9U3U1QICpKJVa5+Zolo8Lcu9oiqVjSgfzH9YwpB0Y/Dy6V8Ay6sko3TSM0xC/mYBxVKfbJUzpLf87UkB2SBteDXqYZbJq7KRshueMoJgSVWo5M1imzOLZTFqlms5qtAhuPy1VOh2qLZaLCGT67/VSo1rz4Jmlig71NZ5nrCZl3jpmPjHRvHFyoTaf9WJ+aelsGTB4rGlZlaGI7MjTqfDXWXUTfFaaHkdLrgYFnVoQ88OFcz2ZpyrXlmRdBgHiu+S5PequIs8lRWkk34sElBfupCeiFUZi1fcsHkonQMJodSD7wjiYKArHjTRH3XDqc5lGJ4webhVI/pDEo3DotZlAQT0PTCmARAUvcCBSjS8lcozAxYfBJUivHELnk251kkYx+OUbU/ebeWQQdMWfBhWR+eilyX+LOwsFiYEyvrhdkC21sDhe+UeVSVtNSeVQdLea0PCvUHgAr7Xk69YmdARV4Uy37m1Ldpn1Bvacrl2CBRHjkGHOIOkSKodXDewfkG3eYKXbuFowZAPs5Z3H6yRYccC57jyXjOZSqDzs61VgkSGR8bbzuI0lCdymfqp/fYdB02XZeAIqL8rKThpT2QLZyIyna3QNjFO3XpUj9Y+TzM8sNYoACa7DuNx0j6rVZ6Hg1A5oLk4Fjmm03TFWCtlLP1Hl3Tom8CGmoE4GBCBIHYg7wfAUWVjiZFQtO0ILhCjQIaTbtrGrRNUykwiVmSMJjrK2CtA5MHCPBtA752+OXPfoEvv/gS775+i9AHON8gRAaHHsEUf/JiueKo6F8D5ADvncbx0QvEejS9WXk4hAA0ocd+v0cIvQLYWnLOY8N4k5nhyU6NkypZJDOn4+XQB9zuD3CuxfX1Nbabq6Q0shMA0DGwaRoEfxhZjpUg0eSsksD9WiEREDjHGKriF0XlJQcgEmKI6A8HtN5j023hqUHgHvAA+UaAotsbeL9F48VyiziCEUActN8KpRgM5j7FtGJTXIdT/IRcauomA8klM1veCAAqIGmOJ1fFORu3UJGxglfWriZuDJVT5nQyY1m2+itPXBvS8KK61ioYHgMjRiBEArMDczhp7acqnksNbI3KN3x9MJ9W75L+LQCDcpwKj6p8UG32DdcPuz5e26fnxaX6Dtfsle8Bsk4eWVup2iSbyqKUZ8q0UchUmf+z4qTtyDY7mPwzlElwpIw0AIyV323TZiDe2Np9umpGRX2Q68wao89iGupaT87iURb8wkAYGNLOceJztjKq5eF6gpuq16RkODE8Hp2Ogqb3kmn+OjvWj9E6JW2VAcEQJFqffKJnABS90As9QaoG6MSyOzRbe2Aaia3nzldH8hinWRzLfubCWVl8PAaxCdhTAXwfuWxPlIa71udQcgOAKmSoFUNVSUf5mpWNybCzm9IlEAThM4VVQMRJuZeyCDh16PfY7XZgMMg5tG0rn6ZVFxpSlxpOMVhiDAgxVIptVsiRwcsZEHkWRBndm66ocwTvHJqmQdc1aLxPljDJYIns1D6IwjrXZK44x4yGCp2VIY8TP1Dyh+5qCWSlQWfI0+mb5wYSdFtSdar8eNdUb5iLXds06JoWHi3IE3x0AvKyl5PQBnpMkYLciwGd8xo2mxLM2pBH51psXIvOdXDOJyCpTsmUHAOK5BQ0JgY5QutbuK7BH/zgD/CD730fn3/+Ob799q0EdwbrqU8BWkkBemNETEAcwCyByZ0zoEjzdZSumxLsQdj4Do33Elsr9VUGKoAy+LVY1bkC6KYY5VN1E+tR902KNWWYQLLZJIL3TSqncxlsLMfEMA6R1DGzRpltCRRZuYfzMBFk7IWAm5sbeOfRtR0iOQEymBBjwOFwi8N+B7fxADlEDqIo6vhQ21EFfYOqTRmwH4EHBJj73anHJUxTncZsXJ7JK1TNL/epvHLxL+p8WQN5g7G2MOddrExF/ivfSACF/rxnKueNx9OqV6iXq556KLpPPh6MtjyPKW8/saa4LFVLWBzcUHnoQ67/PdMkoPYM6QUoeqEXuiNx9eXpoAhjVeyeiIdCFhWLLc6fJx9FnjI3D4bzHt55VJBY2q17Ov18X3SqorH07DBo87Qwz0DhElcBR5wFuDpTuzfukXIX0nQB2VzJ8WkIlAI6S3wYdY+JACji9vYGt7fvkzLhFCwSpVSu9X1A38cikHC20hgqLpbvsfZaAotSxVN7ZFN5UmDGkZwQdr3dpADNlXWhWp7kHbfpsthR7eYOao+lnVeq8zegyBR+Mtc4szKyvhhZCtRjzJEo8fJCdq+jNB7zO855Od2NFEAhsxIQQbcEnsujzo2YAQePBhrLKZ3aR/Dk0ZBHiwYdWrCzGExTHKfkADFPFtdU8pJc5zu8uX6NX/7sl/j973+HX4WAw6EHQayEwqEHO2g/SRwkiTkU1JKoBuoMFKq5K9/zcHAe4k5WaO1DwMWATLLdfENsQgQhgIOewEYAE8H5Bs55ONfI975XYI3Ei8g5dF2L/b7Ffr9PcakMSDVLowwQlQCC8YX1sXwv3TKHblUl90a1MmIwmrZB0zgwtej3B3UD7RHjHn3/XsyfyIFjzLGpmOGTlY9CySwuyIqqynHunC2IM5Bm7lbnL1gj28KFZaY+SIGrVnsISiul/peAS5jFChdg/AOW64EtO46uk4tyDBe3H7LcNn8N8/zw5ZqatA0mqs3F/x+Csj9LXJ6vWRM99KTyCLQeWKas66ymYtfjGbfjkweKJgObDugZW/y90DOlymohseSEu8+l8lszQVXD4wEWfC4+ZRkqo4I7DE6bXLkQPe95R7Isu6Pa5TG7nD0OmeuUgSPje3ejUjUbpn0J4Xs6RoUFlU1aI0QxzMqyXZ4SaSct5Qw4qoAaTkokWdwXZqQuJgLYYqioFRBH7A+3OBx24BjgvFhStG0Hcg4xitVZr3FgkgVCBRLlYs3DMdM0ZY0zZVGU3MMo82jTNPjozUfYtFfWUgMjHgIKlxdKeRVqSwKROLnEjPMfAFomdCfTpUJxG3fU4GcGnzwascYhcWVxyvuOvLqeSbkIpACRlyPk2adYOREMYrERqnQBqmdqBkAxwrupo9sdHHl4eHjyEuh4kuqOZmtfp5Y6ahHz5tVr/OEvf4nb3S0OfcAXv/kScbdD4AiCgFocFTAlA4lIwRxpjxib5IYmOVsARgU3rK4cFTDKfT207CmBIik1JzcmSwssp6yRkxPXiBx80yrPCKjC6obC2tBy8liLruuw2+0SLzMz+r6HxbjhKqZVCQCWfWB/qUoHCRTKYBMREDlCAlo38I0HU0QMBM8RjD32+29xiAf4fYfDQSwAm6ZF48X1sIGHbxoJ4g0JAO8k8BQEyXOwE++kLGWp14/wNJ+Dx2sLZTDWDk0YvV8C6qtzvSCRAOqh7xGCWHJJt+hcBLPqW5fcfS/tdyHrq3OWwXwqogHQRbpAAuHXhvW7GE3pVgYS26VHouUNpbulO9WHOivOwCU84s2luGmXoMdwY5vOkfXeZcpz6f580HZK8/Wp76nwyhlAX5cOJ4zJaBS39YEmzScNFK0xI30BiV7oMWi8z1Dz6ZzSfXZ+ayaEIXBtyvWpGipQKJFHMoyD3d9il/jOVIJQhJlF/ASa01cXC6DvFLvMlsbjBO4bCy0XFVAultI8VeUtmDVbEBnQI+r8tL1Qkd7E7+wGV2WMPCjKd1Ulo/I9Qh967Pa32O1uwZHhGkLTtPDewTuxiihjvDDiqC9y8FiIG1ecr8cwHlF5bfRcQj6y4m/WO84RtpsNXr96jcZ30n5cHLWuNaaEkinaRoOGUauj1GxFflaOIUVTEovYAOPHWBG+YVtkoECUYzv1yaWT2si52o2MGWY1JECLT6CiU8Wntn6SqpkBiSYiQbbJgUGI5T0n7SRWSw3Igj0PebKcBp0BnXIymnN2ihhAV9f45PuEn//sF/j8i9/gm6/eIwY5Gax1DQI01gsy32frH0YIArQAvnKNrWOzjE/LKvujJONhOV3NxlxMyqsjAryD8x7kPCIDEYS228D7Bgyx4ilN94zfm6ZJ1kMlryRQNYorXdmXUy6s5W8DiiRovLGv9UWuY+n2FiPDe4fIPRgHHMK3CP173Oz2ePf+PW53ewACCDauw6bZSvDyboOua9G2Da6vr9G2HRrXSv4KvHIRy8QYL1sVHZ+XWcdlssYx0JcZ0NhQmJgzLNbRqTRWlM9UfmDNzgjmaluul2eleBmw6PgmShHz7aQMz2ursWRYAzS5/x+adNwUgdOrv3fgjUvQ/SnBA5m1ul6QzplcHfxxjjD9HGiqPQgfbn1PpLuy4uj9FWMrrWsABuv5Q9OTBorG9LgT1wu9kFG5pk7vQTwOp07JgMn8m05cfI9NTKpxVSeK6fVis/Qic5ycYsR3atTlIKFjyqb0hXJu2jbP9/1D0n0IUzTz/c7pLgrmhqaY8Iqk29PgkXVlK6x6YhRrDD3BhYsT5Cxd1t9Sxnxy2W63w36/B8Bo2xbb7UbiyrhaQbd4ShO1VoDRLKNqxXiJ5mMTTdeXTOgniRHTNi2c6wQ4SLycAagUrpgCQFHrXqIwdrKXnCI23FWcKlIsLxagVPlbArJO1D0hWWLXwVYmOLHkIcAl1y+tM1nMnhbOBxA7BUqsvi4p8GU2SUHXtiCvIFSJ/ZC4upFvBCRyHs6NYUnBqgrLGGdgp5ZFXdEknpLH9asGP/j+j/CjH/4UX/7m99gfehzoIHGhuQeTBJAOiS+l8Oa6td/L96bxAoAgysl10Y6AF4AnAyuWRs17Uy5oDDvGPoCD8Kwjgms8mrYFaVyuzeYKXbcR1zPfpDGT+lfbVSzwWhwOBzCzWp3UQbRTvySroGlet2cNeBKwKVg3KGmQbwWejUk59VWPvj/g3e0tPv38c3zxm99AxmgDCSLeoPObBBRtNh1ev3qFTz75BB9//AleX38M37TIM5O5oBUWeaVVxhGiVDh5z9acZEU00RRcfJu0qDyBiiXuTmt0CVzldKaUzwkyYHZqQeXcRgOc+mQaYaXpwvFE54DWNZRUvTS/Fenq/4+r1ZTKaHn9u6Jrcf5TiHgAH+PcMa7yHIlMQxhW4AUkuiydBwrrKlP8enh6ZkDRC73QU6WxlPO4IMIDwBgEaFANZHSooMed286jtDuu7cc0EBKfU2Xuj6Z2/e9sRWZNjmErL1uWLokzWX+ZeZ/N3FyVLrUcI2WD3e0tYh/QNR2utld68pVoNiEGjVOEEe/XVkGmvDIo1qfyDNuxfH+czlRwdWv7vPtEDHhq0HgJuO1g7+UBKZtVevqLM1caU6oJ+Rhh29iiVdPJ0DVrGuwqrRWnJgk5rYydlcPBWUzNFE+qyNN5OO8A18C5BvCAj9ATCx2YxmJwGs1JuWVpK+3bZJVEcsodNR6uadAkb73M9yk9/Y8gsYnSCZRqPUUw8ITw8cffw89/9nN8/tnn+N3vfq9AnDCrHQcgp3Ep6AECR6BncXMM0SHECN9I6G3nCNFLeZyz0wC1qA7iopWKUwNE9rHffX+QANjMYOfROjkJzjcNQIQ+RjRti3azReMbOHJ66p+1rQMQ4JxD13XYbrfo+34EEEkA5BzCvDRMIipi2yRfuNoKhOzkJcjpW6ynGMUIhCCnDhqHsSIqEvcpYt/v8Pbb3+PXn/0l2s0Gm801yDVw0cOjEesiL9ZpXdfhe7/7Hn7yk5/hpz/+BT7++PsCjtkOCJO4SSZOOG/dtfqvdUGyx871iKhTYmQ3wFPe5HOrW9NCs53gvbZA4xRkPN4hxWLNG8fYGuY8mN/kpfMzvzgZE5XMdGfGevKUYeSynnP1pvqR6tpT6svTaTqu2hSo+UKPRnfdFbgDvQBFL/RCZ1B5YmhxFU9pwUj7VCeu91lAnxCuqBZ42KLd5Zf0ztOjZPWxVkBTbbG0LPqu0hxQMxSWTwGLzKrDyBTpvM1dnKvDnFzRpoigsWl5vfAv2agljsFFbC4lSKcnAeLKtd1sxFKD7YQzU45jskaadp2xekUMt5SGga3nwKC5a1LfYd8QuraD963E3SmuW/uaklu6n5XPlR+C4kSDfAQXGZRrom7Dd2jU69NPRdveNWvCieeZCFxZMVkg/XLSK44HHzRVAiY4JsuFDNRkyxxS1zRHZtFEhhGM5j6poTARlxZysPhYwKbd4Ec/+hF++MMf4tNP/wq7/XsgMLwhbUx5JtUpi1l4LjIBIaDvA5rGwTmG9wQXgOACnI/wjhFCHJ3cOHQBswDsZumTYs0oTzsWOMYRqdUU4fb2FkEtecQdLkgAdwM5OAMzTdNgs9mou5wBUb3mpce7q4VQOqa87BsbOwm05Oq+gLAluA+14HP4/9l70+1IbmRN8DPAI4LMvZRSSqWq6tvL6ekz7/8w3T3n9kzfvlUqSbknl/AFMJsfhtXdI8JJBplkipaHyaCHOwAHDIDZB1sYChrF1PaEgDaCYQzQrBswHC7aAQN7NHYDiwbgYEVljK4PF4TPZx/w5ewL2q7Df/5PhBcvvk+8IwlkKa3drkfVFKMA6kU+TmsjUNo6FJfvmHRN5j3r8tVKy1TvA8tp99o5vS6YunEvKa9KTjDZ82aELQEmC5AA90ZeTM1l1CD+t0TzQjBNFpxdVPOQsiZVX99HmXcZxYU2fi5IZq490p1TGoHyNOUO6f4DRYfW0RSW/dCNSxbkY6bvPFY5o3ebKfYr8M2R6Aob5Y3GpTiSukpnpcdm6pbRxj/7+PI235rxsYyVmaXPzaHX9fxQI4MiGgNhehr6VQ5cZEbG5AQIcFAK8+IbfpOqyyYoLSZrkVXZei0qqDVQRmV5kZJGOa7winSNib5/PaODAs4S8Gf+ZHVHW8L3VSyioAnn+VLPHHUfy4BO/gJRg4YUn+MRuyqSemP9ClFYj+OXy41Kpwhj6Hp0bafK7nqdrYlAACsPcVDgMW5bpciFvgngwa7+jCCRCdm+UsawNEIZZomukAmgKupZrRo8ffpUXc8i7JG7NtdVAillOxJTqNtXfHBR/qIq+E9uet39h1Y7gpBBSk8cCii4pX4ZRJiLqr+zil3Pv3Lel09ElyaSfDtF0Cd9ikGh47ejl5Ncp1R9xhl9EoE1Bn96+RJ/+w9/xT9//Tu2/Tkc90CjcZI8AOs8Bs/wHoHHtSz2As8OIELDDZoAFCEE/zbWw1pOcbSszTGCxkAks8Y8GgangdiZUz8TAE8a64kaC2GPvu9x9uULzs6+oOtaeOfgBgcmUXdMyctllGmttVW8ohjYGwCEBQwJbKMPxD7NW1BYY0tQNcxzYwAGh+5VayVrGzQBKC3PBdS6UAEvay1Wqwar9Rr9dgvnHJwHDHoQA5Y0eHcc+bYTdF0PxwLbNPi//uszdb0zOW5dmetLN8U98yVtmuN7oiJQroZzQExCqUa6nCDFF6TcezEn2q55R+XzM3LwdM3SiqvbZgonySp2ZXRT3RvWxfEKE+bKPjFxrp0pfpsA0RU37eWC7DbMFCztwrtE/ioFgkk9MwJO2QYaN1brz+ttvd/MC0j79+yDOgqV5ecyJ8OTMriW12r5zsS+nDRzj8BDBbCfvopzd3/T6xbPk/LSMhkq8l3ueSlaE8e8XhOlfAhFnL5J44o9fklj5LBGEPeVRbF5jyVT7+RtZBlpxObAHOssbNDcbVR/NWtVeUXxd2d7pOTdq8rUS97xcJljPtxf1mitWLpUFLfI3JJwBbrXQBEhuPvvV2UAOrANLuWFm9vw5jYtqevASYy+uhm9F01u+kog42Fa1KYFJxg3PLFKYpcIUsyKpZSqHm/wxeU9tAQsui2QaLynp98Hu1xQSdhAMZZFW0uLkKTLzrzLgeEjzCTnnAiT4T4pA57O9W2RbcpQEAoZIAcyHiwDvB/0tJxMUMmQhEphCs9pWmlrAEMCEQ3WGS0LojKbm5qF07S0UxQSAdj6ZSrLphF/z25wZXDxhRvyXtAmtJwKgKZUytLWdKSFpcxiIsEaIWYUims4i4AYkJAth2BgGGAmGBZYjvuBPqN9zmAeIN5rnBY/hDINDHEQdDwInNLDp0w8pG5CFN5YUlp7hh9afPrwAUPb48XT53jx7BlWqybwt4EP40fCMGA4HiDC6X2yZRIXfcDYB7YkJZ4CIJHaGBUYUqupACYZyfBFVJ6JgO9fv8af3/wISxa2kjHKSWXS9Mnrz3is1SqjYOjD47zztpEgfqgciWunAGEcY5yl/HhomBhANHYOyIdWNNCQw6iAn3GTssxolO+AFMxaIGGOx3FBYb0VO2W8SBXp69N6Fe6jElC1ONmc4D/9y1/x+9u/4sv5O3gZIMZCCHDMIEPgwUPEw3MU1sNmbwgsgt4NEENwXstVsMvBGAdrNOj6arUCDCUgrMym572Hc+qOxeIA+MIiDvBgwBsYEgwd8PHtb/jl2RP4vkXf9xj6Ldh1OirGZpnFmLCuSYir1KPrusrNTQKzaPw5ArPWKYYADtZ5EAgxIJohTZXQMM8EAFkAFoIhxJRSIKBZbWDMKrTHgwF48vo+xBBSsI1FLZw0iLTAC0BCcAQ00gQm0bH0Q4d3H3/HyS8bPH/5J/ztb/8BQk165wRQF3Nr1+6uW4JaIqWdjHRtEtKsbfFBNnGPmtsXkPacanonK5qwV8W5Q5T6PRQanvPpXedMlFLsMlLwU5dYBfisJXV3DD/MWZS2UKs0lqn0lAL3p/bmbozge7RiPCRHRZnAkoV4gh8Ejcn7r4jG3GKWsBcbsLfoewmWoQTAKp+RB+CC9y0hu2pKVV9qtpS9KUXfU+5eNcnM/W7K8uJawoeX2EU6ih91zuTDvgpSu7Trzej5yM86hxD6aFpG/FgxZS7jgFyR+pPqXlLs0O94ar4clQCL+DAUZEQi+LBGcmHBlg+Ywm4zkTVp8s4LU79gHvQt7pEFMlfYEg8SUS037mwV54+77shbmI7Nrum4s4ws18REFTLeS8u/J+pAsV5B0qeDJLoY7Q1dEPtpT78viY9KM+3ecSNqrt5V6czYjctfYgWwS1W6At1roAiIC/Fu4Wzx+x9cgK/astunyfo+J/UeR4+7HTqGknkkC6/rtWTBJLxpxV+L7/a1SzDt9zjt9nTJ7Aa38P3Gz5aWBGVZY9e3QxVEgZOCBdDOTIrV3p+VqijzlaeB07cct2mm+PLEKmmndDT+vipFYS9RAIz0LYs1duEcvp4lpkyE8Lmg0DFY9J5idlyOwrwqBpUR4MRdIIISAvYebbvF5cUFiAhPnz7FkydPYBsDhsB7QOCTC81StzylKc9SwZelhUJ1z+haFNnHsZBOT0/x5ocfVVEmiyr2cuoASr9itrQpUYTiij6cf8dxu2aKGj00W1T9CI2nxz5Br+gfKSdy7uudXDzSl/e2aVTn7DdSXqt6EIWJjL4bCZ49f46//Pwz/vHL33F2cYHOeYA0c10Di5huXljSu0mwevIyDQjt1fxIQZ4AKDKrW1iZCSyub845eO80eLWUq1uM2SUgYjjvYcnAe8H79x8S8ND2XQh+zQroACBrgyWbvudqtcJ6vZ4EstZsa7WrZryeQVNBsjZJCrj2RexDVTooqTxe9DfHMomKlS4G6AYgAu8GDEMPzwyiRrPbIVr1BWuCEN/JkIC3F/j97e94+eof+O7713j69Bk0gHYEpYt2V4DRAd4aM19k5wNKTkmS+E2qa2krpf3BwiMIIbNghKR70kqWwP+ZdaCcEknp3k05PlrJgbnN16GJy2XiHy183pVZ0jy9St/XFeei5r8s9n/suHduiJY2ZddCNt7yjkW76rtBHQdEzWuTRJBz70YVwD6JT0xbMTeFDh34XsXDYAndZ9VvNxVAZNoMR6BJEv3zmlB9oQL5PXz/nYy1mw7OnSUK5FIl8/o9du+BovGknQOL/jCUVs4/6Ps/0h3SGIYutu5kon07PDgLEs3ed/g2lfMYyfppMXiZrQhEECxcio3tqFNAeYwAACAASURBVILW1wOMACCemNUwQnTfkyv02XWqztY3o2+Wl7HjXtnLobqGpncN8YOEGcPQ4+LiDF3fYrVuYG2D9XoNYw0c+2QRlZWqXKJAQnLxsaOUVIc/R7HUKoSlWN7p6SmaptHZGhXXAhiqeqAc2hLILDghic47+PNa77HgEan6aeKUkosS0kxnMMnaT8ENk0CGZW0qlMjqcg08HXxfAnJ45jhAFVoX/tffq6bBzz//BT/9+BPevn+P7tNntbghdU8jNKAGsDAYBoYX0dNdIYCMllMdLweuE+i9se4QX6i0KioByvSuYtNcSG2VEItIBL1z+HJ+ARgLIlIXOBGQMSllPVhjFhljYI1mPjs9PU0gUIqDxDVfldaiGUwP7RBAiBEdlMgQLGW+ZlGAyIasZxyGXqCfGRqrSFjCQa1mdmNmDMMAYtFYT0awWm1g1ysYawEheOfhnddA2QAuui3evX+Lj5/eY3OygVqiUOLB/evOkWgMpu9cL29p/SbsrHFUe/pdKf47sCgqHjg2UFAtdVfYwo8XjiKUdwi8vjHRbW7ZV6Od5pxXf3xpkPejUQCTH+m4lNlB8jYTVYvxvXEPGllZfhU6ECOzPqD6tugBAEVAvdmNVviw6h/LReJ+UqkSj5X3R3qk26DxBin5V3UCcFweXAoSZSpjkYQyis2dSE3ho1vO/nViBJVUQsLoVOOqUktpRVRVWYLgX4NqkKikvJ8fSdnYY3mTKrzWAe5825JnH2LWq7F2IpW7GDOj71t03RYXl+do20s0jcF6vcoKNhlQSNtetl9liLA3seTYF+U7RgV45h2rgOBhFoytkWbjhETXjaCsr2wDE8y71f1n97hFfFImAs4OwGRc+wIh+jr7cgxyXip0BG3nbGDaSRXFPTR7w7SdSxq2+FUMqNR4oaBO/ltddvQ/jxfPX+Lnn/+Kf/zyK76cXcAPfXbLIQDBQseQxivyzBgoZCwj1uhrySpPJvySYnwV1hmlVZHeG91v9EcCWBVdDlgEnhld3yfwabPZwDYadyiuDywM8bFuAYlFQ4T1ep2AomEYdlprjK2LkhWWMDiY4kuA4sTkMSYyMKbRAOSmQQxorXVKCmodi+y6AduLLYQFjWlAdgUiC7INaLVCs96AyMA5Dw8PF4efCAMzLtstzi4v8No7EAGGLAgCG0HnI4JFc+VI6ivKF6j8tnxe0ngcbb9eWtTcmj7z7Hg7LcWMo8M0chuSy8K6v7aym1qi/98BpFnUePO6yrh1VPx/nE4t4cwjzpVHmlIZyA6YmeSESaytifizQ6ae3HbzFWQRJyxyC32YdP+BonLzyxEmE5PUSN43SBKCYJYMmDCzKITefbOOQVeZwA/0FReR8AHXmq9GJSCEalGXcBRAS06LdpzWT6q6zp4fys4xqGJTtcAY8FeFwyKo5c5GxMCFZZujxUsRkPJKm48AwhrvZJeCFF/nCqUeg0hKzitDAMcxV6VHU0Dn1t0MmKdZs/8EkogEoy1VcBcUF5s/LlG/DEBcwrtK9wkgWFAIvB/QdVtcbs9wcXEG53uNU9WYQukO7j6edd6yhIjWGgNJmDXQdVi301iLhNTrPOGv0iVk3roq90/1ZQRBCuW6Wa3Q2Eb7ThDiv+T+mOKUMUtY2ZHLaMIDOx+f0Rb3DGu2IsplUhV4tQZw9aQxB5uug9HKXO3T+o56cryrrHL+WG0bGWxWBm++/xlvvv8z3r79COc+hwDNEtYshCAYmr2NmCAOcLEqArx3Fe+ksSnGpFx3ovtXjFeUYKZyfRKkbGZRu2ZRmMjaHuuTDZKPICJGWq9vxlKqp2ma9BPd0FC5TmYgq3RJjUuFThsCkUXTbLBeWYh3GIYBIHXXIxHNTMdhzWKkecpMYDEQz+i6AReXLdgTmmYD2EYBpqZB06xhmrW+Dws8DDxY3fjIYPAKFH36/Blt1+HJk1VwTctAW7SBC0apxzdMGJe3AyRCJVnYK87w5Q1YEnOGgCorpWYVRMS7Z0rNzxxdOiICGQNrLazVbHUJxL+NAasWs0Mr0l2QjPbN25Q8SgAGxedldU7uKj2lJQqj11DS9/Dssa3I7i3dNSsSUFkHp/ozKERAHRtwJ0mWsY4mm96UZIp677jtIdH9Bori7pI6tVgMvmXkYJbiRBoH3fpaKuYd0R9lwb53NELzx/wlkw+7KUrL+6oaKThXt1aIYBHnOhGVD/3NnJWQ8EjdrHiCH80YqgaGIJ7BwT2Fz1+4KeleyJjriKgA3TWvXw0eiOAYYZ+AdaCE+XYEwbl24wrxi+JTS7omNK2CEaKlhYQNnIvg5QAEHjGiyeBa9EOLvm8xDC2YHchQAIc0AKNnn1KKC3uw9+rCohqpgkbZ7yUAX6GPhVEDwhGYLEGi/X1bitwSwTR9UViilFkqAppqMVX2x3zHSdEp4zaojnN4zFPRs7fWmuC+4iLgUelTxXyelEwFqBSibcY4Mdo3N3DFuNacLJWWCCzHxSZ8JxFc0TXh+dNX+OnHv+KXf/6Gy22L3rdh1cmWkGQ0B50Q0IAANmAmeAiIrPJlwW/ZKinWVcyv8F4xHhCg6yTTSM4Ng+BFAM9hLWU4Fg0Qa2yYI+paWcYg0msaoTry5WazSRZFfe/gQ2DrSVsTUKR/e69udyIWEIEbCH1jNWAxojsdwYgo4AMD9iErWoxvLjadu7me4XoBxIa8gBbWrmHsCsauAbJqiQQDJgKHmEUegBG1rPr06RM+n53h9MkzrJp1cIPUAZLyUPNWaCZL2AxIVAvQ4RCgsri9bgOv91ycolo3KpAofl/Onvj5GPrsWJoxlON1VZa9acE+Fkmex2UDviLdJJ7kFSuakRUZy8I/hyKKR6v+k+LLBWCRjL8fjXOUFTPgLnkeP0g6LJ/evXpVH4rHBBXT29Lul++dfZdwEHRD2fRoFBerfbRAzrtvdL+BokjlGvCHxw0YmksCX32zeaQ/Lh2V9e6Kj6OVUek2lP5DcksoV/uU5SGBRcX9u6MiTujw0nX3i1txflMI5rvfJ0APuNGA7RIM9tR5EyIqgIdiE4m5hjSltgf7AdvtJdp2i65rMbgezB7EgHNBiIRgGAY450MQYA5xTgLYxAAFa6gKZy1eN9aaiTEOJl0pSjuECor/RxwiXLfWhMxDarFhZN5dq7LiGGtrUnNBBhoW0oIb995SKhdEeQ5S8d47Sgt6OmKUKEnjvoD26E4JjLoGJRAurC3anQEghMZSevr0GX588xPe/PATPn76hOG8VxAzjovJ7niGARgDEtIgzNJoXCNDADN4nPXIUBjT8RsQsgVlaKuEkMhFW+Mc8l4thmAYjXPw3oOMgWk0PpERVbpjZjNmD88KRq3Xa6xWK5ycnGjsImvx5ewcvusTj4+BopgdTdthAQG6dsD20sE7BUGfvzjFd69eQASwxsKC0FjNVuZZgS0FdQEJ2dWcF1xuO/SDV1DJEMg0MFZ/NPZSVmhSfPTQfQzBtmvx/tNHvHv/Ft+//gHrBogBrSNIVw37MWiMgmtHYY5xaxVLRj9fR3Ckmc/j37vE/GPtjhL3nzCed6qwparumRKjmnbxx7HLz2tfpvk+2Medk+uVXniYr6/HQzfLuPxII1og/5WHVpV7WWVVXNNRZNNH2kkPAygKdItL2QOjxwnxSHdFSyDyh0SFBj86YkxpxuOl8uRVNOtNSmYcT4+v0DWE4uDryHRtU+mqPYdf5qacIDs+3yaJqOJqbQmKqcUCC4PZoeu32G4vsW0v0LaXGIYeLD5YUUiSQ51zGAJI5EO6ZYmubJJTQ4vU2YPi55TuOSnH0/Yu6WOBurrkdNdFhjWoGx1YU49npbU+naNwoq4ZkE2qu4qXVFp4LGiXAjr73kDSDNyrpFWn0PtPiRUQ0uyGakUkIc34FJbb2+59rY48sJDG1nEJliUpLMG0vQSgsSu8/u41/vzTT/j97S+4bD+j95wy5BpStyoYdcUKhvkgITRYqWWOG0Deg40JrpFZyUkuZpWljgIPsakayi2iIZQUPAnPemHAC4gc+mHA4FziIw0gTslCQ4NWR7CIEy9tNpuUAa3tevT9UPVZLCP+zSyaiYw0OHfXbnF+3mEYRIFZMnj50sBAADIK+lgLslbd4AA4VpdQFoEXRtd16LoeEKjrEQiwtrCOUstADdmkMczIABC1jmIRDDzg/cf3+O//47/DOY//9l//bzw5eQpiwJIByGq/IA393jU6n4hHvXcPs0X3vJ2qUeA3yoHs97m0XpuuMB9K8HzsFVn20a24mZXtiIizAOWEvn0J557L7JRBtLuNW1S3YXbwJ9cKwDPtfwull7D2JtR9trJ7PE7fAF2ldzO2KjXgeE256a7pW3FhfDhAEUGFl9mzhsO0dLi+FrNdCQQrhdCg4N4lVYjvHjpmq44XOJXSXrC7zILPbkmxv/90dwDRznHYITRM3ZH2SRf5PSgI+d5z9UR07Sjvi1YeMW4B0VSEUAFXqtg9Ze1U3hkKUHeBZXBJ7pcdJymhPN6TjeEw5bOY+Ff17bGEqfQu9buXWc/oCCtZzIwhQBEvMQaYtYkbIpiibmMeXdthu91iu92i7bYp5guLAOLhfQRl1I8lutdoSQpEQQwEOU7LJJh1bGNxkp3i8VDJe/PXxvNEoMGGCUFBtwYgwIuHZwdiG+CfgpdhgiIaY3epBRSZPKI8GXO1T5hCLuU7FFf2urYQ3Pi0cFIbcuyzAwwhxT1CUXXOvFZbVuwrhybtkdBGDQbNwQVxPNcMap6ej7+FBBByaisFSx8O/P/k9AQ//fQjfv3tR3z89Bb9xZk6aZja4szYPOcNCIYaGBGQNWD2GoDZe7BziTcoodqmAL0IwmMgMAOPMd279wgWRQr8CAustcHqRvm5BIgqyyBmDD5bOMWYME3TYLNeo1/3GAaXeLvk+aZpQnsIIgb90KHrHLzPcYvadsDZ2SVePnsKs2rQGAvbrABr4YVgYr+T9tkw9Li8vIRzg4J0RDpvrIWxMRYZI+aLU9dAQcwsF/vNi6AfBB8/f8D//H/+B/qux5vvf8STk6dYmQbPnjzF0ycvYUwDCNT6KoGuEy4Ln0TdVk0GbQWYzLp0bWb/U9fAvD5ffaUun6j5eheN98+6OEkWgeVcTOBQmubL9sT97Y7tmXe5CZwFYyJAOq2nwOtGT95cCJR60IrPVPy/qKQD39Oy5k6W5jjnD1Upe75bRnFHAabjUHuEHUKOigHb18YdrajrnSl39t7pXoziEOjGtKAYOg5LXo3mRLhrFrOf13dIo2lyStWGeu+42oHOtPz7S7uPvGbklsXPLqP7DxSZglmWu7VO6GA3UVYs7oYIyYUsXZppwJzQKYX4IAhS/h2CRUuOfSj9d3M66gRessKOvieZH4dj0RK+XlL9UmxnSXeawJtl38/6tS/d1Q6QP3zLnHwQtF6Uu5gq1bnDQkJlQAjsvSoJIoUyUJZrQGKxbk7w9PQpVsYC0BgVBIIXPd0vgyxX6ccJIX11LC8IX0WAgqnSr0CGEU0BakPQ69hvMVC3uglFCgqqEExYR4LYBU0ivaRD8xvkX/VYzSva15vXJDx670IcjMsZASAGMYPEA2QRfLpUaSuq5tAfRigFbfaWIGAYATwBRAwPDy8OlhogBncWCdYODs71GNiByWPgDj13GODQw8HAYqXmBFCMyMIzB0skVSITkBCUSoGHi8F6A6MSADIxa1oNsMSsa8kyw4ytjVTwN8aoJUngn4FZdxHToKEGK7uB5RVkALxxIHgQmQQKaedxno/hNJa9BTElkC3+NindfK04pmbNKBVj0HRudUgB5Xewka7QCroZzmo7MYE4zls9+Vbw1wHkQz81gPi0JqTg8wcWPYoAT7EHx1Tn+WUEXup5RZO4YxFkzjUDMSFFFGihfcekXWgAEQZZwuvXf8LPf/0z3r7/J862l/DilbcIIcuXgktGTJ5L5GGNAkLeaxwjJgNvG3VNC/2Z+4GTtaSYCMRQADujUBmt5AKve1HwSQRMBg0T2ABsBGxEg0hbwEJCbC8GSNdXzSbYYxgGDMOQrIpOTjcY/ACGwHlWI6YQ3ZjCZ2Oszm+zgmz7FEw74nr9IPjw4QInzRrr9QZiG5jTU3hLaNlhYxuwMRprCAxPBmftFr99/IDzvscg6rJHViDGh5hK9bam3R7X72TDBibAS492uMC///Jv+P23X/Hdqx9wsj7F6eYEL1+8xKuX3+HFsxfa/6LB1hM7ka7WSOJUBtfS59JNWqL7IaWsisLhoEILC1M7ypWhrsCPWaFCXeZ4fybGdM/OfE/BCo4MwXPcqsb3Kw8xACaGM5K2wvh9zh5JMIE/pea+4vch0qeMDe0rfrQuC6ImrIfqxsjiIOLzQYW48PraJqHgSkgmyBO7W1F/U/RFlJVF4+DtpSVy4LHku7myZkGiss0+yDhRDgmnC3yoUWHumGhFGYK9k1eLSSYQVM5CsMT1uorpy6QDAAJLTFpQvmjg/9SB+oxum4W8H1KsK8+pLADD6V1oZgBotBYAmD0I8GSzHLiD5pX1WmjXbrqOviPh9W9HDzQ3VsF0j1BdddTPi3WrsH9FWXECMMpksHSv2DOxkrX1bel3u0BbOTxUpatd2HsmZc2WMTqkLP6/Lt1zoIjGuxqm023BKfQxF9ejUymEU93WJVY0t9GkQyQLgCLgbsGrxTQnBI1oTua5LTp2Fx1cfA7fUwWUHvXVOC341xjjaJUzDtlQq7PTpwAKqZJ9dZmiIlDeTRSESwOT1hhK6/uk3lR5nsepqwWFooGZeT1GwA5QlF4yNhaEr1jnFRm2fPdxPxQK7nGoBNhi00MgZ8xbrcTMdTHl+JTjomBPk3GhoPWJiLqThBNu9g5D36N3WwyuQ9ddom0v4VwHzw7MDoMbYAyjsRrHhSWm2lY3FgCIsVS4UCQygMQoT7uMRGV9BKakeyS3ujgVT8BCvK6lw7OHgYEVgbUGq2aFxhr4YQDZJrimhT0zKXOU2pMtlXyqp6yXYWCsRY6hJIH/Q4D4WopZTBpout7rCpE//C0QwzpWRAoWcHabSjNSoOPKjBAQBxmYYQXJlqiZYRxr0FmVAmG1+hGWeeG0aE/1JpTLqMHhqOYEOy1WUAUAnjw9xV//8jP+9V//p1rctJfqDkXh/th3gmR1xRFYIUAzglkwq6tWDrxusgouEUgnWLJgT/A+8l5Q8IMVnmYnC9m/XAZIm5XG6oJkwM4EvZEaBouDZ4GJ8yO4wW23WwzDgNPTU6w3azx79gwwFm3b6RwirQsiaJoGtrGwZNE0J1itehgbgONA7BmXF1t8OV9hfXoCs2pwvt3i7YeP+P7NGd58/xqEFbgbIGRw2XX45bff8P7DR3R9BxiDZt3ABAVGl7tCtN4hg2lwcYJtLIwh9EOHoR3QbjsQGoCBF8+e4+ef/4K//eVvePbsBRq7CvvNiBfSwVtcP3ImP+xsQwZVKr5Ly8vMnDy0919FTq4A1fx7zpInSu1S68T1Hjna0oBC0h9vuGURhWlF+li49+ZEATnelX6X1+bS+pOK/0c1FfVQ9Xt6+/j6LmVx9MgCvGURyZKbpw2ae/cIyI+vTb9bVl9l7RWXMkKWmyaNUtAn1xI7qpYjZqn6ouCRqHDPfp8vZpBoifxzXUuWkfVbxYfXKu7odIwi8/gFsKikSTyGm9dYra/mFjplIdHMp6XWPbPL3lXW6PLCDbvgngNFY/pqaM6tU1LzFijyX51GwvQ83cD865HuBZUC7VgA3OVWc4PaFt6XY5DE58ZK5r4ngSyQplYTkhVA2ZpScKLy5lzgjldY2qJd7Zw+rbLUSElFiFtyiwtGtGaqKAl2+a79UtI+hecKbVn8uEqfcQwJCEGm1TICELihR9deYOAWXb9F212g6y/g/ACBg8BD2MMLw/kmBfeNQJG2R8DiQ+DoHIcFAHIg3rLR0VKgDmBdgjalJVGtkJh8L+rvYTQ1uLXqVsHstSYKKdQjkBGfJeS/ySCepiUhnnJAXhEblOIY9JgSQBH/LoGSJTIZFyBRLKLUP3UntECwhmGj1hhgVHF3IolniPcQ6yDeQsQpuBGAN54R9icKrRTKe8J8JAFIUlmRTalUmCgpp0XxkzkU/9a5E0EcYwxevHiBN2/e4Ld3v6MXB+c9GIwYjD3hAJE/wGmtiu9lLMEIYBu1ovQ+toeD9ZDR+FpQl0WRkNmseF8FmVQB8p5DAGm1InKDD25rBhVvGkDMCrYhrEMMLCKCcy65azrncHFxgbbvcPr0KZ4+fQprG/RugHMOzrnUTY1t0Ng1bLPGqlnB2gZEQyWCCAvOL1ucXF5idbLByXqF9x8/4v/8/R9Yb9ZYN4Rm1UCcx9nFJT58/Ixt18N5B7taBZ7LFnUTK5YwJ6QA5+J4MRhePNj14EHQb79g6BzYC96uNvh89gWDH/Af/+U/4tnT51ivNiAo+JoPscu9NLAEAnhhcrDxkmMnLP0V5MZkBTq7LqdJdEetibXV8kpyD44WhsW6UwJFqZn6xVSZvWGrloFA913wz5TW6ysdFGbJpjqbQjEOk3tHRMe2/CjquVH3L9GJjk9juOzhcNAjPSR6YEDRLnoI6MoCEkI6XXrwr3OfQb373Lb7RYfTa959X0ZLIgGSxcRejHXPieTcrUSUgIHypzw7q6dplJijAizFKcnyiZwdZeozPCq+jVZcWaktBfJbWDTm+m1sSRaF6ysVcv3mjFT+0edpHyi/RNN/tSjrhy26/hIDt+iHFm5owTxAXSw4WdkAAHufEA3VpSWBRMliY2Q1oqeRnD5PvDtGYFD9uY5PpL+n40sBsDOk6cdXjVWgRjgmYUM8qaUAhhIh/V3XE4c1WBmlup1a1YlNMAUn66YA4EC5Uztr/1gTkbo7hb/nWCdBfOJhhBA8ovS1POd5KAQIQ9hBvAN7BfeEwsuHPptrUQR8ErACBU4Q+6ywSmCJAc13A0Vl2ykdk+uVmnQMTHBxCgOmdRCDvYe1Bm/e/ICX/3iBi/YC296r65dIUJQin0sosXDpCHULNAaShbpvgWJcrQx2aD+ErpaQHcx7jeHGGqhd4wGFjGfRUMsDngksBl4MREwAH0PMn4bQUM3DRBQyBroEvg3s4UVwevoEJ09OYYcGwzCk8WmaJvysFNwSFIC+pP4WEXS9w/m2xeryEqdPn+Cy7fFv//7vGNyAH998hzc/fA8AaJ3DZdehG1zIBgflWeYQC6oeL5bsmhd5gEDgkNigH3p4zyAYbM9btJcdwGqp1bke9IXw7N1TNBuDn//8M7579X3gXaR+20USefHByCslz3/lNgd3pbzOVitOscOOVe3606jQa7bleo99W3SIH2T0e9e1Y3fmV0Zbr0FXwmXv/+s80gOgew4U7TL1zkSkJ2THmOQ3D6V6BApgUX7NBabz96DZD4fqmBLZJePhk0S05KblLAR/Ft0X/YbHbkFjc9vFca+AZBExsz4safNssNmZ9cP7HPQVldlD+GGpZOLKgnYSGHLm1HLSAtLgtfMNjw8ipUJKpDwtZT3fBktXNA8DFVQo+OFCGDYNwuvYAcJwrg8WRFs4bjG4Hs4PQdFFjv8B5TMfFFvFJDWWi1ppOLXeYV8AD1pv5rF5k/TS+ihaXejzU4uiUErqgxKejG3erDU4sDUhgK/E6AycD8kL04TsLhR5iYBwbwJqOLsUqVdW7E8FI4yxQIq9Etp00Nc/A00AUmaq6o4I1BIjxAhXthaoRdHoAbXqUHDPi4NnH+6PbzOdUaWlJBGB1VwJMSaXaCclUDAHsi73jyk3sozbR6P7g7URFYAzaVwrIW0/ALx89RKvXr7Au49v0fa6MhAkyDrFekQqLxAAa4xafoW6dOlSKzgiBX28L9un9zCrG6NzLgBFGrDaOwXIFCTSwOksAIy6JQoZCFkwWRAkWJNpSrYmAJPZ6kRCJjSffxM0WxsMTgLYWVrkaeBrTVcf+S8GzQZ8saQSvGN02x6X6y0uTi/w7NlTnA0D/r/2f+Ps7DMut1ucnq7x4eMHnF1eoPdDAJkIgAGJxr6i0K+xixjRDS+7r4JCMG3x8MRoXQ/vPLYXLbptD0MWpyen2JgGDh5vP/4O0+gYn56e4nT9TIHJaAlW9FHNN3PXdxAtATqOTRlkVro/gFaODze+jnA9r3vJ+ggRfN9VaqmNH7+Pl8oyS+TVg2XRcd4guowvvBtZYFralzJznxTjSNNbw77/YEWgmT1xcsv45aTUFMOxDS2Rj+92zuqwLbAK+woH0MegeVdFqt/nATLmPQeK6gVvLqDcdCJkM+irVQTcmxEsgS96yCvePaRSMkP8PD1JfJB0bwECKRZKKiXLgvhw24vdUef9DjeQwuRoDmieC+pZyyH6fdf3cCE9ejjOznWo2oYYCFPic1Xw36zI1yxXgmJZqd/76tV7BIUmgm+CcNKd6wTm4v18PZrL2nVV2iH6z/RduMtEF0WCiIPzAJjR9pdouwsMroXnPmQ4q3kpuoapks1IGdA8igxYOf6KRMVVFPixNgQYL945u5aZBNLEz+VPeosCTIrvpWBwyHpVuEatVg2axiDFvEERFDYpQiEWESj1WgZC8zexDqKs4KfeJwQwQpV3BGgzQkWHxlhEuTdOilnINgi9FPqcGcF6CClDWOoPIvgQcNnDQyEfTgCMBqffL5iKCAyF4NDJWi5p72pJJKwhtCcK6Ax8WdU3BooAgdHkVqS3CtS1DOQVMBLGk9NTfP/9a/zy699xdvYZJBxGzaT3EoQgo0Qh8LOCKBEgiiuBIYFYdU+kABzFwygFgjzc0MMNapWllkUKLDFnS6JyGWQEQxwhhNzxIUNgBjOttRBZVW5AZbwi/VvQdR0YgtPTU1hjE3Bqrcb/ESEM/YB222IYhgLsojRewsDQOVyeXaB9coqTzRpNoxZKHz9+xLa9AIvHx08fcLG9TBZ6YgjkLWA9rGg8uszCkoI1VW21pQAAIABJREFUK0gZgL2w6EZ+GIYBXdehbTv0/YBVs8EGG8VQDeOivcSvb38HM+P16++xer1BY2Jg5RgXq9ybTN4v98ix8WAojsmubF/5flRzdOdaI3H12CFHJ0WWR2WVyn/5++sQjWTn0sKobFu6L647lL+d788SeL4/e2wiuY+C4LifJFuxhivx75ImY1jKTfF3kqeuNha63peF3w9dawl0Nt6BgPg+xV0khz0o75p942JaEsU9pLjnPs6r61DcOMdMfA/47Cp074Giq9POM/kHRTk07SPdHj32712ScvSubXBpWjeEzX1qWTC/9k5QmvnDCpr/Q9f5DG5NcMZJLQUghvkN/XgUFYz819fYfxJItoOOARLtr1+CEq0CBgEK7iCCOYALLmWu79F1l+iHLZxr4XyMnRLAkPIwQrLyplZEqjWrxVF+RrMAqTWHAkD7AJ8MEk0tiWikcO1+48iHJJpFTH8AVd407x0EmilrBFaV2dgYHjGzDyhkJJMQZFnqOtMYJsEnniaHbDLjk7MdFKPthFeYnI4aicKwwArgyQR3KQKzq3lJJMQP8vDMsMxg8iELnhZkJhZ45eOS+qR6P4ptzUHJ57OqxL6oAb1qBUlgXPgRD0FOFiTEOg7EYBJ4djBEePXqBV69fIFPnz/Atw6ApmBWDzlWeI40iLsJQfcBhMDMlFoG0myN1iow6ZzX0TMA4DD0Dn3vMDgHCa5mogZWChTN6P3sGcPgQrDrABaU7wjlyaZpQrb3PAcUnNLCvWcwD3BeM/Rt1hsYsoCRYGGkLoEXFxe4uLjE0A8l4pF5wAN920N4wMW5WhSp5RFju/XYtpfoXYd+6GFsg5jtilng2APcwLACaoIIplCS88s1Vji+i8CzYHAObdui7TSzG8iAiSEmcIdRt7uPnz7h3bv3ePXie5j1BpaMBmsfz5lqu9qxX1VrfrA0o8jPkd9mHuOczKNcl1JCgREwMndtvqHj619btqK6GcnqcTpX91G1LjzSEWnmeCetk/v6+5hjUR4I3A+QaCmNe4Emn5fIEY/0SMvoGwSKHumRdlE8pQv/VacTj4vqXVCGT8b9HTNCLSGZeKrtP0fd2ZB8h+TDWwUaqLqgLV7CI1mJ3ncafHMq6gHwdfn3sLB9TPfOUgmPJ8DRRsbE6wKNXSOsEKQhOO/QtS36bgt2Q7CeiNZErFYGUZcXH34A5uxuRqKKNEFSNlpJaeRNcuUq2Sf2gV7PFhMxJkoJDpmQAn1Xf1HxSa0fAjAWoRXJQY+TMn2o7ym63QRQFEBMJaxKcmEtRCE1N0hPLKHBrUOrFypVFtG1K1SfwKJyXivQF2EXAUdLqhiXLBQQx0a8h+dglcOUszbPtCl2iyrI0XqKJ56LGSRSSxjeG0w1jt2cLd94nhKymzmH3z7dZwzhxdPn+NOLF/jt5ATee7RDn6ytAISsdsr1JlnFKmiU6iekZzwzhC2YBcOgwamH3qNtHbrOhe8DMFEmhiTKh/ah89h79G0L1/XgzQo2gJURwFSdT9ui2SMpxB+KVkUC6QeAvbp2eY+hH7BqVthsNhAIVqsGzMB22+Ls7Axtu4VzPg9nGl/9xSwYnGDbtmjbFmRCcG0WMA/qXucFmtaboI6+FiIakFvEgsXkDHOxEsrcFkEkDu6pveuxbTsNju0GnY9GJ3+c1j4A0WfnjC9fznF5ucV69QREnMawzoiXQaqad2ZYqthKxxmqrrLu7rr38POlTFUhM4d1/tskyQDfZPqnJo4liPnG3vZBxx+D5oGhtPXtwxzvRLSJ4GncLkvLwq/JyHsoNCt10T1s4iPdLd3WOvUIFD3SH4weV9OvTtGiYSKELhRKF98dA0DvLyq6EkSPkyi8G2NhTJNuEswIzjT38Ralm0KfiHF0JnUJiqxw94NuawMT1C5YDAGTh/EEL70q3xRjBhO67RZD34KdgxGGIcBDgBDjhsWB2cGzZj7zMV5RzIoVeTco34bUFUjdZJoi5k9k77mzvxIkqq2Kyt87XrjAL03IdqaxXMq6c10HOzDcGk/Os1JkyoxL4UQ+wiSMjO4YZKDITIInzBDLbIyFsjkCBXyIAR8AKXW5i89mqwjhqBEGi68yxl+0fpoB7Uo3Bgmg0KR7AijgA4/FOE/zcNEuHp+zYDChXgntDQBYAKKMMXj27BnevHmDt+/f4XK7BTiAEMhuZBp4PPCNBJ4gC4mufQIwe3gP9L2H94Khd+i6AW07oAufvdO5Q5OXiyNLiChaAwGxYOg6+GHAKlgzUdRaRCecIQMy2rfaXn1+vQ7zCYS+71Of2MLCThV9Qdf1+PTpI87OzjEMHgKCsQrCsOOix7V9LEDXdbjcbmEaC2MNYBggDpZROme8V6su7wEyBBET3AAFsHmkBEUg65ANLlpaOTfgcrtVa6W+BzPDkLqdUgyULQLXD/CDx6pZo+8HDE5jpSleRSithibuN7ssiiDhH4Bi/o35rSx3H10ZyI9rEAHBgA2Rn8s23rW8pVkhazfeDDZXNyYL0NzWKT0CRLdB9T5Xu0zl79OpzS3ykPKLrfg/y4M7ZMf7RPcUw9pNY5R/7g46cMcj7aNoBXnMw9l7DRRF2S/RzhMPGl0D6s1ywUxKt5ca5MNh1SXvuNd/vTDjPvzWC/pT7t8mK8BUwg/CXPXFyBzg8NJ2+zQJVD1l+2XliOR4rDd9odnjuslNN3i2vL1U7iRdS8pdzqs9AoFGdUThJAnZxalWsARgAoRMcJsIp+mE4M6ivCHhxJQq5pjvVIGocrzz7VRpNGLUhUQsSExuYXi3/O6RF8YMUVDwFyn/n9AontIcCebGaer2dz3SQMM5Uo0EFyMGxOW1SAgQA5EBIq6q2vsOXfcFm95jsIBnA8YA8gTHPZjVpcUatcDpu0sMQwewBxnBygJwDBcAB3ZeLQPYAWCQqJUFkmOPpH9xn7HWwlgLmGCpEFwVTVCe8h6lE4+SP76pFBpgfo2OyiRJViQJ+nm9WuH502c4Xa+xshY2gkUUFaTDwUbjGI9PznMWsMjvUuJk+T5kBQ1kq/HN74B0Wsshff1+PLUJLeMwD6x+Fo8Yw08iPBHXgRhsWljnT4hDw5QtdVJjQvvSHJMYWDm2OwLHDAJryvsYF2pfs2N/jq5M9NQQYyjGOdP3DNZgomO2Xq3x45uf8P7jR7z7+B7bbhvKioqwTdY6iK6PJirAIaMZC9gT+sHhctuhax3a7aCARe+D6xdQZWyMrS7XmGgRBwXq1CpJn7dGg0EjWF9l+Sv2VA6cvl6vE0hqrP7tvNc4O8YCQnBO4yQNbsDZ+QU+fzkDSPD0+QlO2WDoGe12wEAMcch7uAFggV48tr7D2q+xblYKcEZ30bhgG1KrM2ZQFYhJ2Uj5OQex1sxvLgVTF9bxYxa4gcE+uH/ZYFVo1JXTh8Dd3jsAFpftFv3QgyGwCSBEApbqpTb+MRJaknVizGYY+AFx/YgTlBIQCRLkjHGHKIbAR1jL6u8oWhIidiWluacAb4iRFtpAIpq9MCxc8a3EFFNxAe26V4FIDayf6iznuGSwKK+JMYmADeuJrgVsQoFp70XAmaeueZJumGnQpO0PR5eo6GCQmwPLeCIT5leQrSn/qb0V9sa5rJl+CoDm6CJRVi+CdVSgJaVrUYYmQMFjEBIqXBw8VABWesNM8+7hSwWiBdoVLS8tEoeiS9yIJgvKDpmb9ksII5E6lD3TAwtl+slUmCj6d0sS27CADurQNC1qKoOHA6edVWaervSX2aVm6hqbdaWjCOkA7jlQpFT0jiCdqKVvR4L2LlrWaZK5+Hh9/HBI5paEb4jyLpJoCgdN77mPFBelawkgcZhvHMP7JvxynediY2X0w6MEY4UCWGyeWZnVMpL4QRrno+oLouAGVC/wAkqeL7u319H1vWtPcFGQeLJl1XpDqHgnUuGbZsoaCTPpryuB41ekawjCZdDTevOjJIRrkygo41HZyE4gjjs4P5Sl4vLyHO/e/YJ+e4reGhg2YHQKFIlmOzNksbINTk828H7QDGfCsGTUSoNiuxRfk6A4x9TsXNyT0qYjp5CPbmfxGohS/BgiVJnF1M0pqVhV3yULkaJ/4mlrVswk8bMq3is8PT3FerXSQMBBsK+U/H3jU0jX02GNIFOcT1I9Vt0p4zv0rggaxHknyLF59pNPvADyiKnP6kMgQnTdSjGjJMSmiuNExVpQrh9Sz+0EClSdImD26sIYMtuJmZ9X9cpUCr2qpOeqIhphEZWYCIYJNA4WUeIuPH/2Aj/++BP+/Ze/47K9RO8cBCYo/QYKtALGGhjbKFBkFQAZhgFuYLgBuGgvcX6xRdd6tK3H0PuEPyTlGiEbG41iNiWwCAnwYxY4zwrysIclSZnDJIKqooXrMzkuV/yJZQyDAzPgFS0CM6PvdZ72/YDNyQar1QmaZg1hwuVlh2a1Rdt26DqXYipxWCOZRANdWYaQV2VXgBiJOooBLIztdosNc5g/HiKalc+Sgm3CAva6jnDICAdIcOHLsasiqGqMgW0aGNIR9N7Dhec8t/j17a/484ef8eObn3KH0lTVrvt+xtINI2Ut6995uCTeVfyeqWuu7Pz/KJU8ZaDUArp+m+T4Gge9mnHVfilFHeNpuYeSFeWuZSwcegijGBPWdTkqZaFSFg4ukbXlESiDCgAS4B8rnVZN2PHFteh+gkk02+npylIxLu5hFHkg7mZKnNa8UXeme4urkc9LXa0Q36aKeTGfSDKfUrQ4l4K/CBmFikwzfv/ysCeW73GYps+NaUZFWVJs/fz8V2k9nj6/ELEt65GZZi7qg0mrisbt/vZW6RYNGubl8GDxs+uZuOKWnTzT4ekQ4w4MMh4AUPSVKO64D8iq6JFuh+4jByyLl/Ot0Q5gaiQ47F02qRBYKmxIBYLs1pMV97RW0+3ywkgtyw25Iqh23yz5gCwET9ompW0XEIEQFiB+E9OfX3z5jO32oirz/Owz/vnP/4OL7Sl6KyAmMNpgsaHKqDUG1jT4/rs/YbVaIVqdeAZYLED5JDpaV4hEpUOv5/ZT+rtWNKYxtiJQFD+LmAw4IZcTYxaNs0NVwWZjmYigEmBgYY1NGdYO9fucIjInaEzv289PGZQiZKspVUzr764ygSJwkxxIMT9DBOquJRDKwcUrZVXmrZzGr5iBolyLACE9uvJMzNg1Cfo7anc2NZwrEVBLJQpuZ5nTM9fr/QSDH77/Aa+/e413H94rzwIALIRJXbxMzTviBcPg0G57DD2j7zzOtltcblt0rcANSDHby/WuhPrm1pCkl4m6bA2DQ9/16PseJ+tG+11i/0+fjUXGrIDWEowRGLLBRU7gWDC4IQE0BoSnT5/h5OQUm/UpjGmwbQc8f9Gj23a43LZwg9PMY0OH3g1oNgbN2iYXNUBAJstyGUBRNzXnHFarVbIEEkN6vzEpC5xzTrPCxYyHLPDsMbgY0Ft5YrVao2lWMEYDcUfX1YEdSAzOLs7x4eNH9EOH9Wql7n4RlBJAvKQ9KrVzpwxag51zdBgWugmNgrYXFPtdZM9+dKTG6dpZQVNafADwTEgil6z3olVgYXW0hJJFyr0EdB4eVSukXIUdaiho8kwCQ0vwgfbOFBEJrtZja7R5oPaRHmkJza4VFUr+MOgRKNpDtQf4I/0h6b5bWd2uJHiP6WbjkpX1w8eadzv6qr3dtxhDh2hesSz7NMfaKMGJrCTnQxMigEwAbYwqjDCMi8sz9F2HNFbC6Pstzs8/oela9A0HoKhTayBSZdoQoTEW29aAzBM0jQWgMUscQzNJBYsGjUeCEIdEkCPw1O9EKFLbg4JbWD4RnXeJLv/OINMYKJoDI0RinCUtNrqhgRDcaooTph2m5Pv81g+DRdP3KZ5O30kBPkzrDiDtjlJKiqb0O4GW6lr+TihaeUT9WmAmZaTCR1dn5lwEq4pA53rnfCQFxRWjO+z4m7odtbwo+Ufi9woinG5OsWo0nbqxBuwjqG0SwAXReeKCq1PXddheDuh7j3bb46JrMTiB9xmwySBRoTBLBkfnSK3u9N2d82i7Fl3b4WTd7Dx/z5Qt5qL1BwTJ+oYAkITsgqIA0mbdgKyCOGQItjF48mSDk9MT8AtB23YY+gHb9hKDG9D6DsYCp09OYJsmtYVDt2bgV92V1usN2rbFdttitVrDNhTGmxIfee/hHYdA2hqIe2CGdx5tP2DwAjCwXjfYbE7QNCs0dqXrODSrmvMeYI+273DZXsKFOGjWNECIDpVBlRJkzeN1FbobqSUCovWPJLC4wEtHREUR9YUr1L5zPRuvVaPP15GZDmNyj3QnlFfq0m6NxoxE+XMlhczuT9P9/XAbbsC4X4mydLIkvMjDozkZdD50wjx9i31yLPomgKLbPUEvYxLUMQ320UM+dPgj74dZbo+L6fKFZnEdB5Sy8vskxM81gZEMGRKoeUTGuyvLlKX17DLjrL+fsZqY+WNfnWUQTL1QqcLIriRXkTgPv6OQgElgqBSAdz93Hy2HSpr4V5dBhtP3Ba8TAZ6T5XcSBo3AD4O6jbm+UGsFIg7OtRgcw5Go6xn1IetVA2GGB2CaDZzvANqEshV5Ye/DMBo4ziBDbI+lYOeRfOijNVrgA4ot2cUJ06sU4vhEkKgIuFD12djauKqDgBwIO69R2QpnGnOo/Lv8PCtgzSlh5W3FV3W547KuuR7FtgHIgbbnKNsaJZAvfI5wTm3hM18GFRZh9Z1B8Q0ubSGHXiFu71IYZFxQvl6UoBRiH1Gx11Beb9brFf76t7/hn29/xcfPn9F2g4bUEQVrJLhwdb1D1w9wLoBFvccwMPrOYXAa6BmcXWfnDbxKcHJuT8nyD7Og7wd0fQ/m00LmiTLTLFw51ylheLTPxHuN9UxGg7Sv1jDWgghw3oFC/Dg0DZpGs7g9GU7QuhZt38KDsTk5QWMMJIBOwjHWWCZjDDabCBRtcXJygjURIEbX4GCJwl5BIfaSXMl6L3DOwzkNeE9AiGlnggsoaVBxMDwzOFgJemZ40ex8ZAxYPCjE1tE5lxe/7B0502eSV8G53r0boGgMhJe1Bg4Qhdq5uhp+zyrtV6PdlpJAjAG3n2hHG+IqmuvRtSg+pT9jW5OdUPqBvboEUO8b7ZWVvoZ6PQHgk2SWr8r45sCPceuGRE/UavyvJoteHT386jJbcP3LBx1KB9tFtRpEOLpatLwtx6/wbut7gPTggaJbZ6qxrFxH155dJ+o4Cg+IaP59pvSNT6z7YEW0tAklOx6OO/gNku5gSYjbARaNH5mSCummCAibAgMDRYDBMDBV3JGlp1CH2iSh7hj/pvjuHk+5XWsw1RIYomuLVMJdDVboEHJRRggibDyii1EeCYFAs5OJOHC0liCBiIHAgUjj1GoAZbUIAFmAKCjTA7bbAcPgYVeb5MYVrXNIKDyTBc4ISOitBiBOroz6DnOuabHdOS4RwUBj7+hTpVICMZgzlg8Z1ROAYsikw9NCxUd0kyjHYj5Q5zKasOBomuV3xPhOLJsfNSVLM1H7BMM+WHnMtExMAHpCf4paNETrkUkw65k2Zee04r7iHdVthdPFKaSUb6ZFfRvvj2UG5CfWx4AGINa///Lzzzi7/G/41//1v/Db23do2wHeOXRtj7bv0XUD+oHhvIJG3jOGgeF9CGoNAoSy0l4p9UWrFoCGhDgdBG5wcIMCJk0TMrkhZp6qy5lacdZkiMDi1TJqcMGlzsIIivlVrL9gmIYgHmhg0dAKDTzEO7UGDJnXPGsA9LiulDHBmqZB0zQpppMJgek9NAsbWMBO+9MFNzMNUC0YhkEzLTLQNBqbiIjA7FNHSYiRzSJgEQxDDxEuYh1JWNd2xCcDLY5+sITrlsb0vBbtaEAadckiZoEp3uqh6uw6NwIa5m5RLmMwPDi4ic4lHSgffYAS/0FacqD2dSmOwwxkNTM4SaRKTFjGKJT65kkh48Ln7t9Nc1k1d9Gt9m1l7bwUGEP1muNDrCsU9UgPkB48UHSbNLX0KDNjod75HjilhXLJAvUNvO8umm7/X4mu2IQUKPIu9+6v2k31SdHOk9c5mr2N6s87hMdqukfF5YhxzNI55jGOW78yTYVMCbheBozGihHHYMRREBEByIOEYUxQqAPl70Nq8VhHUIqz0irp1CjGnBIBDFm4ocOnjx/gGfjhzc9JiBIprUxKgELbruslF4cCCvik3GiTd9dyS3czSD7x1uCGoZ5kSjC1FshnEBr801BOlR4DdWrVu1MbLhVC435Q4pWxHXWDdtH1+HcSRLm4Xltbxv9j/2tWQpEw/hUwMdYaRmVDqn6NQJwSp+tlrCSZ7RmprtQ0Bkpk8m0FPQk0y5ZX2eO7V9/hzZtzfPj0GZ8/n6PrHC7Ot+gGh75XkMizxg+KvzngUHGJksk6mQcxuz6NWj1SkIkIxuoccF7jA3EEVjODzr0RyjkgQEgYAA0aHgKSe+/ghgExULysVkAA6sjonPTswwgYBW7YwbO6cDELnDg0xqAhq61gDq6kUgSXV6uik5MTtG2LrutgjMFqvYYB4AeCd07jE7mcIU7nqYI90e3TWovVRjO6qdUQw0Cq+FHMHoPrsd1u4dyAYRhgySYgTte8wGFkQia0wk0XJV9lHorZzMZWAvN0fKBoXFoJAqH4nERmIMX7O/4Ol0udc/+9bpFlsoXIu2O6GmzwQGipTnBnFCX0Osx6+R1AyMaZxRq0CNkYr1dL6B7oCwtot5vmfnoYb/fHo6VWiMcwpnkEim5CcedLfzzSIx2BgsD4SPtorKQF2nmAOGufMVNazi41V9CtbprJnelb3JprK5exC1RWhiQpPEaCAs8egEdjaccYBAWdBDE7llIY+aJO4ViXgTGEsy/n+PWfv2Pb9mBu8N3r76FBp1VREw5pn2Pqb200YjawUvknqS3BsgIoo5N8BTb2uZzu78l8IhoteaLLlyR/1LJcLbtUkOfq3Rm/CBMY9cA8KBW0669jS2dB1bYCtA09M2MxUL9RldVpUrOEt4n8Va4ZAolZlUZtkQWtj0ZYFLm+kCf0YwC9SGBg8eL5S7x69SecbJ5gGN7h/OwSbTsEKyIFh9R6DqN3jv7JYyWo0N4R3DJI07qn95DMY1nUCU53LCmQdHlvDj5eloEEhiR+MwA4xujywTongD5erXKMtSA3wFiCaWzqYV0z1KXLOa+Ajnfw7NH3A6whbJoVYNV1kNnBOW2ntbaKC7ZarTRDXACFrLWQMFfUMmsoQCEJ1z281z4hA6xWDdbrNcgYDM5h5T1WTRzLHEAZAL58+YLtdgu8lNyvYRzKWEXRcpF3WiIcZ5+4KXAU12oCRnj9eJ7t/Op69e5ar6S8B1hykDO25pregEXdfYTXelCU4Zq7e2spgW+MOSx8GXH/Yr1IRq9VUw+BRY/0SI/0CBTdkB5hokc6Kj2CRFekSvwsPtcU3Viiy8QuF4ts5RItEu6OvnXxhGiswNbgiAIx5Q0MYgYJg8AIB+eIQISEQlWJD1ZFpMAOiAsBttBeQsYiGAbBoO8GtG2Hjx++oG3/X/x4vsWrVy+xXm9gDSBWLQM4ZLyqrFpSBrQ6nW78PgalLgGaBBgFEGwcN2jcJ2NikQADxWemCpO6ymnKW5IIJF2Xl+dVn9Srk6/D2Mi0Xdq2/aBU+i4VXpQbYkLNPikSlNSY8z2CNQIjpfvO3LtkcHimxemxHFOruEtqy61Y55LeTmwUa6nmQmyrgbCATIOT0yf406vX+PHHn/H23We8f/cF3kuKV6Q/NRgDInAcraidp/dAuidAQRN+IsRlMF/T8tW6xjmHwQ1qrWPr+yIIEi0EZ3HKmLJe1ErIuQjKeADqHqpAkUFjKAS+1rXZi84x5x2cH9APPdq+Q9d3WK/WabyYPTwrCCSicYaapkmAUdM0WK1WKbtZCe5ysETiIr4RF2sAEWAtYbXS+EQcrSIJab3QfUfb0nc9Pn36hA/vP+D1i+9wstbg19lySPssjykloGg6zXZNhuPRvnlqSjaiPI8oKvIJ8JoDUo9HZVw5XY93AxeRv8dxaSbx2mYen7NhmW3PztrvE8253N1E8jz0ZDlnovfCkud2VRUt6GJvlwcTpVZWwEWzIFGJbF6zPY/0h6GvHmPqK9G9B4rGpwOHTKNvXB+P3MswPSEbPYHZ44ZF/LTwmOJBkky1wl10pPHbF/A0CVZXnOgHT5n2PVuObcnH4wfn9qiRjlRdKpWocRuWvN6C7t4X6PaKRS2k+VgB091dRt9K/V15K2Vj/AqQYATLkrqcmJKYRUGHg2yZlLmr0b7hrt5mh1npNI7F8ehWN0IpOJ/meSzYEgUtKdwIVT7Yx3guej0p/ya6szCy5QcD1ChQElxYmAWeGfAD/KBxjD59+oKzLxdo2x4Xl+9xcdnhhzc/4qeffsSLFy/QrBtYq3mzmB2YfVA4Y2YrrYuF09qiWaG0XmsEoiGRYEI7QCGDWVyWkBV9xNeemQ9J7A03WmNgrcGqMTBECoJJBsfinjVnuTQGpZLr6rjCJM/vUX/SV7vrqepasN6XEF+MdzNd6mJnRRAkvsl4TTB7ZulcMNuo5QYgEEif52bHdXZxSf8XAEEsLBVMIQiy8i2tDE5OnmKzPsGqWcMYC8AH/obylobM0kDMHKMFxXkW+yNbsWi/hvvKckLPG1P0AaKiHfi5IWzWBo1twnQNblOswMgIQ4P3eU1N0xsEgbpqOfbJbQs2ZBs0FNYKBXEUi9L56BkYeoe+H8A8oO80e5n3HiertbZdVKZjLwm00XT2Glx6FQJQU5AN2HswGbC16NpO3erC/XmRrlPaaza6Bt4zur6DNUZd5MLETutciIN0fn6O3377FW++ew15/gqnJ6RWTAFY0fZlfpMqEGG2uCSE7klrSc3I8+KOHJyHyu8mfy6+EeJqzQICGB117mIqzm4lxZpynZ1mfntSV8AM5I5EymSlG3aX5PooGhMrxq4r37Y2vKNgAAAgAElEQVR4lzIcoYRGlMkm5pbNSVmH3itpGfXNuyWf4p7xOObBmbv7cJtmZM/Z2yoRdNloFvh1sb7KlbpLinIEBJZylSvbLXUj4x5CSK6POxCkUW13S0fTZ0s94aZFlbJAVf5XpPmJN3PfAo6S6Twe05JxWSY/H26PtqbM1yo7Htuf6ONYdM+BIip+ig6J6WexTwC8Ioma+OrpdSHEkJ0MWVkpzUmwS0GScOr7zdKS4G007w5x1GZoRTvGanyzTNq9MyDiVYduX3fkmLZhQY6bX3RLCBWmxV+qZ3lUzF4y+xe9UoEfv/tsBpwjUCnkpbrD4kgUwx+P3XWyAFjtYiQgsF4tJNIoP/kQb6KWToIrAwvYEAA/OzXHgvOi+EREAOVwuXM8FoUWwlTB3kV3EmhyyTK2qK1AEqGTIOsRAaP4Y8SC0ADiwEEB0QTnav3Drgj+i1imgWYSi/1mYMSqWiAECfFr2AOeBYIel22Pd29bfPjwBWfnF+g6B8cW3XCB84t/4NPHS/yX//Kf8fr1d9isT9CsTFDIJMVTYfEhE9ug15yH84xh6DH0Gtck9o8xABmCwQq2sTBGrzUrBaJUUbdobIMxKFMG+i5j5ERrBor9wRR2RI3RQEbAlMdn5zgll7n57wWldVYx7un745NRNU73BwbIDGAQfAEIiQlBxpPbHSHrrCbI/x6Z7+beL/cqg8AU1cmoiRhAGgANIM3R3pURLODiOgek9S6OhYR1z4vAiwCeg9UKFEwhAowBi9GMaWRAnrILG4W4PkG5zzBrdMcLMXREYE1YD8kkcAiIa5HuK+vVGienGzx/dqIgqjEwRtBsVhAygUtIWdGWW6Qp9hStn5MViMAL4CEY2KP3Ho7C8wSwDSGFxcMKBSs95Q3vGF3bJksh1w8Yth2ssThpVljZJlj1aL0mKuIiYOcAZjQmyGDMNYezwPGAoe/VAjFaFIVNRRMfWDAzrG1g7Sr0XaMuqzqI4PDeKXYZgLZv8evbX/Evf/0LTk422GxWwV/QpLYmV1cqeDFRsVcGoCjukCVF4C8p1aHs8TQYW3qQEAyXkoSMrDl1/yIikFgQDAwMLCxIjDo6sqRWcrgXgQcrpX0P7Q4wPKX8DtkNNycTEBBp5sIIJzAzhD2EB4gMYFnr2IESoGhYUlcr245ceuMaNaGRFFbsfXteQNfZCehTyyjadwfK2iebJtbJdpS7ioi3769qjj8PUbY4jftKMnYkLpu4n0owdRycnxhz7SLSAPcxDqSytAR+nHNTPh4tF8n2Z+tTXl+gW41k6htTCRbNFTuWc2+5TVTwcIrteYv1LT9EXXBfWk+njxJriAQer+lzr8f5gHJalG4Ox9AF7jlQhLgLIh+b3OJMfqRHuvc03hC/WkNulwgJJstHt6OFkzARU0qQFyi/NBAhXYB59yarcWzq7DjV96PfNPl2x+o/ufs6Ata3RtM+EEFW9pNQoms/s4yAuQieUsgeRKrYl3xCATQpXL7IWHR9j3dv3+Hjh8/YbjXOC8gE/ujx/sMHWGuxvdzizZsf8OLlczx9+hRNYwEwyGR3kggUOTeg7Tr0nz6jbS9wcXGBvu8BqPtZVCokvKSm+V5htVrj5GSN9XqDk5MNmsbCNhbW2NxuxJnPMCAYq+BYFLSlemXtV6nWihGnSo6bVAEV4d7E+8U0us5Ss8Qqbo6qqb63YgbgEd0N0+cIxGj+qj2F1G8WdIYdvXZcSusUZTdFCVJ4EnsTqCoY+gG9c/jy5QwXF5cwpgFLHx7SzG9E6hLEBM0WCIHCbtF9KRw6JItJUZcwEy0tADICa22aLwBwcnKC7777Dq9ff4fnz05xujkBO4eLy3MMvkffdXCOsF6v0BgTzsDC8Vq0mMoTOuMNAjDUikbjBA0VcFHxDpFa9wk0LpNzGvjaafyhruvRdz1OTk4CeKPgsW2sZjYLdQ/DkIBpZq4AiXjNpSDWbt4CjzS2krUWT5480fhEJAr6UnSRQ7oXgJr/GKAfOnz58hkfP3/Cs6fPsF6t0TQbEMqMi3poQUTBknLEOwlgpOp3BDrq9i4PehrXgzw7dewSqJDGsPweqT/T36Pv0xBOrhyXdlpJlZeTBV9+02hRlto2W8zSVfD6q8d118trUXqdDAvFS1db7w8hU/Xf+3nxKv07V/7MreMiKfLJtyo8/3FoKrt8+1StBXtf+7pS25TuN1CUBMUSynykR/oDkgCFI9UEHHn4U6MUIbOwmRSnePqTSNIPjcHj4jgq9k2+IyrW8yvscQWI8n3mQaevPW77Lca+BmXrIknxhEJQVx4J4IFBUlwWRB0gHy6UJunx8MUai/PzS7x79wFfzi/gnICFIKTqkBdVGt69/YDLiy0+fTrD69d/wg8//IAff3qDJ6enaFYGxgaLHpKQXrvD+fkFtpctBEDbdWi3WwDAZrMBETC4Fttth75zGBzgnb6HbRpsNg1OTjd48fwJnjw5xcnJCdbrNU5PTzWNt7XqamII1tigzAcXkZGEnxWeCEeoqx0VioEk84KpYjMOxH2jES147GDQ2PkSkGfLCChHCFZMBVhEDCEflgFJZ0vjdtTlj4odUQSQFjUVCwTXMA7JIiEBdqGiANp4r26Oxhj07YDf373H//63f8OXswswEYgaBUcDWKTrIQWQKFaUU8NH4CkYIwVLN8AGCzcT4u1sNhs8f/4cT548QdNYPHv2DE+fPsOqadAYCzBj6F2K/eN4ADnAuQGnJ2uQWQHBkkmpBDt0HMkYiJfkkqXZ04JlTwBiYvp6a22wxNMsYQjp5Y0xULc0BntOlgoKFBmIGDSNBa8beIfkRsbM4VnJsYQQ4wppMG3nXBiaOo6YiMCA0NimmJ829XHTNLAmm1RR9NUDNJ5S3+Pz2We8f/8W3718hZPNCZrGY9VsUjwzZq8Z60AhXtMMo0nZm+FqAXjpbySeuBJEU0WlRnDVydco1EHlPajr3kt3sPmVYNlYt8qY1rJGRHxzakBcrJ156k6/vncUYn0hut7mPopuWcdpegah6jV8H1C0pOa5e4oypZCU4zwhFLLBgioe6V7TBCS6t3PtOHR4XR1rhUCa1DeU4e43UPRIfwyas4d+CHSnG44ksCj9nep++OGvg+qAvKsn7S643WG0ERRK1Z4yYxkUzY/ypTuifRUtTWn8rRJNJO/E3YzkAoD4S/5/9t60SZIcSdN7FDBz94jIzDq6qrt6OEMuZYX8/z+DIhRZfqBQZKZnZ2Z7dqb6qKqsyozD3cwAKD8oAIP5EeGRGXlVp4pkRoS7GQyGU/WF6quYVdtaSDk8Z/bGzj42rvj2zlpvSokYE7td5OVPv/Dq1TXjkAwYwoN6UgZTEjBOkXRzx3Y78OrVa169uubm5pZvvvmaq2eXrNce3wmbjfGhDMPEy5c/8+NPL7m5vWEYR5IqfSbL7fueS79hvRn58cefGW+3bLcT4xjZ7aDr4dlVz1dfXfHixSVXV1dcXV2hqmw2G9x6bYCCKM571msDkpw4nJTwtYZYux60lHaZ1w6tXkPaZNsqilcOKMheT6pqHEhvIMe8Mc6T4wZDMdgWIKxksChnp0sknKYM/M3qJM3PmcvJPjOOpqdaFM4xlNvFaPmLiFhqdBGI1o/DOPLzz6/4l//+r/zw40vzgMOhYqGVxq1msFMS+zl7J5HLLZ4T9rk4wXsDifre8+LFM54/f8bFxQUXFxc8f/6cZ8+e0XVdBUimaSJNgTBOXF+/5vr6mt04EFNgvekysOMsnIpipFsftR5CNWRAE+M0MYwjIQTjC/IA5q3T972V2ff1/pTAO1eBJO+9cYHpRAlto3q4FPXCgKcYQibLTnRdhwExtpGLCDElUgy13r7rbC2pHjzWTSJC5z1919V3sXo41qt15j0C8d5Arcr8bOXc3N7w48sf+f1335mnYoiklbJareZ2y3U/DhQx75EFBNJ5nrUcOqUNygHJWeDvged+AZzLn/P7PMZCW9bqsPynkmPvKQWAXTytAbeWKsKxUvNgWoblzXJMwXjMunmmQvlU+vLiUEww78s3kfvHwLz+aJ07p9/1eMjY8RJbOTzwOFzO5w7WWo+PVT6jWQ/J35IX0blyZBbwFOPoM1D0DuQYt8uBFDfv91Ml4LGK+nsSLeEY86nbB1/AW2P06Ck21YC1g6TZKDvHvfv4F8f/ru7R2sAo2fB7mzDMc8fC/e/zNOdOcyl7Klx1hZ/bQfZvfEBUFdKcHSOlBELm4WiuWRiz94M7j5MTbVTbtSj5T6sYPGQM2Glworrb3+NJ9W5no2AeEam2gWb8SDE+AWrIFuZhVCdHGTUNcJT70iFENY8CS2+ficqTcnt3w8uXv7C9G5kCxoVEGeuS/7b5HTK4FONrQghst1v++te/0PeefuXpOqFfGan0MIy8vn7N3XbLmA1fn4lynXMG6nhP5zeQHOv+huvrW3bbCdWBaVKmyQCncRzpOvNYmKYph7YY6a33Hev1hs3FBV2XSXvFGk3ZI2FPWvGy9ouSiar+3XrbmBtEHhK57d8CjN4nXz93fS8zonhF1HVvIYVXwk7IZ16qeX6pFCDg2P35s2b/WX6TPy/r+4OcZMJZ/BFoM+bL3xigQgb+vJBC4k9/+jOvb275w3//V/7wL//KOEWmkAghklSJBSSjYbDKfacye8tIXvdElX7Vc/XsiotNz3rdc3FhoWVffPEF6/Wavu+rxw1Q09WnlBiGgd3dlpubG169esUwjSQSY1xxebHhIm5yc0qTUl4zL3UbTiZEzSFk2XvHeYc485SzrGQe530OI5s5RFoPoBlAshT2IUSmyTydLGuZ/YspMsVghPZq4aDOuTnsT81r0TLJWblG3VQyyWUQT8BlEKnvewszi5rrKrmu9n4uh5s55yyctPOkTtgNW358+ROvXr/iiy9esO7NQBbR6lVkzVb6YH9vNJBNMum0fS0Hc+3kKD3pLaj5fZd7bZmDM29gmZvWrhXUP0fasg+8tY7X83gx833H3m//uoO536wH9X0fmt75RKLVVxv0jALGQVkrdO+a+8ptQKs8Tw76s5Bu5/3xsbqyzAok85p4slJnFAin17s9A0dKYoDWiF0UhLhUl8LHSbmjJftcfjeHYTbh3B+jTVT35lkn++A20RPKx9nmH7ec22Z68PvfCkfRpyjn9OnnubInx05rPoAsDHjYx2erFP6HEwdKx+TsBfKey2Rh07ybzeM+BWzvwqd5HnUrP/q9KRdHgIxsWD1YONS6tm7o7dliC9OcxPJUH2j3QyN2WXL+dL9dlaNG6ruWNuTC9BLXGCnZA+C9LVRCCZ0pc0vJIWiY4db5fLpf7sjGqMthWMk5hA6YZo+kLNbv5mUUQiSESAxKjM2G2pRJMoAh1fTdid1uR0qJm5trjKfIuIq6Tiy8JcEYBvPAyfwlBSgqhkjn16x6WPV95iXyvH59izjh1asd0xjYbpXNxrwpWo4U7xzr9Zqri0ueP3/GetUZoXAdY7MH0DzkUkUOtLrjF0OjmjazPbNnnBTw7E1hojdVUlS1yUwzhwU2+bdq7QUHmjl68j9j7AdjwLF5e+i7N8/N0u8lMmF+gflJ5Wn31vvs+WJE0hVIyYaieQYlNCrDMPIff/mef/ynP/Dyl9f82x//J9shZH6eyJRJ+RNayaFtCy39pZVMuQM671mteq6uLvnyyy/49rff5DHYs96s2Kwvcia1/OpSwFUzBEMINURst9ux3W65u9syTGMGq5T1Zp3LcBm0MQ8eI293+C7PWXELY614B4nvkLwOOefsXQpJtEjOCGbZ0VoeoXEc2G7HHF5aeMMKAGSg2jCMObxNzROp6/G+Y5p2TFMkRiUGa8eYieiLt948JSR7Ezn6rkdUmMYJO3xwGbgt4LCgaqTXxUvMe0/qOtIUuL55zavXP7Pb/abykbkpg2VlDDOHtC1E5vGmZf/IQNxSz5CD2xz72+YMVlYgjjmkdQZ12lCzZm1NJVT4sJr3SoP91leQeS17CoNyuZax+F0ykDd7uOi9nia1rLK+LtbZZp+vbZgOgOb7K9vcXIo/WpeZCF4fID4+9oi5yvqA6nF/uVL0UC2eSG0D5728vnpuYxHIGRHnq+07yW+0jy+dWy/N79QCYYf3fQqASwsMK2emqPmblMLL9muW89fBBhyvHz1N23wGij7LRyHVZH8qt9r3KFU5e5fVLgqUfHLN8whpFkSVmpbWDHjZU25bEOYkslP1OaCSGj+I0hZ9b2+BbrMsHHbBKSXkjM76QB16cFpb06vnP1U/QNW0jvHlga1wdXnJql9Bowh23ZqLiysupWfsFBc9MY1IEJyW8LPCyWJcLprsX1LjLTInpQQEYDZSzYgw40XFAKaUhvy5AhFxCd8ZrwtYhh/nxMh8oXo7SLbiu86xWvUIK1adwwt4cXjnSUm5ubGU3Hd3sFqtqrErItXDqHAX9V0m6sbedQmiUMd/27ba/KxZqupp9vJE/21BojcVA9XsfQxQcMVsLTVb3iAlo6VQMonWEKcMEOmR2+yjvIacWkJ4R2c6kgEhLAQsRgNtpxiIY+KvP/7Af/75P/n+hz/zb//j33l9fccwJVQ8oXgTpZwhKAOZ2rxkBXkTrFaOL55f8SyHMn799Vd8+eIFz188w3kjuhZnBMxeCsE51HA2VZTIFIYKwMQUiUkty1+IRJRVUkQ8Ko4QUgZsEjGGOp998HV+9f2qTnIDg4AMdNra43DiK3BUgCIDdAIhew1N08Ru2FUOM5fT05sXoXkEGlA0ME0jIKz6GcyaJvNAijESc70tqZwQg+LdDGhpBvg0JXZhR8hhat3KvIs2m00Fhuy9cl/k+vvOESeHeM8wDLx6/Yrb2xv6bkXxVnHRG9zpVnWcnJIFMNl4opQxoPsDX8nE/yXDVtqbG6W8tPhruesdmxGnwanHy3kz7lEASatXyjzfyxqnygxMn7q/fTbHQOGl0TqDavNf58vhAdPh88v69abS1rcB1BZVOEd/KRfPIJ/xxbXr9d77SG7wvWcv/j6OkR2WA3tl3Q8ItQehn4anzv1j4W9VtPb9h63Hh5R7V5cnHNsfN1BUDzr06FrzWX5d0p50fCjj+U1Psd6PMXXPBHhTfeSjkaWiWXO4lE0dOaLJnQZ9zNjUhdqQfVMePPUvCkQ51y2nqk7asqgnorpQTKQBW5Zm5vtWSh4zlo/NvUcToD6JGHBRjcXchiKwWW9YrddzbVW4vHzGt998x5fDiqlL+NQT0wgTjDGiSbDwqQR4XEnfWlLrqoWjWWYhA3PQiO8dfe9nctmooIkkOWxRLFuUw+Ey8FSQTQFSNFJeLfwm+V/vhXUGlUQTPAMhpzxPkaQT42AeTyV8DTL/SeU6WtF1Hc4rThVJmbBZBNQyTuHMGFye3DZNXMZm6esGJKrgVp1z8l633vJszaBPqbSUk9WqF7Q/a/43u7raIWXNKEaiLoyN2RaSamvsR5c9GG32BqKoOT1JDn1yFoa1G0b++sMP/NMf/sD//P4/+On1z+y2I9tdQKUjaSLEmIEcK8sxe6yU94ox4hAuVj197/m7737Hd7/7Hev1ivV6zcXFBt8ZOFPDaNTGtP2avZFSghQZxx3DsM3hbvZUcMSohKC4laPrV3T9CtQxTBERNTApJDQZa3vxtPG+oyYxVPL4tzTmC6Awh6HFaCCKgTqTATshorEQSHuurszz6OJigxNnqe1RwhQZdiPDMBKmCRFH39n8ilNkHCcj5o6xemVB8VTsqpeeeSsZIJcKAXecMjDsqhehSAFtW49N++m9RzyIGth0ff2Km5trLjYXoLnfxNE5R+cV52ew+NgoIhP5o4qmGdhvgar8Sb6leCDl8UcTkpkNL9VS39nndn8KzFkJ98Hp/Svf7/7xIO1D7WBbF0ooITzV+ja3GxwsN29Q1umbZ3zmTfTlOevhAS7T/P0GGkRZgdtWKBWd/xbmfjgo57HgVHlEc5/uj9x9IFM+sTPpz8bvvkjrqWfbwych++TbD1FE3CvVK3fWfWy9mUMwn2LkfNxAUVHc869LzTAvBPun4ifk7Yw0vffPDy7nGoTvKZTo7eQdLohv8H7nkD/W7W8xRsuEbSCkc3feonOdurhRPuToF+c844w2fqC99Ixr3lxkNubze81hX838X1SoJa7NSnoJt5FMdHtC6TbjRBZhEc65qu5I0Sqa4Tm3oDRVadu1MU5ZLvptCNvRUL/6PF1+9l5kfw6eMQ7OKfWg/gWsKIXMwNCcSj4R1TIaIVTPHRsXjovNFV9//Q1f7tYGFGmHEmAShjDkELREiCPjkDJ3SWa7qrxFxk1iHmd5rqdkiZWkQBQ2Fs2wcLXuRd9NiezMso8yLL16jDwYe7bvCM5C6jbrFePFijF0iCQ674ixhO246k20Xq9ZrVYWqkKoho55LZX6WTatpLPTui6rBNKEXaiN7+pBVECixuVdOFyPpPn/QA8/8txzRKDOQZVk4JDMMFCGbhe1KvpiMc9re+SxVcM/DiyiVsssXF0cedP2fd5sD1k+N69HoqzWG6ZpYnu74+efX/If33/Pf/7pT/zlr3/hh5c/cbO9YxgCUV3mVfMGsGTQXBR8ngsGrFifrXzH5eUlX331FV9//TVf/+YFL148R5zNB7suUbPmleW2kRgDU5gYh5HtzS1TmEjJsgKWva14avT9iovNBavVCkVIqnnOYDxKMYGmJqvZSNSE96vKtxRigCQZaLF56bF3NSJ6A3eG3cBuGEjBxuxmtWHVr1lvNmieyykltrsdoIxDYLcbiCFmr6PiXRQIY6ghuMZLNIeKls8s/LNkIrN1KYbIFAJJFd9ZWOiqXxuwRD7o0KI/UOclTui6npQmNEWub665vbvmxfgCTakCY73r6LrIamX9cGRQ1fCj/KeNTl2uQTP4296cPdFS9r4CkOZeaW48NpkPVAepe+d9Ss5iizy4ZL/Otge1hOTHpd1Tz9MbF+vc4lDkHg+TvH7MZ9aNnrcHrLR6geQFaVH9oy9z6rOH3qnYRw+tS4fl3Gc3LTw49/Ta/aeXY736nDoOTtXd9LL9TLYHTzg53k5UR/d+aTmi8mJZvKQPQCKlctmdfsBpeXoTbK63iObDxw9lh85zUPZ+FrE1RB5siMK3erY8xhQ81776GKQZlgf6//57aDO79gaQzb1iCGpze7ueFg/4N5ePHCiCmQVf9wZNVu70sPEO5b5Fa++yzPjQPufgsecV9MDFTwiG1NCB+0QeXqUA7nFzfj/yLkCixy5OS56GY82mclimSsEEmtTE7c3Fvmya+JiCDqB+r75paZAd1KksLuf03zk8UKcyrbTypMDFoZIpuR+aJY/9+bg/V+f3V7QQ8YqRj4KSYoSUmo1Xqz7WKvQ+83kUA9Tl4kUUotXVauIRdfOMV1C3N/+VrKQUwtKlET7XI6HJZc8AX99cS8HvQE6vnXl8n7XBPHyNMr+CKWtlPU9ITWuukDlIBA9MKAFNI5oiJQW45D1BVPGu46K/5IorRp/weFQCrGA1bUAs9GUce8J0w93tHT/9dIPqCtcJDFROYStezZh2ELGMaF0xyvNakJLifGm77P2gQkqWYrwod84usqx9zkJyUlTCFHI72Psj5mnQdT2b1QYvXSXTLSD1er3m2ZWFD/Wdwzvw6rPHW8nqZwq7rUOZCFzq6LJ65O52InVs23f58yaErR2nygy8QAkDXcJJyrxMFZHcWzM20xoR2txZvncZLnQ4FVuL1WEZeSSvCbl+FG2/tHNCZZWNEAeSEO2zkpTBwTLedW/USkKy51mbHc680QK4iLpQ15R7pYKDs6Gb8pjVvA6EvAZNux0//PQjP718yR//+Ef+7Y//g59+ecXtbktMyjgKU3Co5CxaYmZ+zJ6RqglHsPCkCOocnfd8+cUX/O533/Hb3/2Wr778iosrV69PuExOnT0sXQ6/BFxuZ0UJacfdsOP25o4wpXpNSoGgkUAiCvhVx+XVBeurDW7VE2XmUFeEJGKfJUVz2JqIJw0T3id2Y2CYEjFZfbpOcF6ISXDR4dQ89KYhsN0O7LYDIQa886xXa9brNSr2TsM0sNvtmKapZjezcLVUuXQEYdyOuasqrEMhg5+7UYgJImphqjGTXScrK6nNtb7zBlT1azrxma3J1s+Uaj4+Yh6DvutJ0UCz6+0tr29e8036FqLi1db/qJ5OEmmCqFMmrS66cEAZbVxq5jFKLVhT6n8INCQiiNa1IRLtvgwyHyN+Li20mOdO6PL+kDRaJsbOk4biq5jDW7MXZrmvzjuZH2NgpcuAXQFkGpL6e4CKVgzkUiNELvM5TxrnHE5mILWuCWXc5+/bbTa6xvpQODiUrsvErA/MXg6SxxOLMVVvPGs/P6anHfnsnKJqd7a2wmk9cA48jKfhnlKU88iirFN3WDtJbhfZ9xBvD9Ka2h4r5dhfktGLGlgtMGe3zKkOxPY8XzZGCqdf0Tnuf4NT8u7wCeNW3FfT3DlmWt1rH3rEw4WV9yvr+oE5Dvm07Gwk7Dz5tQNFR6aiiqJOLePpAo07Pl9nFb1ds+fi5WhnPU4+AaAoy7vAD4495EDRbeRIhxyXc4Cb9z2iP5UZxHmA1mOKe/S1+mAdDjb/I5cvzpEkm1UFjq9P0vr9/WUtP6zpXA8Uw09TZiV1b+NfyFKRuBeGzW2s6BLr0Nlb5Zw6VaBwb4OsKL4e9o20v+gSJHlYZoPlfex7Z53CnrPcPeqZ1LFbUrDjNGc0S7PioomUAuO0M26RGFCN9XGC0PmOVb+mZ436iNMOnAeH8X4IqEa833Jzu2W7G7i+uQWEy6tLdtOWKYRqgNf+Lf2Ww3xSNmRizKS8agTW9j5KLJxEVQEtXgksPBPubrdMvYW/lExO5fdCzL1a5XC3nK1JRLi6uuLZs2dsNhu8ExyKV3BJEac5w47U/po9i7R2USFqLnxfQlHWZ38dmO9tO3/2gMtXNZ6SWjj0W5oAACAASURBVL9pbKTFd3vzY3GK3875pXGqOYRHRHBaDLFDI6LWV7LvV1lrF9dKnb/Zhtvb2/MVOofbLOXIOn1MjoZT2I2FnN0ya00MYeSnX37mn/75n/nhx5/44Ycf+OHlTwxTYIiRmCClnM1NFWIxfmZvF1TRFPEidL3n4uKCL7/8gt9+8y2/+eY3vHj+gtXKoynauid2FKalnDQb9ahWmCzGyDQGpl0Oy0pq4ZU0xnQx5sW8ZLzL6eYxYL5yJwExh8ylOBFCRAjmZYQwTBPDMBFS8XQSug4LsdOxZiYbdwPbu231srMQugu6vicpjOPIbrfj7nbLOE4UYnDUeKA0AQXgKd25v39WbzpBJINXOnspGdhk7yTOvAMvLjY5W9zGAAgVe25KhBgIKdgBBeDF4/sej2NUCLuB169uGXYT3WVvfZESUTMXU1SGYXd0qGVIhcKdtPTqOTE8mefo4r33Sl7uC8cX+Ba/aee47l8DFG8DbR5e6jKvEbL36fG1YS75WL2alUdaHWIJgAnGyWXLhqvrW8WRmzZZAFssvz9GOVC4oU7v38e/Ob4XywPfnydvfOeB682pBzx0zQkd6eB5c1Gn1IpDOGm/HjqXVe9ox9NphaW+xtvb1u9O6tg7Q54IuDm1q7Xfn12nx8o5hb4PZfmJ5MCWK0ZCuwQeu091kcwFHu6+efy/nXw6QFFexD8NY/hTqONnuVeOduGbLIen7nlo6b1f3g8n0qcvLc+OVEvxzHvftI2VhcZRPJpbF/V2PyjABI2CZJ+dn0r8nYkeQKJvLHNIRwYCmN3AmweCKKIJ1UCME2EamMYtEgP7Gpz3HX2/ptOe6DxeO/C+9rumCNLB2nF5sWMcJ+7u7ghTpOt7nj/3wB0xQgjKlL0QLEyk1C17Yqh5uTh1JDO36bscppgS4tuwKOw0U0GjEsaARkWnkI3cFX2/ou/7+trzyMipuZ21VwiBy8tLLi8vWPUW2qIpGjcRdtLvdKkSFDCnPRuuJqQW8MWuXIQZNCDTsRPnpWG9r/BgWeLqxU2XNvdVD+tiNC6QH5lvlQIYNdfIzCU0N7XLxp6bwd3WStw32hqsvraXZq+K8uHcWM2deoaD3X7lGk60XK4BjoGb69f84Q//xD/+4Z/Z7nbc3e3Y7iZ2UyQyezMUcmlDHed08M4586rDQhUvLzZ885uv+e673/Gbr79itVrhXCKGgZRifadlX2gud/biSCkxhWBA0RhIIQEuJ9DTZSasAldIIcCWGu5Z/sUYa2hZjAYYoUpIBpqNY2ScMil03zFOAXETfS+od/jMCzQF4xDqup71epWJox3buzummLi9ubUsbMOYOY1mQEijVrep/LrzYFh0Xxks5mtn72KhbxbzVwAlAxu9d1xcXHB5cVkBLU1KCpGYIiFMTGlCUXwep33X0YmDGBnGyHY3cXe3Y71eU8JGIQNTCuO4szBMVUOnKOTLKc8Nl7lw5tTpx4m1GvDjxN7yuP2mjMsy147fe0xnr6a7NFPt6P3H9KT7QKKH4IXDBA0zJq57m/JeCTUMbn/MHHtsy/J0vCafNbj7pW4L9+GFp+6c0ez8vy4+K1lIPw358CPm02mrT0/qGnGPSVi1u3OB2yeWTwgoejvD+sM+5cNP9M/yCHkolE8e4+3RnmI8cf9/Hlb3yBMeMbxpMdnwLXwV+93UKkILw2vOC/4k1fgYZL839nXt+VTc/iVVUpzQNJHCyDSO+BDN66hOJQtFMrJZwcIwDN2REucpORysh65fMQwTNzd3TJOF4Ig4VquOlBxdr7hgZLlW6WKEKSUld1LBF08JFZI6Ok0k9XRaXNddAziIRXGqEUtrjHjvMgBkzyoGYVFgK48LQgiB6+trpjAR4kTnXU6l7SzEojxNlx4+JfwpZaXCWiOXWkCiOswOuTmc87Rh3UtwQZZ92Cxv+4HbdqE09+iijHl8NGAQUv/ep2aoJNztndXzLwNFzXfLZ+zNoWbeFT+SE1dWu/tcO6X0ZTFelZg9UZSUIre3N/zLv/4L//zf/4VfXr3ibjeyHUbGSYkZDAuq4IoH11y8F4w8WQVSYu0cz58/45vffM1vv/2WL754wXrVGQiUzLNG8z2wNHTnjGkW4lBImsMUGYfAOFjKeTLAa4TPhZtL6TqPitJ5j/POMrklgESKAc0/DShKpGg8QeW5Kdm4lExcrirEqIzDCOrw656EhZ6FKZISrFY93vdMU2AcB+7utsQEu91gBNUxNZ58TbfMmNhpycuLhQnnMBnVJUiU+0ME+lXPxcWFgUTRwtJiyJngYjAeJA04X8ixPZ3vURy63uCSMIyBn3/5hWfPL5CGeyxlbqLEZGNI2ncoL5FAI+J8rrs2698heHO+jXGfviKHv+vhs+4rucGEj5R9CgQ6VZoww6rHrm0Agua7Q++r+fsHSbFP1KPMi7m8/KyD62ev4Q8nJ2r3MeqVrRr9mJsWm2L72RGF7K3kqRptubfNn70jG+JcOfLoD1ibX5+cPb6L/vvIlp9daB9dNfikgKLT8tCCfm7b7PfVm0+Cc1c0+bStv1+hzMr9fRc9POaaAnN5haCIvc3rLQCNRi/Ss7hkziiynJrZH09S5kchdaE88k77G+A+iMEy7e1ZjytKYAEwZElIenz8ONqYGFu3HEsF9+E+eVS2syc4nTj3eW24xxxm2CrumZg4gaaExoTGmHmlDCTSdJjGS3A4vGUhUw9iIR5KQry3sLKULHvUdsf2bsdunHC+A7fCe5cNb4fzHd57A2+yIaEpEqZUjQdNIKpEzJMohEjfd1a3bMi5zhv7UoQC0VhKKbNcCxg1TZY1KWm0dyYTJmXgQ1WZponv//N71n3Ht998g1/5GSjLLVAAkmLE17/x1XMnwSJ0qw0va0PLaqu6uaGPA0YN3doBkFJrZttc9mAopNsFpWpNqeVYdCBxLqoAaQfjv7nn5Dicx9nSLGyuUEOljAtmb00+V384eL7xbikJTdFAFBJJA9//+U/8f//4T/z1px/ZjpFtJqwWb0BngsyHpE2nlXFk5MkiQBQkwldffcXvf/97vvjiBX1nHnUpP7O8n6RmbQcLx1Kt3D2qxuUzjqOFSmYQE6gA0DSNxBi5vdtye3vNZrNifbFhc7HO40UtxCxMpBSNHygFQpiIMTSgaNmzDKy19rN/KYqBZAREJry4fC9YBrTEbjcwDPYvhImkzgCukHKYmM46de3EQ3LkfX1PmfmagBmYVpbjy8Hm4oIvvnjOxcUlINlzygjopykwjSMhjqhGWHU4yRnqVKyNEqw2GzbdiikG7rZb+v6CpAHnhKiJOEWG4a4ZY9T+KNkcVbXyvljms9mz7tiUeNSan8Gp/TtKmzaQ4+K7eaTNnktvHg3wUKh4uvedykqouQ8LR17xyiterrXWOvMJFn1wOW5aoLVZG9njTJV6w2E/iMzAXr30/e7bR0GJqmZkHroz6CzfpSxU5fzfftKY43IMhiurwT2k5W8ob6HFnyUFuF5+9tATj/OrvpHMakH1kH8ik+NXJW8V8dR2cbM27D2grivnw8xa72ur95g58KsAih6Ss7y12nVS54n/NPPsyHlC0bKfZCa/62Xqb0ueNLyxPcnIxlFREp5kcJ2PH/zNSMlssZBGaavXnS5h8bNkhjpH2scoxzfTuX5SFVERjGB0UVImvqkgoy6tmHvk3WeCPP95yzCHFnAom5cyH5VTQzdSDMRpQnJWIpKBNBV3xYwA5yx1fOfBJ486M8hiEyqgGQhYrdas1jnT0hRIDkRypiIRnHessvGAsz6IYQKhekMUYyMlw/WSmscIyZ7TqdXL54xCznksN5WnYC+FaLdkHosaSVhKcRXj2injQ1X5/vvvLdwM+M1XX7PZrGt9HYI0RJKtcq3W2JCbrF4h++DPofJ8LFxs0a8HgF15CgfzZcF7pLPHVZ1aUgqU/LBj4+nYeC390fwpzXfMIMTjpaQeJ3uUPOxlUIzQeuCQsy0alXEhVx652d5xu9tlIudIVOMjssfmDGYIKnHOTpfFe6HzOQ+fE148N/6q1XqdPc08SaN54hTFMmn2jJv5jTTXNxWPIimhZxYqNoyDZfzKxlXSyBQszfz19TXjNLLZbOj7Difm/TaFyDhMhDABqWbuU9rkCJkeWYsXWCZdT4kYbC0QMX6yGLbViNZoRNgGbiXzsgsTMUQUh6YMEuWlxF5QKvaeV9pmMCyzUdX50kJHxUBr+t17j/SezWbDixdf1BA4zQM5BvPEuru7I0Rrh3XqYQVxSmxVGScDkNb9itQlutsO/9NP9P03dJ2aF6TCOOy4vX29BDg1sttdM423uHWH8z1Oij9mnrfZmnvTNb6GadewMvampDSeTe06IWbUln1NH9g6hb1p/Rhddt5H733P3H021jUntpi9EEu/2UFNpIyEGSwqGTgfWpP08LP7mr94NGkJU/tQSlxRWAqPW6Ev/ziUyjJE5tqcm3BnHpvzQUjei8ra/lTvuPBkfdP95rhONpeVV6jZ7n9v0jRfNWMOmu7jGC4fVN7Kdqx2gy7+PvKQ5gYB98BIUKpuPduhjxs9fxNA0aNF3nYiCpY6J51Qd5dcFk8iMm8499Tqs5why+wfTyV7SsQTY3sf08b+oUWOGpra7G6Pb6c3Vribxx8qxKfKP9Ce9777tfVzAXTM80I1EsPINO6I00AMI5LDsdqEcqVJRQoI0oBpDUjkxLiMvv7qa/7h7/+en17+ws+/vOaXX16zHS0LkFTgyQoUMW4cEQddR4+SvLdsUaqkmJp6W/rvMSlJxR6foOtcNoZTDeWy8LgWzTD+E+NtETrfE2XOquRESCjjOPHzL6/405/+gkblN19/zbPLje0lzvYUrZvW7GGEaxTibAjMKdGXBt5CihfA/CuHXnWyHKa6/GpfZrLsEqI3A7HFyC0gwsIY0MMyWikna9m8O/j2nNlSwId9ovvi5VIMqHRGNsg5PM6yXSWN5t2DGaExRe52W7a7HSEZl402PEuo4lCSA7Ln0OzlYJDjxWrNerXmYrPh9998y8XFBYh5qpQUjaqaAaiIhpS9eXJ695jqHlfSw4v3uM4zTZZO/vrmht1uJKUM9IoQYmIYJ8YQMr/QxHa7ZQqTvSvCNAZCzJ5yyTwpvXfmuScNXCkFDPc45wjTmL2Bpvx9aY9sqMdk76KFEDxmwBXIQNRCh547ZIb+DwmuliLtDc2ineeT957VekV/seby8or1ykCiEAJhSmy3W+62N9xt77i7vUMcdL15RI3DRJnvKUV8J6QQuQk33N7d8vrVhpQSf/f7b1ivzEMyamIYR4pSKgIxTbx+/TPb4ZrL1RWotz6q5ww6r49PovQt0FcKSHQ/sDHvs/fuWKWSb7SlHYJUZX0ogOh+eFkZHy5nWpMMEBksW9aSR1bmbbdjWQKRZz1ycfjyNDKDRB+nHAUnTsipyyoAmveuJ1fzP8tneUsxEPPhgfkmloDdc2CMPCifgaJT0iwib2oiqhQ2VN37PP/2VCtyUerv23CeeFM5T8555q9spX7M7LWj0ndYmc/yWDmW8ekNSqGcpj5USgtq7Su2FVqsl3yaINGclYr6HvshICWEwrh8AjGOTOOWOG6JaWhOrlOr9gMJkhniKpaZScVcfZzDPCkwgOn58+f8l//tv/D6+o5/++O/G7HuayOy3jc4apiZphp+45qME86l+n05qTZvhkCMSugSXdcReyVWLwdbn51zGSxRhA7EADInHV3n8Ko5bCfRr1Z439mzVbi9ueP78GcLx/v6Sy7Xl/Ti6HDVmKekgK4nR8ZZUsCjA7in8Swqu9XCdhOh5SCpfEL1lGr/qHGvfFh61slSUckjYQlcaSaxVlNslstk85BC4l3rckT0YbiowDrHv8jhWUktbe0DkpT6wq0HT/bhYRhHXPb6SDFal6l50bisH9RmLW3iDbT0TvgqZzX76ssveXb1LIdHafboGQiR6ilUvIWMX4tapxCCvQ+aU7XnjCrOWVjXbuLlT69zOFSP7/s81iFGSMk4cYYxMU63xk+E5nEtgAFWIsZjVFKgt9wvZT4p4MTjXGfzfiqE8iWDXW7LmCq/0fyz6acDOQJs5p/72+7s7bb/Rf5PQJyj71dcXl1x+eyKy6tLvPdM48Qwjtzd3nFzc8sULCSuZlNUay8LN7VnOweKI8XENE5stwOvX92gKqz6DV9+ccWq9+y2gWEY5zomRVPg519+4JfhG1yvsHL0fW8w3SLs6ejwfKScMiqOrSTa/KP+lMVfpx7xmAo3AHPTX7MBtOdh1Ho9lXW4Akrte5R18PFq2XI1e7wh9ibydmBR1S4Wf0r72Ueob7x9bWwvU9Fmf3wXste+H01ZTyjHpv9n+eilWhePOEn4DBS9Cyl7T+2IE9rzE8pCqf8o5MxF+CMESt7em+iNz6Y+yzuRaiFR0gmfvPLId5oNzccoZXKPenyMP+KjyHD2jqUCGa1x3xgWqgb6TGFkGLdMYYfGAc2eGAZ2zI3WtrCmRJKUuWZiExaxYMHh8vKS33/3Hd/+9nf88ONL/q//+7/x8uW1hY+VcJxcN81PVV16D+WXMaMeCxsje4rEBE6aTE/B00/GXTKMEysPfdexWvX0fUdyJfzGG+EtmkPSjKza+x4nHu87JIf3bLdbXv70EsLE88vnXKzWbLoe33lwLgNRNsZSJuP1+XMngl+EOOaWzCDRKcxHRdDk5s8UwMprP0oal4APnOAy2Adl9ldLAz/msJCSYelIUQV0ZA6tKnPW7nsAJCrXZ7LpenUeQ5qoQNFRMIl23cgHRIgBgGS+nPIOCikq4zggGiuIKWI+VV6M50pEDBxynvVmzeZyw4sXz/n2N7/h22+/4esvv+JivUaT8vLVK7bbO6bJQsVCCDnEzbyVSp3KuC6E1YUvKCUDjsQ5oibubneMY+CXV6/ZDhNd5+m6DlDj9aIjKYQI0xSIMQMiInjf0/ke8RHvBfFtiFJJM194dJjbJBWybM08QxHJfF5SyBsStc4W3TS3uZOZsaEN1ZjHopJqH+hS71isu3vMD1l/c87RdR0XV5c8f/EF682GrnPZo+qOm5trbm5uGcfR+j2DE847A4VUF+0tHoiGSIgKJGGIyn/8xw/c3Qz87ruv+OqLZ0Bk9/VYvdqM8SoyTTu22xuuX3fI8zXCBsnjpev8HoB7OFbv9SZ8QFoYpOBoWr/JP8ukb5t579630432webscSfz/loBcFfCG5ecLcW7zABLoZ2j50pZST/Nnfs4WPTrkxlQtMOQ2Uj+latcn+WzHJdHYtmfGFCUFf+sMZ67wZ1WVvfKTmfEvh4o2ceLQltj8fg1lt70gceJO6s/36+ReQ4IlLW5B8v5wGx5TyXV3m1OLTkCPEi5eL4NfSpF45z2LFr24pM9JeH9agyPIUM2st5ZLZ0BUupfJQQnqdaPJTXElMbeQY1Tb7IKi0gmd8WMK+dq5qjK5UL74ESSWoGlSAEbFjUtb7P37vP3zs2nksblMmcZuq9vFplanmxeueaR+89uTuHvnevZ/K6YeQbsyOTPJNAAOhJ1IKUdcbwlhWBAB6WTyp0CeDQHnEVydi91uCioKyZJhgqyYSQkNp3jf/+Hv6dfXfDy29f8P//vP5LkhnGKOYW4a4wcrcZECf9ZvJWWFjG+FaIppEkS0WXDMCVSTMSYSBFGUZyfWE+BzXrFWi21uXNdDjWJ2YtCcBrpfIdzHb5b49yKpI5+tcb3K4Ypkm5uuNUbOufxzuH7HvGOLpNySzLiW+fN0F31K9arFZ23rd9lr9d5bEs78ijcTu0iUfdSYWFwC+D3wkjq6VX5S8l7npy4xn73UXDiCeJIhaNLIaojiQccLjWzK5mxXcEdLZDP/jw7IfX99gAESagr4/Dw0Kedk5VvrIxVxcBNdTVzl4GhisPRu56184SUcJ2xEq36FZv1mqurK9abDf1mzYsXL7h69owvv/ySL754wdXVJd2qAyzUyb0KTMMdYdhZOvYQK1BZOH3GSmxtfEAlg58mS2efovFs3d7dcnuzZbebGIbAFJU4RUZnfFpdZ1PEAJ80AzdqYIrhXEbiTfaCExyahCS2rlpIZWkfC40bh4lpynxDec4UHaNwKxUaM/a8iESAVMZt6TsrfNHvFWypnbfoR8n3Lb3pFETo+o71Zs3FxYbVusd1yjTtGHY7A4lubytAhwPvPBfrNatVBwLjNDCNI1Ez/5CIkeYX3rMMOt/eBe7ufuLlq9e8eHbBt795gf4vktcfm5UpwqvXr7jevSY6gBUxwWrznNVqY2T7UoLRSyPlLG7F8/CoZN4oDvXleded20sJMwl/XUOaedd68lTjfDnL7FmtHnwu/0zjTVSX+7JfujyuxZbzckEZe82B0ezp1qi06tCkuAyyt6Egi2VETwAN+wqVKRZnvte7kWPr3+zNrMwMdnvrsrrlPDnGbF5W8AaMOXh+W/KBvlT2nsz196Cco+OW8Vd26Py/ghirPIji5yVxAXrer2m9G3kMQFnp7FjO1ceOMj3zwa5efI9I2Y1PPOh9N+hTy+H2vxBVtbF1xns+qI/I3s/U3HfK+eReOXI4+Ig++TSAosYegZnA7g2KOC2PGcRnYCRnXvSwnLN6vEF7LJ//yHuLZ8C9xZ/zfnn7+MRh/cPatxvn3rd67MqnrMxD/XKkEseUiMOrPriUalZopTlRrlZAe335tjEekxby1uw90OqzjX5bQQBxlHNtx+wJNEentWqFHHl6q6jMvy1AneZ99r0xihFfDJlH9cm9Y+Epe/e+jatRMpmNsfmrTHyqiZQmxmnHOO6YpoGYgvWTKKKZxDa7jSMJx5T/BSCSUCaB5KIZYupbKiCcglv1rHtBpSMAl1dXiBPESTb0QjYW5w1fpHBZLEOgSoiCjStjSSmKdMQM2ykmXJrwybGLo1Fae2EcPePYsxo61psVm/XKjH81omInQofQr1dcXm7YrNesVysuL9Y8u7rg8so8GiQqcQyEnEUt7YzvSIHOWfY283Qyw3u9XvO7b3/LV19+iRePd0awbbiiq0NmjsRRMxbS3JdzynpdtAW0hpPu9T4LQ/G02LfJCVGEKAoEXAyYT8yAYATBFTCu3l/zIdIM4i0NvVOSsrcKmIdPqu9vnFmpma+6ePf28+WcN2OWCo7MQLfw/OoZv//d73l2eUXUyGq9ZrVacXV5xfOr5zx79ozNxYZ+tWK93tD3K1brNa7LQE+cSCkgkkhhJEwDIQwVcJkqf0/2ZtM8BvJctWReguKR5NAEIUzc3WzZbgeGIRAj9jlq4LV3xpukFhoWQlxmMcs/ncucU9l4N0+sMk8SMSWcU6RzOFHGYWC321WvIgrIVyZvWY+TfSb7zaxKzUtZjF9tVsvGqFqMgsbbqAWdWoNWnGO13nB1dZXn4JoUI7vpjt3dnfEzjSMxhHn9zv+cOKYxGFF9innfEDYXRqjvvGeaRna7HWGIBsqKDcDbu4nt3cQ4DDz/P6/qOAaIKfLq9S/8cveSSROiGb1z5s1VwK66ZpU96MQS3YI8s0ktzfdkTuumvVAsXDjVpUAW32WwqjTt/vSTxQ97vz0v29Oy15N1/BVgv/1XxmX+W7E66+z1VRIZLGqkJTtj85hWxchlfghGn6c7FD6cIwf9pO3z0sEl0njltKGAB4kRlkXO86yZd+d0/TnvPq/Ph4OujG7FeOCK+vZGJtT9NX13ZTV/Vl30XSvr+nDbK3r6tT82Y+JN5Bx85px2OhcRLM9r7ZPyxaPWgDcBlpby0QNFVWEFykL19uk2PyU5a/l8wzLbEfhpgzWf5bOclgaYKKBR8/H+tZZa+tyTzSPPOfj74Tl8KuPUmyqFip7OkLYwhs4FtI+VdbCDnXyePcqMiwIoKCmDR4mQJqZpZBwHhmFLCCMpjUAEDflnbpOi3KmD5MybIDsgSEo4FzGPEzFvlOxNJJJBQ4QpBDRE/tfv/o6Xf37JL69uiCEwFZJfN3uOJaezV8up1lEAl9NUF+Mpg5NYxrTolF4NlZm8ortI6j26DaT1ZHxEfYeuHN3K0687Vn1nITzObMHii6WakCj4XF7KYNCw27Ldbi1Nd0qMIXtqZBLmvuuY7u7Y+P/Ki+df4AsJbkoVHJu9ETAPkWIpZo6lYpBLS+ZxVIlpw3jm39JDbrSlQV3I6epzP5DrcCzgowAxtACs1bv97HhWmWzNqlKd+chjJSkaU5P1rGRC2rt//mvxfAs5iuZ5kXm4vPf8/d/9nufPrri5vSVp5PLqMnt89ZaZr+sz6ODMM8x5QoyEcSBqZJgGpjgRQuDu9oa7mxtub28Zp4kUMWLp7PFjoE4gpkgKiVABcyEl83jSHBI2TSF7wEFsHE9tOUwgAUSqR9F+OxTQwbk5s12KlnENyGAViPeUpB4xe9VYmN9e/7H3azbul9w0y/PS4hU3L0/LeXvfilVMSe8cKpZR8WK94XJzQd+vEBFub2+53r5iGsYZwG8Sl5R3uL65rV6tq7Xn4uKC9cWai4t15WcavBF5DzIy7jJIjcOtFDRxtx3wd36uXwZOQ9yy3d1A7+jcysZMfwnrC6CATj6DIGW/s4X/WJjzMbCoba+2E9qQ3tmrRysAsCzv/UqtjizBn9mTav6svId7KGvQZ7lXFp7OqiBuMT8LOHOP+9V7ls/2zmf5LI+Rjx4oyhbB3ma/r0j8Wif9YxbRB9pA519KtP78xa+1/T7LZ9kb4YuT0fmz/T/eXsl9GCR6V4r0AQfHwQXUU+8zSuP+uN0WKNgvb3mqJxlAqfmtJGGeRIFxHJjGgRBGpmkgpRFlQggoIRdTzMFCgmveRUkiSQIJxWlnIQcImo1au9aAMwWc65jCyE8v/8IwbfFrh7pEdObtkEosYr4vJTkrIMLIMTWPMW1bBwc4TaQkjCQ6jfgkDFHYhpFuNKJc13t85+h6z2rT0696I7j2PauuY73uuLjYcHl5QecccFm92wAAIABJREFUXgVJySJLMJ6kcRoZx5EwTdy8vmaYzKD1ztF5z+3tHevVmv/jv25wqw0+G5St6T0fyspi+12Y54sxJic+b8dCLlsfTgQ9hyEIqr46NCld3b1iMy4L2LPkKMqP3SOWhuXpt2oJM5m5qGqNi+eBloxhqbTOnodR2w5LL6M2FC6lhPcdL5494/nzS5KqpXhX8wCKIZDiyG7a5XDFzJuVYAw52xiJ3TRYX48jr1695ubmNXd3W6JaOE1KmcA6h+d1ncdFYYxTDsM1z5QQooVMJiVGmKZICA1JdAOQpqSkKdbj9+oBRFlODCAqXFjOGUePmksVMbelOJcBwOwpKa6C8zFq04p5TjXr2TEPDqFM8+UB4uyh0sCVUsZRI9n7qGRhE4xce71Zc3l1xWZzYd4/48iwG3j9+hWjDvSdhXcajOxIIRW8EVWl88YHttmsefbsygAib+0ZYqh1dN7Tr3q8s+xlq9WalAJhHInTxDCGZt1UkETSiRC27HaOVbfh5nbDZn3F5cUG7xzi14hTe+ESksXcWQtDvjl4nbeEI2u5yozSt+1f+aHK3MhAUm3nd6dfPryHHvu+QBcGHOk5wPXfrFS09fQVTSZIVc2en824qr/P2S7LvBQ7maDQBbxb+RCg1Gf5LJ++fPxAUV085g3qY3ck0uxvW5H0d/7A+t9iKVxuokXZOqXEP22jWpUeKHM+fnqwvAf9Hj72QXFCTtW7ku5B3ac/3sSl716Oh33M380GwIFFu2y3Y3Zs0YXe2EPzFGB9/pi8z+Nh/5qHZN8IfqjcMyqXC957/jEFuxpm+wBTuyY2QESKTNPINO4YG6AohIHADkckEvBigEFdwyRhwV4B0QmXegTJXhIdKh6vO0omLyihg+YdcXO75U9/+gvf//l7trs7kuRU5ihJcqhQqXrSgzCEfZkPq+0ZTpzFulUD1/4l59AUiVHxCkHBJZAJ/DiBEzwO7xx9b4BRMYKdCBcXazYXK1abFeI9XQ5R67JxfjdsLSOaGkHxbrutadEF40W5vr7j2dULfvvtd/Rfb/CdRxHzLMmkx2gJdRFUi+o/88bUrj7WDifGfZ27NSTl1LyBhCepIOrxKni13nEp5FDD+bY61hctnevL3jUn5kXhzFpQMoHx6kj2fZOUM+oVv665ElphDTXvoexJklSzl0jTCKpEnZjCZNw1wTzppmliN+wI05QBIzUPoMxxZbxCxolW6jmFiWG3I2mk7zvWzuO6Hsg8RJk/Jihc39ygqQBTEGIyUCiCRtAkxKDEmA256pVRgISS6e/4WjQTNts1Tjyd88QUiEktCbnvM5CUCYaBvu8JIRpXkpjROc+3489ZyAlvzLl3liUtPOHqdfaeTgTfebz3XGwueH71HIDtdsv19bWFiYVA8opbOTabDc4JMVl2spC9o0Tg6vKS588thFCTEXRP48Rut2WcBgCmlIgkvDhEbbzcDQaMe8yjaRinuq7OQIzNhxAGttsbBPN8Wq86A+icAfOiHsQh+Iqo3Wv2H2v2OqdbhK1ZB+p8WOqR1RPxwa3nzXW3x3suLTf5j0mnOpxTy576IMku6lqoR8ZGacvDOh4nS0+oSp73mtdR8/Z9f1LW/ff4yF+JnM2r87cs0rTTezBJz7d7i5GTtZRH2ssfN1C0AInex/OekFi5pgDW4wbVk8qe9XvyPYo6e0T5Ko19Jnn2eVV6oN9Mm3ywqHPr86mCRcfkIN73V8L5/TaycHFefJZ/th4Gp9YMaa9olYasEe0ZkmfWjMNR+uHH4lPMh1k3PA4wC2b0H7IXtGZ08QyQxecxh8SMw2Bg0WRAUYgDIY4EHYAArnhJRGpbq/GrBBUGhVFh0kgXAioB5zpSQ0AgztVIvBQjw868A+6uXxPGLcSI02QhMjp7KShW7fRANgQH+Bzm5PKy5sghYzKbIxGHxoSmTI1clm6BoDGn0gbnwU/gvDTOX8rrnaO79nSdoxNHJ0IvzkLUnDPCYgHvzOjFz2tJCS3p1p5IZEqBVMGPgrrM/We4jnHiaFqOgrkn508stK/wy+ytX80eqAeGwWHbuhLmpgbgBIkG5YhA8s0YYPYUar13itdS441yfD6UtygU9/av9jtUkCgxZ0Wb75rbZF6LLMyxgo2qiKVOy+PLiKVDDMRkWcp2445pGg0oDRMpWgY/5z0r7y1sLIc3qWCE7SIkXfPl8y8yGKTEUMLKyKCFeenc3G2ZxohGGMcJMqCTkuasbjbuLGTJsogdmm95QhyZC6WsEpLmBeLKiNMNOBWc78ybyJdwuuxJFJVV16NRkU4qwLXvxibtHwe9mKGeB3k0ToNczjmc9zhnWQZBGAZLdX97e8swWFa5efAbUGTvAs+fz+F+iND3PZeXl/Rdx263JcaJcRwZx4lpMv61KICXMpwtTCxpbRvwllVRXJ5Dhf/JPOBiikzTwJYb7m5/4XK94mK1IjrBsQbfI9JREzcUr85S3hFd8eFdY55rMSWGYcgE5DGHbDchiWf0hzCvb2+/b95z/8FXmQBd5r/rb+eS5rxTWeoV7zucL2OT7DfE4iMF8aeB2mWB1r+2NUhO98iyD57q9USOGMTzql2f9cH7+Fckx9TgvzGpdltui0Oesw854JYa22Pk4waK3qc84QK8WCiLYvU+3Vs1HSgAB4cAC5HlHv0+4XbVB5533rD+NYFEQGthH352Sn5lTfCQvGmfG5Hi0h2/oUP91chTKpXHdKqjQ/NAsdR50yyflBPoFAnTwDhumaYd47Rl3N2yG+5I446JASGCKOo6+hSqIQKajSqIAcYQiSScQtIdMeWFrqRFtgdnAwfCtCOEHSGOxBgIMVqoT8IyVxTcPAMIp0ZaPSs1RMAyQjkQ77IBaOtbIf2UJKjrMTeORAmtSVh/JSBJBns8+N6SR4GY8eisRkHVDM+keKCfnHFtiI1j76Dveug8KskMQ7F2Fy9EDby+fsWLF1f0feaTEY+FWJkOP6sVCcUtQjT0oAVyO+T30NrvM6Xuic3nqCgT5jHWofjMEQWoywEMJQ/TDAIVHLl4vNT6yrIHD6eF1e1UH1cwKIM/5cIZIJqznBUYaeZxyR5IKXsskTJ4E4jRCI67vuPKPzPS8Wk0jzCgZEwjh2U570iq5qGDVm85Jx3OeUQsg1qIiWGcGHYju9HCEMV3OQwNphBI0wRRs5eQLNqrctWc8tTZayjN5brM65VCREXw3t5VchhaxfIzGCpq/DCas3557ytANAPUy246veSfYaUIhwVk/aOGzDkDsZxzTNPEuBu4u7tjmqZ6qGXcUfZzmownCsB3jtWqp+sMEOu8eUCGMOU+T8QYERG877Lhr6izFO0FGOp6cL5DQyRpoO8dY30BFvuUaiTEERHH7d0vrHpH3wsQiXHDarWh69Z4pzg6NHO2mbh5njecPg9BO3MInFTAyP5eAj16RpccdJGUff28Gw/3uOP3teCuDaR5LJf14oN47HzUskBo60dSEaRGST3SdKfac15ppf3wyaR4DB7oK3lf+tzLn+VdSOvJP/uImL7wJl48T1u5/FPLf4+bBR83ULRAN7T5+QlJrfL7gVvvfUJJbfnRkMp9lqNyvj31NyNPosTJUskucmzkv407+/zZm82pYzwqx/5+L1I2OTn2PvshREduLr9pURCVwktkJ+xbQtgxhS277WuG4Y5p2qFhIklEyJxBDkKaclYymTfiOKHDFtlZatIUlShm/HoN2XifgaJpmkgK4/UNuhvoEnTRsVJI4hDUUis3+03iCM4vM8Zd+HFEhRQKObT97b2rRL7eO7z47B3oM1CUjRQnxWEAnHkS9Z3Q987KKGzWIqhmToiYIES8luTGUtvYJaVLHb2uDaDIJN1OhFXs2P2848f/+IHvvvgtflNIb30+YGj609xXLL35kcOOfZXfkd+lklDPY1nyKW86YwyrCOoUr46ER7FMbq4JA7Qfs/EuTo5k155Ja2vZB1hBSXyfAchydb2vAIwNSLRXyOxuvs/TktA0pxBXtRCklMmxBcG5ji4DB9qt7WcyL6+Mtc33agbP8quqKqny/VjIo5OimFI9XBzCZrVi7FZ454FonjNOshdbTnNfm22vrxskeAZdy9wyIMjCJOc1ysCrNnxNMndSgMw7JiKEaSSEmMGzlLNSLtG4Y1TyB6H1wh7gv/cOxdusATHJAE3NfCXGpKYxotPENI6EKcylSTlgsKxYw26sWTRXqx4RR9+ZWh0pHjYWBqqacM6xWq1qO0UiKtZWlVQ5/1QXiSLQY+1V1qS6UIBIQiSQdGB7lxCNBv6kwGZzRQgX9N0Fm4tndH6D4Mluhnl5y/O+acbTsjfmU+mZtj578+uN1d7z9rmHAR5tfh4pUw5++SxnyVu0ly0gzQfv3jY6zNj3WT7LO5aPEa6o0+Bxc+7jBoqgWUNasKj98uOXmb/gw9bi81748cvMfPG5s+DQIHtTKUr+U5a5V/rfgJx4Tz1nR1QDOESJaWKaBnbDlmG8ZZzuuLt9ze3dL4y7HUlGXDLAR9Q4YkIMhBRImszLQpWhn/jrs5f84es/sn4hDJ3iQmceHtFB2uUsZtljANjtdkxT4GV6zb//w5/46+aG7W1k2gphC3GsNjAz79Xhqx8cXShzljmLQ0NcqriLAK6zkLJahlLtrPIMtwY6KTYsrjPuIQOmZDaGJfMgRTUOJVrWHLUquIjqMIdm5fp6f8cXL15x/b8m/D88589fvba7xHhMihFrJ7OCU2/hTHt9LEf+kFQy2hXARWegoMm6dAg57kv2hAKiJHCJPnT8z6/+zPXlnbV98tX4Tzrz45TC6ulxdWWZjdilbakFdUHKvwy2iIIkAzclGSLXElRbmTkkrf4dF6CQarRxrCU0KVp4kTp8AdCyN446Zd6qG64xYs5chl0r87m8up4QAsM4MYXIdjtwtx0Yp4lxmpimaJnHMGJpI822LILO+zwOBZE5hbvzUj2FyuhatClUTyRUUfHEmGq7eu9R1eptk2Ez86wpHoVR60nrzH+Un3PC2aCGCe9/1mD11SOlUYZbPKCBAhcPKcZkTAasphhJIdYMZXMbWCideSAq4ueMWikmklM8SogxhyBmjjCBruvouo6+760tSKgoTjxOHCr5+USSC4hGos/I855YfRJKIKkyTgZIChDDyPPnX7K5uGS9GgFhsxGc63FOca6zfq1jfc90P3lAobW9Ww+zGdwTShiiNHxnTcEnDiqfSvYBiPJTj3w/A4Qzofe70A8+XXnQnDy3qfZPFAAb065++C7gouMg4uf+/SzvR8rBxce2psw7+3kz7qMHiurpD/MC3i7qsq8IHJHZnfV+kTOuOctxUcrp+fLOd2tQPly2Jj3tlaBFqde9I6Fjlx551sEJ67kEdec87x3L/maS9kC1o697Rq3eBlGuuu4MHB2SeL5ZsQ+295mL2rtZ/I7Nm/Kse06DWmu+/Gi7UVz2RGgsCubryjPsCaka4/dV76juM5fa1v6e6/LV+6fj3L9i3Nf2j+mXc64tQMjhpS0c0BgWNGuztP2W0JSIYbJQs/GOcdoyDLfstjds7+6I4w46IdPxUk6/jQ9jIhEQTQiJsUv869//he3lRHSJoQ+4ZB48Tg1o0GZcSHlfEWJSYrriip7LBKoeTUbqXAHF8j6qnMPcNueSVJZhGI1xvRiiTYM2YJFofpqmeXCWmzPeomJcJaLZi0iXa3fhC0p1vZc6nlWVyTn+1V3z7+6/1ShlrXNjz5hSQ7tUHl7TW8UjUzuzP4rPGZ0udSBCkkh0BrRcTmteX9zx88UrIKeyl4TTmF/bYR49IXs05flc+qLM8/LfPqCUFErb589WIdJPShcEHyKxNLHmK/J4KgamauYmSjnML/P+aLLnOxVIDqEnFU8OtWdLmTh5L04xe/lkbqOVeFRcfbaS+Xwm0EmJu8i43THuRuIYzA4LggRnmfGCINHho6eLDqdGhK6YF40gxl2EeYR5yvtkMKekro8FEChtAMRcZ4Wuc6hT1FtmOgPLyGCe5v4og6Es0tbqDuPEKh5EIpLbsO00qX1poMXihrmrtRQ/r9QecKIzYIjgMHJtpw4XjfsnRSBq3kOKvqnZ+8chESOQ9w6XjIvMRw+DQgaLlGTk9M6juYLi8vXR0buOXmx8lv09oXhxoI4pKZo8fblbLDufEe7nNUkFp1NeYYWUItvtDZpinXUOz87d4cTTrzLsJ8WTaAbUXLNWtOtA3b108XFdb5Ia51oJbZz3Ca1zcC6nuaAsSM3y0AJSizro8u95LSt3GkmyfV6eW9bdef+pv+dl3nm7x2yEzCNGyk2Q6iDSsqaXeog0L3S/FDB1IQfKxfsOicobSSW/fMT7NP/jZLFF2Z6/uLD5snwn9eqDsssWcw8n4Dne3sujiFY3yWHWWRdMUuYDeSWcu2oei3JcFzx43kMNeKh7Hr3qnH5om/Ckopg9Uxf3yFntd9azj8lT85Lf16znNef7qcs91xyjtZgZAGX/4vnXvN5Im12jSfJQ9Y9zJ24rUubAefd+9EBRkdagWZx25H/oA2PmoQGlSgkFOHnJ8on3l9VcMv/a1LvGgz/BSBfhQSLuvE+eDhMpZS0z2xwvam9lym33RnIuKPFmpT8s+22ns+lr33OcSPqcCp0xhw/Ci1qFrC0qHUF/58OYAxD105X9BtM95fGUCKgnW2N5gSX/NM4ubd3kS7vXPsqLbkokJyimaJfQKs2hQbK/zhQt89ibLKr98ICx0Ies4FZXlHaePV3fng0GFqXZ/qD9Q48tvPXP1thQW1cqSDQYcfW0JUx3jMMN43DHNAxIGkEuUBcoBr+WcAqd6EdI0iHa0yW43gzc/t2fLECtcIfoPFP237N4YszOBqe0WhZv0Ura+74WpfeHQRSQatFcB/M//58VgHvLa+6YlfdSQtF2Z2Ps8A1LCc1CUr/Zazdtj2RmBfqgxIOPdPl/bYO9XXH/oCEr7j7l1VjmfyKOLq5JDtaxw8VsxCUQIjABXTXs9LD4xZRUBZcSmgTR3rZKsex3N/3A6/WO625EVpFJcop3yODEzNFigIgZzikV3qLZk6hcq0ByalxUOiuNInZv2X9KuGBKMZP6WNieipCwkLFhDJb5bBoZ4shtHBg0kDoDLicXGRmJJEY3cr2+4fXFjq2MRmKdGzslZQowTdYoMQmFT1ojBgQlzdhtHgNtmzbTKNVYSAsvK1GIB4iBFAOt+bgAZgcFN522Pyml/SfNtdQ1fqaJLA+cK58y0FT33qh5O2m19vJsIfqIOAhdMKDIO/P8c5K5hmxcJJKReDtfp7RzDt85us5bCKBgXmRSiO+pHCuTH5j8SFhHlJ4Sfgkr+pQQIkEcTkGSy+MiMgV7/k3X4byn71Z03Y7d6Pj/2XvXYNuWqzzsG91zrr33ed17pasHQghJXB4WEggJZAjYVaQcobgqJoCNsSH4R8r5YWM7tkMqFVyVKqdclaTK/EiVf5hQwY5dDhAT3sggCYR56AFcPRBBVyAJ0ONe3ffj7L3XWnN2j/wYPbpHzznXWnOfs/a5+9y7R9U5e625evZzdPcYX48xmijA0xWAfApYrnKDzvEk8GRXY525afZrkHliEEWAxWUwxpiARMpzjVl43K5u1Z6o7raSeGJPGipTVgfQ/Mpa5JyXuRVDOqhMsh1pSPqY+jCtrU5ATZExguz5ZOafS3M2xziLiX382fbiSWXOxoqa+PmciVgAxJ3pJh6wmT/CN3oIl4jHcmi0c3P419QJ7JLMMb0vz5Zvc5/X+YjhgdymySBEx3By4gBHJf3ZD0EH+s/Gam6uv065kYy5KRsynyerxPXGxzTr8qCtNBYVTHnYPx/vEPnvKM1p24gNJipJZc+XFGkwzVomfCprGHGt++V4gmmfuZVOJ8KZeOGuAYpuiyYU7+ebLoPnvUhozhAP57nZBC55ZB800JKT5lhcK26TNmRAm3/aSKLLbB/zTfFReFLY3iMNFKxSAf1R6zEEAmTTi7FD16eg1eubWHXHWK6ew/HxM1iujhG5E3crlhufIjk4ThYj5BCYcPhph/ufvoFH73sSIQV2dtBTdlNN1HWqqlo3aPKXqRTbUw7LnU45Z3Roy/vjWm3jlZ3HAokG0s3UfjklvJ5xbSquceX/bakBIHgtO7krJUUlAjjsWjzw+GvguQG7NRxzCnotigtTAIuv2LiNtiRmBAroKSJSnxQfqeHR6Q3cuHk/rh10uHHSonNdsjxAng/WbYq5XEmveas1DANJmY5icUNmvhJQbosqAFcIMVs6SB6EwBF9YByfnqJfRsTQAsse1DXwpwFtcAiBxcUrELhjOI6I64jFzQNcO41YdAcIvQiZTBrPyINjgxAj1usefZ8sikKxJip6fkJeaDjHGMi3uVcwKgDF1uRZNjCo+MHwrOAnEqNMWYLKuzlvQgIEADCND0pZTtY1FhGIMxiTa6cHd3LFoQAoKqOx5d0EBnkJvu08wTcevm3gnbpdiZsgOABEKcg8yY2EILRtg0W7gG+8xPPyAiC5xqFp5Hp7MLA8OUEIHZarDs/g2dRDvpxtSVXR8XW0vARTL/GqWCy7Tk5uou97ELkUCL0HYg+OwMGBg/OLDBblwNoDAGTzFBchRd0Ji+uZdcusBjg/M3BPxRu3QvY6dvksYBBSAH8icW+SoPIKIN2h/fJFRLYXb0XuyXkwD5/UaW5pvGyNCrBfUI8UA5DdBMPfamsuIt0Jq7UZJcwxmXqhdPkM2rwKDv2lJoSyO0QvDqDoki7p+aId6+ZQlbMC6Zx3LmMZzSV7BFOurrZwRpV6YPWwLddtC3a+LnwPNJWLVUjPn6YQzenPyZkivxdjjxg6dP0KfX+CLpyg74+xWj2L9eommHtRWgjwsQeg12czPBHWMaKPjJf+2VU88P6Aq193hNMjj1Uj7i2OGd4o5Br9YFdLDLxVak+1vGrdHCqBuGKPclpeHRTnZ/pwaI8ENbJJOrAqplKnMXBoQCSOxQ3bnkzlcnmDUDYF1cw5uhuKNJvz3soqM0itrYnFXUtOfwEQoQkOVzrCq5+7B9/wmTegiQtE6uBAxWpE7QJojjLIWBMlAGeNSAGOHUAdTg8ew9NHT+CpK08CIKxdCjpt1d0KKBLuU4CoCqSdxjVGiUuTFaOB9YYklbhLzIzQ93IzX9+BmbFed3ju9ATr9RqrvkfHDiteY809VrTCMnRYrTqsu4guMEKQuoQQ0VNAOIyIrWJPhV8EiAuIPaNfs8Q1igTEBBRFrXNSsPLksQit8padYfavfktKfeWGhppvND8q+5zOCx4Wmf5VY20RJ9ZVSbPkYhk2fEUGLo+DnDOkU12CWKw6gF1ETH8D9SUPZnDDCeDS8ZU4PmBg7XssaZ1d3sgRyBNc6+AWHo0X5l+jQ+wi1os1HK+S9VWEwxq9a8Hc4rDv0fkeAUHGyTkQe4llRUDXA8899yScJ1y/fo+0iAGQR9sewfkGFEO6CCDFfyMGvA1CPpg/lNxctV/N+vT84y48+AdkU4dUZ7JJL+m2iQdueDyBLp5l+a8Bo7FcNo+GfGu+E8b7Ytraav7dwP93NaVN6FwOoGfmOSfZvrq8Fvs3075d5m6LdF/aXvE7yZmXQNElXdJ50RkXRBVKt72XFZQdnoaXtJ1cOuntw60vtflUe4d0PHdL1sCiWxJM5nb+INGm/K0yWENuevuW9lHkgK5fS2yibFV0jNOTZ9GHVQ7CCpIYIo7U3JYRY4rL4oCXPncVN37jGt780dcAjUNPEV3sxTUnBrik5BGL+xCAFD8jXUue/kaWz3rVNVD6McZolPikcjBXAmw+wc6okgUWzUk6s9wu1aSbxRwnFxWfr+OWk0yAIEFx5eYqCXDsHKFpvJyOoyjozjm0BwdYLBbwrpHySdxZiBw4Jh43J+46KuJdTMm9UdKUfpgS8otWvjUGRUrEw99omIKmnxliihLMmiWWDjGhd0DvCGCPRfC4EhpcOz2Ciy7FmnAAQoqpRPAMBBeQB3ELLXoPpoCD3qFhYO0DjheEf/PN78XPv/lDOGkZi8AIVZu4ktZKTJQSJJjtFBnVYfiAR98kXlBQ7SlbZfRRrlmPMUEYRHKzGoAQ2WAdCtnwoAgegJIQ4JHV9ToFcGYCwcNXVau0wvLZKF9DGXfTIXIFO/KALTbwY02ldaL/2LUodXpu01SFyvwtr9jfUPE7J2skgbkG9RiAXmxd2okhrtHynqxNAcTi4jwMn0emgH4REJIrcsMMFz18aECI6JuABmv0rgHHTta2tBZFCgihw8npMUKKd3blMIADwVGLxh/h8LCBd4vcVr0tD5GRrs8bjEGy6sO0dUJ2natG587SVkuh89CPL2k38DADmKjmKGPzojG/UpP7UKlJBCAmq9n9t5JldK+/zWpcJNKDiXxAsU+agbYQdpe770Vj3hZyhykH1SvVUP6cUZ87DV++AIAiEYqIt3TcWC69EDTH9NXeyHAnaJY5rpp5X9JtkypNKoBuSSh/qNxAc+s0Fz5/AQ1yXlmlTd57UZAHkoSCAgIWFFBgiiqvkT2TuqbWc3G/tyec2f11mJTtD1SlYwWJKKbT5x7r9RLr9RJ9t0K3XmK1OkXfdwghwJPL8T0cMeAiPElQXaYGLjJAPa5ggWs3r+PgRK6YjqFHH/rs9uM4Slpm9C4Fz01aM7PcRBQ5pkDBDPgcGETGm1HNLwGIzGbOSOEl5BY17b8YYroCu+qYbBUCdUEijUcjHSX1DkmfLS42OsuJ5ITWOQ/vGwmG6xzadoHDKw4Hhy3atkXTtHDk4f0CB4tDNL7FQXuARbqivBpApgRKAQQP59RCwKZB8iwq7+7AsGXuODfqQ2g/JblI7c22LneswJYz8AsSAEgI5BFcBLtjBAqITgLzOvRgSHyeJhLgApj9TjC3RQT7HsQtKLaAl3weuXGMR64fZ8uX3VaGg368TRpbmE6BxXPWhE387/8nAAAgAElEQVTv0MSzbe/WI0vVp7O2vYKwJnOazu9W18BxSNHpMrhqybZWbcFDtz4tpQxR13FbHTu00aN3PRxLmH/nOrB3CP4AbTjAAr24XXJEZLGwlDhZEUQ9lssTPPPMkwhdRLxCIGoBboHocHTFwXsHvThAsDMuFo65KorkJcB5g2BdyZC0ucc17azgxAag3/YMUAC8uBXpc1a/1ZJ0QNvroRaCRNHEc9ofzdEFNN0d0wloUz8j12Vf+lUBiQag66C8+XGKgHHNpLJEahUakF3P8sULVucaz8tJHYnuDkBJZJF0eALUlplb3sny6PCinzrhvKU57u6odB/hjMzuPmKzjmbSjYbK503G4OqlQDy+Muu86K4EiuyCqTQlPl3SNE313yU9j6SBNLcwsF6bLMnGCS9jGU3RSCxN/xtlh6wykMTfC9KXFyqOWcYLUs9lFEO/lMdAindBDHAP5jW67lSsiDoBi1brJZanpwghyD0kDIAcGhCIesDJrT4BDswejiK8c2haDzQBJ/45RDDWMaCPJvZEtMG3Q6lamjsx3c7TR7ENYefSDUDSHhuHhcz/UEsFSiCRQ7bEkTFyQFRAQt2QUCyZgtSxZw38GpGvF1d8iFHuBGCkU05VyeQqc+88SK2R+mfQrBdY8AILOoB3DRq0OAhHWPgjXG2v4NqVw3S9t1r6UFYASS2Xij1GaTlzdiEg0hpMk51l7NwALBsocQA0YOw2iiRwkmexuApObqdpQ7r5iRwigDZQ6u90gxMYoJjirngQ9xkkssButTIwcPNgjdc+9gp88VMvx+++9tNgAJ4jwIQmtlg3KxATHO8OADsPtpkD0MBYblglsYBxcnAQTQqByybRF1NWBiUtEMjlRqxSnpZeFASJ92KyHq1RE/YmiSfslerZvVDrYcGFlF7iUdV5abyq+tmmAxRj52fGv8wrLbZe96fyouo3QjlmYJOmpIojs98x/MUo84zLw+KNaOoiN571aB/q8UVP34+w6LDANbA/wGEAeo8UDF3HWONnBRAIq+UpYs8IPcORB7EHIMDzwSHg6CC1wOW6DI9SxBDhNuTGc7Fk0KyVqZR/7XqmoJF9XigD2bvyx0xl+JzoTssEmy4RUrIHAPsub++UI+cr8MXmu7pxJ0CxatCcO0/vNlIL6flDVwCOTQkEmN6eiQNoxx76IlBLJ62qzd6oli9k3rAfSffTDbvevumuBIoqehEw1SW9gOksO9ALb7c6J+LyLwkCJYilvfJ28MoZaQIneWETFQVts2CdgBII6sGICP0Kq9UJ+n6Jrl9iuTrBerWUQKuAXB+dXM88OcA3YArpVMXBp1PcRduiWSwAeCx6B99HXFkTOHoEMHqSW3bUSasopqJ1qTLJDIR0K1XUDZvqjbjorJIbwQHOuGmB4MkltYpALsX2GJzA5zgzISCGgMBdvk7agkJIeeUHKnxzzgrOSfBbR0nBgwM5hwYNmtCAgoenBj60WPQHuIYrOGwO4b3L42UPa1UwcVkQVoXVKK4m3baDRE0QUa6lz+9YAQgER2HnUhbSjVg+ugx8OER4wW6w9g5rxwi0gmcLdrmEYUZEcskNDZVbk7oO5VNSACCPq6vr+IsPfQ0eu34Tn37pI3DUYdWuAfZw8R4EF+DOeLvnFKCxLXWdmDMwlGYdRJEpt6AwygGtWCBqVrYkQr7qO30tFou5U7KLn7i7wcwdBePU8iyUfE3NB1yzuZnZ9cFlr6+x21lifs4Q55a+KuNvT8hrRajcsKWWg8V1uAYzi1tAejm7pnGVzI6DLVRB1TwU1Zo54If0jG19zHs6BhQagDocPOTxwAe/CC87PcRxs0JDDM8rAY/Tmqmxj8T1FCBIMHTmDl3PWC4dTpoWFB0QGY2TdfFgEeH8AQCPHC0bAJhK/UZDcZGFElPvS7o12oKLKTtc/P41+yrq/QqEfAZUSDdds5jMoTly4AXqqwtUlUuyRBjsSRvSAHdM97grgaJLS6Lbo0tLokt64ZMV8ovQX98yZCiDBcXMNseg2UIvrrVHQaCpkw7bz0kgQ0CMXbIiOkUfTtH3pwIUrZcAIpxeX+ycUbAEdFFD8AY9qAUWi0Ms6BCgDkxrrCiicyEF5eVskhvIyHhpXC1IAECsiIAcl9e2sG4xS1yh1NSQAqI6omQJKNYvElMJmV/y59QdFAkuOjAaEFjazZzc7dTQWtzg8smSBYoAOEqWROTlVI4aOOfQ+BT4G17+sYPzLeKiwfIwyPW/RWuVrFn7Rdy1iErdCygxHONNZIEhGurCo/fnWCS4pJyvGgeGxCnyzJCmECKJ1UrDPsd7aaKE+hVQJCBQC8fOKNyDWplnR53HEzeew2uefCV+4Fffgd973R/htF3joFvI+MYGgTq5ga80O68xtkX1Z85AQPnMA6srLmk1SHCiddfhuZs38cTjT+LxJ57FzeNebkKL0s9tK1c8xwh0ayCsEwiZp6UEoUakUmaF21Hms24VEVYRsTfpazwGztlnlMuwzKrrJrkaSWdA6mLrId5Sya2nZGuqb3CpMd/k+a1rji7ZyfuIHMGlwMzMLIGjicR7PgWRplRX1zi4lnB4QFgsfHongghynX2T4pkkALi4vJphNMBU2zgcHCzSTWbJ1ZMI3rn8r2lbHCwkDQMpflmahxyw7jr06zWeXj+B5UnA1/zxA1hEj+76MW6EG+haxsKtsW4CKDJ8FDDbuwRgpzy9updSQAhLnJ4+k2K3BTAH9KEDXw04PALINWBuIOpB2jejuNFaONvy7lzKwOx5UVb+ZTDUxbfanyZILUnvlv3cyvDnaWEUjcI6LIWp7NgFWL5oNKo1ZKWNYlHH1o3SAOO5e7cHA81uefn9FzPpwntJLzS6+EDRYF3PJsaDVX3nFN0rOHIBgZYZ7ZtKcStL2yZjtxfsMmlPILDf0d93zJkZqfac34zytpxKVZQZaCr1UKmCAYJq5Za03iYbNYCxp7SwQn6uguThvYdz0yayw9rN4vuBpYksX+Oc5gmrtmH6tYqsM6dGm46IRyUpfKCW/QDGFgCqLOc5Ii4PMfZy01lYY90tsVqfou/XKX6GPc2zQj0B7BBjhAdj0RDcgUPjW7SxlTg1UayH4BswAnwktMlHPkCu1PYxKQsKQhHSTWrplB4lTo6Gu2UdlfRcXLMSmmSFZlZFzASMTuCLHmPneGJcbjxipwqMKI6N99DoP/KM4LzLn8soAESivApQ5BBSvCLvPbxPIJFaQHED75zc0B1Ju7gAZlomkkWROVatXX1Ua59wPhtZ5eltHUMlpuQNkASqxnaiHMxa+l2C8zqsvPT9Iji0gRAcIfoOjmJySUvKLSIiAY4diCOGVR+SA+P60mPdrBEc4ev/5MtBocXaAx49mgAEkgDBBoUp/xuUIxpAo8SzSPMnSgyZWnjR9NbETPq2jz3WXY/T0xWeW9/Ec8ub6PoebdMCRHBdsZRZdWssl6fog4BFIUaEPiCEiOVSLPiicjoX5ZhZQIDlaoXT05UEwwYhJgBW03nn0LYeQy6S8aLcTuccfNPAe5/7wPYFM4N1/juX4oUBfR9SnK/S/hgZq8Dou5D7W8qTfiISYEXAqTTXvQAlIAFXD5oWvmmwIMKiadC2LdrWo0nX1C8WCzRNi6ZpsGgbXOVDLHgBwbkETFGQR11UGOWmO3tzXPlAIB+x6Np86KDzuWlkbhIY8B4htdc7iRVUwCyWm+pCj5vPHuP0mWfAfolwGOHDNTgQYhPROZI1kMTikCDrlqeyxmlgfLGo7NGFFXh1E0jALqd10nmPZnEIMKe4RWn+OUpxeWDGgUcfN8W4HLJ8tX/YbHjivcGY2+95qzfzbLyncQ3AZhx+uJ6lfcAo/3mF003BtKOe97lWE+WPirHVmPhlw3tc5tlUrKa6Drtxi9lOKxm4rTM0YtTusnS8rata7ggqw7a7+4Y5T6YfP5KMKW/uds6qG6kCHcl1lqlY/FU5jr/X+/WGmvKQXXRf34PmNKsPhJ5fPY3OOL4vYJrRDxfFqONCA0UEwMWkrOWTWhEI8rWqhPrvJDFwRpPxTfWZx+VWML4DNDr620B6C0dZtSfymtNPgzS5uVYSmKgSTaDNexiXfdPklb13A1DOc/l8Lm/uZ5EiqHIUq33C1sJuvCNz/imJMldvLChQTEIeJcACAUQBHMX9B6AUO8a4/yhwRHoK69AsWlGsVECDuHkQitwp65EbCV3bepiryPvDBpUGjwQIKnBG9RqZmCHGt5kNMjDV57viYumLZXmp1zWGfZ6eZUE6ANwjhhU49gh9h9XqFOvlKTj0oLyGaJ5equ4UdCH4yHCO4Q4OceAXcJ7B/lROtyPDBcYBNEg0i8cEAM+AB4FdCkjupB+InFgS5GYz2Kk1jijL6pak411UBgutpCcuPbV7k64VRGmc07XrLICSFfKJBPzQz0Si4DE5INULSM/yoFGKqeTgaSF1dx4MdX9TU4p0s1J5Mf9Rdywd/xyTJidhgAcL3pQVEEFAWdWliBERagwEDhXAZi2utpAI6uIUp2HliYFFKCZgax8BdGkuOrEkQwKXADh0MoY7FSagJwJ7DwKjUcdFT5D75FoED4AjnFlz9Ir7rAAlcjlGkAKPRZtm5+DyrXopRbZ6KsC08F8Q18KDFgvf4rBtcd+N61KjdgF1J4wgsXDpU5BSIsQ+JjBW1rrHn3wc6/UaCu6pgiRX3hv3SAWySOaDArqc6uR9AwcBiyTGVbKCS6kiR8RoYoNN9LUNjuqcN10nz7tuja7roN3a9T3Wq5XMT0i9sjKvlkuJhTXou/c+g0ht22CxOEDjfQJWHZqmQdN4NE2DtpHYX977ZIkjmq9zlPmeEcEx5HoS2/kyLRl66rJbZ4wRMSDVyRUlG7LncLKIdJHzQQcR4JnRssPBlUMcu4ibp4Q+hLRddViAAG7ALO63uR+8l7VC9y5SbmwQ4RBiBGMFXgvgzq5PQfqBa7iB9ugKAA9mD9Yf9KY2ubdNeIdj1fZyc2cBWDRqUnorr68uCtgOAqKTmVRARatUb74NtJSVLk1gB0ILQguwR7ar4haERsD2ND9jBELPkFhxCoA6xDTWniVeDUe5XbDsVcmUlKN5Zjl8WNlqs55uSP51CEjY/BnMzoCyEzLCqJzpicjE4t5tkldyVwog5iPp5B+XkBZl1kORiTpVFjesl+BYuWMog7C5bW9z/QetGaXjtA5W21YU61RGA6ZW9lkFQzXum66L+hcA0M+sw550vlvVNSZE/3HvDa9vkVRbaz5k8Y3p5iTcFu3weaQ5fb4vVXWmuj4n2dyhuV1N7kIDRYWpROCuBNCzoLB7ROXmD8rFmwx5MaeqI8vnuf20a28CNnPmrZR3USjtBYkj72KagQ7MzWnnKQqnqWuglAk5Sq0vtuZn9mLiIoRUQkUCb3iwzIrgGRHC9GpvlT1mTleXF9CAkPGGDW8P+mxjO4Ygz3QqCyaUNnApKsuBpg/MfLIg0VQZZ9+styj4pAKz9DlzANCj65dYLY9TfKIVwnqN0HeiXNhT0XT1s5ZACfwhSBwguQLep3FIhwSUdBcQ4Ccsv9RajJCtiLKblwErolOFSt3L6t9zb3EZD7HqKVYCzrl8Uml5UuIfifLi07tWHrGujcpjwnDlxh5KirhPYFFphwNRU9qUAKPhyabla+3d6vc8BuZ7QT9Kd2J4igoT13zzeiFzuihZO+d4VUAZB6MzogRWtjttkQ9oUO/ZZbGJOVP9kd+QLDQUmMz8iHpKO20uD9W+VI53yJY6eT/mQc0BJg/nBeihBnDkEBII432Ta8csQFFsY+JflwKmF6ucxcEXIfShsu4ZzmYFjBRMEtBILGcyrEtkgDAy/wpAwODJ8wrpYut6p/MJBewF0PedxC9LPBii3ChYQC5OweAT2OQ9yHEGsnwChBTAci6lAbIFXuNcivclgenVMtDCwyDlX9k7GE4Cfg/mhmlhxQgCSHBaWxx6ltsd09TO4y1dKYCSBinVX3X+uqbBlaMjRGKslkuEEBD6tN+R7p/JeojMOkdqFZnKSrcuxhToP0TCujuRgskh9AHgiKve49AfpqVZ1+M8aAAogQ1leRHgaNAf1SpatXhDH26n3UvHln13w9NiiVQXZIZiR95nod2AwnhWDp6YvXN7cOsda9/gtbw2D9b1sWpvxpDnjMmgSul1xZ9K7mld1xsmN86zqYbw6IndE8ZdobLKuP+KFQftHq59sMQ55adg19Y0mmJuN2/9fUYmfCuy5znTnOpcYIVvV/X3UfULDhRd0r5pH2aOsywR9kq1wDFNF88y6ZKEGLXrR8WCFvyZy5tjCS6LCmN3rlpVY8Tp/YxghOsEAqQtLV+OUeuQ5lUC2CGrU2cVnM76zi6yx2hJeav7XBX2mfltM8lQhT11EscARocYV+jWJ+j70/RviRAFJCL7aorVo+KoyjYREZ4YbbPAolloRSQdReNChoQCogaeVekbAjoWCKLkVkYCUxUr1ZpZxaLMVeDQ6J8CCLZrOCIC6daxpMwNOn0ICAr/ORB5uKTwqstZKUuBImdAI5eVY4vI0qBNhaunKQvJPBa+x4rFoM8nRRJWhCDX4/aigYjiRBvLOx8qChRNzpvxo7RHJiBo5C5i8zU5VGtgGju1MhOXKlcrMpChIoaxcmGQE+sNAPAENNQiNOKWR2rFZmqrY1lBzVHsMaMq0lT+pAXTXNHL+T0itfqpe0QtIQWQ0ofaL5xV4hgbhBTcXnklxqLYWb6jBIx5p33l8pzRdcn2qauAVrnPSKac8igpDoKyqySrEyQ3RtNptr8GjQXnQOoMTrc4DmHFMd8IUCd7StRuRiTCQduC3FV457A+XaJnkgD5UQBCn9uexiaXUdxjBfQLkEDpyaoWQN8zVisJGOVPHNi3cL5Fu2jAjOSGbfcMyv8KOK1lIPNYTQqsPh+K4gQYBGQ+4iRT7EE83klTNh1pB9rQM2W9nAeeaFvP3phyc5wBayZBluolMc/icf3JZKJA1BDoqiixzfxxKEeQO1/R9disVcM63FlXn/NmNsJuU5kN8+KSLsnQJVD0YiGrXN+J3XCvdKcXM3PryVRV7rbuO0fatbGy+nfX+lChs/YlD/5aIWrbS6SCCrK5/DRZiwvO/2wYz6GYVytuu/iUTDpTznnwVQZxhs9r1W4rbfXbSZVWNA0MIolL1PdLdN0p+n6Fvlui75ZADPCOwIFA5GUc0qk0oLeSSX86Bpgc2qYBkctWXmCG3JierJg4uYEYC4nK0oWS0uIou1gVK5pkms61AqNui6CkYKXboHIsFKN4OXN6PxSTPbwo60a4ZzvmqSwQVQqYIw9HcoOZuvcoGCTVTsqZcevKJv6ulJVyTwq69g9N8lnlNpD3CgtsDK0FzsiuyotgZNeNM9PtQUz7p/G6M+ojosk1cg4ozqwtRgaIIpf4PjpGMWXv1dAnKWUSYi2BK05CncPwuJUDxEIHNVjkrR1oAlQTAKQKZNW2nN0YJNL2yAeNlaN2alxzWr7KvMySXEctyoC/zjl4NntNAkPFdWwI7EnFcy/oUpCsNXXu69XyFjygrAgP50bVSvNZ1oZIYpkF58E5GLrsSWC5mSy3XXMeuHo6FtBu4RtgcQgfgWVcoosRUa+9U2Mlu76VmkgJTvuYwdyDwQhRSqZeLBeXKw/yh3B+gWvXFmiaI8PXBhBMfSPxjxJQXe2f08d3Qy/W/Up29ZjtorF1ZQ392brtd+3RPX+O/GLpTqx+Uqfpiz8mN49Nv1S/2wTTrnN6IcRZ6ysvTdhemcyGMtqc0bxYu83ZiTHrAJ3KqnlJlzRFFxwoYrNY8WCun2F7mdCVzpvmItPbBUZ72nG79Un5UX3yvZdAaudKZ2n8Ptsy2GAy700LwVN0UQKRWbpTwbMn+So9GgItZyixyFY2J2OtMF0ZUZRVwZo0NbdrC5IVCCHHIXHEiJTck4xgxyh+8Nkixq5bG9qBSvGmzFdZgWOUvzDpzjp+G/p8Ph/sEh9S37LEgmL06PsVum6Jbn2K9foEq+Up+vUKkTVaRQr+mKxWpJnJDFyeIEKCu7bNAt43YI75RD6C4FysxcAELpVqMXpjTVOseQhRFSiSm8ismM7p49B6SANbDi2SnLFscmSVWyGHgrVJfetr1tQ607o8CUDkJb4JC0DpFBiCusGl9nBhCbEssYpPKSU3jMrYW7VodMI7x6KI6/6fSlMS62/q0rHdBW0yIO6wcOwo89xoAM4NlJ39llKPkQOyq6W6iQHKY1SAFyprkL19TNcna28CJOujlF821EOKl0MDhYPV/c6KYtoXAIhN7Kqx85/lFU1WW6qUPK0FjgaEV4Ve5h9AxPDcjtl1I3+V545SFBu9aQwlZlCJpSQ9qy6xpX42N50vQ7mBU/g0AigigKCx+gp4WsaC7LvgMkfz2shonEf0DToi9KzHWtIOZ9qm2VMap7IGDi3HegHHgkeHUzjfgJbPgcihbQ8kcDqrq6UTYIsLKCW3rPkJKyIafePJX25vzgznYTKoMjw1P3+12JEsS2Dj25/Vg5abObOthMKV5t07SNHw4HZb1HlU3Si7VxqswWzd4c1vGj+ARJIobt6Ttd3y2/5oCMKqbLI5/QSvTIi0ktluzmXiZBm6Rbe56GriJZ0rXXCgSInrOWsl1AtI+4QH9g42ZAGbti5GF4vi7gVveM/1rZJKVVXeReit9vu7pfvOkTbxZ3nOWVEeKT5nL8zsqqI0ZcEdACbqIkq+AygY4X+Qbf4/qeIJAEACMDQYqFzZXgQ9SviObUdxQdq2Rk0o4WoZgqTsW7DItt/UeBsN+/iW+3zjm7YuDI4BIXbowxpdd4quW8otZ8tTdOuVxBiJATl4CWuYzqL0MYDI8sw5QuMJrW9TwFOfy3SQwMPDgdSx0FhOKtYWd0JVttWFCxPXPSOvAVnxSLduUYovki16yFoZlT7K/5Ppt/THsb01Sspyg3WYqIG5U02sihJoVPJyNfiX+IUiZ54hZsTKwmh6vg6tXkocEfPe6K1CVZ4j2VpBCsJoIdhFVV76hQd13R+N4oFNpxpWbMPvm/Penn+dE1BFd8tujAqqasJifWSsyGzfadyPUgMRozLIiTJOgChSXL9RyOd1lGNRrsXNCwkEFleUguFbHktADG3jKwvcSNBdV7lRyLpDxHL9exjGKSuggaXJgLtVQsqgCJu/8kuxNC7P7To4mNcqLiRrS4b0M1Ma07SOTJEdLSIS1570mRxj0bbo2xZ91xXxRIseiMvZHY8F7nNUrKbULc0xwBwQuUeIa6y7E5BzODk+wKJpsTi4AkcehAgJGF0KkBhQDnolfTUKVC/Tjkc9NUg/bYE3hyZBiBR76exEiCQufwn9RJlhZ64Z7Pql+eefzpLTDoB9nyTFlNVHwYTztTw5+9gP998CFKlwaPQdAuDYGEpPWZ/ZUb6YIS1Gc2QKKJoBEqWE9QUrk4DTmap3SS8wukuAokvaJ93Olvf80Ca4fJjmvGmodr+4abbV3Ibvt8J9eVNnBrgWWQYiav6rnB6zkDVdb1XMMlBU0UDQo81X2afIRsgubpMN3VyP7XTn+3x7XUQocxwRYo+QLIm67gTdeol1+hdCLyORgCBiZKCvkuvT/957LFq5stqOmQrKTToJtAE3pS6cg/jma+szUKQgUXHFcA5VlwooqHySOSIrWyDk28jGcYqGN4URhlKoQ5MF21rgt/ygt0pRcW3L7lp2DVJOS3GUVPk0Kcx/WzmnVpwZyNZem8GTSePKIUgEGj6cRTnvKbCI6hadxy42Ceykvo0V2DBU/jZJ2bcw17Or4Yb9T/kIMGBGCjC8q1dyV5J2ae7XDFDlcus2cgIKmAHWyN0KzhIEMMqAk7FdMlZrRIS4sU8sD0+l4Sqlgxc3s6rN030+pWRbcKECIc08pax8SrZc5WWZlM2jGswjaJspJ5fLlsarddVnzCDnCtxABNe2uHLlChjAyWqFmOJJMZc4aAHiSiY3fekaGJEHj5AsqRhqNcaIiKED6BShb3B8/Ay8a3CdIw4Pr6SA+dpUuceMKK2jAxC3btG4p6boXKywbyVPwz48fHArlECXAsbamTa7KrdJt7pSSumRItzwNsyz1mCwBoxLwazx2g6W0fgrDzkvori32vJ4JJPcOTqHQs8AFm0Rpi/pRU6XQNGLlNJyrZLH81uZnTSnfvtpw1hQSQrTxLNLOiPVMvJt5ZN1ZsVttqalEai0jawJO/QEbXAQuLXM/HsKcJ11Ka5+3WbQPjrNrk7NzsB7++pzzUvLJ61nAii4B4c1YlgjhiXW61OsU4yiEHqoEqHBlAX7yVGJchsVbPHeo10s0m1Fxo1G/+VTYhsckxAd5GYfBrzNVxXWZMHjssXWQOjM+i1VVgUEAtxm17McM2jUXSVvsbQw14kPAaMsoZpYH6Sxj4zCPlqvixA8ihnDFlgZvGWE9SFvKt/nW7JM34xLtX1X98HUrNPW7FIKpzn9+V13UwSU9K3Euqlp8/icmSzwZ9aQvDZYkw1BR2s11CBARWeYqIuCsQPMYwqosUbJeUZWYFY6sd+w7uSVbA9DWeZ2sc4pfVRbINV6ULF2crbPtBvYgCjQZwUs0qhmlh/Ldwst1K58WmedT7u6QN0LLSglFpcO/vAQcA7sPE5Xy2wxK0uEZM6UnX3TSJUDDFmPxUWHOSJyAFGPrl+hdQ4xtujWHjef8wLsO+Dg4Cjl5rKBBoHH7uC3SGe1KBql3QdTad7mr3ri3vY+mkEL+2DOa7cTBJwmPp+1nzjXIvPXudA5re8K1E0KjXWZW7bXu5DOqDvZPebubfQl7ZkugaIXMz1/0Pkt0J2r52hfuMSELijV6I1RyZGk5CSMc4o54xBC2CGIlt8I4ipgIZrqQrFZAExSFyb93eaDPrd+0npeoGZW98EcEFJcor4XC6Ll8hir5Sli3wEIIuZyET403kFdMwY5iU2nH4cAACAASURBVHexWCRrovIT1F0NSONA09eti/IIDLVVAuDVlkgv4jOWQ9kdJOehUJ6cyhcXMbX2KS5oUODJ9I+1qij1GgAp1poBqkSWwNgFpkmqXgViJqs2s2AN3cDYWEyNuEBBoMHpbtZlqFbcKvVmat/gLb+dmViNBvNUHsKsd5KkH/SzjpMo3DlQsOn7oWXK7VlK6LjHEXisLmiZX6kAshWgNQmUZwSjgIqm7tUMtbGvVGPW+WXWAv0XZyjsNMh3R8rNvzLBUQSl/pGozupqWho4BEOte/TQok6sT5H7VwEfjWUU2caNiQZUi7YIUa2ty2UCoCSOdbqBbldXVesHqnm2OFjgqnNgR1h3HbquQ74ZLu19Uiu5jgEsVkPknLjwWvc5SoBR7MGhQ+hXYOdBaHB8kmIQXQcWi0M474Eccy71IRLPEM7I75TH4HbczmLcv5uQBYumdqxbojwtefBgn1T2tdvLv5aGz931bJ+gzKQgPz1+up/adb68c7fSLXbkeYmMl3TX0oUGigTJ7+WLyvvqkF2OgzBnQsyOCbAHIZe2LEhVaSP3lqkanb0+KpgUJSt1Hg02Uo7pny1yhmlpNMEYN1RvMjAah3HCOZas5Hb3wi0Omwj7tXA3qrdI4rdWwESTR7Sn8Ep3mlSQTt82JzRaHqdAMkWAT3wShzEmpgosV9APcZYKxEkCLBELz1EEEOCIsV6vEEKoMSYWMTdyyaNtW8Suh2cGKEBS+KR5u3JAhRQUcTAZWNuNJOAjxeyAKaQ0LL+VQqYWUGB4YJraWapPxRUGaXk0CvaWztz2Y0qSXBUQ0xqit2wRXAbhAkLoEboOsVsjdqcI6xPE9RIIHTSoJBGyFQ/lvuKURzqdJoZ3QLtwaFsnAavVYoJQAjXzFrcVTrFMCMnKp24n1Ull5IhS0F4jWKd+dHrrmLHyKS5HKXysvkf6H5UxoPKYiMDliiMQs3FzK4BDYe70lVMQdR1g/UN6q5JLyzZngTfzx1bdKfFnuTwNxCRBg1H6etRxuY6qHtrmE9jp6GhfpBFnmSsMdRuMJV8LRmnsFH2QQAk7lqwxd6a2mdH3HXuxrmMmGREGVhIJMKgsOzRBTLIKV3JKHUspgRUGtSMAgUwMEJgyEwpN1VqTgutW9QICIly+uSaOxwqlv2DLMHmUGGm1dlXF+UICTsyaq7dJliyE952NoaW3LOoUM/LJgNUHlS4xxGrxreY5yiBasu6r4oalVBMxkWrFv06fp3Reg0uQa6S1vFhyUR53NnHELIiiyVxaOyS+OFVllZSmJpVFYuIiKs+JHZzzcABunp6CQ0QfeiN/lTUtpvTKZ9likXUNBJgDiAh9WIOI0DSMEBnLdQeiAKKAa9fuxWJxAOf6tPcxQDFZKsWKPyMAdmr9luZAxqVK3CUAxSJqI43jCyrFGNMhkPAw23Uyd2DhFVmHgoBmxFIX0lDwut8PxiL128Bbeao2E1Uf5EYTrsRT8nFVfqxUHwATwAbguAXsvjRJnDY/Wdv0lk/zayqzlsuK3Ca/cnq4rT8U4KzEXGNVW4liFgkd1KSq/U59jqr2ACqGykT1qft9cpOlGBDze1Ku7j9z4MfIJv7iljptB9lIhQ5sXhS30C5dylhE5kdIrqpDXhng17dYo0mKM3qU3K6+mqBdLLHtvR1VYkhIg500p6NmlKdZXRS60EARgIneGg/WrhPMO3371JwBzuLhvi16VJKYavKwrE3a5K46MTat37dGc/I4L8unuc3foCidNe/nm/Y/V2rFYkOpJe3W5HPyoSq7IutznSwL8kmAdaKQhxAQYkAO9upEPlM1Vf8SEWIMyIgBVKF3sI1g4+sOmP61AJqpXwaRcxvGjMVVu0Y/Z0VU/hi7Aa67pu6QW6CNWlxRxEKM6NYd1usVum6N1WqJ1eoUoV8XhT8FArK3w1khQK9Zds6jbT2axsN77dOy2DCGyt24fkUtUncUI/zqSbuxyChWOkOrmhKHaIqqoKlFewM4BYolo3RTqf8gE2EvTUJl2HmQzkA3Vb9pBRhFkbT9ZafLmLQ/VGFVRVhqoUqXKstTcXtqJVeVXtWyIYcWuh+RddhJfG4E1+HaY+8YrEAi5C6deGfczxnr2tAR4yXPBGHOuU5/zyXavLVfSEEOe9vYYIu272Wlz5Y3BXVaRc3E0xJ/KYx7YEBmkaj7tcywjWSbmpg2Dy9sm1IcI3v9t6l6lqezqDKss7WeoUpxsGVJNxNATQZQckyyUuX8P+WxqOtjewDk831XZalXQEQshCgzIUEDf5P2ibYxgyPywBPASWnOXE/FYXU6uDdt4Fvt0zRLFnIRAEfGcgX0MV3eALG0kdvhfHrTYbRS5E5NYE8M6Pu1pCYP5z1W6xOAHJxv4L2DgJICuChj1LAnyrO8VOaOyrx7O1YqbPZBOQDitGXrAlFVJfGqjkmRD5gJJRYY5xcJ+whnvEGgrBbP7XO2sGotQG0Kbk0p6WZxr+xd5m69MimMXFXysLLMIOMtciXDHIKg7CNlv0Lee/RcSNuWy2db5tZNbWt97JrgSOYoZb6nPAezLfFEUzfTjnW3qsHE27ejp96iiHfWsbxtOkvWZ63GaD3fI50l3z123zmOxJnoQgNFtQxF2I3YXhLS4lc/AM5vBt3ddH6+1sD8af5iGRsTl0e1pdsgqy6lD+YHVUnLddzOOZATSwamcsIsir0KRAr8qACZLJMAA8RU2nH5YAHaTQLVnHYN3FbmpDtvUmXUCnExBqzXa6xXS3TdCl23kphEfZfcAGIBDjLQIO4W+cZtLgK59x5N2xqQCNnSo9LDkfLcMG+2gTvpQ/6jHGLfcc5lHkovZAW0CnBr/iWRVpT9IXsoQDIh32pcprxEkwruYxqr0soDMd+IZgVvmnyv/lYUNaOrcPltF40VlYIasJpU5QCoOrd4lIf+lfzcYGkY1GML2w9/qqfhrvZYJXewh47SbK5EtjpKoITGidFnkiUlXXFYJw1IbEstq86ov1lXVQMY5XXMtCCDgBualR7QrjWFUA+hUcIzcIPk9kb6vMzYDI6mZ454gyIu/CvuncaqwViWqXWDxArzuW+q+E0oQy9joS6wgz5QRjGn7pTSS1kO2YmLFdrI6Eqpnl27mcFRb1+ywAyZEXVVH5rKmDbuVoy9JxwcEkJMe9Z6jS6KhUpMz5zjfONjrndel7VckbMZYjgeekLvGri+AXmH0HmcHDt4x7h6dIBIar1b9giiasQHfXM+8tZua6Q0ClUazmBaHvutLpN3m5y2q76GDwbviHvpcA1k887Zae5+UsaoBI8vH85Sfr1I6VuO5Z/dEtQCuDKQv9uGewcNluxLuqTZdKGBIuC8FfkXIGXJGHhBrnaXdJcTWcQFt79tjfk76/g5hoa4CAX0sFcVWwVlshYVwLoBfKgKVqViSrG0j3YL/ttom0BcrGaGlbs9ykBRUrQ4Mvquw3q5wmq9ROyXWK2PsVqeoO9XKD6XDJeE8ewawkVZFPcwhvMklkTOF9CCiyOMZZlhPJFxPfXLpnboz3qKvL2rqtFMQMbwjQwWTYBE+ZliKArisNYiucolfWXnqj2qrCi3PDSdIcK283AF7krl2BRMJs30Pjx6ZnWJghZBFLJhulJuPsROIBltcTWuWf9se1sJEK+V2J7XeObvBokqGlkX2mfKfwLyaAwv5aG8RPCOqZy0nYFKjozmkFkvdpz6Y4N1Qk7DPBjjiapA+CJWCh1XfyiZDTiKZimc4C+UPspPKus2Ls8gN4PllBNu/ZTamOehLVU/xFjP91zpwp8xRuOGNuqk/NEFYepo/D/LOlpamOuS2UX3kXH2tmZ2HJzzYDDapkXfSrgG6jt0oc8AjQJcdoEZh0hSsFYDXjvE0KGnJQiEQA5rME494OgQ7Bgh9qYvUt2dy9auhHJTYF3a7W9QQ+B+lkuSAe7KY95RnbtIjqYEdm5lIJr4bMYo88hUu+3MmTeGnPmKqr+Shfmc9v1ch4mc5hU4BGe1qGn5YHLde0GqT7cXFv3CU972pIU73c4vaRZdeKBoDt2pk/WzlHNeAFcd9wCwi2EWFysEvqrVudTpItCddi+cRZuuNZ5OfJ41yTSnn/bbl0PQJGlBKICNKXh+Vjk/FH3UPkuSvVpcMDnEEBBjSP7kpUhVVFRWFGjJzq+BS82O9g1PLcfphwp5LZEM3aWsEKybvAbunF5nDFi0g86yTpEqSzGiSwGru26JGHt061OslifouhWYVU3kSUXXim8EuXWoaRq0jYdzLrskcTriK9hDidGR3x/0le2voetZ5S5m2sRRhOpNfaHeU4BaEG0IBpAspdSixIYTBpIMQ1IPjTkksUI4AyV6O9/0qJYPQ3YsZvqqBauT3nRdLZazi2bzCEvMJQUBs1smm8mGCLWg4moOWKVfy5U0m9YjYzSyrVK2JZX10lTb7A168naZq5bvbpesixQgbkPFsMNYZBhgaUNOCWBSxGXopKjuPRitr9W3qfWtLK9l3qWyBAQYgANmDnlKPECUrd0K09lw7bSRB6sYUZXVi/xYVnpGVOnnDOtZ3p11zUCxJlT+cMmSiMEprIzLfKmWOsM1qNQ/iEuTiVUHAEwSbLusgDV4YS18Nqs79gYq+e5dg8ODQzBLDKUQA3xUjqjT6r5HKEHZSxDzdPkDGEAAuEcMhACJreQco1sDy6ZHBPKan2MDkYOjBKil9hQesgG0tV+Q/7LZuOT7/Pm2K+2QM2jIU9gmgnAGvnS8zjms821QmnfgzbHimQzXmUYnq6rqLC/luQ8ZdQgW2Wdafg6Wn8vFoOw5niVjGUxlCBuna5T1IIvnS6s4N33GWHReTN4tNCuu8BD4UxXjIuqDdzHd/UDRrPXrBcY0Bi3Hhk8T2+K5VumSNtD2aLKJ5kT0vjN0PgtsDYRIQajn7hyz9LH0MtCJJNNIDCRhQGJ2ODB6hChXADMiYowZx9PbqwCGY4IHoSEH74ogWw7T1QrFtmkMFo0bu6nSps+nIvOO+qAWtqbG63ws/CXIpyPGOvRYrU7QrU8R4gqhP0UfVghxDeaQwtiK4qBgiK0QR06xAZJy5gneO3jvBVeIGmTfqlIT/T0h6MvjGmoZnjrXzRqjDXk4qkxdSucKEGX+icJelHqGKJ0FXNAWMBxx1pmz65cWRgyKKVg4UAn6Qy7bRESpNEbOx75n1DbJ1O2SmM9CqjAWNZeSYioK+Zhni5XPsKby11qUDC2KmLcDWQSS4JlZAR0ED+bS9zqfRjeWyY/Jbaf8XvGZsbYZRRViAYKGz1ILAEprEICQ5oWDZcsSx4VQK7OkdVQADOoipVUr86ZahanORLuH05fS4xZcS/+gzjrqqkaVUu+cxvfSCg7GG7TRnLNSIjk5eJHVAajUF9qH23lgmO/YBU/nGqUepNHPRG6wl0vstW2AvawNQA5wbdpr47OUTkgrhIK+1QswjdYBGvUCCISD9gA5Zg8YIfQmGQuQr4OT+rgSQSSqdVqfo9yAFmMu1hHQO8ZqFeGaBn1YJ6gOIPLJhceCL1UUHDMWJfh+tuxUkEJv7xuAR5torrv21DvDFVLWACTwKwW91jYwEFksysiN374IFMHYct0DAK31GMDVNVWecFlCMn/eYnt3yCQZPByJwgOdhtnsV/OoXo9TsHL2VX2sq+rzTZHPGcSh2xjHO0RnBonyQ8zEBC7pLHT3A0UvShqLGLvSbEt5SedFojDvphlX5AJ3+fBNqbl6Gn5r4FQ5hQKQ4g7pc7nMT/N3WcCLEKsHrkq1QlIExyJalL3I7j7bBmKX0j31fLr9k4qNESK2mtrvdbOU3vIExBgQ+lOEsETkJWJYo+9O0XdLcOzkFB0hKVeKsA00O0VSID85JyAROYJa9qjamz0CDOCibR9yTn1CKcrdMLaQTSefXe7TikPVeosciqsZqWY+2UuVLgcka6G62eUGNZjUnP+vlMOJPDeVuAssqXM1nwygM0xh631WXmLTNmaXFNQa/ClK4Ob6Kx+xgo3JtDyy3O4lOu0Y2KnBnLq/h0AqM4O4vi69/FbSgAANB1wUeh3jAbhgvk93H5cfynSAJ59undM8ypoTU2GjO4amdBwDZus6mPm3ApLyyodijVa3IJvxK4CjsYt1jpPWK4EjjiC3TVqrGdsbaUnQAMJm3lrrAnVTtXYc22ibm6Q+3xT8dyqP4bJVQF230SIttzKtH4quZTU78bJao/CobxSM1ELTH67rYvcBLc/BIbIcerTNAkdHDFoS1syZp9ik1wfifqbtrsdblu+QImUDYm0EOIqAZzhu0HUrxJhfNqClNmHs7jJ2z7Zrcpl3U99vlapaVfnp6Gj/S4K71yqBd4KnbKzd8/95wHTW7tgXbreWs8CZqd+nb3Ycv2cX+eHPPDGpLgidE9BxXl4uF45eJM28k3QJFN1ldOtz4CKuiC8GmjFiG+2Dz5bNxacNjbht1mTYU2vRiSgrwRJM1iEmgdl7J1ZHYmwkihLFfCtp5ICIHsxhoFic5xyyyungBi4FT2YKroR5LDWrVswAxRSYldF3K6zXJwIUhRX67hjr1U2sVyeI3AEJJNJ6lDEX6UeUcnniiOAbQtu28K0HKLkrxJCsiiaE+spefBpGyRY+I6uf8pvm4RAREQe3suRPGAqcWTGeVv1ROeQYSxi1RiAmcwsy5ewtMAbNwyjytozy0ya+pPrvHF4YZmHie2lcqnn52ERpgqX2OBMFucR64g3K+zA2DTIAJPFOdMQG1/1yDR7Kd58GNVagYw7inb6Tw8QVuBYMEnBjDg3HbJyr9K+sT+W5uDNRCrpvSBV4ZsNXyHOkhEfm/H9WfGuEIZcrs0fmWQG71G0zF5tBIecIHBnOD1xfHZk+lSp5cskSiAwvafkxsz6btXW05uU2KR8NOqR6bztwswscUh50Kas6HHMCtBJQItfAy+9eg1wPwUcw2BPAyXpV8+EEmrDmm0rhHCobRB7kFL5LbUw3lclwazwyy1msBrUgJjhyWLSLXJe+6zJYVCGLaTiKr3UZK7Xs0f2VOSCEDh0RQAEdIpzz6EOE862pA1U1u6Oy56zpOQARAKjZmoJ0dy9INJeSZc30T0IEjOZdGt9zk0cHAOmooA1774gqHFL4XY0+L+mSLulsdAkU3U3E9fZ7SXcD7dia5g7nC2qH2zPwMoEX2NNMFeJFESAsDhqQB6gRHSfpPMa7KIEjzrp3uTMc9IyknXmNSJLMVIyiyVLuiDAb4Vi0iT70WK1OsV6fgLlHDCt03Sm6bokQe5RbzrLKiXzCTAREjdMjSqBzDk3TwDdNPq3XeBlOM4qqsKXeHwyClmFJLCOmQSJrYaLuPG7kbFIApWzJYt6vLHTM2Bg1LFWVDdgla7djX5TwpEOLq46qhLWtm22c8WwDgbJ1i2WPUu9kEUFqGbWdFzkp9JQKKqxl3X928xtnMEhP5z0An/ghgjlk0MyCRTUVEIWI4JsmpU8ARZAb92plvvTNmPTWLFXqNdBrrWiPMhuQcwRPLoE5Wn99qbw/tE+zY5qtLcjwSUEn4LzUNZJAYaxxcFR5T/8ZDhy0vOQ1mifQnzgnI1dcqHrW4PI66qXd+rLzALGDc9p34pyj/cnJ0iumeSLz0wZ/T31HJU7RZASeFFPOUwNQU83DinQcJt2AdN7afLcBSQS1iskAma6/yZJLAz1HDgnoApD4oYrFA0YvnQdKEZQIwvou938qi808BaBoFWcXVQZnizcG4EHEGawT8kCkDCi5ZE25EKwIK3Loug4xBgF9wBnAY0X3SOPg2U6jYmmWxjfGDl3HQGT4pgX51qy1TtIMAFuLS52F9rHH5fXf/I98YGDnqvbHi+F2Zbv20eCvSULIe8k+xLUpq78hIK6xCTdvWWcUhsny0QtKkL6kS7ojdPcDRbMWr7nq3Txz030tNcxGkdhY2ERcjemEMwocboATZZ8FjNLTqNuoEuYUt68QPnObVsv5G/KakdkcIUcl0jmAkiabkpf3Dhpsz2+88U6nn3Qry7q9Uc4GfDSvPVw0J1udSgipb5xZHLQ4OGhx/d4DtKdrhGRVH9VdCQRPHotFg8PDhQjVzFlZ0lgK83hpoITuTC0xPxQIKLxR51g7zQ3yzgDFMPch03BWSFDlZ1VSTjFIIpg7dOslVstjcIpLtFodY7W8iRjllrPsaMIGSiHULh9MIBJAyTcOvvVw3uWTXL0hDZTiyhhziKyUp79EdfuLQuDkBiSy4JBRBnPVVCkcRpUxKQgAu6TNBZAFmSiF0SUCRw+QhzKyVcRymSxnuPlWLzYxb0j7WU+0UZnma99x+uzJCYCjFgopJgXBJbBG+2KgoU1d/2xMWobTzsaLATgrDOmbqLak0AIQSDLxTYsYCYdHN/D4E8/iqSeeArhH23q86otfhRg6EAIkWC4Qk9UIIfGAJ/iDFuQIJycrLNcRbbtA6DtcOWrh2SF0fbaScKlfFX1hOIA8HHlcvXINfYg4XZ7mvr5ydIDl8jkpH2GgHJc2Eokl4pWrV7FadegjgaPwwNHRAdarFYr9RLk6vepD85d1HDXeTZrjcv064JpDMAif/9zDuHLlCPfeuIbQr+FcGTqCtdhgeFagQpX88ZpA2k++RXNwhM898gWs1x002tD9L3kJ2kUrcW04CqTGgGsWODy8hs989hGs1h1CCPAIeOUrXg7vGMShlAsnwCARiDsAQOAI5xscHl7Dk08+g4cf/jzuvfc6Xv6K+xC6taynUUNRR4DEZe36jZfgySeexbLrcf/L7pO6RsA1DZ599jkwRxwsGnDs0DiBTSn3jelxkj1AnhYLt2xdxgpURpBXN1M7VmlviLq1KBgjyydzRDS3rcl6ItaXnkgAE0WEoX/EsggKOpPOJiQAkc0Y6qCz4W1pF+U48dJy5wEOCurImuqpxeHCw8HDYYm+7xBikHnDyuQQZBIFFKv5Nq1DaS8ILMo8B+H3Bg7kGnjycHASzwgFC8trGIoImqtdft5A9sdN+2gClL1HiMGkLsHALQ7oqNio6XM2ccxyrC0ASOuxNTSsDORm0Qw5myclpUEuUwWP47sV68dtBwTDvKblOIouDZyiRhskhp16jDkA0fqNzFSHIOWGes8F8qbk5NF3Yz3J6SiBqMQbhIKqM4qboaftRUyfqccw7eYpzW4WQ+9KMrLG3ZTNnjRovzvJ3L7aSTT4u6W8sVXyRJ3uJE2uLSr7z6O7HygCKkF3I7kdabhsENtoTyyeypyx4JGZDeZkp06TpIatZfHMBXbeomiTb6RdAI/IHXeW5rRtziDvCyjS8naVaa2EN6a9/RWouDjtFluKkLotPU8HHyy6M4q0umuOTnznOoKFChrFTD/VLQlNi0WLpvW4594jHB76LCtL/A8nFh/R4+jwAIuDBqAURDZLmmkaxiLLT7f8FsaCVXApVyaDxIoix9+x+aYENhi2Ko8VWAMY6Vb6IwvwOVaBlEjZggeQqCgsJ8j9KVanp+i7FWI4xnp9gm51ihDWYF4XkCirUUb41nomZYMA+MajaRv4xpeLsTQORhLO5bWaJwpYRGlsckfkk28ignMe5KIZO+VtMnnRxmGy8Y5UsQT1AFppg1M3mwgHDwYhuC7BNWxGigC4rBgCHm17gKZZQJQ7Ac5AKfALFL+z86sW4x0RyHkgSryjbr1G353AN4SgVgWkipIDmXHfNMesoqvWBUo24LOq0Rovh1VRSab9wQU4Iiy7NRaHN/CBD/8hfvbnfwWf+ewjABiHiwbf+A1vwV/5y2/H9UMPr/3kGkR2slRwhG8OsOwjfuEX34mH/vhTODnp0bQHuHplgf/kbW/G173xq8R2gpSrGcTqQph6njxiBN71a7+Jhz75aRwfC1DkW483vfEr8M3f/Fb0/U25pj2KkgvqQOhBYIQQEfoGV66+FL/7oU/gg7/3YZyuuzS3CF/2Za/Ft3zz12OxADwx5PI8V0wUSw8CFNNskh6Myb0MgeGcB3yDoyvX8fGHPoVf/KX34OOf+BRedv9L8De+57/A6173JZCAun0CFij1vYxppJjHiKFAY5AbBHXM2QFocHj1Hvzm+z6EX3jne/DEk8+CGPCe8IavegD/+Tu+FS+7/wYcKAFwBGqO8LPvfC9+9dffj+OTFTgCB63DG77y9fir3/EO3HPjAJFXYFKX0wTExAbOt2j9AqADvP/BP8Bv/tYH8Ed/9Gnc95Ib+Fvf/5143Ze+CjGcCA/FhITRGgeHV/GuX/s9vOdX348rRwd4zWtejde//vV44xvfhF9996/j4w99AswRb3nzG/Hn3/q1CSgSUJsSEBhBhZeRHEIplDlFAWACRQGDI6Xb+HyyXgMhRgY5L+2KjNB3oBjhvBN3Zo0lRSigbiqWPLJlhIJDaTJJv1bymgJG+j0aN5mkhA8BSFL3N/Eb07hJzhNYCi9pPeBaB8cOnV9j3a3Q9wVktTgQ5fnvMpgj4Yd0Z1FQuodjLxaCHIXnuewBBL0ZL0Nypv3ahoHl4mCttyBYrugGkjXfVXslwCkwMKUb61I6Mnsjx9yHlSURI4exqcQXsqvoPG1gePtlmrUjvWXTDZXD3HaXO6deQx7UmtVfC+Zc5CsejBMxFwvgjSQWchVIpFtiXauqvHE2up7tKG4UQH9LzXROZtCrZF+HYb9ANAcowjzVeE/FDfhlC+1Lid4sztRp9jR8lF2sMVqrAOEjDjxbp90KLO794H9UAM4yEHc/UDQH3ifg4k32i1afS7qks9NUANiKkpBc1sQkcFrA85yngmavbkMSPNmhXTTptFDjfhA8tfDc4urVq8nSpdibsCocdeP2U0ezMZQg1rUgNQKy2XadKseb6mUF9Los3U2L8Ks3vERwCFitlwIMdR1i6LBanWC5PEbXrcHJlaE2788FARmwK8+c81i0LZq2EeF+IJYNAfGxuTplvqp0p6RAO3JigUF+tBnLhUyufmfYU+YkVqyGidcnNwAAIABJREFUkvtIylcDG8cssxIiEYITdyh1oVNFh5MbXOMXuHL1ZfiTP/ksPv/5x5KlmgdzL20lgJLrFtnTVnVRMcK6uExGvPS+l+Arvvx1iLEHc29siWB4dT8TzGWlVfKUW/CA6AghWYVQ8Dk2TYgOP/7vfxp/8tlH8YpXvRqLRYNnn30av/ze/4jXvvZL8Y1v/nPw8EnJT8ouAYEIvj3CL//iu/AzP/de3LjnXly9cQOn6zX+6I//FJ/648/i9f/jV+Il9xxVoDZRhCOH0PcQ66+Ipx5/Cj/1M7+AZ46XePnLX4nIEY8++ij+4A8/gcMrR/jqr35AAAImGWMXQLSGI7H+atprePixZ/Gj/+dP4WTZ4SX33wtHLZ5+6hk8+KE/xP0vewne+KbXCvDgHEJ0IlSlMVelkKGArgMzISShkiDAtEODX/qlX8OvvOvX8eQzz+GLv/hL8JnPfBY/+ZO/iB/4O/81rl87BNghslW7FZZs5Wa3mHjIEWIIEnkrWdTF6OH9AR5/coWf/tl34dEnnsFrXvM6cARWyxN84AP/H172slfiHd/2F6AxeCIIj3zuSfzMz70bTdPii175RSA4PPLIF/DB3/l9fNWXfwW+8Zu+NoMHEQpzE4Jv4BbXcPMk4P/+8Z/Cgx/5GE6XK7z61V+Mxx59FP/q3/4i/v7f+X7cd08LeI/AyU3RHeGTf/YU/s2P/wdEtLhyNeIjH/9tXLv2EVy99i489tgTaA8OcXp6gk/+2aM4unY/3vLmN4HC08lqSuL/OBIwrijWDEJX5oI5YIhMALVoF1fhm0MEENqmFTAPMs8cEcABy5PjlN1axjwywDEvWSWuksIDevumlAlH6bxOQNyhHpPh5Qqozk0on/OHIv9m68mBRYRcky7fXVT/6jX6vkvuayVjCYpNiAHIBw96OVY+RFJUydSaU+ywARB0p+Tc4d5YAH7TaUR5nJjKv0tZXGncD9n9EcC0XHHWMd6soJ4lBuOcUsbKdn14ppZ7BHHxnQIkMwvfIcBlfxLlTJo5dDOMpS7pBUx3P1B056fWJV3SJVW0ZbeZ2PzvdKBI3ev1JhoGwzkBDUJILjvGksp7B9+4UlcywBYx5ga0PVMdc/kTFd+I+xhA5RaqxOk0laHKgkjO2bkqBnT9Guv1En2/AmJA151iubyJ9fpUFF2K5SQOA1FM62hkSUeExktsIpdcNhyQlAzTXAOMWICofFSliHPfKahnbzvTq7prog2fc9ZjcgnEcWSuBHep9zy8v4brV6+AEVOwZQGWQE7cUFyDdrHA7370T/Cv/69/h8cfe0oUah2/DPopGDRdr4KfMbxj3HP9Kt7+l74Fb/9L3wRKQXM1YPgQnD0bDSG/AlKpFZojICLAUYvoHJgdWrcQoIIc2sMjPPXk07jnnuv4kX/5L/C1b/5a/OiP/gj+6f/0P+PJZ49xdO0+NKEDMdAzcLxcgYnhvMPpusN73vsbWMeIf/zf/yC++298Nz716U/hb/7178NTjz+OZ0+O8ZKXXgX1DZgjuhDQHl5B2zY4bFqEwCDn8EcPfhzPPXuK17/+9fiJn/h/4NsFvvd7vxcf+eiDePjzT+Mb3vISEALYMYAejCOAOwARC7Rw7gr++FMfwTM3T/CN3/Qt+N9/+IfRtof4Z//0n+H//ZmfwOlxj8P2HkReoaEGfR+x6p7B4uAA3rdo2waLRYsQe115ElAkfBIDsGgO8YUvPImf/4VfwaqP+Nt/+7/Bf/sP/yH+ux/8QfzSz/4sfuT/+Lf4wX/893Bw2GC1XkKtZASgCXAc4Alomya70C6XK8QQxQXTOTTNAr45wMnJczg5PsXrvvR1+I3ffB84RPzHX3sv/ub3fg+eevxptDhCjD3ISfDx0+OnENYd3vaWt+Ff/9iP4fqNe/D3/sE/wr//yX+Hhx/+ArgnwHmZF3pFOpx4azqH3/yt9+G33/dBfMlrvhR/9wf+Lr7rr30XvuPbvwMf+dDv4z+8893463/1HWgXDnCMyA5Xr97Ahz/0Wzg9XeE/+7b/FP/gH/19/Pb7fgs//dM/g4/+/sfwwAMP4H/4oR/CT/z4j+Pdv/LLeOe734M3vOkrcc3JekUanBtS/xw3TC1DOVmbJoAXrsGVq/eiWbR45AuP4eOfeAi/8+BH8Mxzx1C/pBgCjg4P8S3f/Da84au+AvfeuI6wfgbMPbwncOgkIH6F6qeymKuLynXNEkuXkACciYDB1bxPrVGQGgzk2F6upEn7kkNyi1NwChGc4sFRFIuahhxOI6PngJjMlSmhQqzrbEJS7IGFxPHT2GI58tL0CkI4l71SaXjz3PDWvDqxLJ6yzcxzyXnx0JbeUBdwc/wgHZn4djbQxhAX38Qz2cL2nKgCMnUaqCzHZY90KS4iMTgKL9cxIlHmwja6Q0DSWWhfvXupYQNnB0RfWHShgSJV7uqHw8GySPBm2hezjzHqW6X5b+7sA7Ubvs0S96rA73l1uWi3UEyfWNxiPjPzysralrRz++lO9+e+yquuwt6WLqdXmZoy+CCqQhCzeagCHhPAkG734RKwlpOFRzHVVq3+nLdQRhHaJxpXumJ6ExuLw1TeJ7UaUrcMaZOK/+CI0PfouzU4Wausu6XEJVqfgPsOav2i8M7G3kh9RXBwBAlgXfWlxPTQK55V0csn8akP7G1m2ink1AJHQSUJTU1EIOcyUGTZLx/SE6HAYtuJ4Uy8jRQThT3atgXcER76xMP47Qf/EECx87B9TpC6fOrPPotHHn0Sb33r2/DlD3yF/E5s6kIgJHcOcNkDi49Vbvezx8/hPb/yy3joE5/GX/iLb8VRU0ZDXcKIeJb5+ThJcv9L/RpV8B6wWmQgRkYfPZ54+hmcrk7ke8+IscFq3aNBi+uH1/Gye1+Oq81VgB3+9LOfw0Of+jO40AGR0bQt7n/5K8DE8G2LDzz4YTz+9DP4si97AN/3/X8L9730Ply9dh2ve+ABfOELX8CDH/kYXvul3wbGCiAPv1iA2yv4wIc/igc/8gdYrgNc0+Jzn38YyxiARYvXfeVXgohw7b57EeDxex/5fTz25JPS1xylt9nMbRbny4cffgSBO1y/5wre8OY3oe973Peqe9D7gHe++7342EMPCUhAjDe/+Wvw57/+ARB5fOxjf4BPfvJT6EMv60meuA4RhBiBGAjeLfC5zz+KZ49PceOee/FXvuPb8erXvBo/9E/+CX73g7+Lhz75afyLf/ljeM2XvEqGIFlGaFwriid5jDgy3vDVfw5Xr1zFcrkCCHDkwc6DmgU+//CjOOl7rGKPnsRCCgcAWuDhJ57AJ/70czhYtCAvsYY+/flHsWRCbDwOr1/D1XuuAwctVgR85tHH8NknHkcXjuEbABSgdjT33ftKXL/nCq5dvwbnCNeuX8Nf++7vwite8XL8b//r/4L/6vu+H+97/4fxNW96A972tq9F5B6Aw9PPHuPBD30Uh0cLfPu3vwPf+q3fgre89evwwfd/AB/9yO/jh//5P8fbv+3b8Jff/nZ853d+J37nfe/HT//Uz+F7/su3o/Ee4CDwUAZzizuurOPq58qAa3Djxsvx2c8/jnf+/+ydd5xdRfn/3zPn3Ls9PdlUSJAQwNAJIiV0SCgioALSkSJFBb5fFVTg+8MCKoIlKu1LR8oXERClQ+gtSK9JgJBO2m6y7d5zzszvj5k55e7d3bthE4jmeb2Svffcc+ZMfeZ5PvOUhx7l1VffYunyZopRFANuTroUCN58611GjxrB5F2+zOSdtiHn+SgVpbL5ubWOQ1QM31IqswaFdKCFjqda8rPIrK9SS01h3T2Rxt1QWUsIA7pL4y7n2LzVcuMIUlJa1yvjjhpFEQQBgY01RPpd2vKlMvuctuCT0/fjbcVlg7PWkH25RaZd1Hq+1x4eZNzQUnXpdPgiMs+u65QGy7pqT+eDqfL+MqLE5dHsR2YTiDEi0X2/CSFsPCJKSpKZPWaNUwY4SvZFgbZAt+4UnSNZC90XXems6ck1K5F5enqf7vmlPVfbFdYzVVDWurpyKuZToows/mne292aSd9X5traps81UNSJyvZrJQ6BZU5t+qAan37gKqhT6aZWbnJ12vjKlbOuLmMqa99ap7Xfnxkz9L4oZx2jUuCj7LTQSfvc6Wt8CqudoGOFbB2RaL9grECMoCulwEtZoxg9WcZKR7fASB9SLESlka8e3pykCDftljppX1xYPJd06rsV8DVEUUgQFIjCIpqQMDQZzsKwgFYhQqq4TlZHLqlDyknP7sRSSHzfI+flkFaBUalXC51SuHTybGwp5FS21JA51SZRqIzSZOKzuBuzG3xWdxD2HdluLiUhPLQwbipCSDzho5WJcbV8eSu33nEfsxcuicsQQsTKZiKRCpwk+s1vHsWpp5xqrARsQOx4fmlFGAWxRVQCMMSVRgrJ7FkfsuNDD9HS0mribgiXOc66PIm+maMu/o0L050OzqWFh/RruO/eR3j8yRco6CS+lNaC1kJEracJlEJ4gra2VtDwwov/4vXX37AZ9cDzJKNGjeKMb38b6Uv+ef/DFIpFjjn2KAYPGYhG0tAwgBNOOI4ZLz7H8y/MYN+9dqe+OoeQHhpJ06oOrrv5ThYtWUFNXT9WtbYDkpzn0dCvgUgF5KuqUSqiuqaaefMXMH/+AkAjtEcCCaQ1Sg1CUVtXZ8w0PNA6or3YRqAUsz76mNlz5uJ7PkJoXn3jPb4w7lxenvESd9/7D9rbO1DaWS3aOEIqCUjsYu9qLZDCZ0VTC1dddR2bb74lm0zYjP/6wQ8555yzeP7lV5jxymupcRZJ/dzUsn3/wOOvmHnjwBIwrmkIQqVoaS+ANLGytPBoaWsjQvDeBx/xhyuvjsvXQBhFhGg6ohDtC5QEnZOQ83nr/Q/48PfXGj7qlqwGTwhy+Rxf2nEHgjAiEPDWu+/ym8unccklF7PT5N3YY9+9ue3W27n3wWf4aPEyPKnxpIdSggVLVjBmgw055OtfQwif559+iaeeeAZfePz9b39n98l7MmRwI98/5wecfMLxPPfsS2y7xRfZfpstQIWooAOEIgiL9jAgQkqB0p5RcoWJlyJlnhf+9QY33PxXFi9ZxqiRoznw4L044sgjyFdXU1dXj5SSKApoblrBby+/nFf+9TK333kPs2fN5pijDqM6l0NphecCUjvLHq2tq6rhF86S1QGwiSWksGCOU0oNM0qv21hZz1yUgANCrAWlFqlQnEY5NwcBnrH8s3NCWH+afD5PqBVCJfGEMsKtdrxdxGw5Lt7tMS5INxqT7UxZwEiSYbRd8pXecKjy3LlclrLYdVuaFaC1AwHS+/2/L/XUt0ncuW5kQZ3i+ZacRR6Wq8QJBLrp0jhpjzDgUIyGpE3VsNfWkFxdtnpaxwG1RYqPlt5fGpMpe4f52zlvarn3URG403MxFYBE7n0VTvU+WRFrbvjWMJWGRyhPolIUrw9Il/z9rDnWugUUlZ2FlQBFfQcSpSldm14PpBZUFLZdVNK+zyWSsp4qpU4bZrl7+upVle4w6wB115aMCYmT3a2g7i6m/7dCkQGStAn2qYxriJDEwIZ7Zm2CRLHakDr66M04JrFDdOoXB5xpHA+N03ErRRgUCYOASAUGJCp2EAYdKBV0djezZtxJrpu4BbiTZQd65HI5PM8CQjGAp1NPZnl1kqksDfgkSlk6ZX0s5MXXbCDnkuOZePycUlda5bI9KUFLq2BqBL5xPROahZ8sZ+HipWw5cXNOPOEE8lVVSTBgYSxuAB597DHuvvteNJDzBFV5nxVNy3h5xov4Od+44wmPsKjY4UtfJp/P8+abb9Lc1GyVPEWxGDBmzGg2mbAJnsAE2HV9aGVvAxKZREbpcepqfnTV8GwKY22i0MRAkEnB7Xk52gsRz734Gq2FkP0P+AoDBgxAI1CRgScirajv1x+NYJPNJnLKyacY9zQ0SE0URTwxfTqvv/Musz6cQ1gs8uFHHzNu3DgOOewgBAHNzW1UVdVywIFT+d9rtublF2fw1LMvMGXvXekoBuTzVTQ3N7FsaRObbzKBs875b4LQAMGeCBk9ehR534Mw5Ec/+D5Llyw1cyhun0BbS4sE2HOxpjSeL6lvaIAoxJc5jvnmMWyz5TZUV+fRWpPL5fjNpb/hvffe58M583nx5ddpaQs48pvHMXnyZCIV4qz2HHRj1oVEKQP8uXdGUcDiTxYR6YgNxm7A5b/7bZKi3sZycuCDKVDGdfhozkdceumvaRw6hN12350gCMw4aUBppOehlKK+oR5f+EgtGTFsOPvvO4XaulqTNcqdtgsBwiMMQ8ZuMAbPArCbbLwRhxx8MHnPQ9qscHFQcw0L5i/g6eef5957HzauHVZpveH6m/n6145g660ncuihh3Lrbf/HO+9/wLszZ4EwMa7q6/tRDASHH34UDfWDaWlp589X/pnW9lbyuRy33XEbk3acxLHHHs/+B+7PKaedwuWXXcbd/3iAEaNHUJv38T3whLHg8nMSdIhSRRBVgGdc1ISHl6vm/+78OwsWL+OM73yHY44+lk033YyqqpzN3CUs0GN4595778Nf7/w/Lrn4Yp576RX69e/HEV/bH6LIwnBRAjNa1yyzHlUMsmiHpllwW4okoHPMk9xCThZidnUaM6W4jDS/ixkcpGcZTotz90jpIX0fP/KIIi9xx0m/Kw2oa5GsfZG49yan7Dq2vnXzeG3skhkL465cznBJDHoAiXR6jP7dqRLNuOR+7XIHphABdypQIbn9vLwCvPY7XkDJIZcDPpOaZX5Pn1KV1L7HY5lKukpTkQXw55E6yVrrChmhv+f7ekqItQbpswaM1jGg6PNH6yGa9bSeeqLPieTlTknTl5xcLZxSr1IBHBMhPCsgrOur3gJeWsVpumMYTGgiFREEBXMirwKisEgQthOEBbQOjWVE3I/mRFunABel0wCPA4k843Lm+dbKJooVC+WAIidEOmUkvpaAP/F9wgWXTgKOC5EVVoQD4kumX0bM6+qUqNMll3Jdgw7jE2ov74HwiLRH/4EDOfHkk8nnc/EppJACrYwC1rRqJXffc29Kl1O8/fbbHHHEkVRVV1GVzyMQeDLHs888x4ABA7nkF7/g6aeewvd9QLNy1Sq+fdqp/PRnP0PrKM5c5KzIhB1DOxJ0dUjiQIZkDLsnbd2alF0KDkdARQShmTP9Bwzi4ovOZ/iIkSYJkpAEKqK1rY3BgwdTLBaZOnUK+0+dQhiFRGjyVXny+Txnn3UWM9+fxcuvvcbs2bMRXp5TTjmNjb6wKe2FkAvOv4izzj6LxuHDOP2M73HGG6fxz/sfYaOxYxg5cjSeL1i5qg00jN1gDIcefCD1DfWAILICnrJWD3vusxcu410yNZSZU9olzrYWOcKMubNCFNL49335y7uy45d3RngRSpuMX9fffAPR++/h+RBEBaQPk3ffmWOPPdo+r6xbqwXvUu4fIqVwg7bBVSVTpky18dQSRVcpBzRYrNO5/ACzZs3kimm/ZbNNN+Z3l/3SuuAAeLG7iVua+eocQVRgp113Zuttt8bL+abcFADlgCLQVNdUE+iQ0888hWJHAU8aoCgMQ2u9JvCEZNasWRx0wKFMnrwLQ4cNNoCNzJHPVTFn9kxGjxzKvI8+5HtnnorWxiqvtaWZBx94kMWLlrLhmDF847BDyQmfJ559iaeeeRrpeYQqQhXa+d0f/sDOkyez0dhxTN5jT6b9cRpz5i3gD3/6X3KexJMCTwpGjmzkG4cdgCdMqno3pp6QCGksuBYtXooQHvtNncrELbZACEExLDLnozm0tLbSr6GB0aNHU1VVRT6f56ijjmbgoMEcd9yxPP3si0zadgs2HtcIKrLBr03nSpEA253XZAKMC0QZxbBUAc2uQ6emJzHYLHiY+NIk88pydgdCuczNGpMhzfc9oshHKWXd2Nw70hnAzEGWUCZzpXu3c8V2FlJJFsr4wU5t6Y762tK5xyxqbr+ARC7oHe7xn0WxJWm6hyoL/RFbZMdAKbiTnc/Kwj2xUrYUt6sMWOT+16n4XdnC1tO/O33GYv9n+frPOVCkOzGRzh21rq5QK4ytp/X0706fA1wlgXgSAVqmrEuMebTMHCkKC4KY/O0u5sI6yG9SB9QZpUOAc1TS2ij7KowIiwHFIDBZcaICxaCDoFhMZZNKAWnxSXX6YMbdYeNhCIHv5/B9z56gG4ukRF53SpVR3oVIxSgicZUwirHRjoU0W1ccuDq21EgDWD1NvC5+63Q5idOgtTJuHDqygq9xt1CY1PRaerS0Fbj9jtvxfJ9Vq1Zx2KGHMmrUaDQyUdQEKCEJlaZpZQusbLE9IRk4aADah8hTLG9ZwcLli4ldPYCCCohEhJZGcVPChBGO3UfQVqiPjBVUuX3GWaraPu+ZXHBeYqVQ6xAlJCjTD9KTtHYUWbxkOcKqmsITRDo0bl/5PO2t7axqWUXsmmIF7mJbB570eO2NN1mydBnbT5rEN486GinyvPrKK9x8801svMnGnHHGmUyd8hW2n3Qjzzw1nQcfms7pp51GIQjIVdegPY93Zs3iZ7+8hKp8Hq0UkfQZOGgQ3/3ed8hX5fn973/PokWLUgPtauuG286+kmO8jcdvzAknnkikQv7vr3fw2uuv40kT60ULwazZH4EnqaqqdovLKBYIVq1qY+myJSl3VuuMpBVBYIBH309iapl3y0QxETbylbaxZYBczrdr18a2EjB/7gITHyrSBIGxqjFAZYCLyWaC+UvqZa0BegoRbYUCIggwb7VgFaC1SQVfW1MD0jNjGWnCMCLSLm6bjDlAhKKttYgi5MRvncBee+9NIusYl16B4NunnY7wNCgPrSTLVizhzTffYPEnSzjs619l403GokTIw488wkbjvgAuSLUApMeN19/IuHHj+Od99/GFL2yCspYOOc9Ha828uR/z/syPGTf2C+y4/USICgjPZCeTgLCAkWfH1pOClc3LePzx6Tz06MM8/tjjrGhqYtCAgeyyy64cfvjh7LbbbuRyOfbaex8O+/oR3HTt1Tz48ONs/p0TaGvpsO5tFpoVyX4hHMCMRggvdsVJbDxj9CbmWqX7jNujnHup47qJq0xi8ep4p4F7sos7cVsx4H0un7fuN2YeRpEyWLizFLHB+7WygdHNk4Bpqwdxe3DtrZSl9DGlraLKBrYWqY3Q9oPQGmnBi8+q3p8V9d71L36SyvbXzu9z08pYRJqy1lqMorKVIl4P2X/2mpUtMo/o8hOlklb8J82v9dT39FmpUp9zoAgyS+vTIM9rELUuLTmla3b/XE+RzVLlmQe6uD+N0nf9tvIKQ+mL+qqb1lEcrFM8kE9VVh9SbPrSdcFuenz6Exo7EXT5GZMIoukKJd/KMrKyVfoULC/e3DOidvdF6pSwolPtiwMF20mrQUqfJMaEO0mKPdlJwJC+pJIF6FwSU7FGuh5ZN2b2m3B+13ZrERpnOm5uSOa5UQgVURShwpAoCtAqQquIKAoIigXCMMAFWBZIC+ZYIC11qChsNp4Y6hECT/r40sOTzprI1MkYRjhlx5QlHXilU0NpT36TIL7p02IR/2ben3pIpwJml/aWq0cZhQxE5ok0/IQ9xRdK2L6z17VGKIWnBUuXr+BPv/8j8+cvIAiKfGm77RkzcpSJFRUPogFKamvrmDBhs7i/hJD071ePL3OoUDOycSSbbDzBWJjYfhg+dARCS+Jzf2HmtcIEXTf1sgBgSdtLZ2xs3VLB1pDuC4sVoUVEQ0MdA/v3Y+achRx6+NfxpGfjzWCCIiP485//xE477sh1193GtddfCTilwQC2iz9ZBFqxZMkShjY2cuGFP2bQ4AaWLv2EX/z8p7S0ruT6G67l4IMPYuSoMfxh2m85+qgjeeXV17npL7dyyCGHMWTYcMZPmMA7777H76b9iSgKTaWVYLPNN+fs752F0II7bruDF198MdUiBwgL0lkARawLmUDBU/ebwgnHHAc64OknHuWaa2601j0WqPFg5IhhbLTRGOJgv5YXP/nkdL7//e/jSetqZK0xRo0ZzQnHn8DAgQM499xzKRQKxjLEaq0G5jNlKa0YN24jzv3huTz/wnPcdNONxgLSWsoIKWhvb6elvZU3336Lk089iWKxgNIa32ABRFFEsVhg6NChXHXVNTQMHMgjDzzMBRf+D/UNDdZaUCWwgwWCtp80iYsuuoiamhouvvQSHn34UQMoaeveJE1A8Pq6GtrbOmha2czMjz6g4V//MuCXjgywaC2l0B5SRAg80D6rWppZtGQZVbU1HPTVryDzHoUg4LhvHc+p3/4WOV8SWCBLI2jvKFAsFJm805cRCCJp3WeFxJM+P7vop/zllptZtnyFab/0iMEmLDQYGuBMqYgF8+dxw3XX8OBDj7By1Sr69+tHVS5HW1srt9xyM9OnT+fqq69m1113JZ/PM3XKFO687RY+WbKCjkKA9CSoEBdLyHqemvUSW1cqO6zxKnJwXGaRldu/05ZEpkjnAGT4eupOtHulBdbi8uKHzQeJQEtJPpcDZdaj1qHJ1KZsMgelDJ+WEs9ZEWkQSqOFMkCidnGX0lZxlj+WlXHdLprYc4j0LyngKUlz3z05YMj0Ten9Nm6dve4Cfgtr5ae1QgjDp1wQ7PIvSSrrJIDyQvOaVuV6ku/M+3uSA51o0Rmnsc+nxqmn+vQsCyV7jE7t16ae6R7reSOqWFnurij3/sw9KaDIWkji5nPP6lXfDntf6mEV3VYqJ6yZOdyjDtqbOugKdZ2+VMQcXyr7Ux8qvd3o9H3DbdKgec/0+QeKSmP0rI4Dp7ZS0hoknflcSR0rQGVEpw9dDK6ubNArCdXUl4vqswsf9emoEqZfFkFJn0a4+yrl1D3dFyUv7erWlLDUPfXc6QIXlLLMktOJKJF9yP1QonyrCgS9Hu8glV0m/ZxO/bWASlo4TpcvBFFkozbYk/o41bmWCBQmrqXVAAAgAElEQVRSByCqTLwS4SFsbJrkr+uAlNTYLWVE4O7vFC6WQqotsZtKAlG5ctObrgF7gFjJVWaU7YajSDLvxG3QKlZZogiCYkgUFFCRyXSmoiLFQisqCqyCFSFsoFKpBRIPY7HiEYnISnzazg9zUuhJied5eJ6XcvexQaHNeXSqUokbhknv7lqrYjcbd80YXlhrL5GyvIj7zvH8bD+JkpmbBChPUWbuCJT0SKawh0sZrYVEKw+JZ4PZKtARnmfq0tS8AilFHEfIpO5ORk9qwXbbbMtzzz6HGyyNcX/sV9+A1prf/f53FAqF+PRXCEFdXR3S8xBaIrUBELQq4gJZozyUNBGaVAkT9sqd3uos30qan+oHG3NEWgFD4COUidGELnLcUYfx56tu58PZH+J5Hvl8lXlMCBCSoFgEAatalzFvzhyENGtRoSkWiyhl5qLvS7513DHssetueOS46867ePDBh9AK3nz1TW687kZ+eO65bDJ+Aueddz5HH3sMDz/2BPX9+nPA/lP59snH88ijjxKFIdU1dSxe/AnPP/+C5Wemf6traujfv392HpRux+nlbT/X1OZAFxBSkauqRpFj+x0mMmbMMCQmftU222xDvgYbKN/HquOExQ4+mDUTtCSy81MAQwYPYu8990BpzYJ581jRtCL7bmFcHN2wbbnlFuyy607MnTeHuXPn0tbWHscHc+2ryudoXtHEYw8/alwgnVWLBQA6Ch2M33g8IWYMVras4s2330BrzbDGYRx99FEMGjSIp59+mvvvfwCtQeYkQRRQ59cyf8ECnn/xBTxfkM/XxGs9jBRCGPBFCrjk57+gqqrKutt1nnYC46KlMDHhFi5cyH//93+z3Tbb4VGN0gHjNhiD9L143RvLKx/P84giZdzetCYdi0trzahRo81LpIfw8miKJsSEdhnoBGGxSENDLU3tK7j4kkv48IMPGD5sKHvvtjPbbrEFAwcMIFdVxZ+u/l9mvPYGd/71r+y6065oIrbbdiL1dfUUCiHNq9ppqDOxyzxtAGAtjMWh4+FmCdmMjpZP6niMU9qvzrYDXMZBY+3qykJH8X4UG/NIl7nPcUAjF2qlbey5lEWSBjCxq4SGKt/FU7KZ1EKNVlFcllQaLSWxW6KZ8XYcpQVTk8WS7GJp/pMFhlxGOjvNk6XngCLHdLuJ05lYD5ksgomLcuJCKmy8qMTVU5vMjDokUAUiESEReOTcajUgEnEjUqCebYXdcIXsPLFX/wCpkucqFdBXt6xKn0vkGtXFM6LkS3wgJOw+7wLppQEZ2YPSkGHY3d3XnX5lZTvhgbLyg92n4jmayvthnZbjZ8lkEhUVVac3JErjJlYKiJRSpbhFWh34NO0oo/6UpUrqVAn+o/SaVuuzFJXRKUpJV9oFq3dX6RVRUWeW00sreCxFPQJFQohrgQOBT7TWE+21QcDtwFjgI+AbWusVwnDo3wH7A23A8Vrrf9lnjgN+Yov9mdb6ht5VdV2jCldMbzlMxny2l7Q64MfaoDUDXn8GJLLjWf64Jkta9+3eX2lR3dSr9BSvnPtnfKlcMSWbnLnUzfsqrV9aiE4Lba6USvqa8v0t4ya7IKNG8E2sVUR2E9BlmG9Z6hkQFikBNoExXN4hAxI596zEIsgJ11mwyH2IxXmRXE//Lkj6WClFFBlLoigKQSsbxLpo/kWBOXXtxHey302GLtdm8933fXzPKHZOQRLxPSkrNBfTw/1k+ZypoozvSRSBEgEto+lnELFsNStaQ+VusmOhTaYUZaebiznTqWvSbEBQ5nzDjFAYhjQ1rchMJ8+TDOjfH601be3thEGQWYNCCKqqqzNYpbMsSBlakWg6ycuV1vHJuisr0VjTdSutbhkXAVvn9vZ2NtxwAwYNHMDcJZ9w6qmn8KUddgQEYRSxctVKxo4dB8DUqVMZOazR1MpmdPvDtGm88847IARf//rXOPPMM6mqrub551/gl7/6FdLz2GmXnXj11VeZ9sc/stXWW3PQQQdxwIEHcuGF53PB+RfywAMP0rRiOYcd+lUO/spBdHR0UFtXx7/+9SovvPAikIDoV1xxBYX2DrPuBLG1RqwMdrFc62prMdYwClAIQrbaagLbbz/eBruPAK/sOkkyVpo4PtpatxSLRYIwxPc8wijEk54dJ+caCNITMY9YtmwZQRBQKBRiFzTfk4TKhFKWKHbeeUf22msXOtrbGDx4OFrn6OgoorRi1sxZ3HnnnSilWLlyJR0dHSa4dV09q1pWMXr0aL773e8yfPhwBg4cyCOPPEqxGFBVVU1TUxMdHe0Ui+1ICV/84gQOPPAQPE8ycOAAWlraaO9oorq6miuvuJZFixZneFtptwjt4t2Y9m6++eaceOIJVFVXg9Y89thj3HPPPSCzsZxOOOEEJk2axMyZM/nTn/5MUCzGArMUAhVFvPTSS7b7nEWoRBEafqgVUgh8oTnqiMO46rpbmPfhbDbZcDTfPPLrjBozjGrfQAZe3mP33Xfi5dfeYGVzE4oieekR6QiFoqMY0NraSv+GepvBzwVET5ZUvG5i5mYDUbu54f5P7S+Z9U5qSgo7S122sfiEO6UVluF7QkhMXDeBOUJQsdWQEBLpQV7kASgGRYPTuIMnDUpYFzQ3YJgxkZ3meuqd6S+6/DXHttPePM5utZyHT0/sO3ZFLrVYiYEsd80AaFFkgMPUqUr8Iq16PnjrrMh9Glr7AnqWpZcqoV0+FX+KZYuS/cn9loxDqQZdWrqbCN2+uJdUviAjv+qY7ydIa3atde6NzgBOXOE+1mEyc7V03NeEnlZuOD5tGeWokvH9rPTQXlA50bL0t66oN83rLJWtBsfpWQXpkSqxKLoemAbcmLp2LvCo1voSIcS59vsPganAePvvS8CfgS9ZYOlCYHtb5ZeFEPdqrVf09PKMcLoOTKA1SiIlMH3GVVlPWeoON+mZKs1s17fUtak2uOCtnXRIKDEVTmhNmav2PaVw+TRQZ+T87LUYlEhr4GnqyzangSL3+gQsIv2bhswxLJp4HgnQWqZgpfIGv+7kVkURysYj0tpYEgXFNgrFNsLIpZi2iogJYGG/qRjAygadNsCO5/kWKPJIohlZs/8YBkuec+595jtIke6HxPUscVXLbqNZGSQZN2N9tNqoEW5cdMkQKK0QsbJlp41OVDZn4eOUq3Jg0SuvvsKRR3zT/mbGb8CAATzxxBPkcjnOPudsXnjhhZRQCyeddBLf//73U8VYi5E08NYFv3B165tZa+okpSQKQ8IwQAjBPvvsw377TUVFJm5RpDS+n0MIzaRJk9h+2+3iwLnCk/z1rruYOXMmJ33rW1xwwfkMGTyYlSubufiXv2De/Lnkq6rYcqsteOvtt1i6dCm//s2v2XrbrRk5cjhnnnk6AsG0P0zjqaeeoaOjg68ctD+DBg0iDAzYmVhmGEV5o402wvO8bEeU4WmdrBKVGW8IMUCRAh0gKGJid4VoJDDIlmcKdUBhQ0M/rrr6Gmpqagw4ohR1dXU01NdTVVXF7bfdThAERFGE7/v88NxzOf2M09l0wgRyuRxBMSAIAzwvxz77TOGvf/0bKoqQIqKmdiCzZn3EKaeezLz5H9PQz2ODDUfT2gpXXXEj8+YvBKBYKNLe3s6cOXM4+OCD6devH3M+nkNrayueJ2NrHZNlz7hlATzzzDMcffTReJ5kzpwP8XzJu+/NZN68PyOEYMjggey+2+5M3GITGhrqyVflEVLygx98n3322YdCoRADE67jtdLkc3lefvll/t9F/8N3vvsdxo0bZ/iLFrzxxhtcfc01CWiG4QO77bYbkyZNorm5iXvuuZu5c+fFmZg8YfgSwEbjNmTTTcfbtPHSxPLSCs/ySkHAdltuwg++dwpLlixlo7Eb0jh8KB2qDU8opJQMHTqSeXPnIYVk4MCB5PM+mpAPPviQQqFA//paBg0agO8Ru7I5YMIxiq5lA6OsqtQ8U9Y6Ko2qdV7JCVBTUlz8ErNvW+4shEnQIJzdlePZoHUU817fz6M1REoRRYk1prZWUBaut1ZqWY7qDGu64itdqjdWmHDHImWBGcd8Y5CqPBnlPypzjppBzAAXjyjh1Ml964oMs6ZIdPG5a9LddFsSB6l0v/5seroUQOx0AJLa6GNnz8zpi/klEdlTC+4/feqsp17QWgQz+uBVPQJFWusnhRBjSy4fDOxuP98ATMcARQcDN2rD7Z8XQgwQQoyw9z6stV4OIIR4GJgC3PqpW7CuUyVWJ50olcliPVVM5VxQ+qZcU+K6RGmBrKxwlhb80rgJWQGxq+eShz+PFGv1Vk7UsdAoNCUniUawNl+d8N/3DXMAkegU08EdBziYBSukZ5WJdDYdU47VTrvZJYRVBKOwaNJH65Ao6iAM2ikW24iiIkoFGLdHRZLyWJeUmx10BxZ5nsl0JqW0MTAUKfgEp6wkz5WAlkKm+iULFkG6+VmlykJNODusbIL47NhVtoem2+xKT4TfrNWEjhW0zsWkTwnNv45CBx/Pm5NSoKGlpYVisYjv+yxevJg5c+bEz0khaG5uNjNBpKA2lZ0O3Zkkp9eyEeTN2CRDIVKFdTXXs3MhPW6e7yOF5PU3X+fDjz5KQXgxwmaCAu+1F3UN9fi+z8knn8zPf/Yz+vfvR3NzEz//+c95+KGHkVIQFItcf/31tLW1ATBjxkv86LzzuPQ3v6axcRhnn30W226zHReefz4vvPASCxcu4LRvn8KgwQNxmKLLkKeUYvr06axsbjZ8TaRa2cNkaBw2jJ133NHebQP7WvcJKXzjoiV949Za8qxAkMtXsdfee1NbV2sAWqVMwG3b13vssUdchUJHB/X1dUyaNIktt9gCNHGga6UUo0aOZPSoUSZ2GCGQpypfjRQQFAtoHeB5mqeeeobXXn+L4Y2NTJgwgSAMYv5WKBQoFouMHDGSESNGsGrVKkaNGoXv+wghyOfzbLzxeOrq6sjlcsYaRSnGjB7DyBEjCMIOcn4Vq1Y28+EHH/Fw8WEmTtzYWMK1tSGlZPPNN2eXXXZJLF9ShxPSxoUJgoA99tiDI488Mp5HcVBvYOzYsUydOpUHHniAuXPnxsqn53koFZHP5/jG149g0IB+COuiLQSMGjmckY1DkFExdt/TNr6acbsNUWE7G44eytjRQxEawqAV6RvXlNq6frz8yhtMf/I5crlqdtppV4TIAfDMM8/R3tbGqMYhFghXFuDScQD/siSUAYa0RllQoxzeg07kO935124pzZ0zULog5QaukUiEZz7HYLafI680SkEUKpQCtDL8RYGWLj265YNlANZMc926TwGEifSVdhh3HELE23Ksqqea3FMA5jQfTT+TxGnSZRZ7yed0PMj/GOoM5FT+ey9loswQrB0hMXMoKuz3jLxRUo+U4OvijsV1LuXvFc6VymM+rafPHZURy7vVg3qkSubA54cJrW6Mokat9UL7eRHQaD+PAuam7ptnr3V1/T+YnITgzv3txlsJaCSSbbXM8ckap0r9ZddmNoOKfXhjJdMJDl3f02M5sS/xmm/n2h/mtOjWFZVxucn+bEpKB1Hu/tYea9RtGW7zNxJAj6UKCxKVr0hXz5aW2/17EgvArjz5y76crDRlxWorBCfCtcrer10coiSFsflgBZ6USYwR8DVhGBAGHTYuURFtgaIwbDexiUhZE9l3dK6zcLGyY4VWWmsiz/NMrByhUjtrYhmUOXEscecTMeCSjj+EcfsqeX8CnGdUoxRY1DvKpC3HzSms14hI9J6unk8Lz2UGXltwUkpJVVVV5n35fN5adJh4LPl8Po7xpLXOxGyRwsTXikJsv7rT8u6UKaeGJQBlp+/ddppO7ul03fSVlJKbbrqJK6+6iiAMwLq7AOQ8n0GDBvHSjBnU1dXxrRNPZI8996RfQz+01syYMYOmFU0cfdQ3caCu53smCC3CWLkIwQvPPc/+B0zF93PsueeeVFdVsc8+ezNv3gJaWloYOKg/QqgYlHFA4/nnn8+Ml2bELNz1u+ymzwCmTpnCznfe6VqIwENqH48aRKRN4OlIIrXsxEeUho6OArfcfAuRCmlpMaDM4d84HN/3aW9v5ze/+Q0NDQ0MHDgQgDlzPub222/nH//4Byubm/n2qd9m3LhxuLToTU1N/OlPf2Rl03Ia+g1h9gcfISU01NXhy2qiwKNlVTtCwEknn8yZZ5xBGIYxCOOAMzcfIhWRr8oxYMAAlFIceughTJmyXwz8uiDALrC0kCFS5Jn1/nsc8tVDzZSw8cK05QOLFi3mjTfetM+mJ5Wx8pFC0NrWxrk/PI+a6hqamprI5fPU1zWABYNWrVrFO++8Q1tbW2yJBthYWCaz3XvvvkdDXQ0IE4hbSKjK+4xqHBKvYa2F2Yu0A/w16AiNyyinkSJPGElyNQ28/uaH3HjLX1mytIk999mPqQfsD9Jn8aJPuOdv9xJFEXvsvjO+JwjDAImJU2TKdsHREx6X5e7OmsiuVV26YtNZDMmuRZEAO+5HgYgzKpaKXY6XpIOkm4DCJmuds/LU2mSCy/s5dATKN3HnosgGulYalLbZz0QcQkiXeWeqoZk2ZZiGTl3VGu34vU4Me91tGmLLq85gkblDCDPuZk7boOnxLTqxkLZ7oZQSz/dxPCbu1zSwVK5JKV6tOgW+7hwXzvRwVmYwNS6VIyq1LO9BrhFeBWfPri/du821vpDZSy3VXcIE28Xx28vB6ZWK0r2N2ZNtl8jWzfKlzJxJ4UfxvljulboXwE9KbXPPliNdZv51au+6hTOs01QWSiwR73rV1RpMfM4ebqqAH5QDxsuV9Gl11E8dzFprrUXnY/DVJiHEKcApAI2Nw/uq2M8x6ZTs1PNgljI8YrBoPVeoiErli09TkIAurQf6nCp9SyWNSnG5sj8nwnzy9l50ViwM6Bgk6m2MotWmLgrr/HaRMvvX1r1BIbqKOfMpK9Wr/ktJEjqtXIlEYEhiGqW/kx1aYa2MUgCR+24E/4AoLBCFBbQqokILEhXbUGHBBDMVDm4pBRNSbbMKuJE5JZ708HM5cn7OYlQ2A1dsqZLwqzQ/i8txTckoBGa9uXTPgnTcEgcW2T7D4YSrHyk/sRJyb0/ighiQzSmC2UC92kqUzhy/dNTTLmqg2XrrrZk+fXoKUIRcLk+/fv3wPI9p06bR3NwcK/cCwbBhQ/F93wKjEoU5/dfaswqvAWe7W1npGeGSm7vv8VoVJHNHdB75pBztOiULPGmNUpHNDGbWl9aanGcsVqryeaIoIufnqK2uoVAs8s9/3sfOO+/EDpdOsuxVZCxI3GelFJ7vcc+997LBBmMZ/4VNuPmWW1AqYvSokfTv399YTOgEFAmKAVIaF6KxY8f2GihqbBxuQAYhTf9q455ljO1M6HBhA7xLXCD6ZJ63trbxw3PPQ+mATcaP54o/XxHv+cUgYEXTCp56+ileeuklBIK29jamTZtGLpfju2d+h2HDhvHEE0+w+x57sGz5cmpqath0080484wzaFq5Eq0Ufk6yzTbbIaklCqvRyidSZnzr6+vJ5XIsX74cjcmApknWYBqMBMjn83EgddcGISAKNUJ4DBk8CCF8lg9eFk8NrQVCeAwfPoJPPlnKRRf9lIsvvqS8YqcFnif56UUXceCBB6AizdVXX8PBB3+F8eMnUFtTQ2PjMDQwa9Yscrkcw4cPp7a2Np4HNTU1NA5vZPnypSxfptAqxPMk8+bN5Z233qb6hKOYsNEGeNhkCNq5Xhn1PVIR2sZWUwI8Icnl63hs+vPccutdrGptZ/e99uTy3/2amhqfjkIrV151Ne+89RaNQwYydsMRSCIECilMEoF4ZWgvw8s0GpQNjCtAC+valbJgLYFS4vFxSReSi53vTgJNp/Zry0+l8Kz7mQGNXKZKLZwSb4AjlEZ4EvICIoUOzR5i3Gw1OrKAisQEwDONy9TZcR4HhmgSgKAcT4qd4bRxkTPdo2PgC0EcsD0uxYFv2lnS2n0It39nM76V44RSCmMNJpzbGzG/y4xBF8JAOeumUgdod02UfC/zlk7PlXljFy0pd19PZVngplfAkCu3s3zYXRgD8zupmOT23bI37/50VFq/NGit03NTZNvQW0CqO8rEKktvwD1Ql+6YPXVf2sh8Pa0WlXZzRqrSFansZcoUPcqm5h3hp8V3+oxWFyhaLIQYobVeaF3LPrHX5wNjUveNttfmk7iquevTyxWstb4KuApg0wmb/0dMc4tnp7715mGR5nTrqVL6tIDRWrSW6h31Gt/uROZpawEQF5UGLHRFIsvnxsS2bDWS+D2QUuB7XXhlT/QsumUF2k6upU7o7XSskSWj2+tEyXcDJSxYY1AKoihCRUWisIiOgtjlTFvQCB0Yl6QSsb8zJcK5lB5CSnK+AYlMWmwNKWE+i6qk9B1XWkm8gHS/xU1xbXTXtQt6nXpWy4x1VXnSnQSBGO1wQnT81VkmmWDG5iReGvComynQmU242CDm1Lu+vpYttpiI5/m2Lvb82Y7R2LFjjSKjkr7T2gTBNp9BK0GkTF1iBcm1pRISqXpq+59O9UWF5Cz5jPWJ4sCDDmLCphOsEiYw2esMIDNo0CCbEUtyzTXX8Nbbb9Ha2spzzz3L5Mm7IqRARcYaqKqqCt/PobWmo9BhszeBCkJmvPQy5/7wR0yZMpWbb7mJ+oZ69t9/CgMG9McTwriCWfDD980p3k9/+lOiMMoARVIYC5c0Jcqo6c3+Df3BZj+M22w0XBKdx1pupECimNcIQUdHB1+cuCkXXHABW265Jffd93d2mLQDo0aN4vyfnE9rayu33norl11+GcWgSC6X47zzzuO0b5/GW2+/zV133cXuu+/Ogw8+AMAhhxzKLy6+hAv/5wIWLpxPbb6BbbfdnpxfSxi4LIqYmDMWYDvmmGNYsGBBytIlmcBbbb0ll112GYMHD+Zvf7ubiy++2M5XkekLT0ruvvsOxo79QqJk2xkshGTMmDG8+eZbtLS0lIDbJTNGSOoaGvBzOV6e8TK3334HBx54AFor9t9/fyZuMdEkI8LETyoUCkyYMAGA8ePH89vf/tZYLgrP9r0il/c57NDDWLp0CQsXLmbTL2yAmdMqNeam3QZ4VkRaooWH71fz2BPPcfMtf0UjOfDgr/DrS3/JiJHD0ITccP2N/Pbyy6irrmLKfrszsF81KurAI0JIG7dKygyrS7dbKTNfjCWRAYmUSvEyrC1SmnHoNB9M/Z++JXW4U2pDKYVAS20SNlkzoEhGhpcJZbImagOES2l4jSckuVyOXJAz6eJdHbUJgi0d4OTeU44HamxAak230kAJIGTa46yKrLVUFw+mgfeu/imVfC4PspXW7dPLUOsCrTZIVKJzpPeaNHiWtcoVZjJ8TuTmnlwY09QtJpMCI3sl72b22/X0eaSuxr1UVEqLSb0bzu7my+drYqwuUHQvcBxwif17T+r6mUKI2zDBrJstmPQg8AshxEB7377Aeatf7X8/SuTM1YQp11PvaXWZ/OeaKmlHz/cYbMGxyoyKXvL854uhQSmIYP7G3pqxQJO9P9OKboHXNdPeTgdMpSBJ+mTUCujxFR0/kirPWLWYgLoWJLK/RFFIFISoqIAKi2gVEBY7CDraUMp8j4NSILvgR2VOExHkrMuZlFn7DKVNQNRSOFykGu3AHvNXxe0tVYo6k05OxN2cjcGSHgRCnTrfFQ7ESQA5lxnLIQrJ/0b4VdYqrSv+UTqVYkXbM3FMPvzwQ6677joaGvpTna+KXUHinnLbgf0aBAFbbrUVe+y5B0orlNK0tXWgtSDn50AX4zoLbbNrVUSJxKO1U+x6hjcFicKhrXasVISQgt13353Ju01O3alj0Ct26ROCjkKByy+7nOaVzdTX1/PNo44in8ubmChac+aZZzJ1//157bVXOf8n5xs3MinRKmL2B7OZM+djrrjiCvr378eRR3ydLSZuThSGVNfW4Xs5XKBvKU2coi233BKHbhjdxWmb3feQcKhShl8IGhr609HahtAK8PGEDxa0c8GgC4UCINh222249LJfsuMOX+LJJ55k2rRp3HzTzTQ1NXHtdddx+mmncdZZZ1FVVcWF//M/fOc73+Gcc86hvb2d88//CYMHDbYALPzyV79k2+2249hjj2FY41DO+a9z+Pijj7jyymvYf78pbL3NNhag0ni+jC0nZs6ayUcffUQ+l0dIgSc9lFK0FzoYPGRwbL3W3NzEu+++i+97SOnjXCFNW6C1rS3rTiYsX1Iqzjo2depUNt9887L9WVVVTXV1DYsXL+bdd9/lkl9ewrJly/CkCaY9eswYRo8ZjXEJcutYxq5UNTU1TJ482YDUbmiUIghCcrm8BVEVCm0C4+sILaybWWrNKwtyeX6eN958n7/cehdawIknfYvzzvsRw4YOoVDo4H+vuZqf/Ph80HDoIQfypUmbI0WHAXa0bb61iFFlAOQEMDNAcZxVLPUbkLhgdT0TEy1FxFcwab+xrUvzKxA23lvGisMeBJhYdRj3QG04r/Q8PDTVNdWmr0ITDN7Aaklg/VJK3GYSNzJtryfZAJM548B4x29i6ElYbMoCTWVxKJ3ErgObvVOFcSlm/SmkS7meAjOTPky+G7fJFPhV8q711BVY1HvZeW2GpUhTqVtXZwVfl1xNraSu8MPVbUoMov476R//PrTmR2Td4Sk9AkVCiFsx1kBDhBDzMNnLLgHuEEJ8C5gDfMPe/k9gf2AW0AacAKC1Xi6E+Cnwkr3vIm0DW39eSMfwYM+0xpicLmXAQIkLRJd1qqj81a7ZmqU+qlfafLti+jc6PEqmSbkeSBTPnuIFxSQoEQYS5b3XdevpKU23AGnPKmvJ/bFAoGOFMF2FJHx1+g2GhFUoEwU4/VRpPSqvWbc94MzuU+OUmZsu7EnZQpzonoJfdNoFwikURnA2GapCVBSilSIKA4JCuw1eXQQVWbeZRMjvqYlCCDwb8/LAOTMAACAASURBVMGXXlaREOCRxNAg0/MlBceBRNP9WublnSwpux6H7uqfKFKizFX3zWZNsnNUYFPMC0mEQivV5dgaRcgCMCnwadmy5bw/cyYvvPACv/71peTzeZwLiNYqAYdSfwWCKIo49rhjGTduLO+/PxOBZNny5dz79/vZc6/tGT1iIL7ngy8tgAXZ7Hid2wflXC0N6BaPUhf9V/qYJol9c/8//8kjjz5qV00Sowigf7/+/Nc559C/f3+EgObmZurq6/B9j8cfezwWz7XWTD1gf4pBkSVLl/LAQw8moIE24M/YsWMZPryR3Xbbhc02m0AUFMj7OfK5KgodBfN+IVBK4wGXXnop8+fNS+pv+zbdTaXushrNxM0ncupJp0IKkFRK4Mk8YViw4FASe0BIY6EB0NTUzOTdduPqq65gg7Gj+eiDD/nRj38Ud2AURUz7wx9Ytmwpxx17HAAXXHA+J59yCkoppk2bxnPPPceBBxwIGDexOXPm8KMf/Yirr7qavffZh5tuuoVfXXIJjz/2KH+57VYGDxtiwYAEmEMnca8OO/QwBg8ezODBg1m6dClXXHkFVVV5Mxet9VEu5zNp0iR22mlnokgxYuQIfvfby1mwcJEFIVVmbpuGm3f4vs9Xv/pVjj32WDuHSieR+f7oo49yxhln8vLLMxgyeHBsnfTkE09w3333Wb5ox0HDsccey7bbbsv8+fO54ooraGlpMdaPSuNJiRCS9o4O43khbMwakbhQOYu9JIi7R3VVNYHyuP+fj9HeFrDbnrvz4x//mEGDBlEsRlx95bVc8JMLkVJywAH78aVJW1FTHYFSeAKEMvGWNNKARHgx6Jx2nzSN6Hm/KOVmMQgV/56AluUe7CTWZFigMHUt0ZJjViNNLCcPD1FVZa4FgiAMiZRLSED8t5P7lU6yYbqSnbtdcviUraHbWS3EE/9Nl5mqagKIubrHfZvqM5HKoonhw13NQfdZuK1TdwYV1jx1/a70L30i64ss+FVqBdTNYyTZwNx70mOZrp2OAcpe1Lx7Wn1syjwmzNzPFFlSVtIHTk6pUL5bnbliu65imdy9C93zGFMhANXTLb2pWgWv61lPq1SeLl8xUTIHey6n85tFuR+7uqSzc6iS7lr9aD1r+zlDlWQ9O7KLn/Yqc68GzuiinGuBa3tVuz6j9KbVHfXhptDj60oVHVeFkhPgSqtUCXhVyeFyJWE9KvF9rbTLKz3w7olioSer/vf8nIC16Ctdvg4VdLqupNN7LCQuy+zv3feVymj7ZagSZicwbjM93tZ5DHq3P4l48xQR9uTYZuvCnHKjPcKiRgcSFfgQFa0iFVmM1kSyENJD+tiCrEisNcLlAMZk6+lN8MmechSak90oaY/1H0oHURaYk2rh3HgALTTmzFzbUtzJrTZtEzauj8CAEEoRRiFhVICoAxUVCMN2oqhAFBURROgYENRIZVqrrfsHKExcaomL02MO+iWkADZTBQ3CBsvViVLpRHcyvaKBEKF0zMvMmBpAptQWycxfEQtZsTjqFIjU2GinyXbGR1CYk2SnSEvhWeXHPKK1UW6S+EkSIUPzDpvByLYM5WJdkCgyro0u/PHoUSNMrJnpj/Piiy9Q6Ohg4he/aIJSY9sjBBnoyVksWeH+rbfe4pyzzyYoFJg4cVOWNy3n5VffprW1lZNPOhLtg6dCBFG8ruL+s+9x2YrMnJJEpOqrhQ3IbN9pheUk/ooEm2TcKGvmXhO2xI6d0Dz2+KP8+U9/tGsrWZ8IwbChQ/mvc86Jx0FI2Hevvdh6661oaVlFv379ueqqq5g7bx45z0sCzyvN0MGDOPKIw0FALufT2DiCUWPG8Oyzz/Ff//2jeA+QUtLe3k4xUggknpToSHHvPfcw46UZ2anQ3fLUJp7JlClTOPnUb1lXP0VEkdvuvJu7/3G/iTODJufn+OrBB7OqUMTP5xk0cDBawdKlS9h55x3YYMPRzHx/JqecfDL/evlfbL/99rGFjNaaP/7xj/zllr+w77778utLf01tbS233norl156KR2FAsJa+zg3mn/+4x+cfc73uPzy37H9dtvzox//hIcfeZgly5fR0taCVm69Gn7gLICk5zHuCxux8cYb40vPuPRpo9hJ6aG1oKOjiFJQXV3DhAmbApDL5aivrUdq0KEEJRFK2iDRGt+rYtDAIVTncvgCPv7wA16Z8ZJZjSV9LCz6/d477/D0k08iPRcw2/CVl//1MtOmTSMqkYl2+vKX2XabbVm0cBHXXXstixYtRuDjmL8zUtlww9Fs9IVhIJps6nqzqqQFLKS1NFMI/Jzmg9kfM3v2HIaPaOT/XXgBgwYMJCxGXHrpZfz2skvxhOKIww7iy5O2wRPtyAikyJtypSaySrHJ3OXWTAISJVahKVi/FETq3E22XWXkQu0+GEpniiun3pp4d56NwydsXD4LkgoN0vJ+W3fHy6ur8rbPihR1SGyTpY11oFYReOZwQGtNpDRSKrsHJ9zefLAVt7zF1ECV3GXjDCUcBo1CS1J9iOGLwmVJU3EWObOYJUlAcbuvCxeXJ91D7j5S75OGB1t2pVP7cimVAk/x+Gc7PjNOXZMu+VwO1OpZVpVlsi52epMmEyalvCtWZ/mwvMToAkAne3M8ktoAtyY8mJ3rWpZ9n9nTZUk8qz4G6rRASM9Y7goIrTu0OcoSbuqTWI1aKacUQeiqqyrVe9LU0/2lQJYRXHrummRqV16VrvTIvtIdHemuC9WAkF7ni2UQGsNGUnXueQqXp1h/LK1j96RS80GDMcAvrUJpOVqbw9gKqtW5mp0Kq+CppBNW13LtUwezXneoL2HTCl7VE3DzeTZl7SvcZC12+eqSE9bWSNmVnNKsZRPcjFuLE4yyOnbypQJLkorGuMJ7RMml3rymU3kp5piO36AsZqHdyaYFGtwtSpGcRIrydcnWqPsGJoYvlbUgvks7F6yEtRvQwt3nhE8Hpti6uN81MXwUt8OBRGERpUKEComigDAoEIYBKgpxWaJiQRwdAxixJYV9tXH5MNY2wpPW5SwBBJR27mt2LEpAonSb06d46c0suzw6b3NZG69uBIZuSZBqaSLw2iolLnxOoSD+m9mCU3w/vkdnT72PP/5YTj/9dCKbbcoBglqDZ8NKO6+V9Ik4JWCrEICK0Agef+xxvnvmGcxf9AkdHQX61dejwyB2zTD3p8/f4lzhBjhSVjiXzsLCtsAFQxdWu3KAdloodkqf7ScVu3vIODsZVumLwS47F6QNIowwbS2GRfL5PI2Nw8nljGWLRljgwswp0x+C4SNGGHDN8qk333iDv/3tLlYsX86QoS7Yt6JfLke/hv4Maxxm5qvn0dDQwMiRIzP92dMSFUIwaPCgeJ7W1dcxYtQIM3di3qFYvnw5N93yF4JiQG1tLTU1NYBg4cKFPPTQQ2y88cZcc801PPvsswghTByYXA6lFIVCgUJHAT/nM3bsWK699joGDBzABRdcYK1mbEDveAJAGIbcfsfteF6Oc889j0t/cylhFJLP57njjjtpbm7G9307Pi62lnGFu/HGG2loaCAMAlpbW+O+TXeElJIZM17m44/nxnNx3ty5CGEs1pL4M4JFixZz+eW/paa2lkULF1EMilx55ZXcfMtfiDNKpfvUvqu9vd24RGniRRcGgblDCjYdvylTp07l/vvv5/3338cNlSclVfkqqqryHHf8cVTl80ip0YSgBYMHNzCssQqlWpBCo5SwgFLMEC3vURSLRWbNnk0Yhmy44QZstfXWeNLjsSen85vLfo2OQo742sFM2n5L8nmBjsAF0U/3l3YIs1tf2Qa7mWZWQ8oqspx1R+pCJ3kxuSV9b8KbRelvGYUmJQFouxdqjQFT3DjpuC6e9E2bMbGCgiiM+X/GdV+neaZ5h8rcA0lmS9cPtk5CxyKHq2hiSWbvVfFPnfrNuZmlSi1perasTtTV2u+BJ6T/pkvP7FQVy7mun5K6Zi2FK0ci1proHc/p7J7uuIKRBZRdE92/NZ4fiRD0KQXB1GNlxsqVF29lomRedvXScnVQPfzeZcV6ca+jROjos/L7xEumElk/c3MXdSn90MWtuvS+rr73kgRpObvrd5b9DHTyAipbn546q3Ojy92dYtVdU4bldp9YqCv6DwKK1tPnjrJ7y3paa6Qz8oZw/6V51zo7LmmXsqwqnwAc9qswaX5JWbGgQUXa4ht9sHn2ot6Z9PCCFCiTGpS0NC2yj8fFpN0FtYqvRWFAFBSJogCtQoiKBEE7QbGDKArsvSpWoIypfiftLq6nscSReNK4mbgA1poUOGStO+I5lqqvcOBKj6ToalMtFdJL1KZO07hUxpJuIdhNNHKYiL0enzLHVloyLrfUhNoFCnef3em1c0EBGDlqNEMbG2PQI4qixDzfZmQSns3CpFTSPqdEp4AXbJatxuHDqarOo6IQz/cIgwBPAA60EzaWR9xD0mJ+piVCKyRJBqHMqb1IT79E2I+Vr9hV0I2H+U8D++67L57nEwShAROliIPg1tbWUiwWY4U2ihT3/eN+/vGP+1NjpeMxcNYNUkqaV63k/AsuBIwlWdqNtLqqmv+74zbGjBlj3CqjEK09qqqrAVBRxPXXXU8UZa0EulvqDtDL5/PGnU8Ivvu973HKqafGa0AIwSeffMLpp53Gq6+8CoDv+wwePBiAtrY2FixYwEknnYRSivHjxzNr1iyWL1/O9OnT44xjRx11FMcdfzw7fulLPP/CC5x08kl88skn8ZgsXrSIxx59lLffegulFNVV1QweOpC/3HIzb77xBq+++hrSM9nE5s6bZ1OAe7Ta1PLS8/F9A04tXrzYlK20yQDl+9TW1sbZ5dra2ky2uCBgzpw5Zr1YACRflbdWVNC0shnheWg0c+aad4Y6QgGrWltoKUlrn5otSCGJVGR5QQKM+r4J7q6UYtWqVbz77rs0Nzfb1Od2DkoZg2zvvveOeUabFOlaKbbeajOGDRpIPg86ihDCT3RP7QJKK7TwCaOQtvaASGkahw+nuroaLeDNN9+kpaWFvffclR122AZfhAjCZG3ZeSfSbESkHa9Su5FWcSBtKZLsXOYREc+19PfeUBzbqASszva4NuHnHD9zbA2r3NjsfdKuYccyfN8HYSzplDIJ4IW1KHLBos2rJYIoAcjtmEoSvplupxRu/ZYE9Y736hT0IuxOUgZcc/3r7gPjWihirc/Vr6ueWX3K1iOdHzR9tcdSSFsLJOJYAsWu6xSD/RUjCm7v059a+c9WpA/LWk9riVIybR9TGpdMg0TlatCVi1kKh0FXNMFKJdHVpEp0FC3oZJHaS1oPFK2n9fSfRl0h8J85SFSpAFFJOcnnxLIiHXNFxtYcZN7q0rxXCmL0XV3TYmKy8aRP2FKxHtLIUOopEYuXmODRFlhSUUgU2ixnOkSrkLDYTqGjlWKxA3SIiJ2obJnO9aib1OhCSjzPI+f7eJ5VUJRKnUob8CR2R8MVlThFdUflp2Tn+nQFEvU0pRWA1nipuZAOkqq161OFS/2d7v1MJTHPSAtomO9ZJfDBBx+iuXkVnudxyiknU1tby1133cXM92Yipcfo0aM55thj+OSTT7jpppvo6OgAMABLStnWWpHL5UHArPffp7WtneoqHyGxcXGcGX2p06MZExW3zUNIAw5qIQhChSe8VKr4tKVV517MrBuRkum1ZsrUqey7735EoUr6Q0pUFOFi4Li2bLLJJowaNQqX5UijY1Bpww03xMv5DBw8iD333oswCOJ7XEYd3/eZMWMGhWIHjY2N1NXVoqOQurp6cvlaCsUiq1atAiGorq7u7PZQ0VrXtLS0AFBdXU0+nwcSi6disUgun4+BJT/n079//9gFLooiPM+z9asn5+eYOWsmZ599NpMnT+aGG25gp513pqa6mubmZlasWJFJWa+15plnnuHIbx7JuHHjKBQK5PN5Jn7xi/Rr6M+HH36A5xnw8WtfP5RjjjkGTxoRb+jQoXHfXnnllTQ3N8flSgsS5PI5GocPx/NMcOvDDz+c7bffPnMaH2eOAkaMHIXSmo022ogbbryBYrGI53n079+fbx55NMVikV/96lcGtItR0+zc0Uozd+5cfvzjH7Nw0cKM0u2ycc2fP58FCxbE16NImayNStHR0UEQBDz5xNMYnmLmlBQeb73xFscdfQATxg9GpFwlMwCyMC5YvldlAjprTVVNtQFrtcbPSaSE2tpqpAe+1CaOm4jZWAxUprdRjXWLSPHg8pZBCW8oZ/VQLvZOt5Se12UsmR2YbdzBVaa/RRyA27psCbcfGLTb84zrYRhFBGFoPE108k8g8IQkFNKANvGBhvNJsT2TeqcDiWJXsxgEsntPDECZ59M4T9ryiShEhyHCS1Qa5/7nrJvWLEDUterXXd7N7H2dv6fBojWhJK9tKt2Xe7w7QRb/HZq/nnpFdqZkhLi+0hHKvqlbUuUWaMnXijlMTy8rw7u7LKfHWz893/vcA0Wd3HbKtLmS05e+DErXc1lrS8lMqE+D7vVVURXUqeL1sLY7tA8oIxD15fh0867u35P6LTnyXINYfW/ImVl3Uxudxeo7C2qJUiyEKsM7EtCic9FGzBMyBWFUMGS60ht7KkWkRVmdOh3u3Gand0BaCbEWLClRzA2xRqGsi1kUFdFRQBQVCIJ2wqCAUiY2kQOJ0sqLA4nivdpavWhtU4pLz6SnltJmNTPuS04vcv0thSBKZY1Kl5ds0+W2a/f+0nOaMkqQ/ev6IPk/U1RCKb0tsVnScfsysUXi9RI79Nn+EZn3aq0JgoDGxmEUCuOR0rgXufvvvfc+7r337zQ09OOYY46moaGB22+/g3vuvgcpJJttthkHHnQQAwcOZPbs2Vx99dVobdJVK6XwPI9ddtmFr371q1x//fW8+tqr+FIyeGA/tt5qM2qrq1A6MGqxKF3kyfm0RiBkHs+rJggihOchpKQqLwkK7Wih0VHRWGgIHc8mAyxgXdmcEieRnmfBV8OrgzDk9ddfZ/as2WgFO355RzbYYEN7bRae53HAAQc4JI0TTjyRk046ySqcxP2FENTU1hBFERMnTuT22283ViVKxxYGQhjLk6997Ws8++wzHHLIIWgNgwcN4u6776K6pp6/3HILv//D7+nqxE96HoMGDWLJkiXxmLu/AwYMIJfLsXTpknikHZgYz3MMQDN/3ry4TZ7nUVtXB0DLqlWoyFhqLF2ylMWLFtG/f3923HFHTjj+BPbee2+qa2qYP28ejzzyCDfeeCNvv/M2rW1teJ5HFEU0NDSw7TbbcPZZZzN+/Hguu+wybrvtNl587kVOO+3baCF44MEHePXV19lmq23Yb7+pyNhd0FlsCHbeeedM24Wd65FSSM9YEOXzeTbZZBPGjx+frAGya6JYLCKEZNiwRvbee29rsWTqmsv5eJ7HzjvvzLiNNootzUpJhRFz585lyJAhLFi4IDVVBfvtux9DhgwFQZxJMQxDdthhEkopxm64IZdffjktLS1IX1owUlMsFPn/7J15oB1Flf8/Vd333rdn30gIgRAICYQQAqLIPowDiCCKoAgOMzriOM76G3X4OTMujKCO4/JTmBlBEEFBRHRcAFFHBQVkXwIECEkIWwhJINt793Z3nd8fVdVdfd99792QRwyYAy/v3b7dVdW1nDrnW2f56EfO5cX163j62dXsNWdCsdYVZCJBUGU7hzJjyFx8pKeeeoprr70WUNx+++/wgZRjLQgJWtnU8qLCFeXJwwVNMHgAEoX96T83g0Mt3c/aoOEskuze4fmzKV8P6lQOPENcxksX0EyLplqpOHcySBzQm4cdcvurPZgwJT0/c5y4yHzZ5AIvgrdoLBC4gHcpv0Xaymysm+L5LMtI05RIaYpm2OdNbuDqYxS1L+moUjuDvsTOGx0MftlNbLgyRxCxm+aGL32kOIc7OuXzawhUXob+g+Zea0/PLvfykGtCFX+H9Rbg7WC32Vb0WrD62nHI5OsXKIuIWwEWtS2fK3JZLt8VZHBNpZKaim2fr4j/f5vI6yxtGRX5fUEGS9Lt0A4PFJXoZfdsOxPrlVXkXzU0Wl1lRt7WSkLUa5hGByRqB3STYeqS/KcUk6ip9N/LSOQCpeeedkfQTa0ZCSSy1wmAFFd8vtmLdXPI0SQrCGpNbvZuQ2WYtpi+tLNWSi1vdXPY0HKEQsEJVU2P+Q1U5Y2Q4L2d+xLe5cxY66GkjskaKElAGmRpnfrAZns6ThBLyNXl+0P5l/TCuzsfVUrl1kTKZblSbqMVG+ipMEARQWmdC4hNKhRlkEiBCzJbwDdS1h8Y3Cdhn/l4OCqAzcKaQioF3qYYU99GEYV2QcFt3bYGH/9e+XipSrkTdmHixIl8/ZJL8nrHjRvnyi6vT+9OZRUzW+ayZcu48cYbOO300/ngBz/ITTfdxKpVqxAHEp12+ml84uOfYMauuzJ79mzOOutMNr70Im9964ks2Hc2OjIUIbVdP0gAfrreEVWhWuvlmWfW8ZMb/peXNtoYNR0dHRx5+OuYO3sG9WSAOPLZoTQiUaEVBj0ZxRU2bR5g8xZr/fToo48yadIk/v1z/873r/s+cVzhu9/9LjNm7Mr3v38dn/70pxnTN4bPf/7z1gXqudVUq1W6urpc0Hly4NSIzfDlQYJareYG2iqixsXy2rJ5C1mWkaUZjz/+BEnSYMqUKaCtBdgLa9fy0JKHnAuNje0jYoNP9/b1cvzxJ3DyW0/mr//6r3PrFT82H/nIRzjkkEO47LLLuP766xkYGMiViFqtRrUak2UWHK3EEXvtvRePPvoonV1ddPd0kySJteDBjsOkiRM57rjjOProozn66KOpdXSw7PHHuf766/nxT37Mgw8+iHaB4bu7uxER9t9/f8466yze9tZTmDR5MsYY3v3ud3PzzTezauVyNm3ayPz585k8cSJKwerVq3ngvvvBAYxg0Ni4QuF+q5QiTRLiKKbWUWOP2bOpVCr5+z322GNs2rQpBzuLNdPEf8WufyWQpAmNRuLiRNmF+/Ajj9hYRAEIPaZvDLvNnMnAwABbtmzJ+1sBWZqy3377se9++9n1GIJ3xiIB48aN48QTT7RZunTBKxr1Ov923nls2rjWYXYyiFl7pd7yKk2SGfacsye1zt9x8823cPPNt+QgYF9fD7P3mEWaNtA6xUgCuppbC4VcprAAcuukiQGFfdeqH1/pA7GQB4VWTCFYlMcT8wqy2z+1tnHoakphDGT1AYxxYy8OcHRjI95tVuP2NQfaFCgwxeGGWDTHB4324JTn4KKQTMjdJ1x77L8uHLaxVmZKCyYTFxS92Bft+rT8PDTS8t3fai/XyIgxfK1F6pCqZMvrL3+M21eQR4NagS3w8tvvLRqHfA8Bm1XB1ZlnXAifEWzE4BHa0CIAeX7gQ5N7tbseyiX5upQgp+wOoqeoorGvXTJezoBcLtSqBBa11Q9tqF65RNOESwGUjKhbTNuwhVuj5Y3WSraJaoavWbxOJYVsvLX06gKKdtKrhtqZilu7uHbS6NEghfn30ooWlKM829oijZM6XLnOEkQkV5yQLD9ZVdq69hiTkaZJ6cTVUyhYBxfbbOswYJFTZBRl4Sa/s7RIdFMSlSZFAw/lOFDAGLK0gUnqSNpAjP3J0n6MJAgZeRa8QSCjKkospTW2ArxXHJRWtjudAOYBrPyNnXKnSuU2t711Hw7qcy+8vlyhTco1KeXlfA9GFZ3r/zXeegSb5SxSDmxx7fAKnwV7NJ093XR1dxeuPbp1lhea2tHV1cWWLVu45JKv85aTTmLu3LmcffbZfPzjH2fKpMmcfPLJnHvuuUybNo16vc6LL77oYq/A2L4e63pmBtBIDqQo8XCZyuvJRCEq4uGlT/LVCy9j/cbNLvOjjfhz/wMP8p53ncIhi/fBpP0gGSpIaRIOgYhioJFwyWXXsHLVU5hI85GPfDQHdpRWaBfo3FqWabTSbNq0iQ984AM5ePs/P/gBDz/0EEkjCQbKWnicfvo7OeKII3h06VKbBStLg7Y4i6IsY+WKZVQrFc541zu59nvfs99ndg5p5/a0YP8FKBR33nUnkY6YNGkSF1xwAce/+QT6+vpYvXo1H/ygTdyqtWbhwoWc8/5zmDp1KosPPJArv/UtLr74Yh599FEUcMpbT+aARQewccMGAKIo5plnn2Pp0qVMmjSJzs5Onn32WV588UWUUsyfP58vfvGLHLBoEXFkM7v88pe/5IYbbmDChAn85Qf+kkqlQl9fH2PHjc1B2SlTprDrrrti0ownli3jkksu4fJvXs7aF15g5ozpTJ06mUpVM3XqRGKt+Mr/+woXXfSfOdhRuK9RWpe2k4TOjk4+/JEP87d/93dkWUa9Xqerq4tHHnmED33oQ6xbty6PD6SCOW8fL6yM/AzP0ozpM6bbeDZpxt/+zd/w65tvLtV7/HHHc8U3v1lcC9hBHMf87Oc/56c33oiKdKme0087nQX77cdzq1fz9UsuYdOmTYi2ay/SijF9vQxs6cfzKSFyAHoTtxQ7f5yzGvPm78NBiw/mF7/+BZ4TKKVYuHB/DjhgIY0ta204OwAPjPlCm5TqgsMVfD8Eu4Ah4ja1onalpIDH+vaEIJXjZaUnRErjV6oyAEByK02lqOiYLDbUGw3EAZAKUCKYLLMgkYsX5FKVuXj4qrAoUn6vMNZV2RhMlll9UBMg8M5FHIXPTlrA3uR7lskysjRFqYSkUSfWMZGymTmVMpAZe3gppsTL8ndtyZpbH3ZK/k27e1CLPm+ajSr/ZzjaYaS0V5Ca9noVXh99oGynFdCrgZpk560dspHYZ4DtNxc9KAb7ECJyaWYOUV++Fww+PxipacOQ0F5KumjkW4ahnUDRTnrFaBTA3p30CtAOCxJ52mawyCsdlh0XuUMsUzWSWoAkj8FAoVgr7QTSofKPvrKztr2NIdxmZPBzglNkhCytk9QHEJMipoFxv6y0LgAAIABJREFUAawbjS2IB8ryE3coUgp7LCYU3Gwvah0RR5rYB682LquJa1oJFHHKWC4YlwS/ov3DvvEIIEs7NKh/3DVRpvydouwmk+taVvGxSoLJhX3t3K1AXIp7a81x6dcvpV4fQICzzz6b3XbbLVcsm4w6cjDp8MMP5/777+f222/nuuuu47TTTuNDH/oQmzdv5pCDX8fRRx9NtVrl3nvv5bLLLuNbV36TzZs3ceABC5g2dSKSJShl53URP8TND5ftyiagUeiowv1LHmH9hs289e0n8Z6z/xQk4pe/+CVf/uLnueOu+5g/dxYdVRv42r6bF0hsL2QioBWPPb6MJUuWMmHiZD7z+c+QZRlJkmCMIUlSqpUas2fPBhGOO+44Zs2aVVKalVLc+NMbueyyS8EIHbUamTE0kgZZlrF48YEcccThPPfcc1x88ddQSjFu7Fjq9TpCRuT6tbOji6MPP5Q5e8wCY9DiFEPX2Vprljy4BEGYPGkyp556Ku9617tYsGABHV2drFu3jjVr1lCrVqk3GpgsQ6NY9vjjTJwwgUmTJvNXf/VXHHXkUXz5y1/i2muv5cc//glPP/0Up556KlFk3TB/e+ttaK059NBDSZKEZY8vY83z1qVt6aNLGTN2LF2dnRaABI444giOOOIIojjOA3x3dHTg47fEcUSSpDz00BKu+c53uPTSS1m9+nlEDOPGjuVDH/oAnZ1VIOHQNx6M1sJvfnsHjXrDzjUNzmGQaq1GHFfYvHkzTz75JCJCd2cX733ve3n/+99PtVrlN7/5DZdffjlf/vKXedOb3sQ//uM/8slPfpL169ejlGLy5MlMmDCBgYGB3DIrB+Ow8bSefvppEIgrFZRSdHXa4OW9vb10dHSwZs0a4lijImv0Zfmx2LWFTdN+77138R9f/I9B+PX+C/Zn4f77s2rVKi688EJWP7/aWpZhrf+qlYgszZgxYxq7z5ppwQoLkzirRg/0unUhiqhS5be3/o477rqTnp5u5uy5F3Fc4f4H7ufOO+9m1q7TOPSQBejIWyN4pBRn+OCtGMm/C0GiVpx9SGWhidepnCkNT2XrpJCnl25q+ZyIWMusUjP821jGHj4aaU2tUmHANFyAc3soQZaSNRrWcjfSaFXJMzZq8W67fs/KMJJZcClL3XoQjMsaZ11Zo/yQQlTmeKfCBH3qrYmy1JBm/Tlv6ZROdBRhjHWDhAyti/hzQQe/jHOHl78fFccQKrhWAHatGvNqdztrjwap6SN8v/XUXOKr36HvD4U839i60WrnbiWD15d30Srxw6Al/sqg8ocBlYosgKMFEvm7Rrpz22f4TqBoJ71itBMI2oFpCAOXVzqW0lY0hdLGsFXNaiFoYxX8Qij0maAKIEBhrR8i5VyfwgMMVWwThUn2y+2r9kGS1iQjFiHi4hI1ErI0QbIUTEbSGLAuZ1kdSmBYOeuQLcNXoN0prBXevUIcRxEiWbBdDqH6CHn64zK1c64Sft/+hifYOCRFRCFL3p0x776S5ZhTIsVme1N+D877KRTxncKYrxfJFa4tmzZx1VVX8fAjj2CMycER757UTPlWrwzVWoWkUeejH/0o8+fPZ8GCBZx77rlUopjNW7Zw3XXX8anzPsWTK1cytq+XM951OosO2JdqxaB1isaAsW9tYwm5+e/QKaVs4OdEFI8sfZwMzZtPOJFjjjqGSqWTH//wxygUfX09oA1o+47exStfCx4URLFlcz+NNGVCVzdve9vbiOPYWi44sFerCI+QLVq0iEWLFpXmmVKKNS+s5tprrmHSpImccPwJ1GpVrv3edax5YQ2FLYgdq97uHt51+ulEkUJMPwpoNBqMGzOWXXaZwfr1G1AmQ+OcGJ0VobeImT9/Ph/+yEf442OPpa+vjyRJuOXmWzj33H/i7rvvobOzkxOOP4HHHnuMe+69h3e9612c8e4z+PM/+3P23HNP9l+wgC9+8UssXryYiy66kFtvvY1atcbbTjkFJcJdd96JUooDDzyQjo4OXlj7grV6ERvX57JLL+Xkk0/O2yMivPTSS6xbt44syzj00EPZZ5993P117rlnCd/5zjX84Ac/4PnVqzn11FPJTMZV3/429cYADz30AIsWLUDEoJVw1FFv5JBDXsfAQOKsr4So0gVourq7WbnySb797assSNTdzb/+879w5llnMmbsGJ577jnOO+88brnlFk455RSOPfZYzj77bESEz372szz33HOMHTuWM844g0mTJrF58+a8b9NGgogQxzHnn3++X2CFS5nAoYceypQpU7j88suDFRcq7vbeNEut+6vA3LlzOeyww7jhhhtYtWpVfm/ksp7VqjVOe+c7qFWr6DzWmmLa1AmMH6eBxHmCuX1AJAcvjLHQ1OYNm/nedT+hv3+Aj/7TP/K+972fKIr4whe+wJf/35f42c9/xeJFc6nWQLng5T5Ich6lTMgBl8KisgXPkrIrS7OVz8sT66Xpd3FMUr6lCIxesgjL40078KvJRc4/r5QGZce4ZmzmOx1Fts2uzCxLSbMMSaEzivI4Wd6ayALW9sDGSIoYa9WqPJAkuP1GByCVHSffRpMDV9ZyMU0siNRIBnIgP0sTdBRRrVRdJj97MCRis0Xm5wG5lVP7XR3262i6JLU6FNleEtnvQ/Yb3Aj8oNBKPth6qKC5/GLOlwW8nbTD0NAKAZQ5bntA0Ah35WJuU5n+uZJFUXjA5/4pzVAnmm/ftfTKT+AdHyhqtqpqwUTbCeak2jHPkna6XLU3Lm2bFY8GtdGm5tUw3H2jMMfzBBqjQQoXVHeY+gIhYqsLLxfUZje1NVnaKCg4oRyBhqquOTDm0E87q4hAuR1cbTBwTRxwSL49DLXdT22U4/+zRQ5+IAxuWVhuFGl1xUgeS8H5ShHk4UUhaGVTRUcaIuVPir2S6p/JEAnAJC98eLmjBLZ4daFlp+ZvVhaBhu4Mu3kFgaOVzl35rYm/nU8ms0pmo9GPkQbG1ElSm+UsTRuIsdZE4kz/fbsVNhiocYKUFaZdp4kNWqzjGB3HLu6Q658m97VijlnQxb9p61cL568PalpeygVo5b4ww/SRRTMoq2plwbN42pQ+F6Cg5AE1JLzm4mQYqWDdnmxwW43GDQ3VapWOSkxFQWKElcuWM3XSVDas30CkYzKT2X5xwXe1tirRTTfehFIR8+btw7z5+/CFL3yef//c5+jtHUsUVbj/vvs5//zzWbFyBZIZxo0dz557zqWnZxxGNqIigyJGGpmD9QzK2CDTbgUgWoGKGagbVj/zHGN7u5gxYwqVOGLtmrX89ubfUqtW2H2PqURxihGDkohYVYr+kQyUVdQMEXGlg2olJksbPLXyKXRkF1/m0rHX6wkzZsygp7ub556zblgWbIzQkQWRnn7qGRQwpreXxQcsYExvDz//2c9Y/cILtl6x80/QVCqaWbtNZerUCUjWACMkjQRFhDGK1f2bkTSz69z4U3qN1hV22213LrzwohysemHter74hS9w9be+zZOrnuTggw7m6KOOZsGC/diwYQN33HEH119/PRd+6avcdMNNfPqC8znqqKPo7e7hnPd/gCceX8ZXHv0q69a+SE93H2vWrGFLf50ZM2Yyf95+AKxc+SSNNCHSVvH9z/++iAv/6yLSNEVrzfhx4zjyyCM56qij+KM/+iN2m7UbL23awNKlj/D1Sy7lBz/4AWvW2EDab3vbKVxw/gUorens6OLSS7/O1VddC2I47LBDSNMBFCm1CvR2j0GMIsuEehax4skn+flVV3P33fcgGN7whkM47R2ncfafnk13dzfPr36ec845h1/84n8xxvB//uH/8K1vXcm8+fM455z3MXv2LD796fO54467+OQnP8n+CxdywAELmTdvH6qVDrS2Ab0HBgYca7RcL8sEO8M1Dz30CI8/9gQiirhis8Y1S3RKKWKf7Q1h48aNPPnkSrZs2eKAJ8uPVKSoVCIyk7FixUoqsefJ1rqpf2AL48fthTEZkSSWtwXrX4mL36VS0obhxfUbmDJlF8545zuZNnUaAvzlX32QG2+8gScef4xNGzfQ19EDytld5oBPALioADsuKbrNvMjyQ8syNHlcNQfsenZjdwZDEU6+THlJyvV2k8LjxyAMHJ3zR7eHDFJ+tH0/m9tAFSX5+GTKxg2LKxVqboysG12KVhlR5PYVIMLxIqWtFZCLPSKSgklRkrrA+/4d/P7pM+xlGGwMN0MKFgpHRJEhIIosTUlNAyEhyxqoREgijUiCVpq0WqVWqxDaISk3zxQaROVgVvkQyMtK2n2RSyCOmlzYhiAvQYwOjSwrChozorwcxCF086H4e4QWDFKCy/PcllGAfO4hy8OVPcKwT7jDuUDuGdRq00K3UeVg7HkTwnns5mrRDcqBnNaCUBvnCqqKmI75Cg3lN0Ue1Fic1blva0haRUU7tge1h5CMTEOJpS+HvJg8AuWJBIaao0pal2PKMlzrZx0fbi+NafjYsJ/BrzxVujJIrc5Z8HD1jx43UKG+NhxtY5U7NlA0iCcq58Ac3mLaWBBtmBeWNvdhaAjT0HJZ2xMk8tQWUjQyjd4cHrUmtZSQhilrpNEe8QSonXa1yYfaQpbbnS8jtHv49/KCD+TMtkXTVH5P83WnvDcX2QZJjmS0pqFlmmYVogBVhmfs1oUBjHMtcMK3FBYGNngJOchSlKuItaYaaZsy3Z9Qi42R4IVY++6+T1SxYQyDprUWF33A0Oa3GKpTnH2FClUeHQSONtaMP0tI0zppVkcYIE0HqDe2kKQDmCwhB74oXPOQ4nTb/9aBW5pWNnBxFFdykAMn/LcQ8Urv0TIORoug4V7ZCi0MBvfACNQk7Ie/B7VS2cDb/ov8TmmOqeSUBeXHUlsgRGw/K6V4+tln+cKXvkz/ln6efvY5GqmNufEX57yfOK7SqKdgDHFkg3H6uYnYeE9z5uzJKW99Oyef8la6ujo44YQT+OePfYxPfuJTjJ8wnjccegjfueYqLrnkUn70wx+xYuUKLvjMZ5g2bQqvP3QRBx24F33dMfVGg65qbMVfVUwtmwlQUYlrPL3sGZJGxpx99mKPOXuQmpTf/OY3rFy5kkolZo89ZqCjDCWCJnZryIPamZMJNYJi5sxZTJo4kaeefYaDDznYuSCJleu0Iq7UuPrqqznsjW/ksm9cxuf//d/RShXCvlL0DwyQpSlzZu9OJBlpfQtamXyOa3+vKERSFHWU1EkGNoNYSzEb70QRa+uCpILZopQiMynLn1zBr39zMwccuIif/uwm/v2zn+Pmm2/GGEOlWqGzt5vn1q7miR8vp1KxAbar3Z1k64T7lzzAme85izPPPJN//uePsWrVKn50/fWgFetfepEnn3qKpY8upV7vZ/qMacycNZ0kafD971+HzyqmlA0EjBHm7rk3Rx11FKeccgoHLFpEd3c3L730Ildd+W1+cv2PueHGn7Jx0+bCIge48447Wb58OYsOOIBPfeITZEnKN75xGd/81rXElSqLD9qfakcXCKQp3H3vAzzzzGp+dctvWbtuHUmjzj7z9uH009/B29/+dmbO3I1qpcazzzzL+z9wDjf+9Eb6+vqYPGkKjy5dynvOeg+Xf/Mb7LvvvvzJm/6EOXP24mtfu5hf//rX3HXnndxz993UajXmzt2bGTNm0NnZQUdHjTRt2D3DZNY6z/lCrly5IoeGTGpszFmjQSKUaJRoTAomU2hilGieeuppnnrq6WI8xVqbJI2UxFmS/OqXt+Bj0olTQipRzKoVi3n7yW8kjpXbh1ygfWUVVB8nJ9YRWikajS3Uk350ZAGQ/v5NDNT7LZirLKhr2YC3SA2USs89woOYZtbUDKQ7t7gikpgK5q1z2DWqwGsYvF/aAwPPvJTHcvB5ELz1RDP387HTDFku1+Zxl9ya8Up/DixJlpeto4iKtgcwxqRUY0VHR0yl0knmDg4ibXlH/qMVqTF5zKJIxURRAJBJ0TdpZqx7qwhJ0iCVFAFSY4PYG7dPKBF0ZH+sPiCgMpSO3Hhn1mpJmVx8V2LHX7t5YAEjP2YKuze5F222+FJuZNqWlbcNPRhBNR5EXjYYTh4MDyc9OJYndmiuf0T0KNxnXQBxZfBgkShl3YD9DFQe+Azc3PO2l2ouis+vODRiUJPKDS8Stng51zmH5tZxRf1+vypK8kHdg7JKiMAgxdQe2I2iZdmw1I4+tDU0mjrfCGXl/FJBYaLcVEQgZw1dzgj1S/tBm0e8S4UgUVlGb362fUfG0en01scHzVVte107NlC0k9qk1kr/TtpJQ9PWiR9SwCGvIrJMtKz0YxUXCtjAwQBobcEhrawLWh5kWrl7xaXs9NZEvLwMAttGgfm+A7D8aZgSg5iELK3TSPpJ0wFEGmRZnUZjM0nSj0hiMwS5FPYhcCPKA2lFbcZY5UgBOtJUYpuBSjtXEoWUgJaWLQ5AoqK+1uBoq0xA/npzee32V/F78KY6pMAvAazkUTT/wSkPkdI0GnWmTBrL7rtP45EVq/iXf/lXKwJrxX7778vMmTO44467ECP09HYxZdIsHnr4ESfAKrJUmL7LdM4680ze//6/YO995tLZ1cP6dS+hqXDFN77FM089w//92Lkc9LqDmD9/Hud96jxOP+00fvnLm/j2t69m3dr1XPOd/+H2307l//zdWcSRD/yqMCqILaQApREdc/d9D9LIhIlTprLL9JmIgfsfuJ/+/i287uD59Pb2gLLuiUJq+1sUBp9VzYv9hgkTxvLuM97GLb+9jeWrVhLpCNBEcZVHH3+Srm7rEpWmdbQy9A9soVqpsOv0KaRJHUTo7p7CXnvtyYEL90eREunYBZ4NRs0pN6EVmyLJFUzlLC+0CjIbao0oQ2rqoAxpWufCi77CM88+xRVXXMkLa3zKe0gz4Ve/+gW//rVTR7IieLMF2oQX16/lwq98iWWPPcLq1c+xfNljKGDl8mV87rPnY2dNxjvfeSpaGdatW8Oyxx9DKejs7GSXXXZh7ty5HHvMsbz+Da9n7ty5IHDPvfewZMmDXP+Tn7B8xQqSJGHy5MlMmiQuC5kFtiOt+eTHP85XvvoVdt11Vz75qU+QinDFFZfz/R/eSO+48XTUKtxz7/3cc/cDrF27nv6BOlMmTWDfffbm3WeeweFHHMHec+dSq3ZgRFiyZAn/9E/ncuONNzJp4jhOOvkk5s/fl+985xru+N2d/Ol7zuaCCz7LYYe9kZm77s7/PfefeN/73sutt97GD3/4Ix5++BGWLVvOXXfeg8Ja1CX1fsaPH4fJLL/5yw+ew4knHo9SOg8svtvMPYgrFXuqr5wipyNQmjTNOOroo7nwwgsDnmABhcWLF2Myw667zuRTn/o3+vu32HktCbi4Nnfd+Tu+fsnXufee+zn5za8n085ixa1l5Xi5jzEWRzB96mRWPfs851/wGT784XOpVGMuvvi/WbliBRPH9tFRq9o5mbdFOQtIyXlUCAQNdUhU5l+F3YX4Oe7Aj5wci7LbURPfU1Lc4BlXmY013e/cwLCWmxYwi9z89kzCKfw56FQoxr7d1nLKBhA32qCJ6eropKNWQ4DMGDKTkRls6vokw7g9pyLODUwJaGvBqxV5jCsxtuxGmtJIErQYkgQXxwiUilBKSpYocSWir6+HKNZEcUStWssD6MdRhWottvuYKvZzFdRpiynK8+PUHMm2vNUFe8IORC83pt9I+27rh/w/hd2UUsodNBl7oKQk79vgz1EkGdUhaC0ZDw3+7qQ/VNpaCPfVSzuBotcM/WFM2J00mtTM6EbebaWtu36/VMA/JTUzp9JJsHshrRSRUkRau+w5zr1LQpczX7IUD27nDvG2UuTRdwoFypiULEtI0wZZWkekQZr206hvotHYTJY2wAfqlqwk8Fo3IpzC4KyWxOktYvulEsVEKvaPBLpJO7xHgp/gapPbZLvC6nCWemHK58FtgHBOtM74Y9dFUY6dMzYVsjsFFQBDrDVjeyu88+1v5te/uQMxhjiO6evrZdGiBXT3dLH/nF3p7R3DlClTufI730eMsNvMmc6DLuMjH/1HOjpq9I0ZS5plLF+xjK/919dYu/YFUBG/+MWvWLLkIT553id403FvYty4sRy4eCELF+3LO894J7fcfCt//zd/z5rn17LqyWfYY7dxKFI3P3Q+173lmkjKQH8/SMr8eXsTRRU2Dwxw1VVXI0qYvefuzqIClBaUca4iOb+w/WcVAkPS2MJuM6cwdvwfYZI6iCJJhEmTp/PR//txkkwgydBG0JnNjLTodYv542MOoxYrxCRU4gp9fWOsC4hJUcpZovhJ6MbPu4/YCSg5mGRdgQwiGuVSIxsUojSIYd/58/i7v/lrojgmc66WZ555Bnk2J+crbePLlF9VjIs7g1PUlQ2gO3efeRxx5DForXh06aP88Ic/BISFCxdx/J+cSDXu5JrvXMr69evRGubstQeXXXYpY8eOZdr06WgdsWXLZu6+5x7uvvduKpUKf/vhv2PMmDF0d3WhVYQkQkdnp7MChFhbV6ve3l4QmDRxEp/77OfQCq644pt847Ir2LRpM4hQqdbYe++9OOigg3nLm/+E/Rbsx9QpU6nUOkhTw+rVz/Pda6/lS1/6EiuWr2D2HjM59o+OZP78uXTUIt7z7tNQJuPOO+/mPWedyZln/il//t4/Z+asXZiz5xzm7LkXbznxLWzYuJHf/e53/O722zEmI8syvvmNy90ciUE0Rx15dLGQAtIO1MwkRVRm3Yu0oVKLWLhoAQccuH8x3hQAhRHD1KkTefdZ7yLHN8iw2V0014zr4bJLL3GAjU1eYIzOD7FtVi6NdyWtRJoTjz+ay7/9P3z3Oz/gtlvvo1KJWLHiMXq7u3jTsUdQjaDR6KeqI7Su4N1tm4HwoUCiltewFk4eJNLOnSngiogLrGqkgGeV6wwb6NrzvGxEMTAPlmwKMCTArIqaVf7AYBBKFWVZ61x7qRJXIADOMmNIU0MWGUxsM64Z7+6Gi3+G5K57HrQT4w4okgZaazKLHJFtaZBkGXGkUFGM8X0OZFlKd3cXcWzjR9VqNSKXbTGKKvT0dBFFGiMpmkrRFwS8Xzkgo7y5OZ4pfhDzQxp/mj8ayRZCOaL1PCloa+t6ZeOlSLA+m4EzXSCcoagZykzK2zO/vDaGluDbRC0wv5DvSO5/Ft5M8807aSe9Juk1ARSNxAgLQWLUKmzjltFjztvNpHEUKT8dG5ZU8O9oVDqilORua2Ns2mj6aIzxaG7h7bVn+FOwkr48qLw8bn9ezoh1qmbBq42m+Zq2oX9DsMiap3thxNjTS60QE0YuC4IXh8KPP/mk6Xr+u83Z6wXQVqScuTbhPWUAhaaroXDllV1jDFnaIM0apFkdYxqkjQEa9S3WBc1Yyw4R5+OfRzAN2wIohc7AiLHWVSpCq4g4iom1tu4A7gRRDTG04Sm7cqbdzcNZBon8Z++SVTzffO9w1Cptd4vWuR+dj0sYU8n9X1IgRHzWM9dnYhUFk2Z01KpMn9rLqW8+nFqtAxEh0jbLkpgGY2ZMsJYUjQ0sue8hKnEHf/XBD9Hd3YlShklTJqJUxBPLVnL55Vdy5ZWX88xTT9LZ2cXJJ7+JF9au4bbb7uT9f/F+jj7mGP7+H/6WxQcdSFdXD9Onz+LFF2+k3mgwafwYxowZ44LtWrdCLa7/vcAObNqwgeXLlqGV4u1vPZEszXjk4aWsXvM848ePZe7cOWitXOYPlY+1uLUhXmYWhYXPUsQY+rpjUDEoTSXuIq52kpiMVDREkX0m1hiECePHM2PaFLSpI6ZhQVmVOCAO59RGMSY5PuUHJxjzQH91K962USl0XEEyxYnHn8SJx59UKqqkfPnz8ABE9i4Z4TwyuVIt9rTcWSt+73vX8YMf/ghQLFhwALtM25UNGzbwrSuvJssMUSXm2WdXM336DMaOG2NBIKPo6ezhjYccyhsPOdQBtCrfW5TSZKmhXq+TZRb8yrLU6q5KsWHjJpSOGNvXw2cvOB9JE7515ZWg4M/+7Gze/e53M3HyZGbsOpNqLXZrQ7H+xQ1ccsnXueKbl7N06VKqlZgDF+3LSW85jgnj+oiijP7+9SgU7zj1BCZOHMPvfncPX/3qF7jmmqt41xnv5H3v+wsmT57MmDF99I3pZebMt3LyW9+MVoqbb76ZK6+4wirwGIwIP/rhD1m69FE7l9x83GefvTnu+OPyoOMaaxmGCGIMq59bzZo1a3LrE6W1szwyFrxTuhDuFCAZiphIxzbrmrLAXybWTVJLCPLjwHEng4hh37m7cc6fncZNv7yVLVsaoBvMm7Mrb3zDIeyz1yxiXc8tkWxQZJVbZWqlcj7SbKlZ4jxNwDjKZ9dxZWHQUrjkULQ2mJM4kMiXF4AY0pon5zwx34sLfqcc6BruPEKeF87+JWGhRVmm1Dq7+rT260Oh4ygHZgVcrCEXbF/jgDoFUrhX2v4zRFFMtZKRZUI1qiKkJGmGiiPiqs0K2EhTkiRBE6EqdhyyNCNSmkpcoVqtoFVEJY7RWudyRbPbIPkYhn3qX7hIZiAS7uIWWfLJArZFlvNj2Y6sP6K+M8h69xUmB+RZ/un2VcenRRUgznD7sb9LaG1pPPgRyWW14WTxfKqrMvDTtuQtfgWU12EzbW8NbWvkoe1FhYX+0HU2z4F232Oo+36/unEJYS8ubadlB6Ojg7ZDr3qgaNvY88up8PcRf+i1SluhaI9YlIw8NoLNxTtiWe1UJ9t/d2iDhmccvn9U0+8QPBoaAPLwAH5bb2MttM3IWoAI23ZK5E/7ivgDYoQkbaCUdeHIjHU3M+JjW4g9afVBO4P2l6xLXla7huoHyZdBHito2AmYq8KUjO+NwWQpSdIgSxpIllIf6Cdp1DGZjY3j3Q0s6Vw4LU/l4gRZKZvtTClFpZTlzCsmgeLysqlcgAyy5mm/glAIaZm1p1SecxuhPM8KUKXcpgLzhifCAAAgAElEQVQo8a02KDSxjhHJ0KpOpZaASgHy2BpKKxe/NaKeQU9vD2vWb+D5NWtQ2rpQPPvsc1x11VVcdNF/8dSqZxjT28P+CxbwutctZO4+u2JkgN126+OXv7yHn/30Jm6/9Tb2XbAf53zgL5k4YTL/dt6nSZIBDjv8SLp7amRGUCoCl07aSsk+cGeFjZv6eXb1Wrp7xjBx8nQUMdd97/tsfOlFdttnDpMmjkfJRrtu8M/7sQricQE2cLB1ZUQ5BYECjMxSgcgG7lZRTGbsGpMsxaQpmgRtbGYuG0hdB4q4d4+0/SmuHTqOAsU0CpRa+3y1WkPriCzLeOCBB+nr7c0BmOal619D2wi5hULcrEgCNm5J1rKA555cRafWGGO47nvX8I53nMJLL77EPffcxYQJE6jXG2x8aTM3XP9zjj/+eJ5YtpwVK1ayaeNGUMpmDsustd+GjRvZtGkTSdLgudXP8NRTq3j4YZtBzy8420TFpMmTue671zBzt9342D//Cw8+uIT77ruP9S9uYJ95+9Lb14uKFAONBk8+uYqf/eznfPUrX2X5E09QiSP23msPDn3DYubsOYvurhqaDEVGrWIwJoNIc8zRB3PQ4n254cZfcN99D/P5z32Ob1x2Kae87W287nWv55g/OoZx48cRx1XWrVvHZz7zH7y0YSO9fX1EkUZruPZ73+Xqq6/JrVe0Upxy0kkccdhhxErR09lFVUdUo4iKjojQXHft9zjvvH8jEzvP/BwsEgSofHraTIXGxjlC00j60aLZd995KFXFGMFa2aUoMooMaAAZiggE9tx9DDNmHIlIbGO9GEMl7rQB65WPqabJssyCqSFgo4IJwciKuue9luWqfCsQnOWTqysHLIM559egyn15BA97hRCH7ydvEeHLcogIJSYegEkFBKRpPhyyjwhGWSunXEHUXtkv+LDWfl06+0ZtIWDb/UWsGutOVgZotapQia31WEetRq0WU08alo9FEUZBagz1gQGSJHHZzwxZZq1lK3FMJaqgdUSsYwtEqgiw7uXKVRau8hAkEuXeUfnuKnq2AKel+NCKwbRJgpTd4LaBRIo4U9uqtW4dcOXmGN5aryxHlaNvBc87uccowSYKGaHNQbm+2kFtKhpXLAjtEnUQBJ1v8XqDpGJXRt56P94S3N2GWjFatN0AwK2lcPo3M6GA/FyQolObb3hl2veK0DATaLhnXlXv+BoAinbSTrLULiAhNJu/v5xiXn00EviwNZR7oxegwUhVD1fFqPe5q0w8qOIBLhtfQmkbmFnhDr4KfQMRqyDlcr+TeXCn8fm5lHNBGQ3ZriSEj/Rmgjv51Dnoo0SckNwgSxtkJqHRGGCg31sSpdj0w04dVwqjTCG8KQ8AhGb01rpBa2e+H8cOA3BKhFjbmhw0aOqHVgKmatofh12GLTbSoVzTwmdanVgNBozC38EzAv5ku1SvLcmXGCgMzhbCCEICOiOTDJQD8JSFk1AWqFSR5thjD+PSb1zNxV/7bw4/4khWLF/O5z57AY88tITOrg4WLdybw15/CLtMm0pPbwUdbYaowaKFe7DPXvtw+60PcctvbuPuO27jfXfewdi+8axfs4bXHbwPBy3am6r1/rCAizLOQMcpnEoTxZ0sX7WSjAqLDnoDY8dP5aWXNnDPXXeCyThw0X5gEoqV436a+l1UoLr6uWCMczUStGSgUiIRIh2hBZKBhC0vbXQeJzboOjpD+RN7p7yKKDApFphxlnJZhslsMPk0SUGELPWuRl7ZtfNj/PjxTJw4gZXPPGdjBUU+I40aNE9D15l2lmDzlPVzsr+/n4YIY8aMYePGDXz8Xz5GkiQghuP++BjWrl3HjT/9Ged+9MN86YtfYPXq51mz5gUaSaNQ2N0CsTFWqmjteJay67CUptxZMz380PP86yc+xT9/7GNcfPHFLFu+klQUP/jhj1l44Nc56aS38MTy5fzXf/0nDz38ME88/jjdXV3M3n0mbzhkMXvNmUVvT40sa2DSLQ78sACEUtZVqBLBtCm9nHnGW1i4YA4PL32We+95gEu+9t9c/LWvsffcefT2jeGtJ53Cg0uW8L8/vxklsGnjZn7y4xuYNWsWTzy+HMkMu83ajV12mcZtt93OL3/9a44/4QTSNOOJ5csxKOqNhEeWPkr/QJ3Hly1j7bq1dHV3stfee1orE+0HyjMdr74JkIGJbDazWOjr7ePwQw8hUnbC5m4qyuTjbbMjWfdin8msq1oHqZK7+coAWldcHhWNFhuIXawvmG2JGxojuAxgkk/pVvOndN0DRnjQpwC7obA4CvlXDk4FdYcKeAhqO7aN5V3BdAvKKJEIov11m6HRJ/HN+03AaOMAIGU3VLx45XZc569rwSL3vQO/HC7r3i1CUck7xwMmkXYcRix0VatVyUyGKJU7XmfGUK920mg0SBIb3DzNbMZDu2+5LFdEFgw0qrTnI34uFC5xRZ640JXQJ37wcQGLUQx2m0FXfn9Ugr9ediltWTmFEy9nUWJjTmvb2YP4ZssyPOg2XHuluH/ku1rf5tdas8wyxHODcrGUgiwF4OaOMOy/R8oPY0IaCix6TSlaLSbScPQqA4lgJ1C0k3bSHzi99na3HLxoPuETciUIpewJqJNNmsUTrVXu6uDJu26E2Q8KK6DtQw7OQTkDfgUkaYOkMUCaDJBlDbKsQX1gC43GAEjiTtqtoGstwZzSofzpjvvGKQCiHUykNFEUU6lUiCIP0nglyHi1y71/O6mC/UleqOCUey+3cGoCe5pBopEAo7y/VFmo8181j2s7pLzyoLJcuUAip3gWiqufEfmpmVeIFBxyyL488ugj3HzLXZxy0lsY6B8ASVkwfy8Oe+OB7LX3LGqxEIlVfCWzdj1aGTp7NG86ajGHHLg3Sx5+mAeWPMHjj63ggH1n88dHv56+ripZ1iDKs6lFThH2rQeUIs3s2C199DH+5m//jsbAZv73FzcxflwPM6ZPQEwdpbP8HcLny+ukPHecXohNdW2DYPeN6WbN+g1ccMGn6e3pYcmDD6K1ohbHhSVSPqesVZFGgUmYMmE8jz7xLN+64gpuu+XXPL/+RbSCXWdMo7NWJVKqiBTiFI041tSqVU592wn86je3s3HTZtKoDPUNUlqkWM3DkSivTPq3x4HFinF6DPsdMJujjjySa7/9A5bcfx9ZZpg5bQoL95tDpRKx7oWnWLr0cTbVUnadPJ7dp4+zs0XZ03etI+I4Yvz4cUydOoVKxVrCRFHEmLF9gbVcRKQjoijiwosu5jtXf5v7772bBx98kFqtxsGLF7LkoYc5/9PncdFFF7Jy5ZNUI01vbxcHLtyXxYsWMnv2bvT1dpIm/SADRCpzgEqxphU2PpVWiiwZQCnFwgVz2XOPvXnDwQt44MFHeOrp51ix4gkG6gl33PpbRKCnu5sZ03fl8WVP8NF//Ie8x/q6O3nTsUez556zMUmdu+59gLXr7gNlLU1q3d08/dwazvrT96K1YsOGDaA0hx6ygCOPOhSthHrST7USl+aeRS0zkBSMzZyGzogrVeJIiFTdgkd+Dbj1aGO1gWi3fgW00WAqFmj1GS8xrvzMQgXiQQ2xlkrlLQYj3p1qsNLbZL/igMqy2jTIbVdAnGVnDky5kn0s63weBfOSFmWK96KCoI3F/SH8FrqriQdwIU89rVy7CxaTT6Cgfu/SXebDGhAVuTp0zljKFqGuLe69FRDpir+Y86JapYOkkpAkCUmW2WyUDlwVEeqNBOPNgvIXte/kYyYVTug5guZKN0U/hJYzxQgEb+V76+VJBMNbihR2Xe1kISsXpWgrb3lzjYFV73BxAv2/qvhgeYjf312/N+30wzV/uFYFE270KX+HAPOQoO9by8utwbA/aCo6bye1pHZmy47TeTuBop30GqHwPGAn/aFSqxlQCE1WCIw8SORjgjjygp5SNuuZVhqlNYK2cpbyyoY/khTyHMSj1f7A9L2lVQ4CJgPlM/cIaVInSeqISTCSkCT9NBp1sixB68ymqM5Tyjr1SnxfKTzIk5/eCmgsSBTFMVHkXdwMuUtCLkwXPdfG29m7h719sEVPa8rtmQaX4C2ilOQKh1d/hi1a6SGK9KqTB9sC0/W87xT+EL4AHXxsH+8aJcQq5cgjDmb16jWsW7eBcZPGsO+8ORz+xoMY01dDqRQtW4ioImkn2sUbUgwgJkHTzbgezeGH7M/Cffdi1ZOrmLnrrnR3daJSQ0WBkdQqy8RoyXJwTpQhq29mwfzZHLRoLvc9+DDfvfoxtFJMGNfHAQvns8u08UTKueeEoEgJ1JPSr8IryLqp2MDPKVqnvOlPjuDn/3srd95xK8q51Mzbe0/mz5tDbkmkPPCo8jIb9S0c/yfHsOalAVY8/hjLH11KpoUZ0ybzusX701GJwKRoBJRBmUKxS5Mt7LXndPbY4xS29NdJJMmV3fAtwpNiLTKy/qEMopPSJRELKkeVCh0dHVQqFd77p2/jxz/6CRs3beCYow9nbI8m0vDudxzH88+voaenhzG9Y6jEsc0O5EA25bMJKuXABo0RrEVWpImiKH+HLDN0dnbxV+87g//++tWsfOwR9tt7dw4//FDmzZ/HPffcx8233EqaZey1+wwOPmABu8/alcmTJ9LV1YkhxaRbiHQGYmxQaZPmQLK3mtDGsTgXpFhUg85qhZnTJzF50hjSJOP5tS+QpCmPPvo4L730EgsXLmD27nvw0MMP8/yaNaRpShzHTNtlGvP22YVaNePtpxzFokWz8cGnbWY34YYbbqK/vx8RGNvTw8SJ43j9IfvS12MB2Y6GJoocVJ+bx4AYCzoqjwSpDKFOpCNEGiijQNewlkWFrmnABXa2c8eIwhA5yxtByCwAI9ZdMgdStJc3ikmjvKWL54s5COTnXbOqXFirtLZ8DHiZS+Udpu0u9is7d1SouAe4SDGvveLvy6ZQgKV4E0EgC4EFH/MGcitdAXIXPoULOJSXVcAHOXOgdJjg4zv5/iz1ZBk08T0gFPxZuT1YsBaLcSWmFnWQqiy39sqyjCxLMdqQ+Gao/I08bOjmtZ9LBSwQ8gzJAaV2qAwgjQbJNpW57XLxsAczBPOuqcoivGOr9re4JpqRQa1teZ/ymm35dRPwa63lBq/3YuaO9mi/NqidmEV/uPTqAorUDuvvCMzda55cfOHlwRWvoBVk8o1oaFLklrFD0wj8o1zedpz4+ca24yy2tuZMeLwwbGFtvFc7xgoUQmN4rfmKyuNoDF1MO3NBRjSRzSscvhwRcrvu4e8c+Y5A2By5jNbtGnx6NlQR7v1HilOkrIm6fWS4fi/Xp1D5yWV+ixJnRSOFQlBqkx3x/GzQnYRpBGgwMPAC997/G26//Vese3ENSVLHkGAkdXJMRKRqxLqLgxa/nsPfeAyVSg8Kr2C4k2JivMuLTw+fz4aWr2hBnVI7gwDZXgTVQdDMAijSQZH+1M4qFplJaSQNGvU6aTKAmAZpsoV6/yb6t2wkTQfQOkFJIz8xVcoCX1Y4tsJ6HmzUvUCkIptSuFIlimOqccXVmxXz3iFNVj3w4WjLIziYUteG4daEbgMoUoOXevMdORjog08PVsLwilJ+8jkESCU2O5K1KIlsrBJATMXpP+JcKop07vm4+mCtrqdS6SATTf9AxurnX2BMXx/jxvQSa4MmAzJiNUBEBUwvIBBtQPQmEIVJa6A60TqmntaJtY27oYmse1KU2vlMDBK50Ane9cpaoRmJqSeKB5c8xEBSJ80ipk2dwozpU+juisnSLYGLT9FZBbhYmsjuO2/ZU8Q1MqLYNJCyeUud59esBZeOfNr0aXR3d1DRGVolgLXUQJSLMeP2u6iTjY2IZY8/gUbIlDB2/FimTBhDLRKXz83zoUIhF7GKsyhrFShiQa9moKgsOrTHz3Wree3mTyNN7Lyr1mg06tQHBujt6SG3XshsgN4sEyIHjBgXE00rRRzHJEmCMRla+5XlFJFmd0pjXWuqtQ6eXf0iTyxbxh6zd6evr9fGdOmosW79i6SpIa5U6KnVUMqmIk8yx/NUhkhGkeI9D+3seK+3TBP8ibnVo+JcSbdBpm0MnDRLyLKUzs4O65pkMtI0ca5YdiuM45g4jm2MHxTKiEulbd9r48aNZGlGvdGgUqlQ6+ii1tFLtRJhJCVLLS+zljiepzpgQ2Uu2rgClSJEjjcZm01MxfZvSbCWItrxbAPKuBg2dt5a6xk7bxSRm5EONSPyafaaZoPPTmbyT/69/BiW91Vn0RJaFYnDq0tWHMq1wamr+Y3u2ELbdadcEOHcEsRZ4mi/j3rrJMkLsfMPSkCw4C1QSxOc0JI2B4rC6w49t5ZQzoWsZIVXBr0KecV9Vh4QyqvJXa3tgQ15f+aAmbvPeQFadzhl11qWZhhjaCQp9YZBxTWEChARRRX6escxYeI0urv60DpyvMtalxnJMJKRpgZjAijEoVuRigJeFzSEXCooXR5Sfg/kKd2EVJdnSs4V26BQxpWm31tHI+3FhZWxf4BiT1BR3q/K8f58burgc+5S69bOsE0Vi16P0BF+Pqh8rmvQtj1aRcTa7plobXmtP/lRRRqFUhW5PtMaKIKR+6otakdlauYrQ92ohpBnmuuTUdIvlQwW8ZvVg4B3Wd7gmxqic9ISKxykzQ1jxaUC5jAamrOTyJtKk0GlD60DhDe1txZ9RtehixlJ3/Ntaq++Q4896C4RWdzqux3bosjL1+BGSvK0njnlMRhGoa5RBJPaq3ME0ALHhEdqm4QdNdQ9bSj1o00joXPt9mdbzQ5Rf//RWx14bqhaLO0hihmpNlM0ftjFqoaPcqeUgmjkSHiSlU/4hyiNbZ3EXphtGQ/GfXRJTdpiQCLi5cmgja3aFHw1RLGeCRu3z4gpswh7jyCkiGjsqaVYNxqxMWSiKMJnVvKFhhtu7kilvNBNLkDkYAOFgiDO9n+EpgdUnJiJygrwSXwsitA3IFAU/AaLP33OMCbBZHVMZt2XsqxBmjasdRENcKerOakCsFBKU8Kl8to0mohIx0RRlVhV0ESIsyYqXDH8+9s150+zywrtiJ0xDEkTawwgGBlBKPPfq0K5yAtrfq7Qb4Ym5WNBeWUR0BoVFQ9ZILDMqKygWlgcoRSRpGgUlQ5F724TsC5+RZwaq69pp8hvASKQKphebBwkBZKRGahEVhG01mImD9ZprUEym9qeSvBymggb/rlSgUMWzgMgCxWUNCVWMYVLmH8T+3tQrxdfWeuafB4LkRL6OjV9HZ3sMn4GfuVYXb5OMQGdZZ/C8WjrUobU6arAfnOnlqqCtJh/4ITO4g7lhHplUVCr5qtyCU0Ftnf4UdTY4pJQjWxgbZM2qGlFtbMDsjQv2xqhpDa9tySAKpRDAUkyOz650hJw2EAC91ubMQ0G+huM7Y1YtHAvd98AGkgHGvR1RkgOMAwAghghVhZOLyxf/C8VjKeVF4wUc8B2czlILQDG7hexqkAlhkQwJERO+ZWoqESyBsYkFhAwlNadAvq6u5q6ViGSkDVs3Cx75lAOiyvi1oDxe5YDdxRYfgk26HOCiMmtfpRk1nXRzzntLJK8lYz77e1Lis5K8wC3rajskhNeNyV+HN5npBgPZdQgJUiZtFSWUsplfsPtr6kDaArLI7/PSS6D2JtFiiD3BegSKOw5mtQ0DhR814igm98wL9OVFTnLLALwKgBGiphydg8MY0J7Hm/HH9vfgVuafxtvqOKTDSgRm61RtI1DqDW1WNu5jCYTmwXP7/va8QnbUzbykYPTsbHjA77savWOmWH9uPcsJzxwTzRv5000lNTWWjEd2arJ91l5zr4yJK4DijFpdpHzwoVgXDD43EXSvV6OEyGFUDd0jW21KwcKxIL7Rd49z1cNRuHmiqB0VKgPzSJyLi62qRxsK7Wj7+RLdTh5fiT9sn1q583NMDyxVBDgFkzTteKm1vWVr5hhGpU3I0xavK0U6o8BHyOQXdoWerezLt6OPcZwtGMDRVtLzZ0hFAO7k/5AaKjRLrsZjQbteNZ4r3x7VNPf27cHZPCfQyI0xQUrjIJ2mVVUpNFaobXLimaKt/JxIZR2J55BFYUbU+uWvZy+8HGPgivNd1BIoj7AZoaRlDSz7mVCRpbVnctZP0k6gDGps6RwAIYul1zUWcS28Bt0FEVUq1UqlcjnRgverkmBaML4mt8tuLPpPdun3LIqUDaMaeWa55Q71UpgbW6Pf8TfU7QzL2v4Vm3tazSVb4XUQtsI1AIx+TjnAbNFA5Ftr7JWIKVWKE1uS+cDlzSNWTFWzSvXS1O6+Jy7hfkH2xOSJRiDVu9cfB5qtTRfd+NZAhTCD+1yoe0kBbR41by/vfIvpVVdouZuk/DDENScUEBK3xVXC8DDzTcVgNKtArUhRKrMOHyWr0H1OWArLw6ctUzYSlPCIGx8uHbmFPlDRRoFX4jKfzdF+gna4nmP5x8OCJACaBIlLjOk01Vz7XWIee/XbAue0sqFzHduc5y13PWsRfmm9OgQUIIiD3TuDxCUi1Emkjm+kVGecQVf8PiL+D98dw5SZKxG7/dE93TLNkGwp7kyy+9dfsJm8ws4ry7c0tyFAqSkvOXn7MkXrGy7FMZ5DFkrxUqkSESTZW4mGn/ggvspou1Jiza2orbEP9n++of4GFmjUdbLlPBaJY5Q5RuKOraTHO3HOXcj3KkY7qStoDBmW3nq2P3ltTydXj1AkZewJP+Qk+D5TtN1nxJ0uzTwlST1WniJ3yM1b1rbTi93A33lafu06xUFiQLBclBchwCZ8ApYWbUPBOFg2H2sB60VcWwFa+19z5V1xTKZcxExLg2ywsa+wLuYyaD2jC6VASsPbhaCsVc2DFmWkKUJmWmQmQZJ2k8j2UKSbsGYhgOKUrwbQVRS9LVT5KxArpVyqdMh1h4kqljrK+Oz+ATug6UuGN6Scag09c3v27ov/Mg2u6MposidnLvPUp4E1oVEDT61KcoJHsjnW7sgUaGkbDWV0lk3feXnmVSwSqAg1Iu9D//GTmEqHZU1KwcO+JGwpRL8G/wVAgb5+gmO4hQ4s5D2XtHr2OKFqtCSU5p+hiL7RBkECUNrl+/bkShvY0n2UB4rsmTRiNYtD/pvZCDF9WMI5rS4y6eEzgEXVUBHnkLj7bwsDzp6uUuEIiW7vZbH4QoetPzEz3PHM8Ra+PoR0+34lIsHhcTV7UEMyeeTKOOWlARzr3luubnvQCJpZhhu/ecwVIE+FPc1FZkDLM015QpFOS4cYi2HQp5o4+mErr/edcvvTa4XmxFEV7e4MhTKAl1KU8RmU1gLIlvA4MQMHtTxVlkEoNxgt4ay224zPNnUPlymNO3nffCseCsvld8u3r3LbkxBNW7OiClc9PMDA8k/e4wrh9wCi28lFvDMjAMNDajIxv5SeEu/YpgskDS0W8dI1qz5HuRb4Msc+in3bIu51Fx3G5Kncut0hJvaomYHjpZFNfVH69iKZU7dnHBikAvbK0BKjTB2O2m7Uz4aO9423oKGXgzNcsprjXZ4oGi4+aOCPwqwqM2HXzU01Ms13bOTyvSaGPvfL5XUWSdIvhJdagXEUKVSTZUPR02ikziAOHcJU3nD/WmSVjb2ZhQrtEQo7bLXuP+8z7pWbSgypXdo+/YWTweKVy7QWN/+QiGxP5lJybKENLMgUZrWaST9JI1+srSOkCLOxcCDD04v9VUgUgR5VmgbC0Jr4rhCpVK1sUldQN0ytBBCV8WMEJd/qhkUKgvbLQTIYfhanmXHvX8RpFoHykdxz9D1tZ5IHnjL36eQYodsr+2ylpFq2qLChdAH1RWnDAaRLRxQhG6AsvFUrAtahHjXwVDQplkRK5TMcgalkfpHBn9VevN2mapbc045RnmLgWYgq/nvYtE368X2WovoEeLBjOFIt3Fq3RazaYvCXm3Vje3rKiOBab7CFtYfwSoVHAgUWnW4+wqwJQCtmqxeijUSaNPNJeV4klf4/HOSl5Nz6gC8bAd4D/ed0rzP3fZaK/XN2ZrCIwXPOgLPOnvHkF1eVmz9azTzr3JQ6qBmkdJ3JSCqhW+SuLUzJCwglHhs2aIoAJooQPT81FuBmGb3pILf5mOYf1WUkVvcDgsU+bYJrQFmKZVpMUApf5eXYRBRpf3ZmwGVZr2zKPMx4XLLp7xYuz8pF6jdW49pN+7K8yo/B2jmlc3vB606QFHePkocdgj2aaecDIJNW8FGbXHgVtZgpd+ukSPwzTInHr7W5rUW1q28DBFYo3l5zNcUumS+YmL76LH4nTSaFG4rO7DOFnLnlt96q8zXIO3wQNFIJJQ3ykHf56c7o0NK2sH02yqptJGMRNtsnultbLcTScl2eHgaNSuNXFl0n70JebM+NEx1bfdzG7cVQvFo0AibepMguq3kgy9Lk7jgTx63rjD/8Mj3WEGw9S1ev/Bzqxwk1NisU6FWIVIIMFJMDoVQiTVaIhvvyJ0emihCUaEa1xBjyDLr4qMdSjYSVFsy3miT7Lt4Ad0rdSpQKEOwRGGMwZiUzGQYk1nAKKlTr1uQyGQNxLmdiRh8oFq7D7v3yFlB0VgPkMUuy1nR4U2BjMLxUQVo5O1Z/M9o7pm5hRehYhYOdEGDhdXyZGoOEtviliZdOACRVAiSDaq4PZ0+aJW3sym9iUCRLt73vXM78wHUlXUp9EF27ZgoCv/C4logigcNCJUy/ytYI2VTmLx1yoWSt4+0VgrIgQL7twpSwamhtfAmGnzPYOij3LKRy9u6yfiyrAleCWqjOvFCaumBVlKKW9e526IHgVWhxDUvKyn9co+XY7EEmnUR7F6My8po8rEXJDBS85GsRiBVZBP0vMtfD1vW9riUHlO5m1P4dRhcu1XZzYHFBze5WEetFOiQn0nwLnlMHveFycGL4V/FNaTMRKT4u+D1kgNQzY8Wz7Xa6CS4N1j/I1i7eZe45gJD0N9uLa5tORqFHWh7YmLnlQEJAJRMVhEAACAASURBVPpQvvSxtaz7nAUc8rDsUsgHYrQNbCLNtnPlDmm9swxDQbNLnKZpXg2uaSspf992eWhTrWFsxkGB2FtTeQdp3TPhHJcS8iql1vpg5b5prQ+TXjkqdEXfQv9HsJ63Aa0YzfcYHZ2oDX1A2tNprVjxCu59Oc+lkFHbe2RYak8+aIMCkLPklQz54V++DTIKcsIo4xbbSq8OoCjkNC0ub892eFP6USmsLSppLa8tGs3X8kJFXrAU+q0AoRvFKwxbl8zat7NeMSokRTyLsjBa/MqBh1GgnJmHkpYU5efrTayQUVIv3NgqZ7peBPH0acELYRGxbhNaQxyBFk1qXKRtFaF1lVh3EkedDizZulwjRXuCv9vqABuYFAplIT+5taqLHRPJMCbFGPdbUrI0IUkGyNIGJksDkMhn7PELQOe6hMUiyv2ltCaOrLuZPw2WPMtPEcsi7IvS38Epu/2u2Di3ZdNsdjnLZ0t+at6aLC5eRFH38aVKIFEJJwn5rC9b5yfnNqbB6CzmYu0MUZ6yrn7ixs2eykeuPTYzmm2uDsBEf9rulavBQvCQ7cnjsIQSUGveNVikLKMKSnl3mwAGaFrXrUptTRJ8O0gNe1XSVs2hEaw2gFzxlyagMgz3PPihAKTws7G0d4bf+7laMA+jwoXjWYQpnhfIg98rCcqgPK3EwEhWmyK5BWH4duUL2s67EUgCnkTIr1Tp9Upj1OoAcmjrieKZkoLcdIhTAr1VRNDD+T6QL8VhDkFDMlKo8XngaaRYy009MdRnH0cqN7nKX8rPqcAqbKj4Tb5Nxlnx6MFjXHb19fuGmzvSvM/Yf43LAiTuvcI+MUYs6zPWosrg9ghAjLbufQIK65rneZTlWeW2oHzCjMGJPdQIBwIjru6hWJiEM6Z8e/OVkeqQcC2WWuX382IPGnYeD2p287GhJ+9SGkK/Kn9QSo8M3re9nLZdSbX8c9Cn7U6/B1Vv9HTa0SFlhl4mv3cKRCtPQbjPl3VYPBTtSH2wwwNFQRbp8BdbpY29anEW1fR7Jw1PXgDDjXkwebbTqisLdK/aiTcseRlLMZhpjkrhQ3SbwgsU1uM/F+ylrFApryz7XBe5ECouHoQhijQa8//b+7pY25asrG/UXGvvfe5F6CYQgoCCBk2MD/wFSBRCojTQMaA+kCZG8CdRIqjEBxV9kOALopjIi0YDkU74NUrsEH9oo9GnFpq2IzY/0mATu9M24CX2vX3O3mvNWcOHGqNqVM2aa8199jp7r33P+HLO3mvPVbN+R40aY9SoUQgcELHFZrNFoEuALhCQvGqqc/Qrqg00ZLfmPbJHA4owqoK/ekCphxDihDju5P8NdjdPsd9fg+MeMSZvIpgYHva4FkfV0ijv9KfvGCEEDMMGwyBLQlRDhaXpwwq9JrEbw6dF1p66QvUhcI7RobE7jPCcxd9SYz3owBoc++SNOaBkZUVN/qAxv5F+6k1mlqnR/P/KuhePgzav/rirGaGrsmSDfDM+Kz2uuvUjiDfSvLxzEqZeCI5qhWqoqdO2yvbspcoYZOdCnUn5nkx9uBaIGzdTlhhGOcKUTVytj2tGbimNzmVVikNFc6lLGuNR9pAsqbIxQPMwyvOhIPi9I2dt/frHYeU7Wz6Sb1esjgPZ3/PAuz3jVXtUmrk+ogpQse3E/jjkaZoNKS0Sg7eGnSVepkYWteEt9hmjXjSiHr8OZsMExZ/RLLIsBpYYWb6fEt2ZI3KQ7zlGYa3p5tH9eIP9uEOMU7kiHRqTr9S5Mgxy3Y41FGy56G1Y4Mycp3R6jJ9ns2PLxxXpGaFHm7erV33kua2boQtjLUqezdY69LBycruKvjml9seBR7OW24q+BARz3oYiNv8VmVMaprMqL2EBhyTLtQP+aKjZkWAWqxOO3fndenZPOHGzGUm5EJPGwbRWfrVqMZulnpGuww1B9sCyMBkR4wRixnaTYiewHE0MmwsM4RKES4C32Azy/YKyvFS3VtCoDEgH0MavyB4wslPHYPA0YRzT9dJT3KUbzvbX6chZ3AE8gaDeKErzlOl+CJQ1gXIkMyl5wyY03kTSeq7rmNtld7mp6Z0XIG1pjB2pMWY7lw1aI5d63rBVYAFAbnUiUg8rzcDc3GMUk9bbIJd3C16wnFJqloNLE6pg4RpEWIICB7K3JbWGo/L89mzKKqya99xYxAvflRSaSJT6/N3zBS29L6PQi+Trx7xR5i+sTWTmA9se70u0bP/OHkJUjSs1o9ybcvVUF6NGNiIWT6Dil2fRWj8OtS+ln/ddQIlNY9/oUEs+dhLK94URgCiZbDgfyVtPB7N6dXhg4em60mn8FvVe1aN7qQWR0xXeun61ebXekTPjIBOKr4Cdjzq6to9MHwOmHm0vlvUoB8g+tJFg6r44r5jRBIuCdiBzMRal2yAb7yqJtcc50HXyLEoepWkDhGRCRCRjXOQI5gk3NzfY7W6SoWhIHpssi8A6IwpX9HOIXo6bRA57CtXxfI7VqniELxnxiIHhuZhpPa+Y1SNrCUYO0Y9s+62WKewRn+r5qVGR/ksqxztujVYqApLMuOao3GPGeRuKIOtHxSfsrokK/TpK/aFK+pBdXE5cyTtgjVC6ik2+yPOjDVYJ0muVxcYbZDG7o7soSILgwQrwITLpZPjcX6/HykWqEuwBHKQKPvhnensludj1vPfK7Y+fLaVfVgNncZLy5/qduk1JoSY5fqTCySYEXD25wPbqFcQ4AogS/DoAtAFx8iy6uJCbp8QKYmM7LJH2sWfzMdQ/0zPdySQiBDWMGKFqiiPG8SbthO6vsR+vsd8/wzTdAFHj1qiSr/EcSj7qUdOqVJvNgO12k245y49F3Mzlaz6po9uArbMg1ig2/SUasf24aLufKdZmEOZd2BTQzz1Km/RGnZxGIvKmnduQlaEsBeT2izp1ZyZgM9W+Y4D2AEUQb8HYwrgBpP/2ljKtmvVEYFU7tJ3Ha9F+6vGluS6cFDnzALmv2fas1F81uHYCVceelus4e20xtXmvo5AcSN28W/+95gjQsfS3VXrmqVuJNM13nQO6mhbjgKWTeu5EVSbNY0LtlSLqet33C2uBXXt1/qTmyugx8hyDfuy1b/YkEXGZhiGnTAaNiMr4q82aZdUYITJPsxObzHwTXqoeUR1b0Kzaxh4zG2pWDybIjZupLYShjCGKJycFQuC0PuXDRI2xPhuDAkBR+5ZyXeo+pmqDgJkxD8tvDEFtG0351JRT9UHTLWsovg2OrTexAbExFCGPrxbOuV6MTOVUAlpTSQrVGUCQGH9jMsipwYNCQzgLjdIMTblNi+rknSVslm23o+q6pPYf4UHdfHrEezgfQ2kH6lRialmnUWrTmY2e8qgQDnXXAMmlbe+Mrvs1qyWu2rilcgmVhwvt5erXQbTeVXfAIZP6WtXq1HgRGyhlTaqJY10b27Wwh+PcZ5UO01TI9kW+CELl9cwX7gdHx+UEBHP+hqL8Qz9HJAUwPUs7AVxckFndSyEMJXEwDpRk7HVml2WEdOvM4Ur3Fo7nxOwGgw7a8+TngFXNP2GdV/T56hGZjic5KateMcbHmZnsVsaFwHRHbwayZQHVruqCYhCJEUPszil7NXlQZfJAHWZ5ZFm9eYeTUpydWQgAR7m9BGCOCBQBHjBEwgRRokVAvrwc8JZPucKzpxPGiUGBkHaYN4gcQAgIIWA7bBF4K3rNKIWb4MF6rM0oKIeO4RVebduTXOBT/SYgptgzAwVpTxBlhhGnPfbxGfbxGhPG5FU07oAo/zEZQTpgJlmIEpS4V4pJMiF5EA0amyjoDWlisIqAvY1EmbEqamSUqhIrXIQ+0qMApa+ALXrzpu62zveNEFmMUWXOZ6OWTUmc4mNwaoz1Osul5mDLomSwKl8Rup1PGigaKs/MhV6Aqlihi23Um4cK1cybHLf5jexlUAn2Uh+WtUiVXVJDnyrShHWxWwCjikJn/xxzI0LtwdB5xRqwpNsDt0rhXJgH5op2mz0Ded4v4/nW4kMXYyxhzRGl24DMz14ditIexIjSUeIBITHlV4ePbVZtpNv1XFLYVbmDGGLrBuWZLMes2vJmK40VzgkIxTKdVH4GoqWn3HSGGiXzMSlCMsqgoVtiofigplhboS4G4RdUpaVspEh1TLdOintL4itZYVZPmQ3yVQOiAKe3Y06TDDqD5M+ZB1VjQ4SJJjC4ZmkNtGtI1oPed+nkdu8Gy0RrLDJ12chQYbstLXTKUPnZ8DKKoBx+ukZpr9DM7OhizLSQMGQSoBCENVIOsh4oGYSixCza7Ufc7PcYLq+SLMBIN40iyTA2OHM1rzgZlIqxYbaj3fn7sMSbdMyKM5qy7bMjfCimjSZr+IhZBij5rtFlucu/mjqybKpQocoyr1O/0EBZHghqURIeEEIZn8Vi1jy332lsNBVRkI7vxxgwhEH6wcq4Mn5qSLXNXqWDdVbM53CcZalW2+vzW+EkxT3pfryylDXL8Wz4KH8l/HPNuo5j08AkOp5ZXFue0K115swQvYRW0MuLHzEDnstS3XE4gDM3FDGypTcLETpBYChLz9saBqXuoyev03yBnVd7ZancG8HnyOdMccwodzsL9eG8Vk+8Y3zjbLuc6s+La2odmK4cTqFZytuX2zyqAoS3SVQdZjlWttzpjdpYhHvj0l52fudVqVtUlKWiPBQ1dBgGXF5cAjFinIDIOzACmDeIGBDCFgEXGELxyAFQvGkW2mmK7iVa+Iryl8zAFCek02EbOcKf+iLGCVPcY5x2GKc99tM1rm+eYbe7lgDWseqrUiWzApMe/iCoAhIQEIYhBbEO5agdM4Oj3JpWNSz9jnEySmCaUKkISR1FaMy713lAS8up9G49qnVPHYoTchhGgcklpbrmpSSXAZOmkhChyplJ3fwuaanq+36V+u9qfXv1YFuV6tVYja/9oO3kVXJSjzZ7r81ijOhpD6vGsE0nudglu9X1tPxuHan78Vxx6FjZ8xuM5vSYeZwq85kNU/c9c5D0oJFo9hWjOoK0rq5c3eql9ewWtybrpW6znotNUGUWglL6J5O+1KV4EzIxKIoa2xPJeuubGByyDGppXHeYkbTUpBdzZSTSulAgw0sjSK6TZ3MTGHMpwH4urDW1I4SNGJUkb1PfQptc1bXkaTmOhCUmWVekf2v+ZvIi2/d2zQ6z78FlfFKdw0HZgAGzmdMjGBI+RBJnaJPrVTYUCEGMJyTGQARCIMazZ59A+PgGNAx45ZVXYI3hhOIBppWpuf98DajWXE1r1v/FlmqfsHlQyRz6RzgqN2e+b0ojNeDegg8JVR3lvfnCjEaMS23Q75RWcoUAFI/D3NQFOWtWsUU+a3XAA1mwqRua5C3LvUf061DXM0u493iKZA3WOGFYL662a+08WSG2nH5sVshuZP9ul9uGxE9Q3MnQjs1ChMlFnLmhCGgFZd1NEQmh+0Z1M0H3uk/HOaAs48uoRZiXHdThICqYzOdCe9TgliV11eO6AKoTVnpdragc84hqGazGFdAc+ib8dajyJcJmswFvNxiGC0SWHWneImIAaINAQ7o2Gca0wMv9caxmHIHaR0OFwKJIcwRiHBGGVHZ6lo4YjdMO+/EGcdpjjDcSm+gZpnGXbzmrDUVGyZCeTYFOk4cSKIAoeU9tNhtsNhuEMCDyJN4dybiXb+DJPWERq2NHZZ6SNK9VKjRVEXCqK6I1FVvPHeRjB+XWso6hofGyKEGrU+7UpR2dNxpRpbNONGOUcitCLbVp74TlDMh+ym7zSIrqglT7wjlmnp6H6m0G60CFbDe3ZRzFGS8Npxfk12hRp0M+bnvLZoRgPA4F7TG8543vpX4bVZUCgaKdv2rcCI3xzho7RFFF8mZgGkDg5I3RLblWb9Ltg1w9s0h8YiiMLBQ6z2YkEmNKUKNW4t2pzlp3rgwrS55uHJW39tiYHE0TT089w6dbKTZ4s6YHAFLPVALKJg2J4YYQO67XxfwmGy2dOcDt9j0RAg2192r1NUm5hp1Yj6dB1zNKHqRUvxt1TDkdzSYChkGNdhH73Q2ur5/i8nKLYbiAelsRhrlXiLkxkEi9pXuSgZV/jszaLFuoR3BreLH9NT8sOAeZJUsjUaVFIxiD0aqLSFbwMMoyvNJVXiyN4bVo0ZHsbazIv1OfHjuxoevfgR494zVhNboCxj1bre4FppG80pPowVHouy8MvvnwCAxFFmZkxBhUFlNdwFtlgN+c8+tNhHManrwAHavUkovJLMMs2dylWppZI7Sh1KGpztw085y9zId5IFlD7FHL0qF8Zk8qJZP5sFdSr+CZQgEA4kVzsdkgIiIyMDFhigExDmAEBBrSkdbZQj3PcalGbVfM3rLWEeZ0C42wtRCCiFGMOE3Yj8mTaJz2GPc32O+eYhxvwDyC5dYuazQo8iUBYhCCCGDMAYEChmGLMCQjUX1ziZTMnK+97rUx8dsoeTJYjssRGR5cWYB6o1FbGFWgrfJoe5Dr3m2DgC+jo7DID1UWySa1louGOJPHAFe7s2so09S6k/5ADpUgYia6VbY1T7YvkOn7E8Iorsdh1uADr8y6fEUJub2H8l3Jd08Vf+HWwapvjXl8sBWvpLhchpnXx9ROWLv2qNEKzyLglsaixgOmMkiojh25HGkkICAk5TgUY0rlECvLDQee1aVn1FJll5RpGzlTFXQEOaqs5N/kox5Fya5GVf6z37C3s5WlUec7h5iOnBHlo2d27c5lq6HIDEU+zgc1SjGIh1qCyOlDziuQiWlnElacu2coAqPy3CEkDy22t9iV75LRJq1d1guEyB4Hs/yuRhB+na7dFi+WkA4ZBmIwjxh319jdXOGVqw2SgZFMHWO9oBtjUS/Kk3TDoT87/cFIMQ+ObaoNR2ds7QVYp9bbPNMxyANnpfPb8VjlAZg4RdD1MY2Hbu5Ey9yZgWCNoFbOW7myPAfbqhwInjeT+8RLYIDI637+o/nyseCclNgXgEdmKAIKN1KmZwhtzdvH3DaPSl/Hbx2Qgg5/T7QqHfHhZUPzOsWcOnWwsjX5rXGASwrBCeq2tpPWji+taaPsgR46ZriOoDrV6udXiRpFe4Rt2PEii0B2tMQ2WcPww6HgLQfLrw0ONv5FEaCLQD0zUOTqqEhP+cgVi4cNKAmFMaoALMexNlsjhAKLfb2SLsswlM5h4hxbhGMUEpFjYUSYMGE/7jHt95imPfbjDjc3T7G7+QTGcZcCdbMGNm6MGSiLcDL4ZCsOEAjDZoPtZothkHg2UfLiFO+tCg66ML5Z90CAvcoYaBQT0326o798axkhH3WoDFhpDhVKbkJyz92MGkNKW5r0kOTZn5oaDLVRQKh8L43tN6VtmTZtXgzq8esWZtJLH7akWfWBUsDzzL0FtNkvJWs9R1iP75lRsDoo1fSKdd159riLrFGOGPXymD+vy+q9t1YDuz9YqWXxeBrmhpvyXfmdr4YnJF4a0hFZ5uSDwcL7qghhgWSGWOPbcnmGSpG8M4ESA8wcRctPSgyxunXpAcvxJ3tcZmZoQzkybEXGdPRNvCaZEViOJBljkt3LqviyLkY5P33DHjWbBxguLFWP05U+P4TZ2KphTNdDczwpiE9Svvkz509Il00Y3qK1Y/vXvDw9nklgEEdEBMRpwjCIoY8nTOMOz56+AQLhcvsEFYM1Nybn9UBlEqWtUlrT+rVyazL2lQ2YHsT4ciwnw14zLXAZ93y08Jj0Taa9BxBN3WB+6rCxxpUlJAORWnBFBMvGISa5KFGNhnVLT2mAP7djWxXy4Fl+UuZifrRC9jyqGZ7YwHGryx7QrPu2QpR/3LFCVY8drszhbI5/sXqqq+5yf2jH5ba6/iM0FCWUrrbKXPm7PqIgweuOBLNexTyMILucZsUg5B2+A5mpEnOKOp0hUi8dV2R4IdDhcxV4on5S5f54QssURFhtPTjWGNTYurpLXrM01BHe5kaEFZWGiEC1kDszwDRGBJqnI6bji1W3hmqNkO+Zc3m9q2Lba9D12ItGKlNZNAVT3GDSY1hyhXMydAwgDLi4uMLV1RNxZy81XBLRjpJVrm7VMemWGg2iHCN4ipmbERGmacQ07hHjhHG/x/7mGrubZ5imHXiakHYgzQ1YsxoVYTc/oeSxFAbCMIjwpjQoQqq9ir7OWsej39q80xzq+EV1JjF/I2/lfsnHzNAxONla8HzXdSnd7GaVbnvqd7ReOc6H7MD3b2mpW3MMYSiKS341Z0Wzn1X++ou0V1VEN2mzdH67eh2D7kLfFvWxQP1AWQFOfzfLoCh1j3FNA1q6u1NO6HWCHsUpCvzsfEz1Xn+Lgerk9wg9NkTKpw90Vboifl1A9owQRNEsxiKAxCAksWqyZ5FsHLDGA4pYvs2iqDYhfw6wRu986xwRGOnK9cV6AuIJ2gRCZh2f0jfBbAbaOEWJfXLhiczFNm+Xm7wOmHklvJ+yC5LUlWM+9swV37SythzN0jrk1tUtLGvBAWgwciO0V1XM9R9Ms1n4BmWZgJVv9AsBUUSMetRugzjuEcOAMATEuMf1szcw7ke85VMCLi8CIg8IYESZX0qzjCmXPQ9ibBbOg/G9OBvesrTFQehvyGmypzDbHjkMHQVCclTO1MSU+3Mpr0qv0l+Wjsz71WpVibSUDK5ggCOSV3NMxqoJhYx0HSSSd+QZdGPhRS8A98z41oKb34T6TFZe24/0z22CaT/QWts3Fp2uPqvU9bWZtUIxc7+PT2RGOBk6DdQNhrV4FIaiVtWov+l9tm91NR7H2eCRagNAFmwOUVYt8NyFPbRCmF1FWkWxfe/2WJfNPGBm72Va4VGU+0cNQ1W5XKWqjcBtNVtjALKQRUwIGFKtwwCKASw3s4SwQaANGFtcXb2Ci81VMVbUNbIV63xeaN/Mog+kHVxxf5/2GMcbEC4BZux3z7C/uUlxiHhEnHa4ufkExv01OE4gYyDKtptKoBO6YJRLaURBS1czl/t9SryAWK6gJkpC5uwIWPmYFVYiICQlq8TWmPcL9QRnjrDxm7h0zjypGl07hqIeZgbZXKdGXJbnM4ccbWj94LSYLW49/lDXwXajKonlGMaBbB4Y1MzNqukyLHW1j/X5+ka+iKt9Hx6tOn0E7fKT2fXpiKX1Jut5yryYsSjroBpfwhCKoYOo8noM+lnfpBQUOXHEOM86L0GpjCHrClEMt+UmHJ2fnDcj+rVNv9Uwbr+cv1PirinvM/0p6+bEE0DlMFRrKMrHdC1bJBZbh2Uq1txSp7cRnFhvXTTmBRij3pKxf37duchR5oaxKk99YqtoBQVOde6v00Da/EhjFTmmY1AxAhNjGhnDFhKAfYNpjLh+tsV2ExCGC/CU1trK+4bL8eiDMh3PPsyTsNpLGETJsJjWUXmPbQ8cKqOGGveq0NzVKQZKt0+2XjttWTNapGzGUYZNEvS8ONYyUqzBElGM9MbYZLEyz3WONOW8Gdn1nfHm7KNaUzez+L7bd6C8RmJc99KZwGgJ1cPb1Pz8DUWqiGdtiIrmA5jfNUPlnOYBBvIBiu0JX2ftXplx/hOthxy+75DVQlIqMgs8pIAvokdUx9578X3bUzLW3DoxR83O5ILyVQYBW66KH2SWHRUyNXhoviSeBgAxubuHLShcgGiLi+0VNsPWrFYdIx1L2+k4x03eESXGUnH8TsYaggSs3t0gxj3GkfDs2VPsdjuAI6bxBjfXb2Avt5wVgRXJ1pSlaNKGNn1qd7oZwCT9MJnby0V4lk4jaGDSWnu3il/6n3Y/08UzqU1tONhiNOoQRBmcSjhYF3doGbOZYupQCff2+napEOfh5rLstPk/p9dIOSbBxahZ62IH7l9onzKsMp6SaAdSJolzWgW4/lHQsFHdkT5cd0b3WmLHDGSO9NilhED5RiR5cGf0glYvHq06YkBaa8SqPNSEe6UTTlyMQ0PIR9TUSJTj88ikC8SITeydXC9Dt8UeInFzKMhcNtyl8Y7sIZA1LDVGHC51Lyq/uZrZGEuS9wZJnnbt0+DYQIxqjBHeKmsSZeU9F2zm1RL/JcSpfJbmAlRuJ1tqd/s8Zp5Y1kfTulyEeidlg5fdpCuuOXOZgQAKDMaU1lz1ZGFgGifsdxMQLpNhKI549gy4vNzg8smrIIpgmsCRxQMtbSxxlP6mwdR0SaZbXsPy+qIeUSTywkzmUDT8juqVlcFyKLI8TENbx4TSGEzFuCPrYlvkwvqrth41tVaxyEBmgujzCWw8tCMzBvHgBgYUf23l+ee0aj0UlmR+vp230CPAbLackZGoxlwGuw/cdXOllaQilfViDc7eUGT1mHqRLM8zY+m1m81K/KbFghJ27uCktK7CGRq9KiFlKcVs4cV8qFYdeVRBailt+93paf5YLVuhPiv7FNcJ/Nn9vwhJbI0MvMwwi8JMua8CGJFkYeWkEA00YBi2mHgP8CRG6AEhXCIMlwjhEpvNFulIAeU6WRHWHALIv1nq3xMZU/3Mym6OeQGMiAlx3AOYMI43uL7e4/r6GcZxBDhiv3uKZ09fxzTepGvr1cCk5SlNVcYiU7aRGIchpOCpxEi3pSG/k8crxkVSq4PThtwLlfA/u2O6P2Y2UGt2OV9QKKv3wOgpo50CmpLFqmY2GrIoVkknqtyW5qQqHq/bMbBVMCtYDbGj7LSps5BeDFblDc4/H5prLgs4La9oH6sF7aFb8PhhuRXZOQbrhdZTSu6O1qB6aA61RiWtlq3zEnKcIeX3ICAMmEQspFAbifQKeX07PU+8qGfMskbXZFfhbOqHepaQ8ESpakhXjXXbqu0JIRRDEbOR3fXYkXRC00d6+1V1Co2COHeWsbTeSiHELEeXcVHrhDFMEKE+zdiOYXovyCZJzFUUCrNLwTFvolyCLd5cSV+libnuFf+3q3OMEsyZZDzy4ohU06ny1mEmTFNEGJMsSiEixoDd7g1stsl4EeOEyFGObgmtsHilYVMGQKkjG75MgxXq8gAAE31JREFU6w6wMWIgIoIxIkaRDZqr2wv6hnHtPwYQpz2Yp5qnck1DRBtshst0g521K5HNj2GsgUu1B3gjRiCzDuVpWOJZRUrHPtO8UA5f7kzT9EUqcN6/iLWs+rF1Id9yJTpV+1YUKOTZqcCLtS/c2QP3BHaRszYUMSCKHjIXi4bhsflZD1az0CvLCQvJ9VGzAPVAAfUO3GLNT4hDxeX1hCq7BIVYx9CpFKG74ZSu43cPLm4THymrbN0dRDyVuT4bNtoCyTwirNsV72junHazkteHGg1q4TJ2Gxs7DM9kK7TSiHjzm2Kx4ggQ2XQqtFllnfN/RpzPwSwIG+KeqcR1rYjUbmL7TAVzwiYMiDRgwjaJIwHYbAZsNhcYhitswxaECcxj4hwxgEKKHxSZ044tEeIkgZ8x1kaq1lBDBHDMrurpRp5CF4wJcZwQEDHtb7DfTdjtbhDjiCnusbt+A/v9NXga8zxWI5bOfVYpT44oEIequ0GEEAhDGDBQSFEQrDApI5CNFHm4jJIm2augNyOhzFs5X6FMxuNO51V5RDWt6bGK2dpLWrvuqFdxcKrXCNkARoTifgWoQsOx9jBI8wkisISsBFLWFE2g1VZZWbFDU15p62zzOn4JMsc0HytvgNxvJPWfK97lsE1RBii/XDIpHlionteeB8n4mopkMGk8Dw0Sm4kPJf6J/jjEN6oLl7Wwpi4lJaNun1Uw1q5V9bik+qqynNcN6t1x1M//eCDWvjJcfd/LfTZf6/FT5avNK6lxum9PMlZlvmmvdet0lMVT9ZnBuc8Wg1Rn3tUWVXjnKmNwybCqha4ZG5nDJDxRWl/I0Ci4qeeCBNsVr8+8VCvvojQFgnhiaeD/zBPVQKG3SvXrXYy7do62cgGMQEdmcGwMN+SNjAAy07NXLolxP5r+ZJOWtSvNPGfpP1MVmLVV5Q7LPlDeb+dBBHWJjMBVHBYK4tWlBrmItLEhdUqbBblV+T2dE3kscl+lG9VsPK/IaT2IDIwjMI4jCDtMFyPGcYfXfvs3UzrxxEoXTiQ5YDNs04UXlI6yl7hhpUWJjwYMYYsQNmbMm5nNQOQR++kacZrM5tgcrbduGTpdtxjTNMqG0gL3JMJmc4HLiyfZEavUWLKUL2KczMs2pXIdAoUNhqEYzNQAmZbddMNqjITdbg8AePLkCa5eeQU0bMEMhIHSOkxqNG37EnIboTnWuI6tAxpQvupzAjiFHaBqjtq5YDvMKo4PAEbjQdQTvu5YxNF1MhwVb9rwCt1yINIry5zv5rm+r4/TAZ0kwi2LPFgOEKg8mMrQp8m+e3daKcsj1TJ4D0Q5jtpCgjv3wFkbigACbDA8boQgqtWH2bv5FxcXY319pSPLDLxE3C8Qc75ZUNbOIpiT3KB0QoPOi8AawxywQkgEKl1kucB1dVo9vqu6tyPuV22WRejWO+fU/C9P5+na+hzDPE1ff1jZV1mZlVzyFiRpLpkWuvSQZdbDR1Gs8FsrHKlQ3V0kELabDUgFx8AIIWIYGCEAhAn7/VPc7HTXbSNXAaf6RrnKfpxiEsp4RA6m2eyG5l3baIRw2IU5CUD7cYfIO1xfS/DqcY8x7hCnHXa7Z4jTXoQ2ozyZLiXI7Ssq6xiFIbLcvxMChmHAJoRkrEK/v7NdpRLqi5CVjgSZnWNmtFIER0vSRamBeVaUxSTkBjGetTOm62nQwcyzxnozqPdStaASUryJRsvRtohBEGr0MJJ09+gZmzw66K1VDJ4JYMTHZQ0r4Jcxz8SFsisSOvzT9smyEDG/gamZ8xr7C2aKUrmFr8TJGupMjqISxWzus3bkGDRYSPKc4DyWcmW3iavRq2t5r6MEdwW9w0eSbLq6NKP8sU1he6qfrxWB5rx7PoellivquFDlW74623jI9T2ckerVXTrP899SFcp4tvF4UOaQWZ1gA8cnD6faMwtA9rAspZvylxAtrTXg9vPSuFoKaL7rPKlJsfSPpSS7NlS8xqRolaTizbUkLyy0leuLIjSmlD3yx8zgOIrRRo0GZl3JL4viz+XYXjKmlU0oNa5FjolLTYzIE2IkjOMEfv11MAPTFNNaDwIQEeMot4MyQthgGLYgTsaGEIo8YxikfJfiH1L23Gqbn8qfYlrj+aD22wq5Om91/hM0ppKVp0o/sBiKtthuLszI1B2pnnExjvPlBULbDLk1NSQv7Gx0Fdpgkn66SMYyGhDCBjHusR9v8Morr+LJ1auyWROAWOaLevelP6Rwu9G7Vl7O9GZlbF2XMCPF2cNWNrh32PnZPD9qPD/d+bTDkoKmWXOHNdAIqLOv2CY7lAuvqdOKjFYgrTF5O6U+6ZD5IAA+0cVLQNWbx0byWM/Pv72doHTmhiLHanCX650/DlTZ3oJ0rziRsuG4DdYZDdfkYwW1+rd8JsZmIFAkpLvkIqZ4g/F6AvgGw3CD6/01htdfw2bQ3cBNVvhijODImCJEIAVUhcjeYVwEbJb09Q1Qxcsq7VISQCM0NksqZ4/II3jaIcZ98nDKnkhWaeXSRCKkQJKlH1TwDiFInAdKRidw9jrUuVa8ZdKCeOjoQMzCei9t2fHNT4x8UI91X0lty5y9k2pejFo9Q0zV5+a9rFwVA58iK9Qkz++ZH6yzVT8fV+x7OJ4nqDuX68+P4XjCce8ix23Q40lrelSNxClOjXCubAxO39SqBcs/NvqmmrLWzCHLl5caAzFWHc7vlHRuA+B3v9d6qVJUGf3rfKRy4KatzPPr15dH6XDb1TDEMa1XMXvKd0aCCn8PFNINeCkTeUyAxCrK48rJc2a3G3H9bIf9mMpK5dh1Iso6DCQfvZA3vYohprJNgpDWXQ2krh1U6ZZCY5NsXLGQ5NImTotk1KH8mfQm1WRhm9FeCuY+JFmge8EEpAIE5uKZlIx0VOYAM4YwgAGRkdpNCUIIA64uX8WTq0/C1dUTXF5e4frmGk+vgZvdU/CnRFw9gWzQAYwhmeXyMDqvdLysSByEuc8LgPubH24ocjwcOsZ6uwPCkCMs9x24LS5PzIxlr3LHQ4JVzlFhuAj/dpctCUATkvfdiN1uj+ubiHEcQHQJxpDojwIIQ9oty7tdBCDIjtmAQYQu0ut77fFYKSs2Ry2sR1HKlAGaQEjBSIchYBgotSPuwbwHRFjOXkrpRfmZDBpR2y47HRSSoDoMAwbxzowqbFe7rFQFC1Whr6qlUS7Ke6F6dsiwNDMcmS1K0r8XNpvmcU6Kgavy5CI6MC9bY0O6kjh7fNDjMDysggjzIcy0hBSItYNzEsrL0rBs2OL+lvDZQ70aHKfBbeZsVp4BgJK5oBysSt+ROaIU9ZpvUqPRbeplDU+dPV1OfDoeOz+R7TELhh3rZXqUsNYZuOa8uslDpl5rNkvrBEE9tqwXyjJocT6U9VpSUorrZNeKyks0V4QlWDcnKwRzPu6S6qxegsAU06HocdzjZvcMu91ejESMQEOer2osAljCTwSAdc0s9Zu3oQQQTyi374kpKR9xZEBiOvbMYGWdrZ5kLyrpyyByD9d56LHQTCNN/CHlprrnrGsyS8N0HHT90LEuG0vmSCQgccE22O+vMU07TPEVgD4JNGzATPjE07QxFgFcbCdshicIdFk226ipmMPxEsJulD6UjOaGIofDcRY4RbDghJg9YjQAppjljeBZFINpSjGBbnYj9ntC5BuAB0CVBAxgpizchRDkKFpAwACwGkvULbw2psy9ODRegrY3Pd1s0ptDCNhuN7i42GAISAYtlYvFwyV52rF8luCQIgznWosH0TAMGIZ0/IdjlB3UiDiNuUZZ+C6jkVuSn9LSGB2X5HTHeWnnPOewSii0xqdWSVovVSZPBLn62BwtnL+/YL06Y2grYmyt7HWg2qKwnCseV787zhvV8Tb15IQxvmdjsfBXANmzBqocn05vvW/qbr0n179ndzmKUb8YEsTI0bEMHbrZjoxhfynoeTJExHRzmdw+ppsW6UhaY0xpNmTKGmyCYZvkWuHIY4oLOO2TQYoobdCAyvuZecp6yRPqI3edviUtV9spV8LrJgUmMJPZD+UcW/JYMPBEj5O0O7si1RuuKn+QGrQ0zQTbWTb2HuW700SWyrfZ1St4kq82aSOs6XvigM0QsR8JT58xprgHY8LF5RU22wsQGJ94+joiBrxyFXGxBbYbwjBsEMIACiV+oMPx8kE3pltD/QPU5DTHPV4MiOg3AXwCwG89dF0cjxqfBqchx93gNOQ4BZyOHHeF05DjrnAactwVTkOOU8Dp6Dzwu5n503tfnLWhCACI6L3M/CUPXQ/H44XTkOOucBpynAJOR467wmnIcVc4DTnuCqchxyngdHT+WHMvt8PhcDgcDofD4XA4HA6H4yWAG4ocDofD4XA4HA6Hw+FwOBwAHoeh6J8+dAUcjx5OQ467wmnIcQo4HTnuCqchx13hNOS4K5yGHKeA09GZ4+xjFDkcDofD4XA4HA6Hw+FwOO4Hj8GjyOFwOBwOh8PhcDgcDofDcQ9wQ5HD4XA4HA6Hw+FwOBwOhwPAGRuKiOhrieiXieiDRPQ3H7o+jvMEEX0OEf0nIvoFIvoAEf1Vef5dRPQRInq//H+7eec7ha5+mYi+5uFq7zgnENGHiOjnhV7eK88+lYjeTUS/Ir/fKs+JiL5f6Oi/E9EXPWztHQ8NIvr9ht+8n4g+TkTf4bzIcQhE9INE9BtE9D/Ms1vzHSL6Fkn/K0T0LQ/RFsfDYYGO/j4R/ZLQyk8S0Vvk+ecS0TPDk/6JeeeLZR38oNAaPUR7HPePBRq69frl+tvLiwUa+nFDPx8iovfLc+dDjwBnGaOIiAYA/xPAVwP4MICfBfBNzPwLD1oxx9mBiD4TwGcy8/uI6HcA+DkAfxzANwJ4g5n/QZP+DwD4UQBfCuB3AvgPAH4fM0/3W3PHuYGIPgTgS5j5t8yz7wXwGjN/jwg8b2XmvyHC0l8G8HYAXwbgHzHzlz1EvR3nB1nDPoJEG38WzoscCyCirwTwBoB3MvMflGe34jtE9KkA3gvgSwAw0jr4xcz82w/QJMcDYIGO3gbgPzLzSER/DwCEjj4XwE9puiafnwHwVwD8VwD/BsD3M/O/vZ9WOB4SCzT0XbjF+iVfu/72kqJHQ8333wfg/zHzdzsfehw4V4+iLwXwQWb+NWbeAfgxAN/wwHVynCGY+aPM/D75/DqAXwTwWQde+QYAP8bMN8z8vwB8EIneHI4evgHAD8nnH0IyQurzd3LCewC8RYyWDgcA/BEAv8rMv34gjfMiB5j5vwB4rXl8W77zNQDezcyviXHo3QC+9sXX3nEu6NERM/80M4/y53sAfPahPISWPpmZ38NpF/mdKLTneJNjgRctYWn9cv3tJcYhGhKvoG9EMjAuwvnQeeFcDUWfBeB/m78/jMPKv8MBsU5/IZIFGgC+XVyuf1Bd9+G05VgGA/hpIvo5IvoL8uwzmPmj8vn/APgM+ex05DiEd6AWhpwXOW6D2/IdpyXHMfw5AHZH/vOI6L8R0X8moq+QZ5+FRDsKpyMHcLv1y3mRYwlfAeBjzPwr5pnzoTPHuRqKHI5bgYg+CcC/BPAdzPxxAP8YwO8F8AUAPgrg+x6weo7HgT/MzF8E4OsAfJu40GbIzsb5ndV1nBWI6ALA1wP4F/LIeZHjueF8x3FXENHfBjAC+GF59FEAv4uZvxDAXwPwI0T0yQ9VP8dZw9cvx6nwTag30JwPPQKcq6HoIwA+x/z92fLM4ZiBiLZIRqIfZuZ/BQDM/DFmnpg5AvhnKEc6nLYcXTDzR+T3bwD4SSSa+ZgeKZPfvyHJnY4cS/g6AO9j5o8Bzoscz4Xb8h2nJUcXRPRnAPwxAH9KjI6Q40L/Vz7/HIBfRYov8xHUx9Ocjl5yPMf65bzIMQMRbQD8SQA/rs+cDz0OnKuh6GcBfD4RfZ7szr4DwLseuE6OM4Scef0BAL/IzP/QPLfxYv4EAI3A/y4A7yCiSyL6PACfD+Bn7qu+jvMEEb0qwdBBRK8CeBsSzbwLgN4g9C0A/rV8fheAb6aEL0cKzvdROBzNrpnzIsdz4LZ8598DeBsRvVWOhrxNnjleYhDR1wL46wC+npmfmuefLgH3QUS/B4n3/JrQ0seJ6MtFtvpmFNpzvIR4jvXL9TdHD38UwC8xcz5S5nzocWDz0BXoQW5o+HYkQWcA8IPM/IEHrpbjPPGHAPxpAD+vVy4C+FsAvomIvgDJZf9DAP4iADDzB4joJwD8ApIr9rf5LUMOpBggPyk3cG4A/Agz/zsi+lkAP0FEfx7AryMF4gPSLQxvRwrg+BTpZivHSw4xMn41hN8Ivtd5kWMJRPSjAL4KwKcR0YcB/B0A34Nb8B1mfo2I/i6SkgYA383Ma4PSOt4EWKCj7wRwCeDdsra9h5m/FcBXAvhuItoDiAC+1dDLXwLwzwE8QYpp5DcNvSRYoKGvuu365frby4seDTHzD2AetxFwPvQoQOKJ6nA4HA6Hw+FwOBwOh8PheMlxrkfPHA6Hw+FwOBwOh8PhcDgc9ww3FDkcDofD4XA4HA6Hw+FwOAC4ocjhcDgcDofD4XA4HA6HwyFwQ5HD4XA4HA6Hw+FwOBwOhwOAG4ocDofD4XA4HA6Hw+FwOBwCNxQ5HA6Hw+FwOBwOh8PhcDgAuKHI4XA4HA6Hw+FwOBwOh8Mh+P/y49U+RgUj7wAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light", + "tags": [] + }, + "output_type": "display_data" + } + ], + "source": [ + "import matplotlib.pyplot as plt\n", + "image = plt.imread('./tutorial/demo/out.jpg')\n", + "plt.figure(figsize=(20, 20))\n", + "plt.imshow(image) \n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "d19RYN7zSEiT" + }, + "source": [ + "If you would like to use ONNX model for inference, you need to modify the `pt_path` in yaml file to the path to ONNX model, and add `--onnx` arguments when you execute `inference.py`. " + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "Kk9EXEqcSKeN" + }, + "source": [ + "# Evaluation \n", + "In this section, we will evaluate our trained model on COCO128 dataset. Execute commands in the folder `yolov5` and the outputs are as following:" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "r1nE3Ia9SDwV", + "outputId": "28ca5b5e-88e5-484d-f272-55a8d4181680" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Namespace(augment=False, batch_size=32, conf_thres=0.001, data='data/coco128.yaml', device='', exist_ok=False, img_size=640, iou_thres=0.65, name='exp', project='runs/test', save_conf=False, save_json=False, save_txt=False, single_cls=False, task='val', verbose=True, weights=['runs/train/exp/weights/best.pt'])\n", + "Using torch 1.9.0+cu102 CUDA:0 (Tesla T4, 15109MB)\n", + "\n", + "Fusing layers... \n", + "Model Summary: 164 layers, 6772285 parameters, 0 gradients\n", + "/usr/local/lib/python3.7/dist-packages/torch/nn/functional.py:718: UserWarning: Named tensors and all their associated APIs are an experimental feature and subject to change. Please do not use them for anything important until they are released as stable. (Triggered internally at /pytorch/c10/core/TensorImpl.h:1156.)\n", + " return torch.max_pool2d(input, kernel_size, stride, padding, dilation, ceil_mode)\n", + "***cache_path ../coco128/labels/train2017.cache\n", + "Scanning labels ../coco128/labels/train2017.cache (126 found, 0 missing, 2 empty, 0 duplicate, for 128 images): 128it [00:00, 12775.64it/s]\n", + " Class Images Targets P R mAP@.5 mAP@.5:.95: 100% 4/4 [00:04<00:00, 1.11s/it]\n", + " all 128 929 0.313 0.549 0.487 0.314\n", + " person 128 254 0.387 0.768 0.725 0.448\n", + " bicycle 128 6 0.279 0.333 0.298 0.214\n", + " car 128 46 0.297 0.348 0.266 0.14\n", + " motorcycle 128 5 0.515 1 0.962 0.7\n", + " airplane 128 6 0.492 0.833 0.822 0.536\n", + " bus 128 7 0.41 0.714 0.723 0.605\n", + " train 128 3 0.175 0.667 0.472 0.256\n", + " truck 128 12 0.36 0.333 0.388 0.179\n", + " boat 128 6 0.104 0.333 0.229 0.0438\n", + " traffic light 128 14 0.0575 0.0714 0.102 0.0628\n", + " stop sign 128 2 0.965 1 0.995 0.698\n", + " bench 128 9 0.116 0.111 0.171 0.108\n", + " bird 128 16 0.53 0.625 0.637 0.277\n", + " cat 128 4 0.372 1 0.787 0.63\n", + " dog 128 9 0.644 0.556 0.701 0.437\n", + " horse 128 2 0.332 1 0.995 0.473\n", + " elephant 128 17 0.681 0.824 0.829 0.626\n", + " bear 128 1 0.519 1 0.995 0.896\n", + " zebra 128 4 0.788 1 0.995 0.921\n", + " giraffe 128 9 0.539 1 0.931 0.55\n", + " backpack 128 6 0.32 0.333 0.368 0.187\n", + " umbrella 128 18 0.392 0.611 0.471 0.215\n", + " handbag 128 19 0.126 0.105 0.097 0.0353\n", + " tie 128 7 0.314 0.429 0.432 0.317\n", + " suitcase 128 4 0.417 1 0.788 0.461\n", + " frisbee 128 5 0.36 0.8 0.678 0.424\n", + " skis 128 1 0 0 0.111 0.0111\n", + " snowboard 128 7 0.498 0.857 0.774 0.443\n", + " sports ball 128 6 0.162 0.5 0.183 0.107\n", + " kite 128 10 0.255 0.2 0.144 0.0468\n", + " baseball bat 128 4 0 0 0.0395 0.0109\n", + " baseball glove 128 7 0.166 0.286 0.29 0.146\n", + " skateboard 128 5 0.422 0.6 0.5 0.4\n", + " tennis racket 128 7 0.118 0.429 0.327 0.162\n", + " bottle 128 18 0.206 0.667 0.381 0.218\n", + " wine glass 128 16 0.209 0.438 0.394 0.242\n", + " cup 128 36 0.279 0.389 0.333 0.2\n", + " fork 128 6 0.105 0.167 0.179 0.135\n", + " knife 128 16 0.415 0.562 0.414 0.177\n", + " spoon 128 22 0.329 0.318 0.338 0.115\n", + " bowl 128 28 0.379 0.679 0.586 0.403\n", + " banana 128 1 0.132 1 0.249 0.0249\n", + " sandwich 128 2 0.211 0.5 0.144 0.141\n", + " orange 128 4 0.108 0.25 0.149 0.0885\n", + " broccoli 128 11 0.145 0.0909 0.113 0.107\n", + " carrot 128 24 0.203 0.583 0.387 0.218\n", + " hot dog 128 2 0.42 1 0.745 0.671\n", + " pizza 128 5 0.765 0.6 0.732 0.463\n", + " donut 128 14 0.289 1 0.86 0.662\n", + " cake 128 4 0.363 1 0.726 0.491\n", + " chair 128 35 0.185 0.572 0.297 0.132\n", + " couch 128 6 0.831 0.821 0.855 0.401\n", + " potted plant 128 14 0.355 0.571 0.501 0.325\n", + " bed 128 3 0.483 0.667 0.695 0.318\n", + " dining table 128 13 0.234 0.462 0.384 0.248\n", + " toilet 128 2 0.114 0.5 0.497 0.447\n", + " tv 128 2 0.255 1 0.995 0.746\n", + " laptop 128 3 0 0 0.0467 0.0304\n", + " mouse 128 2 0 0 0 0\n", + " remote 128 8 0.376 0.5 0.504 0.34\n", + " cell phone 128 8 0.155 0.125 0.0481 0.0298\n", + " microwave 128 3 0.299 1 0.775 0.469\n", + " oven 128 5 0.156 0.4 0.403 0.243\n", + " sink 128 6 0.112 0.167 0.0889 0.0779\n", + " refrigerator 128 5 0.631 0.6 0.577 0.376\n", + " book 128 29 0.145 0.103 0.119 0.0598\n", + " clock 128 9 0.482 0.778 0.853 0.687\n", + " vase 128 2 0.115 1 0.995 0.796\n", + " scissors 128 1 0 0 0.0664 0.00664\n", + " teddy bear 128 21 0.436 0.429 0.496 0.218\n", + " toothbrush 128 5 0.204 0.4 0.417 0.212\n", + "Speed: 3.2/4.0/7.2 ms inference/NMS/total per 640x640 image at batch-size 32\n", + "Results saved to runs/test/exp\n" + ] + } + ], + "source": [ + "!python test.py --weights runs/train/exp/weights/best.pt --verbose" + ] + } + ], + "metadata": { + "accelerator": "GPU", + "colab": { + "collapsed_sections": [], + "name": "tutorial.ipynb", + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.5" + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/utils/__init__.py b/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/utils/activations.py b/utils/activations.py new file mode 100644 index 0000000..ba6b854 --- /dev/null +++ b/utils/activations.py @@ -0,0 +1,72 @@ +# Activation functions + +import torch +import torch.nn as nn +import torch.nn.functional as F + + +# Swish https://arxiv.org/pdf/1905.02244.pdf --------------------------------------------------------------------------- +class Swish(nn.Module): # + @staticmethod + def forward(x): + return x * torch.sigmoid(x) + + +class Hardswish(nn.Module): # export-friendly version of nn.Hardswish() + @staticmethod + def forward(x): + # return x * F.hardsigmoid(x) # for torchscript and CoreML + return x * F.hardtanh(x + 3, 0., 6.) / 6. # for torchscript, CoreML and ONNX + + +class MemoryEfficientSwish(nn.Module): + class F(torch.autograd.Function): + @staticmethod + def forward(ctx, x): + ctx.save_for_backward(x) + return x * torch.sigmoid(x) + + @staticmethod + def backward(ctx, grad_output): + x = ctx.saved_tensors[0] + sx = torch.sigmoid(x) + return grad_output * (sx * (1 + x * (1 - sx))) + + def forward(self, x): + return self.F.apply(x) + + +# Mish https://github.com/digantamisra98/Mish -------------------------------------------------------------------------- +class Mish(nn.Module): + @staticmethod + def forward(x): + return x * F.softplus(x).tanh() + + +class MemoryEfficientMish(nn.Module): + class F(torch.autograd.Function): + @staticmethod + def forward(ctx, x): + ctx.save_for_backward(x) + return x.mul(torch.tanh(F.softplus(x))) # x * tanh(ln(1 + exp(x))) + + @staticmethod + def backward(ctx, grad_output): + x = ctx.saved_tensors[0] + sx = torch.sigmoid(x) + fx = F.softplus(x).tanh() + return grad_output * (fx + x * sx * (1 - fx * fx)) + + def forward(self, x): + return self.F.apply(x) + + +# FReLU https://arxiv.org/abs/2007.11824 ------------------------------------------------------------------------------- +class FReLU(nn.Module): + def __init__(self, c1, k=3): # ch_in, kernel + super().__init__() + self.conv = nn.Conv2d(c1, c1, k, 1, 1, groups=c1) + self.bn = nn.BatchNorm2d(c1) + + def forward(self, x): + return torch.max(x, self.bn(self.conv(x))) diff --git a/utils/autoanchor.py b/utils/autoanchor.py new file mode 100644 index 0000000..1e82492 --- /dev/null +++ b/utils/autoanchor.py @@ -0,0 +1,152 @@ +# Auto-anchor utils + +import numpy as np +import torch +import yaml +from scipy.cluster.vq import kmeans +from tqdm import tqdm + + +def check_anchor_order(m): + # Check anchor order against stride order for YOLOv5 Detect() module m, and correct if necessary + a = m.anchor_grid.prod(-1).view(-1) # anchor area + da = a[-1] - a[0] # delta a + ds = m.stride[-1] - m.stride[0] # delta s + if da.sign() != ds.sign(): # same order + print('Reversing anchor order') + m.anchors[:] = m.anchors.flip(0) + m.anchor_grid[:] = m.anchor_grid.flip(0) + + +def check_anchors(dataset, model, thr=4.0, imgsz=640): + # Check anchor fit to data, recompute if necessary + print('\nAnalyzing anchors... ', end='') + m = model.module.model[-1] if hasattr(model, 'module') else model.model[-1] # Detect() + shapes = imgsz * dataset.shapes / dataset.shapes.max(1, keepdims=True) + scale = np.random.uniform(0.9, 1.1, size=(shapes.shape[0], 1)) # augment scale + wh = torch.tensor(np.concatenate([l[:, 3:5] * s for s, l in zip(shapes * scale, dataset.labels)])).float() # wh + + def metric(k): # compute metric + r = wh[:, None] / k[None] + x = torch.min(r, 1. / r).min(2)[0] # ratio metric + best = x.max(1)[0] # best_x + aat = (x > 1. / thr).float().sum(1).mean() # anchors above threshold + bpr = (best > 1. / thr).float().mean() # best possible recall + return bpr, aat + + bpr, aat = metric(m.anchor_grid.clone().cpu().view(-1, 2)) + print('anchors/target = %.2f, Best Possible Recall (BPR) = %.4f' % (aat, bpr), end='') + if bpr < 0.98: # threshold to recompute + print('. Attempting to improve anchors, please wait...') + na = m.anchor_grid.numel() // 2 # number of anchors + new_anchors = kmean_anchors(dataset, n=na, img_size=imgsz, thr=thr, gen=1000, verbose=False) + new_bpr = metric(new_anchors.reshape(-1, 2))[0] + if new_bpr > bpr: # replace anchors + new_anchors = torch.tensor(new_anchors, device=m.anchors.device).type_as(m.anchors) + m.anchor_grid[:] = new_anchors.clone().view_as(m.anchor_grid) # for inference + m.anchors[:] = new_anchors.clone().view_as(m.anchors) / m.stride.to(m.anchors.device).view(-1, 1, 1) # loss + check_anchor_order(m) + print('New anchors saved to model. Update model *.yaml to use these anchors in the future.') + else: + print('Original anchors better than new anchors. Proceeding with original anchors.') + print('') # newline + + +def kmean_anchors(path='./data/coco128.yaml', n=9, img_size=640, thr=4.0, gen=1000, verbose=True): + """ Creates kmeans-evolved anchors from training dataset + + Arguments: + path: path to dataset *.yaml, or a loaded dataset + n: number of anchors + img_size: image size used for training + thr: anchor-label wh ratio threshold hyperparameter hyp['anchor_t'] used for training, default=4.0 + gen: generations to evolve anchors using genetic algorithm + verbose: print all results + + Return: + k: kmeans evolved anchors + + Usage: + from utils.general import *; _ = kmean_anchors() + """ + thr = 1. / thr + + def metric(k, wh): # compute metrics + r = wh[:, None] / k[None] + x = torch.min(r, 1. / r).min(2)[0] # ratio metric + # x = wh_iou(wh, torch.tensor(k)) # iou metric + return x, x.max(1)[0] # x, best_x + + def anchor_fitness(k): # mutation fitness + _, best = metric(torch.tensor(k, dtype=torch.float32), wh) + return (best * (best > thr).float()).mean() # fitness + + def print_results(k): + k = k[np.argsort(k.prod(1))] # sort small to large + x, best = metric(k, wh0) + bpr, aat = (best > thr).float().mean(), (x > thr).float().mean() * n # best possible recall, anch > thr + print('thr=%.2f: %.4f best possible recall, %.2f anchors past thr' % (thr, bpr, aat)) + print('n=%g, img_size=%s, metric_all=%.3f/%.3f-mean/best, past_thr=%.3f-mean: ' % + (n, img_size, x.mean(), best.mean(), x[x > thr].mean()), end='') + for i, x in enumerate(k): + print('%i,%i' % (round(x[0]), round(x[1])), end=', ' if i < len(k) - 1 else '\n') # use in *.cfg + return k + + if isinstance(path, str): # *.yaml file + with open(path) as f: + data_dict = yaml.load(f, Loader=yaml.FullLoader) # model dict + from utils.datasets import LoadImagesAndLabels + dataset = LoadImagesAndLabels(data_dict['train'], augment=True, rect=True) + else: + dataset = path # dataset + + # Get label wh + shapes = img_size * dataset.shapes / dataset.shapes.max(1, keepdims=True) + wh0 = np.concatenate([l[:, 3:5] * s for s, l in zip(shapes, dataset.labels)]) # wh + + # Filter + i = (wh0 < 3.0).any(1).sum() + if i: + print('WARNING: Extremely small objects found. ' + '%g of %g labels are < 3 pixels in width or height.' % (i, len(wh0))) + wh = wh0[(wh0 >= 2.0).any(1)] # filter > 2 pixels + + # Kmeans calculation + print('Running kmeans for %g anchors on %g points...' % (n, len(wh))) + s = wh.std(0) # sigmas for whitening + k, dist = kmeans(wh / s, n, iter=30) # points, mean distance + k *= s + wh = torch.tensor(wh, dtype=torch.float32) # filtered + wh0 = torch.tensor(wh0, dtype=torch.float32) # unfiltered + k = print_results(k) + + # Plot + # k, d = [None] * 20, [None] * 20 + # for i in tqdm(range(1, 21)): + # k[i-1], d[i-1] = kmeans(wh / s, i) # points, mean distance + # fig, ax = plt.subplots(1, 2, figsize=(14, 7)) + # ax = ax.ravel() + # ax[0].plot(np.arange(1, 21), np.array(d) ** 2, marker='.') + # fig, ax = plt.subplots(1, 2, figsize=(14, 7)) # plot wh + # ax[0].hist(wh[wh[:, 0]<100, 0],400) + # ax[1].hist(wh[wh[:, 1]<100, 1],400) + # fig.tight_layout() + # fig.savefig('wh.png', dpi=200) + + # Evolve + npr = np.random + f, sh, mp, s = anchor_fitness(k), k.shape, 0.9, 0.1 # fitness, generations, mutation prob, sigma + pbar = tqdm(range(gen), desc='Evolving anchors with Genetic Algorithm') # progress bar + for _ in pbar: + v = np.ones(sh) + while (v == 1).all(): # mutate until a change occurs (prevent duplicates) + v = ((npr.random(sh) < mp) * npr.random() * npr.randn(*sh) * s + 1).clip(0.3, 3.0) + kg = (k.copy() * v).clip(min=2.0) + fg = anchor_fitness(kg) + if fg > f: + f, k = fg, kg.copy() + pbar.desc = 'Evolving anchors with Genetic Algorithm: fitness = %.4f' % f + if verbose: + print_results(k) + + return print_results(k) diff --git a/utils/datasets.py b/utils/datasets.py new file mode 100644 index 0000000..0ffd13d --- /dev/null +++ b/utils/datasets.py @@ -0,0 +1,979 @@ +# Dataset utils and dataloaders + +import glob +import math +import os +import random +import shutil +import time +from itertools import repeat +from multiprocessing.pool import ThreadPool +from pathlib import Path +from threading import Thread + +import cv2 +import numpy as np +import torch +from PIL import Image, ExifTags +from torch.utils.data import Dataset +from tqdm import tqdm + +from utils.general import xyxy2xywh, xywh2xyxy +from utils.torch_utils import torch_distributed_zero_first + +# Parameters +help_url = 'https://github.com/ultralytics/yolov5/wiki/Train-Custom-Data' +img_formats = ['bmp', 'jpg', 'jpeg', 'png', 'tif', 'tiff', 'dng'] # acceptable image suffixes +vid_formats = ['mov', 'avi', 'mp4', 'mpg', 'mpeg', 'm4v', 'wmv', 'mkv'] # acceptable video suffixes + +# Get orientation exif tag +for orientation in ExifTags.TAGS.keys(): + if ExifTags.TAGS[orientation] == 'Orientation': + break + + +def get_hash(files): + # Returns a single hash value of a list of files + return sum(os.path.getsize(f) for f in files if os.path.isfile(f)) + + +def exif_size(img): + # Returns exif-corrected PIL size + s = img.size # (width, height) + try: + rotation = dict(img._getexif().items())[orientation] + if rotation == 6: # rotation 270 + s = (s[1], s[0]) + elif rotation == 8: # rotation 90 + s = (s[1], s[0]) + except: + pass + + return s + + +def create_dataloader(path, imgsz, batch_size, stride, opt, hyp=None, augment=False, cache=False, pad=0.0, rect=False, + rank=-1, world_size=1, workers=8): + # Make sure only the first process in DDP process the dataset first, and the following others can use the cache + with torch_distributed_zero_first(rank): + dataset = LoadImagesAndLabels(path, imgsz, batch_size, + augment=augment, # augment images + hyp=hyp, # augmentation hyperparameters + rect=rect, # rectangular training + cache_images=cache, + single_cls=opt.single_cls, + stride=int(stride), + pad=pad, + rank=rank) + + batch_size = min(batch_size, len(dataset)) + nw = min([os.cpu_count() // world_size, batch_size if batch_size > 1 else 0, workers]) # number of workers + sampler = torch.utils.data.distributed.DistributedSampler(dataset) if rank != -1 else None + dataloader = InfiniteDataLoader(dataset, + batch_size=batch_size, + num_workers=nw, + sampler=sampler, + pin_memory=True, + collate_fn=LoadImagesAndLabels.collate_fn) # torch.utils.data.DataLoader() + return dataloader, dataset + + +class InfiniteDataLoader(torch.utils.data.dataloader.DataLoader): + """ Dataloader that reuses workers + + Uses same syntax as vanilla DataLoader + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + object.__setattr__(self, 'batch_sampler', _RepeatSampler(self.batch_sampler)) + self.iterator = super().__iter__() + + def __len__(self): + return len(self.batch_sampler.sampler) + + def __iter__(self): + for i in range(len(self)): + yield next(self.iterator) + + +class _RepeatSampler(object): + """ Sampler that repeats forever + + Args: + sampler (Sampler) + """ + + def __init__(self, sampler): + self.sampler = sampler + + def __iter__(self): + while True: + yield from iter(self.sampler) + + +class LoadImages: # for inference + def __init__(self, path, img_size=640): + p = str(Path(path)) # os-agnostic + p = os.path.abspath(p) # absolute path + if '*' in p: + files = sorted(glob.glob(p, recursive=True)) # glob + elif os.path.isdir(p): + files = sorted(glob.glob(os.path.join(p, '*.*'))) # dir + elif os.path.isfile(p): + files = [p] # files + else: + raise Exception('ERROR: %s does not exist' % p) + + images = [x for x in files if x.split('.')[-1].lower() in img_formats] + videos = [x for x in files if x.split('.')[-1].lower() in vid_formats] + ni, nv = len(images), len(videos) + + self.img_size = img_size + self.files = images + videos + self.nf = ni + nv # number of files + self.video_flag = [False] * ni + [True] * nv + self.mode = 'images' + if any(videos): + self.new_video(videos[0]) # new video + else: + self.cap = None + assert self.nf > 0, 'No images or videos found in %s. Supported formats are:\nimages: %s\nvideos: %s' % \ + (p, img_formats, vid_formats) + + def __iter__(self): + self.count = 0 + return self + + def __next__(self): + if self.count == self.nf: + raise StopIteration + path = self.files[self.count] + + if self.video_flag[self.count]: + # Read video + self.mode = 'video' + ret_val, img0 = self.cap.read() + if not ret_val: + self.count += 1 + self.cap.release() + if self.count == self.nf: # last video + raise StopIteration + else: + path = self.files[self.count] + self.new_video(path) + ret_val, img0 = self.cap.read() + + self.frame += 1 + print('video %g/%g (%g/%g) %s: ' % (self.count + 1, self.nf, self.frame, self.nframes, path), end='') + + else: + # Read image + self.count += 1 + img0 = cv2.imread(path) # BGR + assert img0 is not None, 'Image Not Found ' + path + print('image %g/%g %s: ' % (self.count, self.nf, path), end='') + + # Padded resize + img = letterbox(img0, new_shape=self.img_size)[0] + + # Convert + img = img[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB, to 3x416x416 + img = np.ascontiguousarray(img) + + return path, img, img0, self.cap + + def new_video(self, path): + self.frame = 0 + self.cap = cv2.VideoCapture(path) + self.nframes = int(self.cap.get(cv2.CAP_PROP_FRAME_COUNT)) + + def __len__(self): + return self.nf # number of files + + +class LoadWebcam: # for inference + def __init__(self, pipe='0', img_size=640): + self.img_size = img_size + + if pipe.isnumeric(): + pipe = eval(pipe) # local camera + # pipe = 'rtsp://192.168.1.64/1' # IP camera + # pipe = 'rtsp://username:password@192.168.1.64/1' # IP camera with login + # pipe = 'http://wmccpinetop.axiscam.net/mjpg/video.mjpg' # IP golf camera + + self.pipe = pipe + self.cap = cv2.VideoCapture(pipe) # video capture object + self.cap.set(cv2.CAP_PROP_BUFFERSIZE, 3) # set buffer size + + def __iter__(self): + self.count = -1 + return self + + def __next__(self): + self.count += 1 + if cv2.waitKey(1) == ord('q'): # q to quit + self.cap.release() + cv2.destroyAllWindows() + raise StopIteration + + # Read frame + if self.pipe == 0: # local camera + ret_val, img0 = self.cap.read() + img0 = cv2.flip(img0, 1) # flip left-right + else: # IP camera + n = 0 + while True: + n += 1 + self.cap.grab() + if n % 30 == 0: # skip frames + ret_val, img0 = self.cap.retrieve() + if ret_val: + break + + # Print + assert ret_val, 'Camera Error %s' % self.pipe + img_path = 'webcam.jpg' + print('webcam %g: ' % self.count, end='') + + # Padded resize + img = letterbox(img0, new_shape=self.img_size)[0] + + # Convert + img = img[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB, to 3x416x416 + img = np.ascontiguousarray(img) + + return img_path, img, img0, None + + def __len__(self): + return 0 + + +class LoadStreams: # multiple IP or RTSP cameras + def __init__(self, sources='streams.txt', img_size=640): + self.mode = 'images' + self.img_size = img_size + + if os.path.isfile(sources): + with open(sources, 'r') as f: + sources = [x.strip() for x in f.read().splitlines() if len(x.strip())] + else: + sources = [sources] + + n = len(sources) + self.imgs = [None] * n + self.sources = sources + for i, s in enumerate(sources): + # Start the thread to read frames from the video stream + print('%g/%g: %s... ' % (i + 1, n, s), end='') + cap = cv2.VideoCapture(eval(s) if s.isnumeric() else s) + assert cap.isOpened(), 'Failed to open %s' % s + w = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH)) + h = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT)) + fps = cap.get(cv2.CAP_PROP_FPS) % 100 + _, self.imgs[i] = cap.read() # guarantee first frame + thread = Thread(target=self.update, args=([i, cap]), daemon=True) + print(' success (%gx%g at %.2f FPS).' % (w, h, fps)) + thread.start() + print('') # newline + + # check for common shapes + s = np.stack([letterbox(x, new_shape=self.img_size)[0].shape for x in self.imgs], 0) # inference shapes + self.rect = np.unique(s, axis=0).shape[0] == 1 # rect inference if all shapes equal + if not self.rect: + print('WARNING: Different stream shapes detected. For optimal performance supply similarly-shaped streams.') + + def update(self, index, cap): + # Read next stream frame in a daemon thread + n = 0 + while cap.isOpened(): + n += 1 + # _, self.imgs[index] = cap.read() + cap.grab() + if n == 4: # read every 4th frame + _, self.imgs[index] = cap.retrieve() + n = 0 + time.sleep(0.01) # wait time + + def __iter__(self): + self.count = -1 + return self + + def __next__(self): + self.count += 1 + img0 = self.imgs.copy() + if cv2.waitKey(1) == ord('q'): # q to quit + cv2.destroyAllWindows() + raise StopIteration + + # Letterbox + img = [letterbox(x, new_shape=self.img_size, auto=self.rect)[0] for x in img0] + + # Stack + img = np.stack(img, 0) + + # Convert + img = img[:, :, :, ::-1].transpose(0, 3, 1, 2) # BGR to RGB, to bsx3x416x416 + img = np.ascontiguousarray(img) + + return self.sources, img, img0, None + + def __len__(self): + return 0 # 1E12 frames = 32 streams at 30 FPS for 30 years + + +class LoadImagesAndLabels(Dataset): # for training/testing + def __init__(self, path, img_size=640, batch_size=16, augment=False, hyp=None, rect=False, image_weights=False, + cache_images=False, single_cls=False, stride=32, pad=0.0, rank=-1): + self.img_size = img_size + self.augment = augment + self.hyp = hyp + self.image_weights = image_weights + self.rect = False if image_weights else rect + self.mosaic = self.augment and not self.rect # load 4 images at a time into a mosaic (only during training) + self.mosaic_border = [-img_size // 2, -img_size // 2] + self.stride = stride + + def img2label_paths(img_paths): + # Define label paths as a function of image paths + sa, sb = os.sep + 'images' + os.sep, os.sep + 'labels' + os.sep # /images/, /labels/ substrings + return [x.replace(sa, sb, 1).replace(x.split('.')[-1], 'txt') for x in img_paths] + + try: + f = [] # 儲存圖片文件 + self.label_files, self.img_files = [], [] + + for p in path if isinstance(path, list) else [path]: + p = Path(p) # 確保 `p` 是 Path 物件 + + if p.is_dir(): # 目錄 + print(f"📂 正在處理目錄: {p}") + + # 確保 `images/` 和 `labels/` 存在 + img_dir = p / 'images' + lbl_dir = p / 'labels' + + if not img_dir.exists() or not lbl_dir.exists(): + raise ValueError(f"🚨 錯誤!找不到 `images/` 或 `labels/` 於 {p}") + + print(f"✅ 找到 `images/` 於: {img_dir}") + print(f"✅ 找到 `labels/` 於: {lbl_dir}") + + # 檢查 `labels/` 內的所有文件 + for file_i in os.listdir(lbl_dir): + if file_i.startswith('.'): + continue # 忽略隱藏文件 + + label_path_i = lbl_dir / file_i + image_path_i = img_dir / (file_i[:-4] + '.jpg') + + if not label_path_i.is_file(): + raise ValueError(f"🚨 標註文件不存在: {label_path_i}") + + if not image_path_i.is_file(): + image_path_i = img_dir / (file_i[:-4] + '.png') + if not image_path_i.is_file(): + raise ValueError(f"🚨 找不到對應的圖片: {image_path_i}") + + self.label_files.append(str(label_path_i)) + self.img_files.append(str(image_path_i)) + + elif p.is_file(): # 單一文件 + with open(p, 'r') as t: + t = t.read().splitlines() + parent = str(p.parent) + os.sep + f += [x.replace('./', parent) if x.startswith('./') else x for x in t] # 轉換相對路徑到絕對路徑 + + else: + raise Exception(f"🚨 錯誤!{p} 這個路徑不存在") + + assert self.img_files, '🚨 錯誤!未找到任何圖片' + + except Exception as e: + raise Exception(f"⚠️ 錯誤載入數據: {path}\n錯誤原因: {e}") + + # Check cache + #self.label_files = img2label_paths(self.img_files) # labels + cache_path = str(Path(self.label_files[0]).parent) + '.cache' # cached labels + print('***cache_path',cache_path) + if os.path.isfile(cache_path): + cache = torch.load(cache_path) # load + if cache['hash'] != get_hash(self.label_files + self.img_files): # dataset changed + cache = self.cache_labels(cache_path) # re-cache + else: + cache = self.cache_labels(cache_path) # cache + + # Read cache + cache.pop('hash') # remove hash + labels, shapes = zip(*cache.values()) + self.labels = list(labels) + self.shapes = np.array(shapes, dtype=np.float64) + self.img_files = list(cache.keys()) # update + self.label_files = img2label_paths(cache.keys()) # update + + n = len(shapes) # number of images + bi = np.floor(np.arange(n) / batch_size).astype(int) # batch index + nb = bi[-1] + 1 # number of batches + self.batch = bi # batch index of image + self.n = n + + # Rectangular Training + if self.rect: + # Sort by aspect ratio + s = self.shapes # wh + ar = s[:, 1] / s[:, 0] # aspect ratio + irect = ar.argsort() + self.img_files = [self.img_files[i] for i in irect] + self.label_files = [self.label_files[i] for i in irect] + self.labels = [self.labels[i] for i in irect] + self.shapes = s[irect] # wh + ar = ar[irect] + + # Set training image shapes + shapes = [[1, 1]] * nb + for i in range(nb): + ari = ar[bi == i] + mini, maxi = ari.min(), ari.max() + if maxi < 1: + shapes[i] = [maxi, 1] + elif mini > 1: + shapes[i] = [1, 1 / mini] + + self.batch_shapes = np.ceil(np.array(shapes) * img_size / stride + pad).astype(int) * stride + + # Check labels + create_datasubset, extract_bounding_boxes, labels_loaded = False, False, False + nm, nf, ne, ns, nd = 0, 0, 0, 0, 0 # number missing, found, empty, datasubset, duplicate + pbar = enumerate(self.label_files) + if rank in [-1, 0]: + pbar = tqdm(pbar) + for i, file in pbar: + l = self.labels[i] # label + if l is not None and l.shape[0]: + assert l.shape[1] == 5, '> 5 label columns: %s' % file + #print(l,l >= 0,(l >= 0).all()) + assert (l >= 0).all(), 'negative labels: %s' % file + assert (l[:, 1:] <= 1).all(), 'non-normalized or out of bounds coordinate labels: %s' % file + if np.unique(l, axis=0).shape[0] < l.shape[0]: # duplicate rows + nd += 1 # print('WARNING: duplicate rows in %s' % self.label_files[i]) # duplicate rows + if single_cls: + l[:, 0] = 0 # force dataset into single-class mode + self.labels[i] = l + nf += 1 # file found + + # Create subdataset (a smaller dataset) + if create_datasubset and ns < 1E4: + if ns == 0: + create_folder(path='./datasubset') + os.makedirs('./datasubset/images') + exclude_classes = 43 + if exclude_classes not in l[:, 0]: + ns += 1 + # shutil.copy(src=self.img_files[i], dst='./datasubset/images/') # copy image + with open('./datasubset/images.txt', 'a') as f: + f.write(self.img_files[i] + '\n') + + # Extract object detection boxes for a second stage classifier + if extract_bounding_boxes: + p = Path(self.img_files[i]) + img = cv2.imread(str(p)) + h, w = img.shape[:2] + for j, x in enumerate(l): + f = '%s%sclassifier%s%g_%g_%s' % (p.parent.parent, os.sep, os.sep, x[0], j, p.name) + if not os.path.exists(Path(f).parent): + os.makedirs(Path(f).parent) # make new output folder + + b = x[1:] * [w, h, w, h] # box + b[2:] = b[2:].max() # rectangle to square + b[2:] = b[2:] * 1.3 + 30 # pad + b = xywh2xyxy(b.reshape(-1, 4)).ravel().astype(np.int) + + b[[0, 2]] = np.clip(b[[0, 2]], 0, w) # clip boxes outside of image + b[[1, 3]] = np.clip(b[[1, 3]], 0, h) + assert cv2.imwrite(f, img[b[1]:b[3], b[0]:b[2]]), 'Failure extracting classifier boxes' + else: + ne += 1 # print('empty labels for image %s' % self.img_files[i]) # file empty + # os.system("rm '%s' '%s'" % (self.img_files[i], self.label_files[i])) # remove + + if rank in [-1, 0]: + pbar.desc = 'Scanning labels %s (%g found, %g missing, %g empty, %g duplicate, for %g images)' % ( + cache_path, nf, nm, ne, nd, n) + if nf == 0: + s = 'WARNING: No labels found in %s. See %s' % (os.path.dirname(file) + os.sep, help_url) + print(s) + assert not augment, '%s. Can not train without labels.' % s + + # Cache images into memory for faster training (WARNING: large datasets may exceed system RAM) + self.imgs = [None] * n + if cache_images: + gb = 0 # Gigabytes of cached images + self.img_hw0, self.img_hw = [None] * n, [None] * n + results = ThreadPool(8).imap(lambda x: load_image(*x), zip(repeat(self), range(n))) # 8 threads + pbar = tqdm(enumerate(results), total=n) + for i, x in pbar: + self.imgs[i], self.img_hw0[i], self.img_hw[i] = x # img, hw_original, hw_resized = load_image(self, i) + gb += self.imgs[i].nbytes + pbar.desc = 'Caching images (%.1fGB)' % (gb / 1E9) + + def cache_labels(self, path='labels.cache'): + # Cache dataset labels, check images and read shapes + x = {} # dict + pbar = tqdm(zip(self.img_files, self.label_files), desc='Scanning images', total=len(self.img_files)) + for (img, label) in pbar: + try: + l = [] + im = Image.open(img) + im.verify() # PIL verify + shape = exif_size(im) # image size + assert (shape[0] > 9) & (shape[1] > 9), 'image size <10 pixels' + if os.path.isfile(label): + with open(label, 'r') as f: + l = np.array([x.split() for x in f.read().splitlines()], dtype=np.float32) # labels + if len(l) == 0: + l = np.zeros((0, 5), dtype=np.float32) + x[img] = [l, shape] + except Exception as e: + print('WARNING: Ignoring corrupted image and/or label %s: %s' % (img, e)) + + x['hash'] = get_hash(self.label_files + self.img_files) + torch.save(x, path) # save for next time + return x + + def __len__(self): + return len(self.img_files) + + # def __iter__(self): + # self.count = -1 + # print('ran dataset iter') + # #self.shuffled_vector = np.random.permutation(self.nF) if self.augment else np.arange(self.nF) + # return self + + def __getitem__(self, index): + if self.image_weights: + index = self.indices[index] + # dataset_mosaic_tag = dataset_mosaic(self, index) + hyp = self.hyp + mosaic = self.mosaic and random.random() < hyp['mosaic'] + #print(index, 'self.mosaic',self.mosaic,hyp['mosaic']) + if mosaic :#and dataset_mosaic_tag: + # Load mosaic + img, labels = load_mosaic(self, index) + shapes = None + + # MixUp https://arxiv.org/pdf/1710.09412.pdf + if random.random() < hyp['mixup']: + img2, labels2 = load_mosaic(self, random.randint(0, len(self.labels) - 1)) + r = np.random.beta(8.0, 8.0) # mixup ratio, alpha=beta=8.0 + img = (img * r + img2 * (1 - r)).astype(np.uint8) + labels = np.concatenate((labels, labels2), 0) + + else: + #print('*no mosaic',index) + # Load image + img, (h0, w0), (h, w) = load_image(self, index) + + # Letterbox + shape = self.batch_shapes[self.batch[index]] if self.rect else self.img_size # final letterboxed shape + img, ratio, pad = letterbox(img, shape, auto=False, scaleup=self.augment) + shapes = (h0, w0), ((h / h0, w / w0), pad) # for COCO mAP rescaling + + # Load labels + labels = [] + x = self.labels[index] + if x.size > 0: + # Normalized xywh to pixel xyxy format + labels = x.copy() + labels[:, 1] = ratio[0] * w * (x[:, 1] - x[:, 3] / 2) + pad[0] # pad width + labels[:, 2] = ratio[1] * h * (x[:, 2] - x[:, 4] / 2) + pad[1] # pad height + labels[:, 3] = ratio[0] * w * (x[:, 1] + x[:, 3] / 2) + pad[0] + labels[:, 4] = ratio[1] * h * (x[:, 2] + x[:, 4] / 2) + pad[1] + + if self.augment: + # Augment imagespace + if not mosaic: + img, labels = random_perspective(img, labels, + degrees=hyp['degrees'], + translate=hyp['translate'], + scale=hyp['scale'], + shear=hyp['shear'], + perspective=hyp['perspective']) + + # Augment colorspace + augment_hsv(img, hgain=hyp['hsv_h'], sgain=hyp['hsv_s'], vgain=hyp['hsv_v']) + + # Apply cutouts + # if random.random() < 0.9: + # labels = cutout(img, labels) + + nL = len(labels) # number of labels + if nL: + labels[:, 1:5] = xyxy2xywh(labels[:, 1:5]) # convert xyxy to xywh + labels[:, [2, 4]] /= img.shape[0] # normalized height 0-1 + labels[:, [1, 3]] /= img.shape[1] # normalized width 0-1 + + if self.augment: + # flip up-down + if random.random() < hyp['flipud']: + img = np.flipud(img) + if nL: + labels[:, 2] = 1 - labels[:, 2] + + # flip left-right + if random.random() < hyp['fliplr']: + img = np.fliplr(img) + if nL: + labels[:, 1] = 1 - labels[:, 1] + + if random.random() < -0.3: + yvu = cv2.cvtColor(img, cv2.COLOR_BGR2YCrCb) + y, v, u = cv2.split(yvu) + img = np.stack((y,)*3, axis=-1) + + labels_out = torch.zeros((nL, 6)) + if nL: + labels_out[:, 1:] = torch.from_numpy(labels) + + # hh, ww = img.shape[:2] + # for label_i in labels: + # class_i, xc, yc, w, h = label_i[:] + # xx1, yy1, xx2, yy2 = int((xc-0.5*w)*ww), int((yc-0.5*h)*hh), int((xc+0.5*w)*ww), int((yc+0.5*h)*hh) + # cv2.rectangle(img, (xx1,yy1), (xx2,yy2), (225, 0, 0), lineType=cv2.LINE_AA) + + # path=os.path.join('tmp480',str(index)+'.jpg') + # cv2.imwrite(path,img) + # Convert + img = img[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB, to 3x416x416 + img = np.ascontiguousarray(img) + + return torch.from_numpy(img), labels_out, self.img_files[index], shapes + + @staticmethod + def collate_fn(batch): + img, label, path, shapes = zip(*batch) # transposed + for i, l in enumerate(label): + l[:, 0] = i # add target image index for build_targets() + return torch.stack(img, 0), torch.cat(label, 0), path, shapes + + +# Ancillary functions -------------------------------------------------------------------------------------------------- +def load_image(self, index): + # loads 1 image from dataset, returns img, original hw, resized hw + img = self.imgs[index] + if img is None: # not cached + path = self.img_files[index] + img = cv2.imread(path) # BGR + assert img is not None, 'Image Not Found ' + path + h0, w0 = img.shape[:2] # orig hw + r = self.img_size / max(h0, w0) # resize image to img_size + if r != 1: # always resize down, only resize up if training with augmentation + interp = cv2.INTER_AREA if r < 1 and not self.augment else cv2.INTER_LINEAR + img = cv2.resize(img, (int(w0 * r), int(h0 * r)), interpolation=interp) + return img, (h0, w0), img.shape[:2] # img, hw_original, hw_resized + else: + return self.imgs[index], self.img_hw0[index], self.img_hw[index] # img, hw_original, hw_resized + + +def augment_hsv(img, hgain=0.5, sgain=0.5, vgain=0.5): + r = np.random.uniform(-1, 1, 3) * [hgain, sgain, vgain] + 1 # random gains + hue, sat, val = cv2.split(cv2.cvtColor(img, cv2.COLOR_BGR2HSV)) + dtype = img.dtype # uint8 + + x = np.arange(0, 256, dtype=np.int16) + lut_hue = ((x * r[0]) % 180).astype(dtype) + lut_sat = np.clip(x * r[1], 0, 255).astype(dtype) + lut_val = np.clip(x * r[2], 0, 255).astype(dtype) + + img_hsv = cv2.merge((cv2.LUT(hue, lut_hue), cv2.LUT(sat, lut_sat), cv2.LUT(val, lut_val))).astype(dtype) + cv2.cvtColor(img_hsv, cv2.COLOR_HSV2BGR, dst=img) # no return needed + + # Histogram equalization + # if random.random() < 0.2: + # for i in range(3): + # img[:, :, i] = cv2.equalizeHist(img[:, :, i]) + +def dataset_mosaic(self, index): + tag = True + path = self.img_files[index].lower() + dataset_NoMosaic = ['bdd100k','crowdhuman','nuscenes'] #'cityscapes' + for d_i in dataset_NoMosaic: + if d_i in path: + tag = False + # print('self.img_files[index]',self.img_files[index],tag) + return tag + +def load_mosaic(self, index): + # loads images in a mosaic + + labels4 = [] + s = self.img_size + yc, xc = [int(random.uniform(-x, 2 * s + x)) for x in self.mosaic_border] # mosaic center x, y + + indices = [index] + [random.randint(0, len(self.labels) - 1) for _ in range(3)] # 3 additional image indices + # indices = [index] + # while len(indices)<4: + # random_index = random.randint(0, len(self.labels) - 1) + # tag = True + # path = self.img_files[index].lower() + # dataset_NoMosaic = ['bdd100k','crowdhuman','nuscenes'] #'cityscapes' + # for d_i in dataset_NoMosaic: + # if d_i in path: + # tag = False + # if tag: + # indices.append(random_index) + + + for i, index in enumerate(indices): + # Load image + img, _, (h, w) = load_image(self, index) + + # place img in img4 + if i == 0: # top left + img4 = np.full((s * 2, s * 2, img.shape[2]), 114, dtype=np.uint8) # base image with 4 tiles + x1a, y1a, x2a, y2a = max(xc - w, 0), max(yc - h, 0), xc, yc # xmin, ymin, xmax, ymax (large image) + x1b, y1b, x2b, y2b = w - (x2a - x1a), h - (y2a - y1a), w, h # xmin, ymin, xmax, ymax (small image) + elif i == 1: # top right + x1a, y1a, x2a, y2a = xc, max(yc - h, 0), min(xc + w, s * 2), yc + x1b, y1b, x2b, y2b = 0, h - (y2a - y1a), min(w, x2a - x1a), h + elif i == 2: # bottom left + x1a, y1a, x2a, y2a = max(xc - w, 0), yc, xc, min(s * 2, yc + h) + x1b, y1b, x2b, y2b = w - (x2a - x1a), 0, w, min(y2a - y1a, h) + elif i == 3: # bottom right + x1a, y1a, x2a, y2a = xc, yc, min(xc + w, s * 2), min(s * 2, yc + h) + x1b, y1b, x2b, y2b = 0, 0, min(w, x2a - x1a), min(y2a - y1a, h) + + img4[y1a:y2a, x1a:x2a] = img[y1b:y2b, x1b:x2b] # img4[ymin:ymax, xmin:xmax] + padw = x1a - x1b + padh = y1a - y1b + + # Labels + x = self.labels[index] + labels = x.copy() + if x.size > 0: # Normalized xywh to pixel xyxy format + labels[:, 1] = w * (x[:, 1] - x[:, 3] / 2) + padw + labels[:, 2] = h * (x[:, 2] - x[:, 4] / 2) + padh + labels[:, 3] = w * (x[:, 1] + x[:, 3] / 2) + padw + labels[:, 4] = h * (x[:, 2] + x[:, 4] / 2) + padh + labels4.append(labels) + + # Concat/clip labels + if len(labels4): + labels4 = np.concatenate(labels4, 0) + np.clip(labels4[:, 1:], 0, 2 * s, out=labels4[:, 1:]) # use with random_perspective + # img4, labels4 = replicate(img4, labels4) # replicate + + # Augment + img4, labels4 = random_perspective(img4, labels4, + degrees=self.hyp['degrees'], + translate=self.hyp['translate'], + scale=self.hyp['scale'], + shear=self.hyp['shear'], + perspective=self.hyp['perspective'], + border=self.mosaic_border) # border to remove + + return img4, labels4 + + +def replicate(img, labels): + # Replicate labels + h, w = img.shape[:2] + boxes = labels[:, 1:].astype(int) + x1, y1, x2, y2 = boxes.T + s = ((x2 - x1) + (y2 - y1)) / 2 # side length (pixels) + for i in s.argsort()[:round(s.size * 0.5)]: # smallest indices + x1b, y1b, x2b, y2b = boxes[i] + bh, bw = y2b - y1b, x2b - x1b + yc, xc = int(random.uniform(0, h - bh)), int(random.uniform(0, w - bw)) # offset x, y + x1a, y1a, x2a, y2a = [xc, yc, xc + bw, yc + bh] + img[y1a:y2a, x1a:x2a] = img[y1b:y2b, x1b:x2b] # img4[ymin:ymax, xmin:xmax] + labels = np.append(labels, [[labels[i, 0], x1a, y1a, x2a, y2a]], axis=0) + + return img, labels + + +def letterbox(img, new_shape=(640, 640), color=(114, 114, 114), auto=True, scaleFill=False, scaleup=True): + # Resize image to a 32-pixel-multiple rectangle https://github.com/ultralytics/yolov3/issues/232 + shape = img.shape[:2] # current shape [height, width] + if isinstance(new_shape, int): + new_shape = (new_shape, new_shape) + + # Scale ratio (new / old) + r = min(new_shape[0] / shape[0], new_shape[1] / shape[1]) + if not scaleup: # only scale down, do not scale up (for better test mAP) + r = min(r, 1.0) + + # Compute padding + ratio = r, r # width, height ratios + new_unpad = int(round(shape[1] * r)), int(round(shape[0] * r)) + dw, dh = new_shape[1] - new_unpad[0], new_shape[0] - new_unpad[1] # wh padding + if auto: # minimum rectangle + dw, dh = np.mod(dw, 32), np.mod(dh, 32) # wh padding + elif scaleFill: # stretch + dw, dh = 0.0, 0.0 + new_unpad = (new_shape[1], new_shape[0]) + ratio = new_shape[1] / shape[1], new_shape[0] / shape[0] # width, height ratios + + dw /= 2 # divide padding into 2 sides + dh /= 2 + + if shape[::-1] != new_unpad: # resize + img = cv2.resize(img, new_unpad, interpolation=cv2.INTER_LINEAR) + top, bottom = int(round(dh - 0.1)), int(round(dh + 0.1)) + left, right = int(round(dw - 0.1)), int(round(dw + 0.1)) + img = cv2.copyMakeBorder(img, top, bottom, left, right, cv2.BORDER_CONSTANT, value=color) # add border + return img, ratio, (dw, dh) + + +def random_perspective(img, targets=(), degrees=10, translate=.1, scale=.1, shear=10, perspective=0.0, border=(0, 0)): + # torchvision.transforms.RandomAffine(degrees=(-10, 10), translate=(.1, .1), scale=(.9, 1.1), shear=(-10, 10)) + # targets = [cls, xyxy] + + height = img.shape[0] + border[0] * 2 # shape(h,w,c) + width = img.shape[1] + border[1] * 2 + + # Center + C = np.eye(3) + C[0, 2] = -img.shape[1] / 2 # x translation (pixels) + C[1, 2] = -img.shape[0] / 2 # y translation (pixels) + + # Perspective + P = np.eye(3) + P[2, 0] = random.uniform(-perspective, perspective) # x perspective (about y) + P[2, 1] = random.uniform(-perspective, perspective) # y perspective (about x) + + # Rotation and Scale + R = np.eye(3) + a = random.uniform(-degrees, degrees) + # a += random.choice([-180, -90, 0, 90]) # add 90deg rotations to small rotations + s = random.uniform(1 - scale, 1 + scale) + # s = 2 ** random.uniform(-scale, scale) + R[:2] = cv2.getRotationMatrix2D(angle=a, center=(0, 0), scale=s) + + # Shear + S = np.eye(3) + S[0, 1] = math.tan(random.uniform(-shear, shear) * math.pi / 180) # x shear (deg) + S[1, 0] = math.tan(random.uniform(-shear, shear) * math.pi / 180) # y shear (deg) + + # Translation + T = np.eye(3) + T[0, 2] = random.uniform(0.5 - translate, 0.5 + translate) * width # x translation (pixels) + T[1, 2] = random.uniform(0.5 - translate, 0.5 + translate) * height # y translation (pixels) + + # Combined rotation matrix + M = T @ S @ R @ P @ C # order of operations (right to left) is IMPORTANT + if (border[0] != 0) or (border[1] != 0) or (M != np.eye(3)).any(): # image changed + if perspective: + img = cv2.warpPerspective(img, M, dsize=(width, height), borderValue=(114, 114, 114)) + else: # affine + img = cv2.warpAffine(img, M[:2], dsize=(width, height), borderValue=(114, 114, 114)) + + # Visualize + # import matplotlib.pyplot as plt + # ax = plt.subplots(1, 2, figsize=(12, 6))[1].ravel() + # ax[0].imshow(img[:, :, ::-1]) # base + # ax[1].imshow(img2[:, :, ::-1]) # warped + + # Transform label coordinates + n = len(targets) + if n: + # warp points + xy = np.ones((n * 4, 3)) + xy[:, :2] = targets[:, [1, 2, 3, 4, 1, 4, 3, 2]].reshape(n * 4, 2) # x1y1, x2y2, x1y2, x2y1 + xy = xy @ M.T # transform + if perspective: + xy = (xy[:, :2] / xy[:, 2:3]).reshape(n, 8) # rescale + else: # affine + xy = xy[:, :2].reshape(n, 8) + + # create new boxes + x = xy[:, [0, 2, 4, 6]] + y = xy[:, [1, 3, 5, 7]] + xy = np.concatenate((x.min(1), y.min(1), x.max(1), y.max(1))).reshape(4, n).T + + # # apply angle-based reduction of bounding boxes + # radians = a * math.pi / 180 + # reduction = max(abs(math.sin(radians)), abs(math.cos(radians))) ** 0.5 + # x = (xy[:, 2] + xy[:, 0]) / 2 + # y = (xy[:, 3] + xy[:, 1]) / 2 + # w = (xy[:, 2] - xy[:, 0]) * reduction + # h = (xy[:, 3] - xy[:, 1]) * reduction + # xy = np.concatenate((x - w / 2, y - h / 2, x + w / 2, y + h / 2)).reshape(4, n).T + + # clip boxes + xy[:, [0, 2]] = xy[:, [0, 2]].clip(0, width) + xy[:, [1, 3]] = xy[:, [1, 3]].clip(0, height) + + # filter candidates + i = box_candidates(box1=targets[:, 1:5].T * s, box2=xy.T) + targets = targets[i] + targets[:, 1:5] = xy[i] + + return img, targets + + +def box_candidates(box1, box2, wh_thr=2, ar_thr=20, area_thr=0.1): # box1(4,n), box2(4,n) + # Compute candidate boxes: box1 before augment, box2 after augment, wh_thr (pixels), aspect_ratio_thr, area_ratio + w1, h1 = box1[2] - box1[0], box1[3] - box1[1] + w2, h2 = box2[2] - box2[0], box2[3] - box2[1] + ar = np.maximum(w2 / (h2 + 1e-16), h2 / (w2 + 1e-16)) # aspect ratio + return (w2 > wh_thr) & (h2 > wh_thr) & (w2 * h2 / (w1 * h1 + 1e-16) > area_thr) & (ar < ar_thr) # candidates + + +def cutout(image, labels): + # Applies image cutout augmentation https://arxiv.org/abs/1708.04552 + h, w = image.shape[:2] + + def bbox_ioa(box1, box2): + # Returns the intersection over box2 area given box1, box2. box1 is 4, box2 is nx4. boxes are x1y1x2y2 + box2 = box2.transpose() + + # Get the coordinates of bounding boxes + b1_x1, b1_y1, b1_x2, b1_y2 = box1[0], box1[1], box1[2], box1[3] + b2_x1, b2_y1, b2_x2, b2_y2 = box2[0], box2[1], box2[2], box2[3] + + # Intersection area + inter_area = (np.minimum(b1_x2, b2_x2) - np.maximum(b1_x1, b2_x1)).clip(0) * \ + (np.minimum(b1_y2, b2_y2) - np.maximum(b1_y1, b2_y1)).clip(0) + + # box2 area + box2_area = (b2_x2 - b2_x1) * (b2_y2 - b2_y1) + 1e-16 + + # Intersection over box2 area + return inter_area / box2_area + + # create random masks + scales = [0.5] * 1 + [0.25] * 2 + [0.125] * 4 + [0.0625] * 8 + [0.03125] * 16 # image size fraction + for s in scales: + mask_h = random.randint(1, int(h * s)) + mask_w = random.randint(1, int(w * s)) + + # box + xmin = max(0, random.randint(0, w) - mask_w // 2) + ymin = max(0, random.randint(0, h) - mask_h // 2) + xmax = min(w, xmin + mask_w) + ymax = min(h, ymin + mask_h) + + # apply random color mask + image[ymin:ymax, xmin:xmax] = [random.randint(64, 191) for _ in range(3)] + + # return unobscured labels + if len(labels) and s > 0.03: + box = np.array([xmin, ymin, xmax, ymax], dtype=np.float32) + ioa = bbox_ioa(box, labels[:, 1:5]) # intersection over area + labels = labels[ioa < 0.60] # remove >60% obscured labels + + return labels + + +def create_folder(path='./new'): + # Create folder + if os.path.exists(path): + shutil.rmtree(path) # delete output folder + os.makedirs(path) # make new output folder + + +def flatten_recursive(path='../coco128'): + # Flatten a recursive directory by bringing all files to top level + new_path = Path(path + '_flat') + create_folder(new_path) + for file in tqdm(glob.glob(str(Path(path)) + '/**/*.*', recursive=True)): + shutil.copyfile(file, new_path / Path(file).name) diff --git a/utils/general.py b/utils/general.py new file mode 100644 index 0000000..9a962ab --- /dev/null +++ b/utils/general.py @@ -0,0 +1,434 @@ +# General utils + +import glob +import logging +import math +import os +import platform +import random +import re +import subprocess +import time +from pathlib import Path + +import cv2 +import matplotlib +import numpy as np +import torch +import yaml + +from utils.google_utils import gsutil_getsize +from utils.metrics import fitness +from utils.torch_utils import init_torch_seeds + +# Set printoptions +torch.set_printoptions(linewidth=320, precision=5, profile='long') +np.set_printoptions(linewidth=320, formatter={'float_kind': '{:11.5g}'.format}) # format short g, %precision=5 +matplotlib.rc('font', **{'size': 11}) + +# Prevent OpenCV from multithreading (to use PyTorch DataLoader) +cv2.setNumThreads(0) + + +def set_logging(rank=-1): + logging.basicConfig( + format="%(message)s", + level=logging.INFO if rank in [-1, 0] else logging.WARN) + + +def init_seeds(seed=0): + random.seed(seed) + np.random.seed(seed) + init_torch_seeds(seed) + + +def get_latest_run(search_dir='.'): + # Return path to most recent 'last.pt' in /runs (i.e. to --resume from) + last_list = glob.glob(f'{search_dir}/**/last*.pt', recursive=True) + return max(last_list, key=os.path.getctime) if last_list else '' + + +def check_git_status(): + # Suggest 'git pull' if repo is out of date + if platform.system() in ['Linux', 'Darwin'] and not os.path.isfile('/.dockerenv'): + s = subprocess.check_output('if [ -d .git ]; then git fetch && git status -uno; fi', shell=True).decode('utf-8') + if 'Your branch is behind' in s: + print(s[s.find('Your branch is behind'):s.find('\n\n')] + '\n') + + +def check_img_size(img_size, s=32): + # Verify img_size is a multiple of stride s + new_size = make_divisible(img_size, int(s)) # ceil gs-multiple + if new_size != img_size: + print('WARNING: --img-size %g must be multiple of max stride %g, updating to %g' % (img_size, s, new_size)) + return new_size + + +def check_file(file): + # Search for file if not found + if os.path.isfile(file) or file == '': + return file + else: + files = glob.glob('./**/' + file, recursive=True) # find file + assert len(files), 'File Not Found: %s' % file # assert file was found + assert len(files) == 1, "Multiple files match '%s', specify exact path: %s" % (file, files) # assert unique + return files[0] # return file + + +def check_dataset(dict): + # Download dataset if not found locally + val, s = dict.get('val'), dict.get('download') + if val and len(val): + val = [Path(x).resolve() for x in (val if isinstance(val, list) else [val])] # val path + if not all(x.exists() for x in val): + print('\nWARNING: Dataset not found, nonexistent paths: %s' % [str(x) for x in val if not x.exists()]) + if s and len(s): # download script + print('Downloading %s ...' % s) + if s.startswith('http') and s.endswith('.zip'): # URL + f = Path(s).name # filename + torch.hub.download_url_to_file(s, f) + r = os.system('unzip -q %s -d ../ && rm %s' % (f, f)) # unzip + else: # bash script + r = os.system(s) + print('Dataset autodownload %s\n' % ('success' if r == 0 else 'failure')) # analyze return value + else: + raise Exception('Dataset not found.') + + +def make_divisible(x, divisor): + # Returns x evenly divisible by divisor + return math.ceil(x / divisor) * divisor + + +def labels_to_class_weights(labels, nc=80): + # Get class weights (inverse frequency) from training labels + if labels[0] is None: # no labels loaded + return torch.Tensor() + + labels = np.concatenate(labels, 0) # labels.shape = (866643, 5) for COCO + classes = labels[:, 0].astype(int) # labels = [class xywh] + weights = np.bincount(classes, minlength=nc) # occurrences per class + + # Prepend gridpoint count (for uCE training) + # gpi = ((320 / 32 * np.array([1, 2, 4])) ** 2 * 3).sum() # gridpoints per image + # weights = np.hstack([gpi * len(labels) - weights.sum() * 9, weights * 9]) ** 0.5 # prepend gridpoints to start + + weights[weights == 0] = 1 # replace empty bins with 1 + weights = 1 / weights # number of targets per class + weights /= weights.sum() # normalize + return torch.from_numpy(weights) + + +def labels_to_image_weights(labels, nc=80, class_weights=np.ones(80)): + # Produces image weights based on class mAPs + n = len(labels) + class_counts = np.array([np.bincount(labels[i][:, 0].astype(np.int), minlength=nc) for i in range(n)]) + image_weights = (class_weights.reshape(1, nc) * class_counts).sum(1) + # index = random.choices(range(n), weights=image_weights, k=1) # weight image sample + return image_weights + + +def coco80_to_coco91_class(): # converts 80-index (val2014) to 91-index (paper) + # https://tech.amikelive.com/node-718/what-object-categories-labels-are-in-coco-dataset/ + # a = np.loadtxt('data/coco.names', dtype='str', delimiter='\n') + # b = np.loadtxt('data/coco_paper.names', dtype='str', delimiter='\n') + # x1 = [list(a[i] == b).index(True) + 1 for i in range(80)] # darknet to coco + # x2 = [list(b[i] == a).index(True) if any(b[i] == a) else None for i in range(91)] # coco to darknet + x = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 27, 28, 31, 32, 33, 34, + 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, + 64, 65, 67, 70, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 84, 85, 86, 87, 88, 89, 90] + return x + + +def xyxy2xywh(x): + # Convert nx4 boxes from [x1, y1, x2, y2] to [x, y, w, h] where xy1=top-left, xy2=bottom-right + y = x.clone() if isinstance(x, torch.Tensor) else np.copy(x) + y[:, 0] = (x[:, 0] + x[:, 2]) / 2 # x center + y[:, 1] = (x[:, 1] + x[:, 3]) / 2 # y center + y[:, 2] = x[:, 2] - x[:, 0] # width + y[:, 3] = x[:, 3] - x[:, 1] # height + return y + + +def xywh2xyxy(x): + # Convert nx4 boxes from [x, y, w, h] to [x1, y1, x2, y2] where xy1=top-left, xy2=bottom-right + y = x.clone() if isinstance(x, torch.Tensor) else np.copy(x) + y[:, 0] = x[:, 0] - x[:, 2] / 2 # top left x + y[:, 1] = x[:, 1] - x[:, 3] / 2 # top left y + y[:, 2] = x[:, 0] + x[:, 2] / 2 # bottom right x + y[:, 3] = x[:, 1] + x[:, 3] / 2 # bottom right y + return y + + +def scale_coords(img1_shape, coords, img0_shape, ratio_pad=None): + # Rescale coords (xyxy) from img1_shape to img0_shape + if ratio_pad is None: # calculate from img0_shape + gain = min(img1_shape[0] / img0_shape[0], img1_shape[1] / img0_shape[1]) # gain = old / new + pad = (img1_shape[1] - img0_shape[1] * gain) / 2, (img1_shape[0] - img0_shape[0] * gain) / 2 # wh padding + else: + gain = ratio_pad[0][0] + pad = ratio_pad[1] + + coords[:, [0, 2]] -= pad[0] # x padding + coords[:, [1, 3]] -= pad[1] # y padding + coords[:, :4] /= gain + clip_coords(coords, img0_shape) + return coords + + +def clip_coords(boxes, img_shape): + # Clip bounding xyxy bounding boxes to image shape (height, width) + boxes[:, 0].clamp_(0, img_shape[1]) # x1 + boxes[:, 1].clamp_(0, img_shape[0]) # y1 + boxes[:, 2].clamp_(0, img_shape[1]) # x2 + boxes[:, 3].clamp_(0, img_shape[0]) # y2 + + +def bbox_iou(box1, box2, x1y1x2y2=True, GIoU=False, DIoU=False, CIoU=False, eps=1e-9): + # Returns the IoU of box1 to box2. box1 is 4, box2 is nx4 + box2 = box2.T + + # Get the coordinates of bounding boxes + if x1y1x2y2: # x1, y1, x2, y2 = box1 + b1_x1, b1_y1, b1_x2, b1_y2 = box1[0], box1[1], box1[2], box1[3] + b2_x1, b2_y1, b2_x2, b2_y2 = box2[0], box2[1], box2[2], box2[3] + else: # transform from xywh to xyxy + b1_x1, b1_x2 = box1[0] - box1[2] / 2, box1[0] + box1[2] / 2 + b1_y1, b1_y2 = box1[1] - box1[3] / 2, box1[1] + box1[3] / 2 + b2_x1, b2_x2 = box2[0] - box2[2] / 2, box2[0] + box2[2] / 2 + b2_y1, b2_y2 = box2[1] - box2[3] / 2, box2[1] + box2[3] / 2 + + # Intersection area + inter = (torch.min(b1_x2, b2_x2) - torch.max(b1_x1, b2_x1)).clamp(0) * \ + (torch.min(b1_y2, b2_y2) - torch.max(b1_y1, b2_y1)).clamp(0) + + # Union Area + w1, h1 = b1_x2 - b1_x1, b1_y2 - b1_y1 + eps + w2, h2 = b2_x2 - b2_x1, b2_y2 - b2_y1 + eps + union = w1 * h1 + w2 * h2 - inter + eps + + iou = inter / union + if GIoU or DIoU or CIoU: + cw = torch.max(b1_x2, b2_x2) - torch.min(b1_x1, b2_x1) # convex (smallest enclosing box) width + ch = torch.max(b1_y2, b2_y2) - torch.min(b1_y1, b2_y1) # convex height + if CIoU or DIoU: # Distance or Complete IoU https://arxiv.org/abs/1911.08287v1 + c2 = cw ** 2 + ch ** 2 + eps # convex diagonal squared + rho2 = ((b2_x1 + b2_x2 - b1_x1 - b1_x2) ** 2 + + (b2_y1 + b2_y2 - b1_y1 - b1_y2) ** 2) / 4 # center distance squared + if DIoU: + return iou - rho2 / c2 # DIoU + elif CIoU: # https://github.com/Zzh-tju/DIoU-SSD-pytorch/blob/master/utils/box/box_utils.py#L47 + v = (4 / math.pi ** 2) * torch.pow(torch.atan(w2 / h2) - torch.atan(w1 / h1), 2) + with torch.no_grad(): + alpha = v / ((1 + eps) - iou + v) + return iou - (rho2 / c2 + v * alpha) # CIoU + else: # GIoU https://arxiv.org/pdf/1902.09630.pdf + c_area = cw * ch + eps # convex area + return iou - (c_area - union) / c_area # GIoU + else: + return iou # IoU + + +def box_iou(box1, box2): + # https://github.com/pytorch/vision/blob/master/torchvision/ops/boxes.py + """ + Return intersection-over-union (Jaccard index) of boxes. + Both sets of boxes are expected to be in (x1, y1, x2, y2) format. + Arguments: + box1 (Tensor[N, 4]) + box2 (Tensor[M, 4]) + Returns: + iou (Tensor[N, M]): the NxM matrix containing the pairwise + IoU values for every element in boxes1 and boxes2 + """ + + def box_area(box): + # box = 4xn + return (box[2] - box[0]) * (box[3] - box[1]) + + area1 = box_area(box1.T) + area2 = box_area(box2.T) + + # inter(N,M) = (rb(N,M,2) - lt(N,M,2)).clamp(0).prod(2) + inter = (torch.min(box1[:, None, 2:], box2[:, 2:]) - torch.max(box1[:, None, :2], box2[:, :2])).clamp(0).prod(2) + return inter / (area1[:, None] + area2 - inter) # iou = inter / (area1 + area2 - inter) + + +def wh_iou(wh1, wh2): + # Returns the nxm IoU matrix. wh1 is nx2, wh2 is mx2 + wh1 = wh1[:, None] # [N,1,2] + wh2 = wh2[None] # [1,M,2] + inter = torch.min(wh1, wh2).prod(2) # [N,M] + return inter / (wh1.prod(2) + wh2.prod(2) - inter) # iou = inter / (area1 + area2 - inter) + + +def non_max_suppression(prediction, conf_thres=0.1, iou_thres=0.6, merge=False, classes=None, agnostic=False): + """Performs Non-Maximum Suppression (NMS) on inference results + + Returns: + detections with shape: nx6 (x1, y1, x2, y2, conf, cls) + """ + + nc = prediction[0].shape[1] - 5 # number of classes + xc = prediction[..., 4] > conf_thres # candidates + + # Settings + min_wh, max_wh = 2, 4096 # (pixels) minimum and maximum box width and height + max_det = 300 # maximum number of detections per image + time_limit = 10.0 # seconds to quit after + redundant = True # require redundant detections + multi_label = nc > 1 # multiple labels per box (adds 0.5ms/img) + + t = time.time() + output = [torch.zeros(0, 6)] * prediction.shape[0] + for xi, x in enumerate(prediction): # image index, image inference + # Apply constraints + # x[((x[..., 2:4] < min_wh) | (x[..., 2:4] > max_wh)).any(1), 4] = 0 # width-height + x = x[xc[xi]] # confidence + + # If none remain process next image + if not x.shape[0]: + continue + + # Compute conf + x[:, 5:] *= x[:, 4:5] # conf = obj_conf * cls_conf + + # Box (center x, center y, width, height) to (x1, y1, x2, y2) + box = xywh2xyxy(x[:, :4]) + + # Detections matrix nx6 (xyxy, conf, cls) + if multi_label: + i, j = (x[:, 5:] > conf_thres).nonzero(as_tuple=False).T + x = torch.cat((box[i], x[i, j + 5, None], j[:, None].float()), 1) + else: # best class only + conf, j = x[:, 5:].max(1, keepdim=True) + x = torch.cat((box, conf, j.float()), 1)[conf.view(-1) > conf_thres] + + # Filter by class + if classes: + x = x[(x[:, 5:6] == torch.tensor(classes, device=x.device)).any(1)] + + # Apply finite constraint + # if not torch.isfinite(x).all(): + # x = x[torch.isfinite(x).all(1)] + + # If none remain process next image + n = x.shape[0] # number of boxes + if not n: + continue + + # Sort by confidence + # x = x[x[:, 4].argsort(descending=True)] + + # Batched NMS + c = x[:, 5:6] * (0 if agnostic else max_wh) # classes + boxes, scores = x[:, :4] + c, x[:, 4] # boxes (offset by class), scores + i = torch.ops.torchvision.nms(boxes, scores, iou_thres) + if i.shape[0] > max_det: # limit detections + i = i[:max_det] + if merge and (1 < n < 3E3): # Merge NMS (boxes merged using weighted mean) + # update boxes as boxes(i,4) = weights(i,n) * boxes(n,4) + iou = box_iou(boxes[i], boxes) > iou_thres # iou matrix + weights = iou * scores[None] # box weights + x[i, :4] = torch.mm(weights, x[:, :4]).float() / weights.sum(1, keepdim=True) # merged boxes + if redundant: + i = i[iou.sum(1) > 1] # require redundancy + + output[xi] = x[i] + if (time.time() - t) > time_limit: + break # time limit exceeded + + return output + + +def strip_optimizer(f='weights/best.pt', s=''): # from utils.general import *; strip_optimizer() + # Strip optimizer from 'f' to finalize training, optionally save as 's' + x = torch.load(f, map_location=torch.device('cpu')) + x['optimizer'] = None + x['training_results'] = None + x['epoch'] = -1 + x['model'].half() # to FP16 + for p in x['model'].parameters(): + p.requires_grad = False + torch.save(x, s or f) + mb = os.path.getsize(s or f) / 1E6 # filesize + print('Optimizer stripped from %s,%s %.1fMB' % (f, (' saved as %s,' % s) if s else '', mb)) + + +def print_mutation(hyp, results, yaml_file='hyp_evolved.yaml', bucket=''): + # Print mutation results to evolve.txt (for use with train.py --evolve) + a = '%10s' * len(hyp) % tuple(hyp.keys()) # hyperparam keys + b = '%10.3g' * len(hyp) % tuple(hyp.values()) # hyperparam values + c = '%10.4g' * len(results) % results # results (P, R, mAP@0.5, mAP@0.5:0.95, val_losses x 3) + print('\n%s\n%s\nEvolved fitness: %s\n' % (a, b, c)) + + if bucket: + url = 'gs://%s/evolve.txt' % bucket + if gsutil_getsize(url) > (os.path.getsize('evolve.txt') if os.path.exists('evolve.txt') else 0): + os.system('gsutil cp %s .' % url) # download evolve.txt if larger than local + + with open('evolve.txt', 'a') as f: # append result + f.write(c + b + '\n') + x = np.unique(np.loadtxt('evolve.txt', ndmin=2), axis=0) # load unique rows + x = x[np.argsort(-fitness(x))] # sort + np.savetxt('evolve.txt', x, '%10.3g') # save sort by fitness + + # Save yaml + for i, k in enumerate(hyp.keys()): + hyp[k] = float(x[0, i + 7]) + with open(yaml_file, 'w') as f: + results = tuple(x[0, :7]) + c = '%10.4g' * len(results) % results # results (P, R, mAP@0.5, mAP@0.5:0.95, val_losses x 3) + f.write('# Hyperparameter Evolution Results\n# Generations: %g\n# Metrics: ' % len(x) + c + '\n\n') + yaml.dump(hyp, f, sort_keys=False) + + if bucket: + os.system('gsutil cp evolve.txt %s gs://%s' % (yaml_file, bucket)) # upload + + +def apply_classifier(x, model, img, im0): + # applies a second stage classifier to yolo outputs + im0 = [im0] if isinstance(im0, np.ndarray) else im0 + for i, d in enumerate(x): # per image + if d is not None and len(d): + d = d.clone() + + # Reshape and pad cutouts + b = xyxy2xywh(d[:, :4]) # boxes + b[:, 2:] = b[:, 2:].max(1)[0].unsqueeze(1) # rectangle to square + b[:, 2:] = b[:, 2:] * 1.3 + 30 # pad + d[:, :4] = xywh2xyxy(b).long() + + # Rescale boxes from img_size to im0 size + scale_coords(img.shape[2:], d[:, :4], im0[i].shape) + + # Classes + pred_cls1 = d[:, 5].long() + ims = [] + for j, a in enumerate(d): # per item + cutout = im0[i][int(a[1]):int(a[3]), int(a[0]):int(a[2])] + im = cv2.resize(cutout, (224, 224)) # BGR + # cv2.imwrite('test%i.jpg' % j, cutout) + + im = im[:, :, ::-1].transpose(2, 0, 1) # BGR to RGB, to 3x416x416 + im = np.ascontiguousarray(im, dtype=np.float32) # uint8 to float32 + im /= 255.0 # 0 - 255 to 0.0 - 1.0 + ims.append(im) + + pred_cls2 = model(torch.Tensor(ims).to(d.device)).argmax(1) # classifier prediction + x[i] = x[i][pred_cls1 == pred_cls2] # retain matching class detections + + return x + + +def increment_path(path, exist_ok=True, sep=''): + # Increment path, i.e. runs/exp --> runs/exp{sep}0, runs/exp{sep}1 etc. + path = Path(path) # os-agnostic + if (path.exists() and exist_ok) or (not path.exists()): + return str(path) + else: + dirs = glob.glob(f"{path}{sep}*") # similar paths + matches = [re.search(rf"%s{sep}(\d+)" % path.stem, d) for d in dirs] + i = [int(m.groups()[0]) for m in matches if m] # indices + n = max(i) + 1 if i else 2 # increment number + return f"{path}{sep}{n}" # update path diff --git a/utils/google_app_engine/Dockerfile b/utils/google_app_engine/Dockerfile new file mode 100644 index 0000000..0155618 --- /dev/null +++ b/utils/google_app_engine/Dockerfile @@ -0,0 +1,25 @@ +FROM gcr.io/google-appengine/python + +# Create a virtualenv for dependencies. This isolates these packages from +# system-level packages. +# Use -p python3 or -p python3.7 to select python version. Default is version 2. +RUN virtualenv /env -p python3 + +# Setting these environment variables are the same as running +# source /env/bin/activate. +ENV VIRTUAL_ENV /env +ENV PATH /env/bin:$PATH + +RUN apt-get update && apt-get install -y python-opencv + +# Copy the application's requirements.txt and run pip to install all +# dependencies into the virtualenv. +ADD requirements.txt /app/requirements.txt +RUN pip install -r /app/requirements.txt + +# Add the application source code. +ADD . /app + +# Run a WSGI server to serve the application. gunicorn must be declared as +# a dependency in requirements.txt. +CMD gunicorn -b :$PORT main:app diff --git a/utils/google_app_engine/additional_requirements.txt b/utils/google_app_engine/additional_requirements.txt new file mode 100644 index 0000000..5fcc305 --- /dev/null +++ b/utils/google_app_engine/additional_requirements.txt @@ -0,0 +1,4 @@ +# add these requirements in your app on top of the existing ones +pip==18.1 +Flask==1.0.2 +gunicorn==19.9.0 diff --git a/utils/google_app_engine/app.yaml b/utils/google_app_engine/app.yaml new file mode 100644 index 0000000..ac29d10 --- /dev/null +++ b/utils/google_app_engine/app.yaml @@ -0,0 +1,14 @@ +runtime: custom +env: flex + +service: yolov5app + +liveness_check: + initial_delay_sec: 600 + +manual_scaling: + instances: 1 +resources: + cpu: 1 + memory_gb: 4 + disk_size_gb: 20 \ No newline at end of file diff --git a/utils/google_utils.py b/utils/google_utils.py new file mode 100644 index 0000000..4f3241e --- /dev/null +++ b/utils/google_utils.py @@ -0,0 +1,120 @@ +# Google utils: https://cloud.google.com/storage/docs/reference/libraries + +import os +import platform +import subprocess +import time +from pathlib import Path + +import torch + + +def gsutil_getsize(url=''): + # gs://bucket/file size https://cloud.google.com/storage/docs/gsutil/commands/du + s = subprocess.check_output('gsutil du %s' % url, shell=True).decode('utf-8') + return eval(s.split(' ')[0]) if len(s) else 0 # bytes + + +def attempt_download(weights): + # Attempt to download pretrained weights if not found locally + weights = weights.strip().replace("'", '') + file = Path(weights).name + + msg = weights + ' missing, try downloading from https://github.com/ultralytics/yolov5/releases/' + models = ['yolov5s.pt', 'yolov5m.pt', 'yolov5l.pt', 'yolov5x.pt'] # available models + + if file in models and not os.path.isfile(weights): + # Google Drive + # d = {'yolov5s.pt': '1R5T6rIyy3lLwgFXNms8whc-387H0tMQO', + # 'yolov5m.pt': '1vobuEExpWQVpXExsJ2w-Mbf3HJjWkQJr', + # 'yolov5l.pt': '1hrlqD1Wdei7UT4OgT785BEk1JwnSvNEV', + # 'yolov5x.pt': '1mM8aZJlWTxOg7BZJvNUMrTnA2AbeCVzS'} + # r = gdrive_download(id=d[file], name=weights) if file in d else 1 + # if r == 0 and os.path.exists(weights) and os.path.getsize(weights) > 1E6: # check + # return + + try: # GitHub + url = 'https://github.com/ultralytics/yolov5/releases/download/v3.1/' + file + print('Downloading %s to %s...' % (url, weights)) + torch.hub.download_url_to_file(url, weights) + assert os.path.exists(weights) and os.path.getsize(weights) > 1E6 # check + except Exception as e: # GCP + print('Download error: %s' % e) + url = 'https://storage.googleapis.com/ultralytics/yolov5/ckpt/' + file + print('Downloading %s to %s...' % (url, weights)) + r = os.system('curl -L %s -o %s' % (url, weights)) # torch.hub.download_url_to_file(url, weights) + finally: + if not (os.path.exists(weights) and os.path.getsize(weights) > 1E6): # check + os.remove(weights) if os.path.exists(weights) else None # remove partial downloads + print('ERROR: Download failure: %s' % msg) + print('') + return + + +def gdrive_download(id='1n_oKgR81BJtqk75b00eAjdv03qVCQn2f', name='coco128.zip'): + # Downloads a file from Google Drive. from utils.google_utils import *; gdrive_download() + t = time.time() + + print('Downloading https://drive.google.com/uc?export=download&id=%s as %s... ' % (id, name), end='') + os.remove(name) if os.path.exists(name) else None # remove existing + os.remove('cookie') if os.path.exists('cookie') else None + + # Attempt file download + out = "NUL" if platform.system() == "Windows" else "/dev/null" + os.system('curl -c ./cookie -s -L "drive.google.com/uc?export=download&id=%s" > %s ' % (id, out)) + if os.path.exists('cookie'): # large file + s = 'curl -Lb ./cookie "drive.google.com/uc?export=download&confirm=%s&id=%s" -o %s' % (get_token(), id, name) + else: # small file + s = 'curl -s -L -o %s "drive.google.com/uc?export=download&id=%s"' % (name, id) + r = os.system(s) # execute, capture return + os.remove('cookie') if os.path.exists('cookie') else None + + # Error check + if r != 0: + os.remove(name) if os.path.exists(name) else None # remove partial + print('Download error ') # raise Exception('Download error') + return r + + # Unzip if archive + if name.endswith('.zip'): + print('unzipping... ', end='') + os.system('unzip -q %s' % name) # unzip + os.remove(name) # remove zip to free space + + print('Done (%.1fs)' % (time.time() - t)) + return r + + +def get_token(cookie="./cookie"): + with open(cookie) as f: + for line in f: + if "download" in line: + return line.split()[-1] + return "" + +# def upload_blob(bucket_name, source_file_name, destination_blob_name): +# # Uploads a file to a bucket +# # https://cloud.google.com/storage/docs/uploading-objects#storage-upload-object-python +# +# storage_client = storage.Client() +# bucket = storage_client.get_bucket(bucket_name) +# blob = bucket.blob(destination_blob_name) +# +# blob.upload_from_filename(source_file_name) +# +# print('File {} uploaded to {}.'.format( +# source_file_name, +# destination_blob_name)) +# +# +# def download_blob(bucket_name, source_blob_name, destination_file_name): +# # Uploads a blob from a bucket +# storage_client = storage.Client() +# bucket = storage_client.get_bucket(bucket_name) +# blob = bucket.blob(source_blob_name) +# +# blob.download_to_filename(destination_file_name) +# +# print('Blob {} downloaded to {}.'.format( +# source_blob_name, +# destination_file_name)) diff --git a/utils/init_params.yaml b/utils/init_params.yaml new file mode 100644 index 0000000..2615b7b --- /dev/null +++ b/utils/init_params.yaml @@ -0,0 +1,17 @@ +model_type: onnx + +grid20_path: ../generate_npy/20_640x640.npy +grid40_path: ../generate_npy/40_640x640.npy +grid80_path: ../generate_npy/80_640x640.npy + +model_yaml_path: ./models/yolov5s-noupsample.yaml +onnx_path: ./yolov5s-noupsample.onnx + +input_w: 640 +input_h: 640 +# number of classes +nc: 80 + +conf_thres: 0.3 +iou_thres: 0.5 +e2e_coco: True \ No newline at end of file diff --git a/utils/init_paramsnef.yaml b/utils/init_paramsnef.yaml new file mode 100644 index 0000000..954b21b --- /dev/null +++ b/utils/init_paramsnef.yaml @@ -0,0 +1,24 @@ +# 模型類型 +model_type: onnx + +# NEF 模型路徑(確保這是你的最終模型) +model_path: "/data1/kneron_flow/models_720.nef" + +# YOLOv5 輸入尺寸 +input_w: 640 +input_h: 640 + +# 類別數量(你的模型是 6 類) +nc: 6 + +# 信心閾值(影響檢測結果數量) +conf_thres: 0.3 +iou_thres: 0.5 + +# 是否使用 COCO 格式輸出(如果你的模型不是 COCO 格式,請設為 False) +e2e_coco: False + +# Anchor Grid `.npy` 檔案路徑(確保它們存在) +grid20_path: "/workspace/yolov5/npy/20_640x640.npy" +grid40_path: "/workspace/yolov5/npy/40_640x640.npy" +grid80_path: "/workspace/yolov5/npy/80_640x640.npy" diff --git a/utils/loss.py b/utils/loss.py new file mode 100644 index 0000000..06d986d --- /dev/null +++ b/utils/loss.py @@ -0,0 +1,179 @@ +# Loss functions + +import torch +import torch.nn as nn + +from utils.general import bbox_iou +from utils.torch_utils import is_parallel + + +def smooth_BCE(eps=0.1): # https://github.com/ultralytics/yolov3/issues/238#issuecomment-598028441 + # return positive, negative label smoothing BCE targets + return 1.0 - 0.5 * eps, 0.5 * eps + + +class BCEBlurWithLogitsLoss(nn.Module): + # BCEwithLogitLoss() with reduced missing label effects. + def __init__(self, alpha=0.05): + super(BCEBlurWithLogitsLoss, self).__init__() + self.loss_fcn = nn.BCEWithLogitsLoss(reduction='none') # must be nn.BCEWithLogitsLoss() + self.alpha = alpha + + def forward(self, pred, true): + loss = self.loss_fcn(pred, true) + pred = torch.sigmoid(pred) # prob from logits + dx = pred - true # reduce only missing label effects + # dx = (pred - true).abs() # reduce missing label and false label effects + alpha_factor = 1 - torch.exp((dx - 1) / (self.alpha + 1e-4)) + loss *= alpha_factor + return loss.mean() + + +class FocalLoss(nn.Module): + # Wraps focal loss around existing loss_fcn(), i.e. criteria = FocalLoss(nn.BCEWithLogitsLoss(), gamma=1.5) + def __init__(self, loss_fcn, gamma=1.5, alpha=0.25): + super(FocalLoss, self).__init__() + self.loss_fcn = loss_fcn # must be nn.BCEWithLogitsLoss() + self.gamma = gamma + self.alpha = alpha + self.reduction = loss_fcn.reduction + self.loss_fcn.reduction = 'none' # required to apply FL to each element + + def forward(self, pred, true): + loss = self.loss_fcn(pred, true) + # p_t = torch.exp(-loss) + # loss *= self.alpha * (1.000001 - p_t) ** self.gamma # non-zero power for gradient stability + + # TF implementation https://github.com/tensorflow/addons/blob/v0.7.1/tensorflow_addons/losses/focal_loss.py + pred_prob = torch.sigmoid(pred) # prob from logits + p_t = true * pred_prob + (1 - true) * (1 - pred_prob) + alpha_factor = true * self.alpha + (1 - true) * (1 - self.alpha) + modulating_factor = (1.0 - p_t) ** self.gamma + loss *= alpha_factor * modulating_factor + + if self.reduction == 'mean': + return loss.mean() + elif self.reduction == 'sum': + return loss.sum() + else: # 'none' + return loss + + +def compute_loss(p, targets, model): # predictions, targets, model + device = targets.device + lcls, lbox, lobj = torch.zeros(1, device=device), torch.zeros(1, device=device), torch.zeros(1, device=device) + tcls, tbox, indices, anchors = build_targets(p, targets, model) # targets + h = model.hyp # hyperparameters + + # Define criteria + BCEcls = nn.BCEWithLogitsLoss(pos_weight=torch.Tensor([h['cls_pw']])).to(device) + BCEobj = nn.BCEWithLogitsLoss(pos_weight=torch.Tensor([h['obj_pw']])).to(device) + + # Class label smoothing https://arxiv.org/pdf/1902.04103.pdf eqn 3 + cp, cn = smooth_BCE(eps=0.0) + + # Focal loss + g = h['fl_gamma'] # focal loss gamma + if g > 0: + BCEcls, BCEobj = FocalLoss(BCEcls, g), FocalLoss(BCEobj, g) + + # Losses + nt = 0 # number of targets + no = len(p) # number of outputs + balance = [4.0, 1.0, 0.4] if no == 3 else [4.0, 1.0, 0.4, 0.1] # P3-5 or P3-6 + for i, pi in enumerate(p): # layer index, layer predictions + b, a, gj, gi = indices[i] # image, anchor, gridy, gridx + tobj = torch.zeros_like(pi[..., 0], device=device) # target obj + + n = b.shape[0] # number of targets + if n: + nt += n # cumulative targets + ps = pi[b, a, gj, gi] # prediction subset corresponding to targets + + # Regression + pxy = ps[:, :2].sigmoid() * 2. - 0.5 + pwh = (ps[:, 2:4].sigmoid() * 2) ** 2 * anchors[i] + pbox = torch.cat((pxy, pwh), 1).to(device) # predicted box + iou = bbox_iou(pbox.T, tbox[i], x1y1x2y2=False, CIoU=True) # iou(prediction, target) + lbox += (1.0 - iou).mean() # iou loss + + # Objectness + tobj[b, a, gj, gi] = (1.0 - model.gr) + model.gr * iou.detach().clamp(0).type(tobj.dtype) # iou ratio + + # Classification + if model.nc > 1: # cls loss (only if multiple classes) + t = torch.full_like(ps[:, 5:], cn, device=device) # targets + t[range(n), tcls[i]] = cp + lcls += BCEcls(ps[:, 5:], t) # BCE + + # Append targets to text file + # with open('targets.txt', 'a') as file: + # [file.write('%11.5g ' * 4 % tuple(x) + '\n') for x in torch.cat((txy[i], twh[i]), 1)] + + lobj += BCEobj(pi[..., 4], tobj) * balance[i] # obj loss + + s = 3 / no # output count scaling + lbox *= h['box'] * s + lobj *= h['obj'] * s * (1.4 if no == 4 else 1.) + lcls *= h['cls'] * s + bs = tobj.shape[0] # batch size + + loss = lbox + lobj + lcls + return loss * bs, torch.cat((lbox, lobj, lcls, loss)).detach() + + +def build_targets(p, targets, model): + # Build targets for compute_loss(), input targets(image,class,x,y,w,h) + det = model.module.model[-1] if is_parallel(model) else model.model[-1] # Detect() module + na, nt = det.na, targets.shape[0] # number of anchors, targets + tcls, tbox, indices, anch = [], [], [], [] + gain = torch.ones(7, device=targets.device) # normalized to gridspace gain + ai = torch.arange(na, device=targets.device).float().view(na, 1).repeat(1, nt) # same as .repeat_interleave(nt) + targets = torch.cat((targets.repeat(na, 1, 1), ai[:, :, None]), 2) # append anchor indices + + g = 0.5 # bias + off = torch.tensor([[0, 0], + [1, 0], [0, 1], [-1, 0], [0, -1], # j,k,l,m + # [1, 1], [1, -1], [-1, 1], [-1, -1], # jk,jm,lk,lm + ], device=targets.device).float() * g # offsets + + for i in range(det.nl): + anchors = det.anchors[i] + gain[2:6] = torch.tensor(p[i].shape)[[3, 2, 3, 2]] # xyxy gain + + # Match targets to anchors + t = targets * gain + if nt: + # Matches + r = t[:, :, 4:6] / anchors[:, None] # wh ratio + j = torch.max(r, 1. / r).max(2)[0] < model.hyp['anchor_t'] # compare + # j = wh_iou(anchors, t[:, 4:6]) > model.hyp['iou_t'] # iou(3,n)=wh_iou(anchors(3,2), gwh(n,2)) + t = t[j] # filter + + # Offsets + gxy = t[:, 2:4] # grid xy + gxi = gain[[2, 3]] - gxy # inverse + j, k = ((gxy % 1. < g) & (gxy > 1.)).T + l, m = ((gxi % 1. < g) & (gxi > 1.)).T + j = torch.stack((torch.ones_like(j), j, k, l, m)) + t = t.repeat((5, 1, 1))[j] + offsets = (torch.zeros_like(gxy)[None] + off[:, None])[j] + else: + t = targets[0] + offsets = 0 + + # Define + b, c = t[:, :2].long().T # image, class + gxy = t[:, 2:4] # grid xy + gwh = t[:, 4:6] # grid wh + gij = (gxy - offsets).long() + gi, gj = gij.T # grid xy indices + + # Append + a = t[:, 6].long() # anchor indices + indices.append((b, a, gj.clone().detach().to(torch.int64), gi.clone().detach().to(torch.int64))) + tbox.append(torch.cat((gxy - gij, gwh), 1)) # box + anch.append(anchors[a]) # anchors + tcls.append(c) # class + + return tcls, tbox, indices, anch diff --git a/utils/metrics.py b/utils/metrics.py new file mode 100644 index 0000000..d4a10db --- /dev/null +++ b/utils/metrics.py @@ -0,0 +1,110 @@ +# Model validation metrics + +import matplotlib.pyplot as plt +import numpy as np + + +def fitness(x): + # Model fitness as a weighted combination of metrics + w = [0.0, 0.0, 0.1, 0.9] # weights for [P, R, mAP@0.5, mAP@0.5:0.95] + return (x[:, :4] * w).sum(1) + + +def ap_per_class(tp, conf, pred_cls, target_cls, plot=False, fname='precision-recall_curve.png'): + """ Compute the average precision, given the recall and precision curves. + Source: https://github.com/rafaelpadilla/Object-Detection-Metrics. + # Arguments + tp: True positives (nparray, nx1 or nx10). + conf: Objectness value from 0-1 (nparray). + pred_cls: Predicted object classes (nparray). + target_cls: True object classes (nparray). + plot: Plot precision-recall curve at mAP@0.5 + fname: Plot filename + # Returns + The average precision as computed in py-faster-rcnn. + """ + + # Sort by objectness + i = np.argsort(-conf) + tp, conf, pred_cls = tp[i], conf[i], pred_cls[i] + + # Find unique classes + unique_classes = np.unique(target_cls) + + # Create Precision-Recall curve and compute AP for each class + px, py = np.linspace(0, 1, 1000), [] # for plotting + pr_score = 0.1 # score to evaluate P and R https://github.com/ultralytics/yolov3/issues/898 + s = [unique_classes.shape[0], tp.shape[1]] # number class, number iou thresholds (i.e. 10 for mAP0.5...0.95) + ap, p, r = np.zeros(s), np.zeros(s), np.zeros(s) + for ci, c in enumerate(unique_classes): + i = pred_cls == c + n_l = (target_cls == c).sum() # number of labels + n_p = i.sum() # number of predictions + + if n_p == 0 or n_l == 0: + continue + else: + # Accumulate FPs and TPs + fpc = (1 - tp[i]).cumsum(0) + tpc = tp[i].cumsum(0) + + # Recall + recall = tpc / (n_l + 1e-16) # recall curve + r[ci] = np.interp(-pr_score, -conf[i], recall[:, 0]) # r at pr_score, negative x, xp because xp decreases + + # Precision + precision = tpc / (tpc + fpc) # precision curve + p[ci] = np.interp(-pr_score, -conf[i], precision[:, 0]) # p at pr_score + + # AP from recall-precision curve + for j in range(tp.shape[1]): + ap[ci, j], mpre, mrec = compute_ap(recall[:, j], precision[:, j]) + if j == 0: + py.append(np.interp(px, mrec, mpre)) # precision at mAP@0.5 + + # Compute F1 score (harmonic mean of precision and recall) + f1 = 2 * p * r / (p + r + 1e-16) + + if plot: + py = np.stack(py, axis=1) + fig, ax = plt.subplots(1, 1, figsize=(5, 5)) + ax.plot(px, py, linewidth=0.5, color='grey') # plot(recall, precision) + ax.plot(px, py.mean(1), linewidth=2, color='blue', label='all classes %.3f mAP@0.5' % ap[:, 0].mean()) + ax.set_xlabel('Recall') + ax.set_ylabel('Precision') + ax.set_xlim(0, 1) + ax.set_ylim(0, 1) + plt.legend() + fig.tight_layout() + fig.savefig(fname, dpi=200) + + return p, r, ap, f1, unique_classes.astype('int32') + + +def compute_ap(recall, precision): + """ Compute the average precision, given the recall and precision curves. + Source: https://github.com/rbgirshick/py-faster-rcnn. + # Arguments + recall: The recall curve (list). + precision: The precision curve (list). + # Returns + The average precision as computed in py-faster-rcnn. + """ + + # Append sentinel values to beginning and end + mrec = recall # np.concatenate(([0.], recall, [recall[-1] + 1E-3])) + mpre = precision # np.concatenate(([0.], precision, [0.])) + + # Compute the precision envelope + mpre = np.flip(np.maximum.accumulate(np.flip(mpre))) + + # Integrate area under curve + method = 'interp' # methods: 'continuous', 'interp' + if method == 'interp': + x = np.linspace(0, 1, 101) # 101-point interp (COCO) + ap = np.trapz(np.interp(x, mrec, mpre), x) # integrate + else: # 'continuous' + i = np.where(mrec[1:] != mrec[:-1])[0] # points where x axis (recall) changes + ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1]) # area under curve + + return ap, mpre, mrec diff --git a/utils/plots.py b/utils/plots.py new file mode 100644 index 0000000..2118efe --- /dev/null +++ b/utils/plots.py @@ -0,0 +1,383 @@ +# Plotting utils + +import glob +import math +import os +import random +from copy import copy +from pathlib import Path + +import cv2 +import matplotlib +import matplotlib.pyplot as plt +import numpy as np +import torch +import yaml +from PIL import Image +from scipy.signal import butter, filtfilt + +from utils.general import xywh2xyxy, xyxy2xywh +from utils.metrics import fitness + +# Settings +matplotlib.use('Agg') # for writing to files only + + +def color_list(): + # Return first 10 plt colors as (r,g,b) https://stackoverflow.com/questions/51350872/python-from-color-name-to-rgb + def hex2rgb(h): + return tuple(int(h[1 + i:1 + i + 2], 16) for i in (0, 2, 4)) + + return [hex2rgb(h) for h in plt.rcParams['axes.prop_cycle'].by_key()['color']] + + +def hist2d(x, y, n=100): + # 2d histogram used in labels.png and evolve.png + xedges, yedges = np.linspace(x.min(), x.max(), n), np.linspace(y.min(), y.max(), n) + hist, xedges, yedges = np.histogram2d(x, y, (xedges, yedges)) + xidx = np.clip(np.digitize(x, xedges) - 1, 0, hist.shape[0] - 1) + yidx = np.clip(np.digitize(y, yedges) - 1, 0, hist.shape[1] - 1) + return np.log(hist[xidx, yidx]) + + +def butter_lowpass_filtfilt(data, cutoff=1500, fs=50000, order=5): + # https://stackoverflow.com/questions/28536191/how-to-filter-smooth-with-scipy-numpy + def butter_lowpass(cutoff, fs, order): + nyq = 0.5 * fs + normal_cutoff = cutoff / nyq + return butter(order, normal_cutoff, btype='low', analog=False) + + b, a = butter_lowpass(cutoff, fs, order=order) + return filtfilt(b, a, data) # forward-backward filter + + +def plot_one_box(x, img, color=None, label=None, line_thickness=None): + # Plots one bounding box on image img + tl = line_thickness or round(0.002 * (img.shape[0] + img.shape[1]) / 2) + 1 # line/font thickness + color = color or [random.randint(0, 255) for _ in range(3)] + c1, c2 = (int(x[0]), int(x[1])), (int(x[2]), int(x[3])) + cv2.rectangle(img, c1, c2, color, thickness=tl, lineType=cv2.LINE_AA) + if label: + tf = max(tl - 1, 1) # font thickness + t_size = cv2.getTextSize(label, 0, fontScale=tl / 3, thickness=tf)[0] + c2 = c1[0] + t_size[0], c1[1] - t_size[1] - 3 + cv2.rectangle(img, c1, c2, color, -1, cv2.LINE_AA) # filled + cv2.putText(img, label, (c1[0], c1[1] - 2), 0, tl / 3, [225, 255, 255], thickness=tf, lineType=cv2.LINE_AA) + + +def plot_wh_methods(): # from utils.general import *; plot_wh_methods() + # Compares the two methods for width-height anchor multiplication + # https://github.com/ultralytics/yolov3/issues/168 + x = np.arange(-4.0, 4.0, .1) + ya = np.exp(x) + yb = torch.sigmoid(torch.from_numpy(x)).numpy() * 2 + + fig = plt.figure(figsize=(6, 3), dpi=150) + plt.plot(x, ya, '.-', label='YOLOv3') + plt.plot(x, yb ** 2, '.-', label='YOLOv5 ^2') + plt.plot(x, yb ** 1.6, '.-', label='YOLOv5 ^1.6') + plt.xlim(left=-4, right=4) + plt.ylim(bottom=0, top=6) + plt.xlabel('input') + plt.ylabel('output') + plt.grid() + plt.legend() + fig.tight_layout() + fig.savefig('comparison.png', dpi=200) + + +def output_to_target(output, width, height): + # Convert model output to target format [batch_id, class_id, x, y, w, h, conf] + if isinstance(output, torch.Tensor): + output = output.cpu().numpy() + + targets = [] + for i, o in enumerate(output): + if o is not None: + if isinstance(o, torch.Tensor): + o = o.cpu().numpy() + + for pred in o: + box = pred[:4] + w = (box[2] - box[0]) / width + h = (box[3] - box[1]) / height + x = box[0] / width + w / 2 + y = box[1] / height + h / 2 + conf = pred[4] + cls = int(pred[5]) + + targets.append([i, cls, x, y, w, h, conf]) + + return np.array(targets) + + +def plot_images(images, targets, paths=None, fname='images.jpg', names=None, max_size=640, max_subplots=16): + # Plot image grid with labels + + if isinstance(images, torch.Tensor): + images = images.cpu().float().numpy() + if isinstance(targets, torch.Tensor): + targets = targets.cpu().numpy() + + # un-normalise + if np.max(images[0]) <= 1: + images *= 255 + + tl = 3 # line thickness + tf = max(tl - 1, 1) # font thickness + bs, _, h, w = images.shape # batch size, _, height, width + bs = min(bs, max_subplots) # limit plot images + ns = np.ceil(bs ** 0.5) # number of subplots (square) + + # Check if we should resize + scale_factor = max_size / max(h, w) + if scale_factor < 1: + h = math.ceil(scale_factor * h) + w = math.ceil(scale_factor * w) + + colors = color_list() # list of colors + mosaic = np.full((int(ns * h), int(ns * w), 3), 255, dtype=np.uint8) # init + for i, img in enumerate(images): + if i == max_subplots: # if last batch has fewer images than we expect + break + + block_x = int(w * (i // ns)) + block_y = int(h * (i % ns)) + + img = img.transpose(1, 2, 0) + if scale_factor < 1: + img = cv2.resize(img, (w, h)) + + mosaic[block_y:block_y + h, block_x:block_x + w, :] = img + if len(targets) > 0: + image_targets = targets[targets[:, 0] == i] + boxes = xywh2xyxy(image_targets[:, 2:6]).T + classes = image_targets[:, 1].astype('int') + labels = image_targets.shape[1] == 6 # labels if no conf column + conf = None if labels else image_targets[:, 6] # check for confidence presence (label vs pred) + + boxes[[0, 2]] *= w + boxes[[0, 2]] += block_x + boxes[[1, 3]] *= h + boxes[[1, 3]] += block_y + for j, box in enumerate(boxes.T): + cls = int(classes[j]) + color = colors[cls % len(colors)] + cls = names[cls] if names else cls + if labels or conf[j] > 0.25: # 0.25 conf thresh + label = '%s' % cls if labels else '%s %.1f' % (cls, conf[j]) + plot_one_box(box, mosaic, label=label, color=color, line_thickness=tl) + + # Draw image filename labels + if paths: + label = Path(paths[i]).name[:40] # trim to 40 char + t_size = cv2.getTextSize(label, 0, fontScale=tl / 3, thickness=tf)[0] + cv2.putText(mosaic, label, (block_x + 5, block_y + t_size[1] + 5), 0, tl / 3, [220, 220, 220], thickness=tf, + lineType=cv2.LINE_AA) + + # Image border + cv2.rectangle(mosaic, (block_x, block_y), (block_x + w, block_y + h), (255, 255, 255), thickness=3) + + if fname: + r = min(1280. / max(h, w) / ns, 1.0) # ratio to limit image size + mosaic = cv2.resize(mosaic, (int(ns * w * r), int(ns * h * r)), interpolation=cv2.INTER_AREA) + # cv2.imwrite(fname, cv2.cvtColor(mosaic, cv2.COLOR_BGR2RGB)) # cv2 save + Image.fromarray(mosaic).save(fname) # PIL save + return mosaic + + +def plot_lr_scheduler(optimizer, scheduler, epochs=300, save_dir=''): + # Plot LR simulating training for full epochs + optimizer, scheduler = copy(optimizer), copy(scheduler) # do not modify originals + y = [] + for _ in range(epochs): + scheduler.step() + y.append(optimizer.param_groups[0]['lr']) + plt.plot(y, '.-', label='LR') + plt.xlabel('epoch') + plt.ylabel('LR') + plt.grid() + plt.xlim(0, epochs) + plt.ylim(0) + plt.tight_layout() + plt.savefig(Path(save_dir) / 'LR.png', dpi=200) + + +def plot_test_txt(): # from utils.general import *; plot_test() + # Plot test.txt histograms + x = np.loadtxt('test.txt', dtype=np.float32) + box = xyxy2xywh(x[:, :4]) + cx, cy = box[:, 0], box[:, 1] + + fig, ax = plt.subplots(1, 1, figsize=(6, 6), tight_layout=True) + ax.hist2d(cx, cy, bins=600, cmax=10, cmin=0) + ax.set_aspect('equal') + plt.savefig('hist2d.png', dpi=300) + + fig, ax = plt.subplots(1, 2, figsize=(12, 6), tight_layout=True) + ax[0].hist(cx, bins=600) + ax[1].hist(cy, bins=600) + plt.savefig('hist1d.png', dpi=200) + + +def plot_targets_txt(): # from utils.general import *; plot_targets_txt() + # Plot targets.txt histograms + x = np.loadtxt('targets.txt', dtype=np.float32).T + s = ['x targets', 'y targets', 'width targets', 'height targets'] + fig, ax = plt.subplots(2, 2, figsize=(8, 8), tight_layout=True) + ax = ax.ravel() + for i in range(4): + ax[i].hist(x[i], bins=100, label='%.3g +/- %.3g' % (x[i].mean(), x[i].std())) + ax[i].legend() + ax[i].set_title(s[i]) + plt.savefig('targets.jpg', dpi=200) + + +def plot_study_txt(f='study.txt', x=None): # from utils.general import *; plot_study_txt() + # Plot study.txt generated by test.py + fig, ax = plt.subplots(2, 4, figsize=(10, 6), tight_layout=True) + ax = ax.ravel() + + fig2, ax2 = plt.subplots(1, 1, figsize=(8, 4), tight_layout=True) + for f in ['study/study_coco_yolov5%s.txt' % x for x in ['s', 'm', 'l', 'x']]: + y = np.loadtxt(f, dtype=np.float32, usecols=[0, 1, 2, 3, 7, 8, 9], ndmin=2).T + x = np.arange(y.shape[1]) if x is None else np.array(x) + s = ['P', 'R', 'mAP@.5', 'mAP@.5:.95', 't_inference (ms/img)', 't_NMS (ms/img)', 't_total (ms/img)'] + for i in range(7): + ax[i].plot(x, y[i], '.-', linewidth=2, markersize=8) + ax[i].set_title(s[i]) + + j = y[3].argmax() + 1 + ax2.plot(y[6, :j], y[3, :j] * 1E2, '.-', linewidth=2, markersize=8, + label=Path(f).stem.replace('study_coco_', '').replace('yolo', 'YOLO')) + + ax2.plot(1E3 / np.array([209, 140, 97, 58, 35, 18]), [34.6, 40.5, 43.0, 47.5, 49.7, 51.5], + 'k.-', linewidth=2, markersize=8, alpha=.25, label='EfficientDet') + + ax2.grid() + ax2.set_xlim(0, 30) + ax2.set_ylim(28, 50) + ax2.set_yticks(np.arange(30, 55, 5)) + ax2.set_xlabel('GPU Speed (ms/img)') + ax2.set_ylabel('COCO AP val') + ax2.legend(loc='lower right') + plt.savefig('study_mAP_latency.png', dpi=300) + plt.savefig(f.replace('.txt', '.png'), dpi=300) + + +def plot_labels(labels, save_dir=''): + # plot dataset labels + c, b = labels[:, 0], labels[:, 1:].transpose() # classes, boxes + nc = int(c.max() + 1) # number of classes + + fig, ax = plt.subplots(2, 2, figsize=(8, 8), tight_layout=True) + ax = ax.ravel() + ax[0].hist(c, bins=np.linspace(0, nc, nc + 1) - 0.5, rwidth=0.8) + ax[0].set_xlabel('classes') + ax[1].scatter(b[0], b[1], c=hist2d(b[0], b[1], 90), cmap='jet') + ax[1].set_xlabel('x') + ax[1].set_ylabel('y') + ax[2].scatter(b[2], b[3], c=hist2d(b[2], b[3], 90), cmap='jet') + ax[2].set_xlabel('width') + ax[2].set_ylabel('height') + plt.savefig(Path(save_dir) / 'labels.png', dpi=200) + plt.close() + + # seaborn correlogram + try: + import seaborn as sns + import pandas as pd + x = pd.DataFrame(b.transpose(), columns=['x', 'y', 'width', 'height']) + sns.pairplot(x, corner=True, diag_kind='hist', kind='scatter', markers='o', + plot_kws=dict(s=3, edgecolor=None, linewidth=1, alpha=0.02), + diag_kws=dict(bins=50)) + plt.savefig(Path(save_dir) / 'labels_correlogram.png', dpi=200) + plt.close() + except Exception as e: + pass + + +def plot_evolution(yaml_file='data/hyp.finetune.yaml'): # from utils.general import *; plot_evolution() + # Plot hyperparameter evolution results in evolve.txt + with open(yaml_file) as f: + hyp = yaml.load(f, Loader=yaml.FullLoader) + x = np.loadtxt('evolve.txt', ndmin=2) + f = fitness(x) + # weights = (f - f.min()) ** 2 # for weighted results + plt.figure(figsize=(10, 12), tight_layout=True) + matplotlib.rc('font', **{'size': 8}) + for i, (k, v) in enumerate(hyp.items()): + y = x[:, i + 7] + # mu = (y * weights).sum() / weights.sum() # best weighted result + mu = y[f.argmax()] # best single result + plt.subplot(6, 5, i + 1) + plt.scatter(y, f, c=hist2d(y, f, 20), cmap='viridis', alpha=.8, edgecolors='none') + plt.plot(mu, f.max(), 'k+', markersize=15) + plt.title('%s = %.3g' % (k, mu), fontdict={'size': 9}) # limit to 40 characters + if i % 5 != 0: + plt.yticks([]) + print('%15s: %.3g' % (k, mu)) + plt.savefig('evolve.png', dpi=200) + print('\nPlot saved as evolve.png') + + +def plot_results_overlay(start=0, stop=0): # from utils.general import *; plot_results_overlay() + # Plot training 'results*.txt', overlaying train and val losses + s = ['train', 'train', 'train', 'Precision', 'mAP@0.5', 'val', 'val', 'val', 'Recall', 'mAP@0.5:0.95'] # legends + t = ['Box', 'Objectness', 'Classification', 'P-R', 'mAP-F1'] # titles + for f in sorted(glob.glob('results*.txt') + glob.glob('../../Downloads/results*.txt')): + results = np.loadtxt(f, usecols=[2, 3, 4, 8, 9, 12, 13, 14, 10, 11], ndmin=2).T + n = results.shape[1] # number of rows + x = range(start, min(stop, n) if stop else n) + fig, ax = plt.subplots(1, 5, figsize=(14, 3.5), tight_layout=True) + ax = ax.ravel() + for i in range(5): + for j in [i, i + 5]: + y = results[j, x] + ax[i].plot(x, y, marker='.', label=s[j]) + # y_smooth = butter_lowpass_filtfilt(y) + # ax[i].plot(x, np.gradient(y_smooth), marker='.', label=s[j]) + + ax[i].set_title(t[i]) + ax[i].legend() + ax[i].set_ylabel(f) if i == 0 else None # add filename + fig.savefig(f.replace('.txt', '.png'), dpi=200) + + +def plot_results(start=0, stop=0, bucket='', id=(), labels=(), save_dir=''): + # from utils.general import *; plot_results(save_dir='runs/train/exp0') + # Plot training 'results*.txt' as seen in https://github.com/ultralytics/yolov5#reproduce-our-training + fig, ax = plt.subplots(2, 5, figsize=(12, 6)) + ax = ax.ravel() + s = ['Box', 'Objectness', 'Classification', 'Precision', 'Recall', + 'val Box', 'val Objectness', 'val Classification', 'mAP@0.5', 'mAP@0.5:0.95'] + if bucket: + # os.system('rm -rf storage.googleapis.com') + # files = ['https://storage.googleapis.com/%s/results%g.txt' % (bucket, x) for x in id] + files = ['results%g.txt' % x for x in id] + c = ('gsutil cp ' + '%s ' * len(files) + '.') % tuple('gs://%s/results%g.txt' % (bucket, x) for x in id) + os.system(c) + else: + files = glob.glob(str(Path(save_dir) / 'results*.txt')) + glob.glob('../../Downloads/results*.txt') + assert len(files), 'No results.txt files found in %s, nothing to plot.' % os.path.abspath(save_dir) + for fi, f in enumerate(files): + try: + results = np.loadtxt(f, usecols=[2, 3, 4, 8, 9, 12, 13, 14, 10, 11], ndmin=2).T + n = results.shape[1] # number of rows + x = range(start, min(stop, n) if stop else n) + for i in range(10): + y = results[i, x] + if i in [0, 1, 2, 5, 6, 7]: + y[y == 0] = np.nan # don't show zero loss values + # y /= y[0] # normalize + label = labels[fi] if len(labels) else Path(f).stem + ax[i].plot(x, y, marker='.', label=label, linewidth=1, markersize=6) + ax[i].set_title(s[i]) + # if i in [5, 6, 7]: # share train and val loss y axes + # ax[i].get_shared_y_axes().join(ax[i], ax[i - 5]) + except Exception as e: + print('Warning: Plotting error for %s; %s' % (f, e)) + + fig.tight_layout() + ax[1].legend() + fig.savefig(Path(save_dir) / 'results.png', dpi=200) diff --git a/utils/torch_utils.py b/utils/torch_utils.py new file mode 100644 index 0000000..8b3098e --- /dev/null +++ b/utils/torch_utils.py @@ -0,0 +1,242 @@ +# PyTorch utils + +import logging +import math +import os +import time +from contextlib import contextmanager +from copy import deepcopy + +import torch +import torch.backends.cudnn as cudnn +import torch.nn as nn +import torch.nn.functional as F +import torchvision + +logger = logging.getLogger(__name__) + + +@contextmanager +def torch_distributed_zero_first(local_rank: int): + """ + Decorator to make all processes in distributed training wait for each local_master to do something. + """ + if local_rank not in [-1, 0]: + torch.distributed.barrier() + yield + if local_rank == 0: + torch.distributed.barrier() + + +def init_torch_seeds(seed=0): + # Speed-reproducibility tradeoff https://pytorch.org/docs/stable/notes/randomness.html + torch.manual_seed(seed) + if seed == 0: # slower, more reproducible + cudnn.deterministic = True + cudnn.benchmark = False + else: # faster, less reproducible + cudnn.deterministic = False + cudnn.benchmark = True + + +def select_device(device='', batch_size=None): + # device = 'cpu' or '0' or '0,1,2,3' + cpu_request = device.lower() == 'cpu' + if device and not cpu_request: # if device requested other than 'cpu' + os.environ['CUDA_VISIBLE_DEVICES'] = device # set environment variable + assert torch.cuda.is_available(), 'CUDA unavailable, invalid device %s requested' % device # check availablity + + cuda = False if cpu_request else torch.cuda.is_available() + if cuda: + c = 1024 ** 2 # bytes to MB + ng = torch.cuda.device_count() + if ng > 1 and batch_size: # check that batch_size is compatible with device_count + assert batch_size % ng == 0, 'batch-size %g not multiple of GPU count %g' % (batch_size, ng) + x = [torch.cuda.get_device_properties(i) for i in range(ng)] + s = f'Using torch {torch.__version__} ' + for i in range(0, ng): + if i == 1: + s = ' ' * len(s) + logger.info("%sCUDA:%g (%s, %dMB)" % (s, i, x[i].name, x[i].total_memory / c)) + else: + logger.info(f'Using torch {torch.__version__} CPU') + + logger.info('') # skip a line + return torch.device('cuda:0' if cuda else 'cpu') + + +def time_synchronized(): + torch.cuda.synchronize() if torch.cuda.is_available() else None + return time.time() + + +def is_parallel(model): + return type(model) in (nn.parallel.DataParallel, nn.parallel.DistributedDataParallel) + + +def intersect_dicts(da, db, exclude=()): + # Dictionary intersection of matching keys and shapes, omitting 'exclude' keys, using da values + return {k: v for k, v in da.items() if k in db and not any(x in k for x in exclude) and v.shape == db[k].shape} + + +def initialize_weights(model): + for m in model.modules(): + t = type(m) + if t is nn.Conv2d: + pass # nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + elif t is nn.BatchNorm2d: + m.eps = 1e-3 + m.momentum = 0.03 + # elif t in [nn.Hardswish, nn.LeakyReLU, nn.ReLU, nn.ReLU6]: + elif t in [nn.LeakyReLU, nn.ReLU, nn.ReLU6]: + m.inplace = True + + +def find_modules(model, mclass=nn.Conv2d): + # Finds layer indices matching module class 'mclass' + return [i for i, m in enumerate(model.module_list) if isinstance(m, mclass)] + + +def sparsity(model): + # Return global model sparsity + a, b = 0., 0. + for p in model.parameters(): + a += p.numel() + b += (p == 0).sum() + return b / a + + +def prune(model, amount=0.3): + # Prune model to requested global sparsity + import torch.nn.utils.prune as prune + print('Pruning model... ', end='') + for name, m in model.named_modules(): + if isinstance(m, nn.Conv2d): + prune.l1_unstructured(m, name='weight', amount=amount) # prune + prune.remove(m, 'weight') # make permanent + print(' %.3g global sparsity' % sparsity(model)) + + +def fuse_conv_and_bn(conv, bn): + # Fuse convolution and batchnorm layers https://tehnokv.com/posts/fusing-batchnorm-and-conv/ + fusedconv = nn.Conv2d(conv.in_channels, + conv.out_channels, + kernel_size=conv.kernel_size, + stride=conv.stride, + padding=conv.padding, + groups=conv.groups, + bias=True).requires_grad_(False).to(conv.weight.device) + + # prepare filters + w_conv = conv.weight.clone().view(conv.out_channels, -1) + w_bn = torch.diag(bn.weight.div(torch.sqrt(bn.eps + bn.running_var))) + fusedconv.weight.copy_(torch.mm(w_bn, w_conv).view(fusedconv.weight.size())) + + # prepare spatial bias + b_conv = torch.zeros(conv.weight.size(0), device=conv.weight.device) if conv.bias is None else conv.bias + b_bn = bn.bias - bn.weight.mul(bn.running_mean).div(torch.sqrt(bn.running_var + bn.eps)) + fusedconv.bias.copy_(torch.mm(w_bn, b_conv.reshape(-1, 1)).reshape(-1) + b_bn) + + return fusedconv + + +def model_info(model, verbose=False, img_size=640): + # Model information. img_size may be int or list, i.e. img_size=640 or img_size=[640, 320] + n_p = sum(x.numel() for x in model.parameters()) # number parameters + n_g = sum(x.numel() for x in model.parameters() if x.requires_grad) # number gradients + if verbose: + print('%5s %40s %9s %12s %20s %10s %10s' % ('layer', 'name', 'gradient', 'parameters', 'shape', 'mu', 'sigma')) + for i, (name, p) in enumerate(model.named_parameters()): + name = name.replace('module_list.', '') + print('%5g %40s %9s %12g %20s %10.3g %10.3g' % + (i, name, p.requires_grad, p.numel(), list(p.shape), p.mean(), p.std())) + + try: # FLOPS + from thop import profile + stride = int(model.stride.max()) + flops = profile(deepcopy(model), inputs=(torch.zeros(1, 3, stride, stride),), verbose=False)[0] / 1E9 * 2 + img_size = img_size if isinstance(img_size, list) else [img_size, img_size] # expand if int/float + fs = ', %.1f GFLOPS' % (flops * img_size[0] / stride * img_size[1] / stride) # 640x640 FLOPS + except (ImportError, Exception): + fs = '' + + logger.info(f"Model Summary: {len(list(model.modules()))} layers, {n_p} parameters, {n_g} gradients{fs}") + + +def load_classifier(name='resnet101', n=2): + # Loads a pretrained model reshaped to n-class output + model = torchvision.models.__dict__[name](pretrained=True) + + # ResNet model properties + # input_size = [3, 224, 224] + # input_space = 'RGB' + # input_range = [0, 1] + # mean = [0.485, 0.456, 0.406] + # std = [0.229, 0.224, 0.225] + + # Reshape output to n classes + filters = model.fc.weight.shape[1] + model.fc.bias = nn.Parameter(torch.zeros(n), requires_grad=True) + model.fc.weight = nn.Parameter(torch.zeros(n, filters), requires_grad=True) + model.fc.out_features = n + return model + + +def scale_img(img, ratio=1.0, same_shape=False): # img(16,3,256,416), r=ratio + # scales img(bs,3,y,x) by ratio + if ratio == 1.0: + return img + else: + h, w = img.shape[2:] + s = (int(h * ratio), int(w * ratio)) # new size + img = F.interpolate(img, size=s, mode='bilinear', align_corners=False) # resize + if not same_shape: # pad/crop img + gs = 32 # (pixels) grid size + h, w = [math.ceil(x * ratio / gs) * gs for x in (h, w)] + return F.pad(img, [0, w - s[1], 0, h - s[0]], value=0.447) # value = imagenet mean + + +def copy_attr(a, b, include=(), exclude=()): + # Copy attributes from b to a, options to only include [...] and to exclude [...] + for k, v in b.__dict__.items(): + if (len(include) and k not in include) or k.startswith('_') or k in exclude: + continue + else: + setattr(a, k, v) + + +class ModelEMA: + """ Model Exponential Moving Average from https://github.com/rwightman/pytorch-image-models + Keep a moving average of everything in the model state_dict (parameters and buffers). + This is intended to allow functionality like + https://www.tensorflow.org/api_docs/python/tf/train/ExponentialMovingAverage + A smoothed version of the weights is necessary for some training schemes to perform well. + This class is sensitive where it is initialized in the sequence of model init, + GPU assignment and distributed training wrappers. + """ + + def __init__(self, model, decay=0.9999, updates=0): + # Create EMA + self.ema = deepcopy(model.module if is_parallel(model) else model).eval() # FP32 EMA + # if next(model.parameters()).device.type != 'cpu': + # self.ema.half() # FP16 EMA + self.updates = updates # number of EMA updates + self.decay = lambda x: decay * (1 - math.exp(-x / 2000)) # decay exponential ramp (to help early epochs) + for p in self.ema.parameters(): + p.requires_grad_(False) + + def update(self, model): + # Update EMA parameters + with torch.no_grad(): + self.updates += 1 + d = self.decay(self.updates) + + msd = model.module.state_dict() if is_parallel(model) else model.state_dict() # model state_dict + for k, v in self.ema.state_dict().items(): + if v.dtype.is_floating_point: + v *= d + v += (1. - d) * msd[k].detach() + + def update_attr(self, model, include=(), exclude=('process_group', 'reducer')): + # Update EMA attributes + copy_attr(self.ema, model, include, exclude) diff --git a/weights/download_weights.sh b/weights/download_weights.sh new file mode 100644 index 0000000..314208c --- /dev/null +++ b/weights/download_weights.sh @@ -0,0 +1,10 @@ +#!/bin/bash +# Download latest models from https://github.com/ultralytics/yolov5/releases + +python - <