diff --git a/imcui/third_party/MatchAnything/LICENSE b/imcui/third_party/MatchAnything/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..d645695673349e3947e8e5ae42332d0ac3164cd7 --- /dev/null +++ b/imcui/third_party/MatchAnything/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/imcui/third_party/MatchAnything/README.md b/imcui/third_party/MatchAnything/README.md new file mode 100644 index 0000000000000000000000000000000000000000..ceb1c2a3152cbf3c8d7e42190e54040efa38d23d --- /dev/null +++ b/imcui/third_party/MatchAnything/README.md @@ -0,0 +1,104 @@ +# MatchAnything: Universal Cross-Modality Image Matching with Large-Scale Pre-Training +### [Project Page](https://zju3dv.github.io/MatchAnything) | [Paper](??) + +> MatchAnything: Universal Cross-Modality Image Matching with Large-Scale Pre-Training\ +> [Xingyi He](https://hxy-123.github.io/), +[Hao Yu](https://ritianyu.github.io/), +[Sida Peng](https://pengsida.net), +[Dongli Tan](https://github.com/Cuistiano), +[Zehong Shen](https://zehongs.github.io), +[Xiaowei Zhou](https://xzhou.me/), +[Hujun Bao](http://www.cad.zju.edu.cn/home/bao/)\ +> Arxiv 2025 + +

+ animated +

+ +## TODO List +- [x] Pre-trained models and inference code +- [x] Huggingface demo +- [ ] Data generation and training code +- [ ] Finetune code to further train on your own data +- [ ] Incorporate more synthetic modalities and image generation methods + +## Quick Start + +### [ HuggingFace demo for MatchAnything](https://huggingface.co/spaces/LittleFrog/MatchAnything) + +## Setup +Create the python environment by: +``` +conda env create -f environment.yaml +conda activate env +``` +We have tested our code on the device with CUDA 11.7. + +Download pretrained weights from [here](https://drive.google.com/file/d/12L3g9-w8rR9K2L4rYaGaDJ7NqX1D713d/view?usp=sharing) and place it under repo directory. Then unzip it by running the following command: +``` +unzip weights.zip +rm -rf weights.zip +``` + +## Test: +We evaluate the models pretrained by our framework using a single network weight on all cross-modality matching and registration tasks. + +### Data Preparing +Download the `test_data` directory from [here](https://drive.google.com/drive/folders/1jpxIOcgnQfl9IEPPifdXQ7S7xuj9K4j7?usp=sharing) and plase it under `repo_directory/data`. Then, unzip all datasets by: +```shell +cd repo_directiry/data/test_data + +for file in *.zip; do + unzip "$file" && rm "$file" +done +``` + +The data structure should looks like: +``` +repo_directiry/data/test_data + - Liver_CT-MR + - havard_medical_matching + - remote_sense_thermal + - MTV_cross_modal_data + - thermal_visible_ground + - visible_sar_dataset + - visible_vectorized_map +``` + +### Evaluation +```shell +# For Tomography datasets: +sh scripts/evaluate/eval_liver_ct_mr.sh +sh scripts/evaluate/eval_harvard_brain.sh + + + +# For visible-thermal datasets: +sh scripts/evaluate/eval_thermal_remote_sense.sh +sh scripts/evaluate/eval_thermal_mtv.sh +sh scripts/evaluate/eval_thermal_ground.sh + +# For visible-sar dataset: +sh scripts/evaluate/eval_visible_sar.sh + +# For visible-vectorized map dataset: +sh scripts/evaluate/eval_visible_vectorized_map.sh +``` + +# Citation + +If you find this code useful for your research, please use the following BibTeX entry. + +``` +@inproceedings{he2025matchanything, +title={MatchAnything: Universal Cross-Modality Image Matching with Large-Scale Pre-Training}, +author={He, Xingyi and Yu, Hao and Peng, Sida and Tan, Dongli and Shen, Zehong and Bao, Hujun and Zhou, Xiaowei}, +booktitle={Arxiv}, +year={2025} +} +``` + +# Acknowledgement +We thank the authors of +[ELoFTR](https://github.com/zju3dv/EfficientLoFTR), +[ROMA](https://github.com/Parskatt/RoMa) for their great works, without which our project/code would not be possible. \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/configs/models/eloftr_model.py b/imcui/third_party/MatchAnything/configs/models/eloftr_model.py new file mode 100644 index 0000000000000000000000000000000000000000..abbc030bb4181ea1d33d06f90797880ae03a18da --- /dev/null +++ b/imcui/third_party/MatchAnything/configs/models/eloftr_model.py @@ -0,0 +1,128 @@ +from src.config.default import _CN as cfg + +cfg.LOFTR.MATCH_COARSE.MATCH_TYPE = 'dual_softmax' +cfg.LOFTR.MATCH_COARSE.SPARSE_SPVS = False + +cfg.TRAINER.CANONICAL_LR = 8e-3 +cfg.TRAINER.WARMUP_STEP = 1875 # 3 epochs +cfg.TRAINER.WARMUP_RATIO = 0.1 + +cfg.TRAINER.MSLR_MILESTONES = [4, 6, 8, 10, 12, 14, 16] + +# pose estimation +cfg.TRAINER.RANSAC_PIXEL_THR = 0.5 + +cfg.TRAINER.OPTIMIZER = "adamw" +cfg.TRAINER.ADAMW_DECAY = 0.1 + +cfg.LOFTR.MATCH_COARSE.TRAIN_COARSE_PERCENT = 0.1 + +cfg.LOFTR.MATCH_COARSE.MTD_SPVS = True +cfg.LOFTR.FINE.MTD_SPVS = True + +cfg.LOFTR.RESOLUTION = (8, 1) # options: [(8, 2), (16, 4)] +cfg.LOFTR.FINE_WINDOW_SIZE = 8 # window_size in fine_level, must be odd +cfg.LOFTR.MATCH_FINE.THR = 0 +cfg.LOFTR.LOSS.FINE_TYPE = 'l2' # ['l2_with_std', 'l2'] + +cfg.TRAINER.EPI_ERR_THR = 5e-4 # recommendation: 5e-4 for ScanNet, 1e-4 for MegaDepth (from SuperGlue) + +cfg.LOFTR.MATCH_COARSE.SPARSE_SPVS = True + +# PAN +cfg.LOFTR.COARSE.PAN = True +cfg.LOFTR.COARSE.POOl_SIZE = 4 +cfg.LOFTR.COARSE.BN = False +cfg.LOFTR.COARSE.XFORMER = True +cfg.LOFTR.COARSE.ATTENTION = 'full' # options: ['linear', 'full'] + +cfg.LOFTR.FINE.PAN = False +cfg.LOFTR.FINE.POOl_SIZE = 4 +cfg.LOFTR.FINE.BN = False +cfg.LOFTR.FINE.XFORMER = False + +# noalign +cfg.LOFTR.ALIGN_CORNER = False + +# fp16 +cfg.DATASET.FP16 = False +cfg.LOFTR.FP16 = False + +# DEBUG +cfg.LOFTR.FP16LOG = False +cfg.LOFTR.MATCH_COARSE.FP16LOG = False + +# fine skip +cfg.LOFTR.FINE.SKIP = True + +# clip +cfg.TRAINER.GRADIENT_CLIPPING = 0.5 + +# backbone +cfg.LOFTR.BACKBONE_TYPE = 'RepVGG' + +# A1 +cfg.LOFTR.RESNETFPN.INITIAL_DIM = 64 +cfg.LOFTR.RESNETFPN.BLOCK_DIMS = [64, 128, 256] # s1, s2, s3 +cfg.LOFTR.COARSE.D_MODEL = 256 +cfg.LOFTR.FINE.D_MODEL = 64 + +# FPN backbone_inter_feat with coarse_attn. +cfg.LOFTR.COARSE_FEAT_ONLY = True +cfg.LOFTR.INTER_FEAT = True +cfg.LOFTR.RESNETFPN.COARSE_FEAT_ONLY = True +cfg.LOFTR.RESNETFPN.INTER_FEAT = True + +# loop back spv coarse match +cfg.LOFTR.FORCE_LOOP_BACK = False + +# fix norm fine match +cfg.LOFTR.MATCH_FINE.NORMFINEM = True + +# loss cf weight +cfg.LOFTR.LOSS.COARSE_OVERLAP_WEIGHT = True +cfg.LOFTR.LOSS.FINE_OVERLAP_WEIGHT = True + +# leaky relu +cfg.LOFTR.RESNETFPN.LEAKY = False +cfg.LOFTR.COARSE.LEAKY = 0.01 + +# prevent FP16 OVERFLOW in dirty data +cfg.LOFTR.NORM_FPNFEAT = True +cfg.LOFTR.REPLACE_NAN = True + +# force mutual nearest +cfg.LOFTR.MATCH_COARSE.FORCE_NEAREST = True +cfg.LOFTR.MATCH_COARSE.THR = 0.1 + +# fix fine matching +cfg.LOFTR.MATCH_FINE.FIX_FINE_MATCHING = True + +# dwconv +cfg.LOFTR.COARSE.DWCONV = True + +# localreg +cfg.LOFTR.MATCH_FINE.LOCAL_REGRESS = True +cfg.LOFTR.LOSS.LOCAL_WEIGHT = 0.25 + +# it5 +cfg.LOFTR.EVAL_TIMES = 1 + +# rope +cfg.LOFTR.COARSE.ROPE = True + +# local regress temperature +cfg.LOFTR.MATCH_FINE.LOCAL_REGRESS_TEMPERATURE = 10.0 + +# SLICE +cfg.LOFTR.MATCH_FINE.LOCAL_REGRESS_SLICE = True +cfg.LOFTR.MATCH_FINE.LOCAL_REGRESS_SLICEDIM = 8 + +# inner with no mask [64,100] +cfg.LOFTR.MATCH_FINE.LOCAL_REGRESS_INNER = True +cfg.LOFTR.MATCH_FINE.LOCAL_REGRESS_NOMASK = True + +cfg.LOFTR.MATCH_FINE.TOPK = 1 +cfg.LOFTR.MATCH_COARSE.FINE_TOPK = 1 + +cfg.LOFTR.MATCH_COARSE.FP16MATMUL = False \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/configs/models/roma_model.py b/imcui/third_party/MatchAnything/configs/models/roma_model.py new file mode 100644 index 0000000000000000000000000000000000000000..207ab322242234242b5e558e962f57dce91d0566 --- /dev/null +++ b/imcui/third_party/MatchAnything/configs/models/roma_model.py @@ -0,0 +1,27 @@ +from src.config.default import _CN as cfg +cfg.ROMA.RESIZE_BY_STRETCH = True +cfg.DATASET.RESIZE_BY_STRETCH = True + +cfg.TRAINER.CANONICAL_LR = 8e-3 +cfg.TRAINER.WARMUP_STEP = 1875 # 3 epochs +cfg.TRAINER.WARMUP_RATIO = 0.1 + +cfg.TRAINER.MSLR_MILESTONES = [4, 6, 8, 10, 12, 14, 16, 18, 20] + +# pose estimation +cfg.TRAINER.RANSAC_PIXEL_THR = 0.5 + +cfg.TRAINER.OPTIMIZER = "adamw" +cfg.TRAINER.ADAMW_DECAY = 0.1 +cfg.TRAINER.OPTIMIZER_EPS = 5e-7 + +cfg.TRAINER.EPI_ERR_THR = 5e-4 + +# fp16 +cfg.DATASET.FP16 = False +cfg.LOFTR.FP16 = True + +# clip +cfg.TRAINER.GRADIENT_CLIPPING = 0.5 + +cfg.LOFTR.ROMA_LOSS.IGNORE_EMPTY_IN_SPARSE_MATCH_SPV = True \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/environment.yaml b/imcui/third_party/MatchAnything/environment.yaml new file mode 100644 index 0000000000000000000000000000000000000000..0d61b8e99932550f4c9c3f3a9e7e58e0b9bf68b4 --- /dev/null +++ b/imcui/third_party/MatchAnything/environment.yaml @@ -0,0 +1,14 @@ +name: env +channels: + - pytorch + - nvidia + - conda-forge + - defaults +dependencies: + - python=3.8 + - pytorch-cuda=11.7 + - pytorch=1.12.1 + - torchvision=0.13.1 + - pip + - pip: + - -r requirements.txt \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/notebooks/notebooks_utils/__init__.py b/imcui/third_party/MatchAnything/notebooks/notebooks_utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..81512278dfdfa73dd0915defa732b3b0e7db6af6 --- /dev/null +++ b/imcui/third_party/MatchAnything/notebooks/notebooks_utils/__init__.py @@ -0,0 +1 @@ +from .plotting import * \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/notebooks/notebooks_utils/plotting.py b/imcui/third_party/MatchAnything/notebooks/notebooks_utils/plotting.py new file mode 100644 index 0000000000000000000000000000000000000000..9993ed5b989d8babc87969e34d153ba3bcc05e1f --- /dev/null +++ b/imcui/third_party/MatchAnything/notebooks/notebooks_utils/plotting.py @@ -0,0 +1,344 @@ +import numpy as np +import matplotlib.pyplot as plt +import matplotlib +from matplotlib.colors import hsv_to_rgb +import pylab as pl +import matplotlib.cm as cm +from PIL import Image +import cv2 + + +def visualize_features(feat, img_h, img_w, save_path=None): + from sklearn.decomposition import PCA + pca = PCA(n_components=3, svd_solver="arpack") + img = pca.fit_transform(feat).reshape(img_h * 2, img_w, 3) + img_norm = cv2.normalize( + img, None, alpha=0, beta=255, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_8UC3 + ) + img_resized = cv2.resize( + img_norm, (img_w * 8, img_h * 2 * 8), interpolation=cv2.INTER_LINEAR + ) + img_colormap = img_resized + img1, img2 = img_colormap[: img_h * 8, :, :], img_colormap[img_h * 8 :, :, :] + img_gapped = np.hstack( + (img1, np.ones((img_h * 8, 10, 3), dtype=np.uint8) * 255, img2) + ) + if save_path is not None: + cv2.imwrite(save_path, img_gapped) + + fig, axes = plt.subplots(1, 1, dpi=200) + axes.imshow(img_gapped) + axes.get_yaxis().set_ticks([]) + axes.get_xaxis().set_ticks([]) + plt.tight_layout(pad=0.5) + return fig + +def make_matching_figure( + img0, + img1, + mkpts0, + mkpts1, + color, + kpts0=None, + kpts1=None, + text=[], + path=None, + draw_detection=False, + draw_match_type='corres', # ['color', 'corres', None] + r_normalize_factor=0.4, + white_center=True, + vertical=False, + use_position_color=False, + draw_local_window=False, + window_size=(9, 9), + plot_size_factor=1, # Point size and line width + anchor_pts0=None, + anchor_pts1=None, + rescale_thr=5000, +): + if (max(img0.shape) > rescale_thr) or (max(img1.shape) > rescale_thr): + scale_factor = 0.5 + img0 = np.array(Image.fromarray((img0 * 255).astype(np.uint8)).resize((int(img0.shape[1] * scale_factor), int(img0.shape[0] * scale_factor)))) / 255. + img1 = np.array(Image.fromarray((img1 * 255).astype(np.uint8)).resize((int(img1.shape[1] * scale_factor), int(img1.shape[0] * scale_factor)))) / 255. + mkpts0, mkpts1 = mkpts0 * scale_factor, mkpts1 * scale_factor + if kpts0 is not None: + kpts0, kpts1 = kpts0 * scale_factor, kpts1 * scale_factor + + # draw image pair + fig, axes = ( + plt.subplots(2, 1, figsize=(10, 6), dpi=600) + if vertical + else plt.subplots(1, 2, figsize=(10, 6), dpi=600) + ) + axes[0].imshow(img0, aspect='auto') + axes[1].imshow(img1, aspect='auto') + + # axes[0].imshow(img0, aspect='equal') + # axes[1].imshow(img1, aspect='equal') + for i in range(2): # clear all frames + axes[i].get_yaxis().set_ticks([]) + axes[i].get_xaxis().set_ticks([]) + for spine in axes[i].spines.values(): + spine.set_visible(False) + plt.tight_layout(pad=1) + + if use_position_color: + mean_coord = np.mean(mkpts0, axis=0) + x_center, y_center = mean_coord + # NOTE: set r_normalize_factor to a smaller number will make plotted figure more contrastive. + position_color = matching_coord2color( + mkpts0, + x_center, + y_center, + r_normalize_factor=r_normalize_factor, + white_center=white_center, + ) + color[:, :3] = position_color + + if draw_detection and kpts0 is not None and kpts1 is not None: + # color = 'g' + color = 'r' + axes[0].scatter(kpts0[:, 0], kpts0[:, 1], c=color, s=1 * plot_size_factor) + axes[1].scatter(kpts1[:, 0], kpts1[:, 1], c=color, s=1 * plot_size_factor) + + if draw_match_type is 'corres': + # draw matches + fig.canvas.draw() + plt.pause(2.0) + transFigure = fig.transFigure.inverted() + fkpts0 = transFigure.transform(axes[0].transData.transform(mkpts0)) + fkpts1 = transFigure.transform(axes[1].transData.transform(mkpts1)) + fig.lines = [ + matplotlib.lines.Line2D( + (fkpts0[i, 0], fkpts1[i, 0]), + (fkpts0[i, 1], fkpts1[i, 1]), + transform=fig.transFigure, + c=color[i], + linewidth=1* plot_size_factor, + ) + for i in range(len(mkpts0)) + ] + + axes[0].scatter(mkpts0[:, 0], mkpts0[:, 1], c=color, s=2* plot_size_factor) + axes[1].scatter(mkpts1[:, 0], mkpts1[:, 1], c=color, s=2* plot_size_factor) + elif draw_match_type is 'color': + # x_center = img0.shape[-1] / 2 + # y_center = img1.shape[-2] / 2 + + mean_coord = np.mean(mkpts0, axis=0) + x_center, y_center = mean_coord + # NOTE: set r_normalize_factor to a smaller number will make plotted figure more contrastive. + kpts_color = matching_coord2color( + mkpts0, + x_center, + y_center, + r_normalize_factor=r_normalize_factor, + white_center=white_center, + ) + axes[0].scatter(mkpts0[:, 0], mkpts0[:, 1], c=kpts_color, s=1 * plot_size_factor) + axes[1].scatter(mkpts1[:, 0], mkpts1[:, 1], c=kpts_color, s=1 * plot_size_factor) + + if draw_local_window: + anchor_pts0 = mkpts0 if anchor_pts0 is None else anchor_pts0 + anchor_pts1 = mkpts1 if anchor_pts1 is None else anchor_pts1 + plot_local_windows( + anchor_pts0, color=(1, 0, 0, 0.4), lw=0.2, ax_=0, window_size=window_size + ) + plot_local_windows( + anchor_pts1, color=(1, 0, 0, 0.4), lw=0.2, ax_=1, window_size=window_size + ) # lw =0.2 + + # put txts + txt_color = "k" if img0[:100, :200].mean() > 200 else "w" + fig.text( + 0.01, + 0.99, + "\n".join(text), + transform=fig.axes[0].transAxes, + fontsize=15, + va="top", + ha="left", + color=txt_color, + ) + plt.tight_layout(pad=1) + + # save or return figure + if path: + plt.savefig(str(path), bbox_inches="tight", pad_inches=0) + plt.close() + else: + return fig + +def make_triple_matching_figure( + img0, + img1, + img2, + mkpts01, + mkpts12, + color01, + color12, + text=[], + path=None, + draw_match=True, + r_normalize_factor=0.4, + white_center=True, + vertical=False, + draw_local_window=False, + window_size=(9, 9), + anchor_pts0=None, + anchor_pts1=None, +): + # draw image pair + fig, axes = ( + plt.subplots(3, 1, figsize=(10, 6), dpi=600) + if vertical + else plt.subplots(1, 3, figsize=(10, 6), dpi=600) + ) + axes[0].imshow(img0) + axes[1].imshow(img1) + axes[2].imshow(img2) + for i in range(3): # clear all frames + axes[i].get_yaxis().set_ticks([]) + axes[i].get_xaxis().set_ticks([]) + for spine in axes[i].spines.values(): + spine.set_visible(False) + plt.tight_layout(pad=1) + + if draw_match: + # draw matches for [0,1] + fig.canvas.draw() + transFigure = fig.transFigure.inverted() + fkpts0 = transFigure.transform(axes[0].transData.transform(mkpts01[0])) + fkpts1 = transFigure.transform(axes[1].transData.transform(mkpts01[1])) + fig.lines = [ + matplotlib.lines.Line2D( + (fkpts0[i, 0], fkpts1[i, 0]), + (fkpts0[i, 1], fkpts1[i, 1]), + transform=fig.transFigure, + c=color01[i], + linewidth=1, + ) + for i in range(len(mkpts01[0])) + ] + + axes[0].scatter(mkpts01[0][:, 0], mkpts01[0][:, 1], c=color01[:, :3], s=1) + axes[1].scatter(mkpts01[1][:, 0], mkpts01[1][:, 1], c=color01[:, :3], s=1) + + fig.canvas.draw() + # draw matches for [1,2] + fkpts1_1 = transFigure.transform(axes[1].transData.transform(mkpts12[0])) + fkpts2 = transFigure.transform(axes[2].transData.transform(mkpts12[1])) + fig.lines += [ + matplotlib.lines.Line2D( + (fkpts1_1[i, 0], fkpts2[i, 0]), + (fkpts1_1[i, 1], fkpts2[i, 1]), + transform=fig.transFigure, + c=color12[i], + linewidth=1, + ) + for i in range(len(mkpts12[0])) + ] + + axes[1].scatter(mkpts12[0][:, 0], mkpts12[0][:, 1], c=color12[:, :3], s=1) + axes[2].scatter(mkpts12[1][:, 0], mkpts12[1][:, 1], c=color12[:, :3], s=1) + + # # put txts + # txt_color = "k" if img0[:100, :200].mean() > 200 else "w" + # fig.text( + # 0.01, + # 0.99, + # "\n".join(text), + # transform=fig.axes[0].transAxes, + # fontsize=15, + # va="top", + # ha="left", + # color=txt_color, + # ) + plt.tight_layout(pad=0.1) + + # save or return figure + if path: + plt.savefig(str(path), bbox_inches="tight", pad_inches=0) + plt.close() + else: + return fig + + +def matching_coord2color(kpts, x_center, y_center, r_normalize_factor=0.4, white_center=True): + """ + r_normalize_factor is used to visualize clearer according to points space distribution + r_normalize_factor maxium=1, larger->points darker/brighter + """ + if not white_center: + # dark center points + V, H = np.mgrid[0:1:10j, 0:1:360j] + S = np.ones_like(V) + else: + # white center points + S, H = np.mgrid[0:1:10j, 0:1:360j] + V = np.ones_like(S) + + HSV = np.dstack((H, S, V)) + RGB = hsv_to_rgb(HSV) + """ + # used to visualize hsv + pl.imshow(RGB, origin="lower", extent=[0, 360, 0, 1], aspect=150) + pl.xlabel("H") + pl.ylabel("S") + pl.title("$V_{HSV}=1$") + pl.show() + """ + kpts = np.copy(kpts) + distance = kpts - np.array([x_center, y_center])[None] + r_max = np.percentile(np.linalg.norm(distance, axis=1), 85) + # r_max = np.sqrt((x_center) ** 2 + (y_center) ** 2) + kpts[:, 0] = kpts[:, 0] - x_center # x + kpts[:, 1] = kpts[:, 1] - y_center # y + + r = np.sqrt(kpts[:, 0] ** 2 + kpts[:, 1] ** 2) + 1e-6 + r_normalized = r / (r_max * r_normalize_factor) + r_normalized[r_normalized > 1] = 1 + r_normalized = (r_normalized) * 9 + + cos_theta = kpts[:, 0] / r # x / r + theta = np.arccos(cos_theta) # from 0 to pi + change_angle_mask = kpts[:, 1] < 0 + theta[change_angle_mask] = 2 * np.pi - theta[change_angle_mask] + theta_degree = np.degrees(theta) + theta_degree[theta_degree == 360] = 0 # to avoid overflow + theta_degree = theta_degree / 360 * 360 + kpts_color = RGB[r_normalized.astype(int), theta_degree.astype(int)] + return kpts_color + + +def show_image_pair(img0, img1, path=None): + fig, axes = plt.subplots(1, 2, figsize=(10, 6), dpi=200) + axes[0].imshow(img0, cmap="gray") + axes[1].imshow(img1, cmap="gray") + for i in range(2): # clear all frames + axes[i].get_yaxis().set_ticks([]) + axes[i].get_xaxis().set_ticks([]) + for spine in axes[i].spines.values(): + spine.set_visible(False) + plt.tight_layout(pad=1) + if path: + plt.savefig(str(path), bbox_inches="tight", pad_inches=0) + return fig + +def plot_local_windows(kpts, color="r", lw=1, ax_=0, window_size=(9, 9)): + ax = plt.gcf().axes + for kpt in kpts: + ax[ax_].add_patch( + matplotlib.patches.Rectangle( + ( + kpt[0] - (window_size[0] // 2) - 1, + kpt[1] - (window_size[1] // 2) - 1, + ), + window_size[0] + 1, + window_size[1] + 1, + lw=lw, + color=color, + fill=False, + ) + ) + diff --git a/imcui/third_party/MatchAnything/requirements.txt b/imcui/third_party/MatchAnything/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..891cdb8becdb19790c4ac3659d69f8118d14bd2c --- /dev/null +++ b/imcui/third_party/MatchAnything/requirements.txt @@ -0,0 +1,22 @@ +opencv_python==4.4.0.46 +albumentations==0.5.1 --no-binary=imgaug,albumentations +Pillow==9.5.0 +ray==2.9.3 +einops==0.3.0 +kornia==0.4.1 +loguru==0.5.3 +yacs>=0.1.8 +tqdm +autopep8 +pylint +ipython +jupyterlab +matplotlib +h5py==3.1.0 +pytorch-lightning==1.3.5 +torchmetrics==0.6.0 # version problem: https://github.com/NVIDIA/DeepLearningExamples/issues/1113#issuecomment-1102969461 +joblib>=1.0.1 +pynvml +gpustat +safetensors +timm==0.6.7 \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/scripts/evaluate/eval_harvard_brain.sh b/imcui/third_party/MatchAnything/scripts/evaluate/eval_harvard_brain.sh new file mode 100644 index 0000000000000000000000000000000000000000..a517ce7855dc9c87798a9a230fa8f21c4c06ca5a --- /dev/null +++ b/imcui/third_party/MatchAnything/scripts/evaluate/eval_harvard_brain.sh @@ -0,0 +1,17 @@ +#!/bin/bash -l + +SCRIPTPATH=$(dirname $(readlink -f "$0")) +PROJECT_DIR="${SCRIPTPATH}/../../" + +cd $PROJECT_DIR + +DEVICE_ID='0' +NPZ_ROOT=data/test_data/havard_medical_matching/all_eval +NPZ_LIST_PATH=data/test_data/havard_medical_matching/all_eval/val_list.txt +OUTPUT_PATH=results/havard_medical_matching + +# ELoFTR pretrained: +CUDA_VISIBLE_DEVICES=$DEVICE_ID python tools/evaluate_datasets.py configs/models/eloftr_model.py --ckpt_path weights/matchanything_eloftr.ckpt --method matchanything_eloftr@-@ransac_affine --imgresize 832 --thr 0.05 --npe --npz_root $NPZ_ROOT --npz_list_path $NPZ_LIST_PATH --output_path $OUTPUT_PATH + +# ROMA pretrained: +CUDA_VISIBLE_DEVICES=$DEVICE_ID python tools/evaluate_datasets.py configs/models/roma_model.py --ckpt_path weights/matchanything_roma.ckpt --method matchanything_roma@-@ransac_affine --imgresize 832 --npe --npz_root $NPZ_ROOT --npz_list_path $NPZ_LIST_PATH --output_path $OUTPUT_PATH \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/scripts/evaluate/eval_liver_ct_mr.sh b/imcui/third_party/MatchAnything/scripts/evaluate/eval_liver_ct_mr.sh new file mode 100644 index 0000000000000000000000000000000000000000..f58b0b623565a3131f0be290c81e3842c6db4d3e --- /dev/null +++ b/imcui/third_party/MatchAnything/scripts/evaluate/eval_liver_ct_mr.sh @@ -0,0 +1,17 @@ +#!/bin/bash -l + +SCRIPTPATH=$(dirname $(readlink -f "$0")) +PROJECT_DIR="${SCRIPTPATH}/../../" + +cd $PROJECT_DIR + +DEVICE_ID='0' +NPZ_ROOT=data/test_data/Liver_CT-MR/eval_indexs +NPZ_LIST_PATH=data/test_data/Liver_CT-MR/eval_indexs/val_list.txt +OUTPUT_PATH=results/Liver_CT-MR + +# ELoFTR pretrained: +CUDA_VISIBLE_DEVICES=$DEVICE_ID python tools/evaluate_datasets.py configs/models/eloftr_model.py --ckpt_path weights/matchanything_eloftr.ckpt --method matchanything_eloftr@-@ransac_affine --imgresize 832 --npe --npz_root $NPZ_ROOT --npz_list_path $NPZ_LIST_PATH --output_path $OUTPUT_PATH + +# ROMA pretrained: +CUDA_VISIBLE_DEVICES=$DEVICE_ID python tools/evaluate_datasets.py configs/models/roma_model.py --ckpt_path weights/matchanything_roma.ckpt --method matchanything_roma@-@ransac_affine --imgresize 832 --npe --npz_root $NPZ_ROOT --npz_list_path $NPZ_LIST_PATH --output_path $OUTPUT_PATH \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/scripts/evaluate/eval_thermal_ground.sh b/imcui/third_party/MatchAnything/scripts/evaluate/eval_thermal_ground.sh new file mode 100644 index 0000000000000000000000000000000000000000..2f7d5ea30804e749c5eff6f94d9963073683e441 --- /dev/null +++ b/imcui/third_party/MatchAnything/scripts/evaluate/eval_thermal_ground.sh @@ -0,0 +1,17 @@ +#!/bin/bash -l + +SCRIPTPATH=$(dirname $(readlink -f "$0")) +PROJECT_DIR="${SCRIPTPATH}/../../" + +cd $PROJECT_DIR + +DEVICE_ID='0' +NPZ_ROOT=data/test_data/thermal_visible_ground/eval_indexs +NPZ_LIST_PATH=data/test_data/thermal_visible_ground/eval_indexs/val_list.txt +OUTPUT_PATH=results/thermal_visible_ground + +# ELoFTR pretrained: +CUDA_VISIBLE_DEVICES=$DEVICE_ID python tools/evaluate_datasets.py configs/models/eloftr_model.py --ckpt_path weights/matchanything_eloftr.ckpt --method matchanything_eloftr@-@ransac_affine --imgresize 832 --npe --npz_root $NPZ_ROOT --npz_list_path $NPZ_LIST_PATH --output_path $OUTPUT_PATH + +# ROMA pretrained: +CUDA_VISIBLE_DEVICES=$DEVICE_ID python tools/evaluate_datasets.py configs/models/roma_model.py --ckpt_path weights/matchanything_roma.ckpt --method matchanything_roma@-@ransac_affine --imgresize 832 --npe --npz_root $NPZ_ROOT --npz_list_path $NPZ_LIST_PATH --output_path $OUTPUT_PATH \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/scripts/evaluate/eval_thermal_mtv.sh b/imcui/third_party/MatchAnything/scripts/evaluate/eval_thermal_mtv.sh new file mode 100644 index 0000000000000000000000000000000000000000..faf816d4a377626f35e3549b1de8c65a8b4ae540 --- /dev/null +++ b/imcui/third_party/MatchAnything/scripts/evaluate/eval_thermal_mtv.sh @@ -0,0 +1,17 @@ +#!/bin/bash -l + +SCRIPTPATH=$(dirname $(readlink -f "$0")) +PROJECT_DIR="${SCRIPTPATH}/../../" + +cd $PROJECT_DIR + +DEVICE_ID='0' +NPZ_ROOT=data/test_data/MTV_cross_modal_data/scene_info/scene_info +NPZ_LIST_PATH=data/test_data/MTV_cross_modal_data/scene_info/test_list.txt +OUTPUT_PATH=results/MTV_cross_modal_data + +# ELoFTR pretrained: +CUDA_VISIBLE_DEVICES=$DEVICE_ID python tools/evaluate_datasets.py configs/models/eloftr_model.py --ckpt_path weights/matchanything_eloftr.ckpt --method matchanything_eloftr@-@ransac_affine --imgresize 832 --npe --npz_root $NPZ_ROOT --npz_list_path $NPZ_LIST_PATH --output_path $OUTPUT_PATH + +# ROMA pretrained: +CUDA_VISIBLE_DEVICES=$DEVICE_ID python tools/evaluate_datasets.py configs/models/roma_model.py --ckpt_path weights/matchanything_roma.ckpt --method matchanything_roma@-@ransac_affine --imgresize 832 --npe --npz_root $NPZ_ROOT --npz_list_path $NPZ_LIST_PATH --output_path $OUTPUT_PATH \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/scripts/evaluate/eval_thermal_remote_sense.sh b/imcui/third_party/MatchAnything/scripts/evaluate/eval_thermal_remote_sense.sh new file mode 100644 index 0000000000000000000000000000000000000000..afd0c62edb3ba9e9ffc4a3a4140a38c3fd1d475e --- /dev/null +++ b/imcui/third_party/MatchAnything/scripts/evaluate/eval_thermal_remote_sense.sh @@ -0,0 +1,17 @@ +#!/bin/bash -l + +SCRIPTPATH=$(dirname $(readlink -f "$0")) +PROJECT_DIR="${SCRIPTPATH}/../../" + +cd $PROJECT_DIR + +DEVICE_ID='0' +NPZ_ROOT=data/test_data/remote_sense_thermal/eval_Optical-Infrared +NPZ_LIST_PATH=data/test_data/remote_sense_thermal/eval_Optical-Infrared/val_list.txt +OUTPUT_PATH=results/remote_sense_thermal + +# ELoFTR pretrained: +CUDA_VISIBLE_DEVICES=$DEVICE_ID python tools/evaluate_datasets.py configs/models/eloftr_model.py --ckpt_path weights/matchanything_eloftr.ckpt --method matchanything_eloftr@-@ransac_affine --imgresize 832 --npe --npz_root $NPZ_ROOT --npz_list_path $NPZ_LIST_PATH --output_path $OUTPUT_PATH + +# ROMA pretrained: +CUDA_VISIBLE_DEVICES=$DEVICE_ID python tools/evaluate_datasets.py configs/models/roma_model.py --ckpt_path weights/matchanything_roma.ckpt --method matchanything_roma@-@ransac_affine --imgresize 832 --npe --npz_root $NPZ_ROOT --npz_list_path $NPZ_LIST_PATH --output_path $OUTPUT_PATH \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/scripts/evaluate/eval_visible_sar.sh b/imcui/third_party/MatchAnything/scripts/evaluate/eval_visible_sar.sh new file mode 100644 index 0000000000000000000000000000000000000000..231f303a642ce890c7e90470120ef8744d89455e --- /dev/null +++ b/imcui/third_party/MatchAnything/scripts/evaluate/eval_visible_sar.sh @@ -0,0 +1,17 @@ +#!/bin/bash -l + +SCRIPTPATH=$(dirname $(readlink -f "$0")) +PROJECT_DIR="${SCRIPTPATH}/../../" + +cd $PROJECT_DIR + +DEVICE_ID='0' +NPZ_ROOT=data/test_data/visible_sar_dataset/eval +NPZ_LIST_PATH=data/test_data/visible_sar_dataset/eval/val_list.txt +OUTPUT_PATH=results/visible_sar_dataset + +# ELoFTR pretrained: +CUDA_VISIBLE_DEVICES=$DEVICE_ID python tools/evaluate_datasets.py configs/models/eloftr_model.py --ckpt_path weights/matchanything_eloftr.ckpt --method matchanything_eloftr@-@ransac_affine --imgresize 832 --thr 0.05 --npe --npz_root $NPZ_ROOT --npz_list_path $NPZ_LIST_PATH --output_path $OUTPUT_PATH + +# ROMA pretrained: +CUDA_VISIBLE_DEVICES=$DEVICE_ID python tools/evaluate_datasets.py configs/models/roma_model.py --ckpt_path weights/matchanything_roma.ckpt --method matchanything_roma@-@ransac_affine --imgresize 832 --npe --npz_root $NPZ_ROOT --npz_list_path $NPZ_LIST_PATH --output_path $OUTPUT_PATH \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/scripts/evaluate/eval_visible_vectorized_map.sh b/imcui/third_party/MatchAnything/scripts/evaluate/eval_visible_vectorized_map.sh new file mode 100644 index 0000000000000000000000000000000000000000..a8bb56f0a8273409fa51532e61323d423ad04191 --- /dev/null +++ b/imcui/third_party/MatchAnything/scripts/evaluate/eval_visible_vectorized_map.sh @@ -0,0 +1,17 @@ +#!/bin/bash -l + +SCRIPTPATH=$(dirname $(readlink -f "$0")) +PROJECT_DIR="${SCRIPTPATH}/../../" + +cd $PROJECT_DIR + +DEVICE_ID='0' +NPZ_ROOT=data/test_data/visible_vectorized_map/scene_indices +NPZ_LIST_PATH=data/test_data/visible_vectorized_map/scene_indices/val_list.txt +OUTPUT_PATH=results/visible_vectorized_map + +# ELoFTR pretrained: +CUDA_VISIBLE_DEVICES=$DEVICE_ID python tools/evaluate_datasets.py configs/models/eloftr_model.py --ckpt_path weights/matchanything_eloftr.ckpt --method matchanything_eloftr@-@ransac_affine --imgresize 832 --npe --npz_root $NPZ_ROOT --npz_list_path $NPZ_LIST_PATH --output_path $OUTPUT_PATH + +# ROMA pretrained: +CUDA_VISIBLE_DEVICES=$DEVICE_ID python tools/evaluate_datasets.py configs/models/roma_model.py --ckpt_path weights/matchanything_roma.ckpt --method matchanything_roma@-@ransac_affine --imgresize 832 --npe --npz_root $NPZ_ROOT --npz_list_path $NPZ_LIST_PATH --output_path $OUTPUT_PATH \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/src/__init__.py b/imcui/third_party/MatchAnything/src/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/imcui/third_party/MatchAnything/src/config/default.py b/imcui/third_party/MatchAnything/src/config/default.py new file mode 100644 index 0000000000000000000000000000000000000000..0b43845bfec84071b29b96012bf5401a889327ed --- /dev/null +++ b/imcui/third_party/MatchAnything/src/config/default.py @@ -0,0 +1,344 @@ +from yacs.config import CfgNode as CN +_CN = CN() +############## ROMA Pipeline ######### +_CN.ROMA = CN() +_CN.ROMA.MATCH_THRESH = 0.0 +_CN.ROMA.RESIZE_BY_STRETCH = False # Used for test mode +_CN.ROMA.NORMALIZE_IMG = False # Used for test mode + +_CN.ROMA.MODE = "train_framework" # Used in Lightning Train & Val +_CN.ROMA.MODEL = CN() +_CN.ROMA.MODEL.COARSE_BACKBONE = 'DINOv2_large' +_CN.ROMA.MODEL.COARSE_FEAT_DIM = 1024 +_CN.ROMA.MODEL.MEDIUM_FEAT_DIM = 512 +_CN.ROMA.MODEL.COARSE_PATCH_SIZE = 14 +_CN.ROMA.MODEL.AMP = True # FP16 mode + +_CN.ROMA.SAMPLE = CN() +_CN.ROMA.SAMPLE.METHOD = "threshold_balanced" +_CN.ROMA.SAMPLE.N_SAMPLE = 5000 +_CN.ROMA.SAMPLE.THRESH = 0.05 + +_CN.ROMA.TEST_TIME = CN() +_CN.ROMA.TEST_TIME.COARSE_RES = (560, 560) # need to divisable by 14 & 8 +_CN.ROMA.TEST_TIME.UPSAMPLE = True +_CN.ROMA.TEST_TIME.UPSAMPLE_RES = (864, 864) # need to divisable by 8 +_CN.ROMA.TEST_TIME.SYMMETRIC = True +_CN.ROMA.TEST_TIME.ATTENUTATE_CERT = True + +############## ↓ LoFTR Pipeline ↓ ############## +_CN.LOFTR = CN() +_CN.LOFTR.BACKBONE_TYPE = 'ResNetFPN' +_CN.LOFTR.ALIGN_CORNER = True +_CN.LOFTR.RESOLUTION = (8, 2) # options: [(8, 2), (16, 4)] +_CN.LOFTR.FINE_WINDOW_SIZE = 5 # window_size in fine_level, must be odd +_CN.LOFTR.FINE_WINDOW_MATCHING_SIZE = 5 # window_size for loftr fine-matching, odd for select and even for average +_CN.LOFTR.FINE_CONCAT_COARSE_FEAT = True +_CN.LOFTR.FINE_SAMPLE_COARSE_FEAT = False +_CN.LOFTR.COARSE_FEAT_ONLY = False # TO BE DONE +_CN.LOFTR.INTER_FEAT = False # FPN backbone inter feat with coarse_attn. +_CN.LOFTR.FP16 = False +_CN.LOFTR.FIX_BIAS = False +_CN.LOFTR.MATCHABILITY = False +_CN.LOFTR.FORCE_LOOP_BACK = False +_CN.LOFTR.NORM_FPNFEAT = False +_CN.LOFTR.NORM_FPNFEAT2 = False +_CN.LOFTR.REPLACE_NAN = False +_CN.LOFTR.PLOT_SCORES = False +_CN.LOFTR.REP_FPN = False +_CN.LOFTR.REP_DEPLOY = False +_CN.LOFTR.EVAL_TIMES = 1 + +# 1. LoFTR-backbone (local feature CNN) config +_CN.LOFTR.RESNETFPN = CN() +_CN.LOFTR.RESNETFPN.INITIAL_DIM = 128 +_CN.LOFTR.RESNETFPN.BLOCK_DIMS = [128, 196, 256] # s1, s2, s3 +_CN.LOFTR.RESNETFPN.SAMPLE_FINE = False +_CN.LOFTR.RESNETFPN.COARSE_FEAT_ONLY = False # TO BE DONE +_CN.LOFTR.RESNETFPN.INTER_FEAT = False # FPN backbone inter feat with coarse_attn. +_CN.LOFTR.RESNETFPN.LEAKY = False +_CN.LOFTR.RESNETFPN.REPVGGMODEL = None + +# 2. LoFTR-coarse module config +_CN.LOFTR.COARSE = CN() +_CN.LOFTR.COARSE.D_MODEL = 256 +_CN.LOFTR.COARSE.D_FFN = 256 +_CN.LOFTR.COARSE.NHEAD = 8 +_CN.LOFTR.COARSE.LAYER_NAMES = ['self', 'cross'] * 4 +_CN.LOFTR.COARSE.ATTENTION = 'linear' # options: ['linear', 'full'] +_CN.LOFTR.COARSE.TEMP_BUG_FIX = True +_CN.LOFTR.COARSE.NPE = False +_CN.LOFTR.COARSE.PAN = False +_CN.LOFTR.COARSE.POOl_SIZE = 4 +_CN.LOFTR.COARSE.POOl_SIZE2 = 4 +_CN.LOFTR.COARSE.BN = True +_CN.LOFTR.COARSE.XFORMER = False +_CN.LOFTR.COARSE.BIDIRECTION = False +_CN.LOFTR.COARSE.DEPTH_CONFIDENCE = -1.0 +_CN.LOFTR.COARSE.WIDTH_CONFIDENCE = -1.0 +_CN.LOFTR.COARSE.LEAKY = -1.0 +_CN.LOFTR.COARSE.ASYMMETRIC = False +_CN.LOFTR.COARSE.ASYMMETRIC_SELF = False +_CN.LOFTR.COARSE.ROPE = False +_CN.LOFTR.COARSE.TOKEN_MIXER = None +_CN.LOFTR.COARSE.SKIP = False +_CN.LOFTR.COARSE.DWCONV = False +_CN.LOFTR.COARSE.DWCONV2 = False +_CN.LOFTR.COARSE.SCATTER = False +_CN.LOFTR.COARSE.ROPE = False +_CN.LOFTR.COARSE.NPE = None +_CN.LOFTR.COARSE.NORM_BEFORE = True +_CN.LOFTR.COARSE.VIT_NORM = False +_CN.LOFTR.COARSE.ROPE_DWPROJ = False +_CN.LOFTR.COARSE.ABSPE = False + + +# 3. Coarse-Matching config +_CN.LOFTR.MATCH_COARSE = CN() +_CN.LOFTR.MATCH_COARSE.THR = 0.2 +_CN.LOFTR.MATCH_COARSE.BORDER_RM = 2 +_CN.LOFTR.MATCH_COARSE.MATCH_TYPE = 'dual_softmax' # options: ['dual_softmax, 'sinkhorn'] +_CN.LOFTR.MATCH_COARSE.DSMAX_TEMPERATURE = 0.1 +_CN.LOFTR.MATCH_COARSE.SKH_ITERS = 3 +_CN.LOFTR.MATCH_COARSE.SKH_INIT_BIN_SCORE = 1.0 +_CN.LOFTR.MATCH_COARSE.SKH_PREFILTER = False +_CN.LOFTR.MATCH_COARSE.TRAIN_COARSE_PERCENT = 0.2 # training tricks: save GPU memory +_CN.LOFTR.MATCH_COARSE.TRAIN_PAD_NUM_GT_MIN = 200 # training tricks: avoid DDP deadlock +_CN.LOFTR.MATCH_COARSE.SPARSE_SPVS = True +_CN.LOFTR.MATCH_COARSE.MTD_SPVS = False +_CN.LOFTR.MATCH_COARSE.FIX_BIAS = False +_CN.LOFTR.MATCH_COARSE.BINARY = False +_CN.LOFTR.MATCH_COARSE.BINARY_SPV = 'l2' +_CN.LOFTR.MATCH_COARSE.NORMFEAT = False +_CN.LOFTR.MATCH_COARSE.NORMFEATMUL = False +_CN.LOFTR.MATCH_COARSE.DIFFSIGN2 = False +_CN.LOFTR.MATCH_COARSE.DIFFSIGN3 = False +_CN.LOFTR.MATCH_COARSE.CLASSIFY = False +_CN.LOFTR.MATCH_COARSE.D_CLASSIFY = 256 +_CN.LOFTR.MATCH_COARSE.SKIP_SOFTMAX = False +_CN.LOFTR.MATCH_COARSE.FORCE_NEAREST = False # in case binary is True, force nearest neighbor, preventing finding a reasonable threshold +_CN.LOFTR.MATCH_COARSE.FP16MATMUL = False +_CN.LOFTR.MATCH_COARSE.SEQSOFTMAX = False +_CN.LOFTR.MATCH_COARSE.SEQSOFTMAX2 = False +_CN.LOFTR.MATCH_COARSE.RATIO_TEST = False +_CN.LOFTR.MATCH_COARSE.RATIO_TEST_VAL = -1.0 +_CN.LOFTR.MATCH_COARSE.USE_GT_COARSE = False +_CN.LOFTR.MATCH_COARSE.CROSS_SOFTMAX = False +_CN.LOFTR.MATCH_COARSE.PLOT_ORIGIN_SCORES = False +_CN.LOFTR.MATCH_COARSE.USE_PERCENT_THR = False +_CN.LOFTR.MATCH_COARSE.PERCENT_THR = 0.1 +_CN.LOFTR.MATCH_COARSE.ADD_SIGMOID = False +_CN.LOFTR.MATCH_COARSE.SIGMOID_BIAS = 20.0 +_CN.LOFTR.MATCH_COARSE.SIGMOID_SIGMA = 2.5 +_CN.LOFTR.MATCH_COARSE.CAL_PER_OF_GT = False + +# 4. LoFTR-fine module config +_CN.LOFTR.FINE = CN() +_CN.LOFTR.FINE.SKIP = False +_CN.LOFTR.FINE.D_MODEL = 128 +_CN.LOFTR.FINE.D_FFN = 128 +_CN.LOFTR.FINE.NHEAD = 8 +_CN.LOFTR.FINE.LAYER_NAMES = ['self', 'cross'] * 1 +_CN.LOFTR.FINE.ATTENTION = 'linear' +_CN.LOFTR.FINE.MTD_SPVS = False +_CN.LOFTR.FINE.PAN = False +_CN.LOFTR.FINE.POOl_SIZE = 4 +_CN.LOFTR.FINE.BN = True +_CN.LOFTR.FINE.XFORMER = False +_CN.LOFTR.FINE.BIDIRECTION = False + + +# Fine-Matching config +_CN.LOFTR.MATCH_FINE = CN() +_CN.LOFTR.MATCH_FINE.THR = 0 +_CN.LOFTR.MATCH_FINE.TOPK = 3 +_CN.LOFTR.MATCH_FINE.NORMFINEM = False +_CN.LOFTR.MATCH_FINE.USE_GT_FINE = False +_CN.LOFTR.MATCH_COARSE.FINE_TOPK = _CN.LOFTR.MATCH_FINE.TOPK +_CN.LOFTR.MATCH_FINE.FIX_FINE_MATCHING = False +_CN.LOFTR.MATCH_FINE.SKIP_FINE_SOFTMAX = False +_CN.LOFTR.MATCH_FINE.USE_SIGMOID = False +_CN.LOFTR.MATCH_FINE.SIGMOID_BIAS = 0.0 +_CN.LOFTR.MATCH_FINE.NORMFEAT = False +_CN.LOFTR.MATCH_FINE.SPARSE_SPVS = True +_CN.LOFTR.MATCH_FINE.FORCE_NEAREST = False +_CN.LOFTR.MATCH_FINE.LOCAL_REGRESS = False +_CN.LOFTR.MATCH_FINE.LOCAL_REGRESS_RMBORDER = False +_CN.LOFTR.MATCH_FINE.LOCAL_REGRESS_NOMASK = False +_CN.LOFTR.MATCH_FINE.LOCAL_REGRESS_TEMPERATURE = 1.0 +_CN.LOFTR.MATCH_FINE.LOCAL_REGRESS_PADONE = False +_CN.LOFTR.MATCH_FINE.LOCAL_REGRESS_SLICE = False +_CN.LOFTR.MATCH_FINE.LOCAL_REGRESS_SLICEDIM = 8 +_CN.LOFTR.MATCH_FINE.LOCAL_REGRESS_INNER = False +_CN.LOFTR.MATCH_FINE.MULTI_REGRESS = False + + + +# 5. LoFTR Losses +# -- # coarse-level +_CN.LOFTR.LOSS = CN() +_CN.LOFTR.LOSS.COARSE_TYPE = 'focal' # ['focal', 'cross_entropy'] +_CN.LOFTR.LOSS.COARSE_WEIGHT = 1.0 +_CN.LOFTR.LOSS.COARSE_SIGMOID_WEIGHT = 1.0 +_CN.LOFTR.LOSS.LOCAL_WEIGHT = 0.5 +_CN.LOFTR.LOSS.COARSE_OVERLAP_WEIGHT = False +_CN.LOFTR.LOSS.FINE_OVERLAP_WEIGHT = False +_CN.LOFTR.LOSS.FINE_OVERLAP_WEIGHT2 = False +# _CN.LOFTR.LOSS.SPARSE_SPVS = False +# -- - -- # focal loss (coarse) +_CN.LOFTR.LOSS.FOCAL_ALPHA = 0.25 +_CN.LOFTR.LOSS.FOCAL_GAMMA = 2.0 +_CN.LOFTR.LOSS.POS_WEIGHT = 1.0 +_CN.LOFTR.LOSS.NEG_WEIGHT = 1.0 +_CN.LOFTR.LOSS.CORRECT_NEG_WEIGHT = False +# _CN.LOFTR.LOSS.DUAL_SOFTMAX = False # whether coarse-level use dual-softmax or not. +# use `_CN.LOFTR.MATCH_COARSE.MATCH_TYPE` + +# -- # fine-level +_CN.LOFTR.LOSS.FINE_TYPE = 'l2_with_std' # ['l2_with_std', 'l2'] +_CN.LOFTR.LOSS.FINE_WEIGHT = 1.0 +_CN.LOFTR.LOSS.FINE_CORRECT_THR = 1.0 # for filtering valid fine-level gts (some gt matches might fall out of the fine-level window) + +# -- # ROMA: +_CN.LOFTR.ROMA_LOSS = CN() +_CN.LOFTR.ROMA_LOSS.IGNORE_EMPTY_IN_SPARSE_MATCH_SPV = False # ['l2_with_std', 'l2'] + +# -- # DKM: +_CN.LOFTR.DKM_LOSS = CN() +_CN.LOFTR.DKM_LOSS.IGNORE_EMPTY_IN_SPARSE_MATCH_SPV = False # ['l2_with_std', 'l2'] + +############## Dataset ############## +_CN.DATASET = CN() +# 1. data config +# training and validating +_CN.DATASET.TB_LOG_DIR= "logs/tb_logs" # options: ['ScanNet', 'MegaDepth'] +_CN.DATASET.TRAIN_DATA_SAMPLE_RATIO = [1.0] # options: ['ScanNet', 'MegaDepth'] +_CN.DATASET.TRAIN_DATA_SOURCE = None # options: ['ScanNet', 'MegaDepth'] +_CN.DATASET.TRAIN_DATA_ROOT = None +_CN.DATASET.TRAIN_POSE_ROOT = None # (optional directory for poses) +_CN.DATASET.TRAIN_NPZ_ROOT = None +_CN.DATASET.TRAIN_LIST_PATH = None +_CN.DATASET.TRAIN_INTRINSIC_PATH = None +_CN.DATASET.VAL_DATA_ROOT = None +_CN.DATASET.VAL_DATA_SOURCE = None # options: ['ScanNet', 'MegaDepth'] +_CN.DATASET.VAL_POSE_ROOT = None # (optional directory for poses) +_CN.DATASET.VAL_NPZ_ROOT = None +_CN.DATASET.VAL_LIST_PATH = None # None if val data from all scenes are bundled into a single npz file +_CN.DATASET.VAL_INTRINSIC_PATH = None +_CN.DATASET.FP16 = False +_CN.DATASET.TRAIN_GT_MATCHES_PADDING_N = 8000 +# testing +_CN.DATASET.TEST_DATA_SOURCE = None +_CN.DATASET.TEST_DATA_ROOT = None +_CN.DATASET.TEST_POSE_ROOT = None # (optional directory for poses) +_CN.DATASET.TEST_NPZ_ROOT = None +_CN.DATASET.TEST_LIST_PATH = None # None if test data from all scenes are bundled into a single npz file +_CN.DATASET.TEST_INTRINSIC_PATH = None + +# 2. dataset config +# general options +_CN.DATASET.MIN_OVERLAP_SCORE_TRAIN = 0.4 # discard data with overlap_score < min_overlap_score +_CN.DATASET.MIN_OVERLAP_SCORE_TEST = 0.0 +_CN.DATASET.AUGMENTATION_TYPE = None # options: [None, 'dark', 'mobile'] + +# debug options +_CN.DATASET.TEST_N_PAIRS = None # Debug first N pairs +# DEBUG +_CN.LOFTR.FP16LOG = False +_CN.LOFTR.MATCH_COARSE.FP16LOG = False + +# scanNet options +_CN.DATASET.SCAN_IMG_RESIZEX = 640 # resize the longer side, zero-pad bottom-right to square. +_CN.DATASET.SCAN_IMG_RESIZEY = 480 # resize the shorter side, zero-pad bottom-right to square. + +# MegaDepth options +_CN.DATASET.MGDPT_IMG_RESIZE = (640, 640) # resize the longer side, zero-pad bottom-right to square. +_CN.DATASET.MGDPT_IMG_PAD = True # pad img to square with size = MGDPT_IMG_RESIZE +_CN.DATASET.MGDPT_DEPTH_PAD = True # pad depthmap to square with size = 2000 +_CN.DATASET.MGDPT_DF = 8 +_CN.DATASET.LOAD_ORIGIN_RGB = False # Only open in test mode, useful for RGB required baselines such as DKM, ROMA. +_CN.DATASET.READ_GRAY = True +_CN.DATASET.RESIZE_BY_STRETCH = False +_CN.DATASET.NORMALIZE_IMG = False # For backbone using pretrained DINO feats, use True may be better. +_CN.DATASET.HOMO_WARP_USE_MASK = False + +_CN.DATASET.NPE_NAME = "megadepth" + +############## Trainer ############## +_CN.TRAINER = CN() +_CN.TRAINER.WORLD_SIZE = 1 +_CN.TRAINER.CANONICAL_BS = 64 +_CN.TRAINER.CANONICAL_LR = 6e-3 +_CN.TRAINER.SCALING = None # this will be calculated automatically +_CN.TRAINER.FIND_LR = False # use learning rate finder from pytorch-lightning + +# optimizer +_CN.TRAINER.OPTIMIZER = "adamw" # [adam, adamw] +_CN.TRAINER.OPTIMIZER_EPS = 1e-8 # Default for optimizers, but set smaller, e.g., 1e-7 for fp16 mix training +_CN.TRAINER.TRUE_LR = None # this will be calculated automatically at runtime +_CN.TRAINER.ADAM_DECAY = 0. # ADAM: for adam +_CN.TRAINER.ADAMW_DECAY = 0.1 + +# step-based warm-up +_CN.TRAINER.WARMUP_TYPE = 'linear' # [linear, constant] +_CN.TRAINER.WARMUP_RATIO = 0. +_CN.TRAINER.WARMUP_STEP = 4800 + +# learning rate scheduler +_CN.TRAINER.SCHEDULER = 'MultiStepLR' # [MultiStepLR, CosineAnnealing, ExponentialLR] +_CN.TRAINER.SCHEDULER_INTERVAL = 'epoch' # [epoch, step] +_CN.TRAINER.MSLR_MILESTONES = [3, 6, 9, 12] # MSLR: MultiStepLR +_CN.TRAINER.MSLR_GAMMA = 0.5 +_CN.TRAINER.COSA_TMAX = 30 # COSA: CosineAnnealing +_CN.TRAINER.ELR_GAMMA = 0.999992 # ELR: ExponentialLR, this value for 'step' interval + +# plotting related +_CN.TRAINER.ENABLE_PLOTTING = True +_CN.TRAINER.N_VAL_PAIRS_TO_PLOT = 8 # number of val/test paris for plotting +_CN.TRAINER.PLOT_MODE = 'evaluation' # ['evaluation', 'confidence'] +_CN.TRAINER.PLOT_MATCHES_ALPHA = 'dynamic' + +# geometric metrics and pose solver +_CN.TRAINER.EPI_ERR_THR = 5e-4 # recommendation: 5e-4 for ScanNet, 1e-4 for MegaDepth (from SuperGlue) +_CN.TRAINER.POSE_GEO_MODEL = 'E' # ['E', 'F', 'H'] +_CN.TRAINER.POSE_ESTIMATION_METHOD = 'RANSAC' # [RANSAC, DEGENSAC, MAGSAC] +_CN.TRAINER.WARP_ESTIMATOR_MODEL = 'affine' # [RANSAC, DEGENSAC, MAGSAC] +_CN.TRAINER.RANSAC_PIXEL_THR = 0.5 +_CN.TRAINER.RANSAC_CONF = 0.99999 +_CN.TRAINER.RANSAC_MAX_ITERS = 10000 +_CN.TRAINER.USE_MAGSACPP = False +_CN.TRAINER.THRESHOLDS = [5, 10, 20] + +# data sampler for train_dataloader +_CN.TRAINER.DATA_SAMPLER = 'scene_balance' # options: ['scene_balance', 'random', 'normal'] +# 'scene_balance' config +_CN.TRAINER.N_SAMPLES_PER_SUBSET = 200 +_CN.TRAINER.SB_SUBSET_SAMPLE_REPLACEMENT = True # whether sample each scene with replacement or not +_CN.TRAINER.SB_SUBSET_SHUFFLE = True # after sampling from scenes, whether shuffle within the epoch or not +_CN.TRAINER.SB_REPEAT = 1 # repeat N times for training the sampled data +_CN.TRAINER.AUC_METHOD = 'exact_auc' +# 'random' config +_CN.TRAINER.RDM_REPLACEMENT = True +_CN.TRAINER.RDM_NUM_SAMPLES = None + +# gradient clipping +_CN.TRAINER.GRADIENT_CLIPPING = 0.5 + +# Finetune Mode: +_CN.FINETUNE = CN() +_CN.FINETUNE.ENABLE = False +_CN.FINETUNE.METHOD = "lora" #['lora', 'whole_network'] + +_CN.FINETUNE.LORA = CN() +_CN.FINETUNE.LORA.RANK = 2 +_CN.FINETUNE.LORA.MODE = "linear&conv" # ["linear&conv", "linear_only"] +_CN.FINETUNE.LORA.SCALE = 1.0 + +_CN.TRAINER.SEED = 66 + + +def get_cfg_defaults(): + """Get a yacs CfgNode object with default values for my_project.""" + # Return a clone so that the defaults will not be altered + # This is for the "local variable" use pattern + return _CN.clone() diff --git a/imcui/third_party/MatchAnything/src/lightning/lightning_loftr.py b/imcui/third_party/MatchAnything/src/lightning/lightning_loftr.py new file mode 100644 index 0000000000000000000000000000000000000000..91a5132809b88d14d8f02ddf7012c5abba8c46ec --- /dev/null +++ b/imcui/third_party/MatchAnything/src/lightning/lightning_loftr.py @@ -0,0 +1,343 @@ + +from collections import defaultdict +import pprint +from loguru import logger +from pathlib import Path + +import torch +import numpy as np +import pytorch_lightning as pl +from matplotlib import pyplot as plt + +from src.loftr import LoFTR +from src.loftr.utils.supervision import compute_supervision_coarse, compute_supervision_fine, compute_roma_supervision +from src.optimizers import build_optimizer, build_scheduler +from src.utils.metrics import ( + compute_symmetrical_epipolar_errors, + compute_pose_errors, + compute_homo_corner_warp_errors, + compute_homo_match_warp_errors, + compute_warp_control_pts_errors, + aggregate_metrics +) +from src.utils.plotting import make_matching_figures, make_scores_figures +from src.utils.comm import gather, all_gather +from src.utils.misc import lower_config, flattenList +from src.utils.profiler import PassThroughProfiler +from third_party.ROMA.roma.matchanything_roma_model import MatchAnything_Model + +import pynvml + +def reparameter(matcher): + module = matcher.backbone.layer0 + if hasattr(module, 'switch_to_deploy'): + module.switch_to_deploy() + print('m0 switch to deploy ok') + for modules in [matcher.backbone.layer1, matcher.backbone.layer2, matcher.backbone.layer3]: + for module in modules: + if hasattr(module, 'switch_to_deploy'): + module.switch_to_deploy() + print('backbone switch to deploy ok') + for modules in [matcher.fine_preprocess.layer2_outconv2, matcher.fine_preprocess.layer1_outconv2]: + for module in modules: + if hasattr(module, 'switch_to_deploy'): + module.switch_to_deploy() + print('fpn switch to deploy ok') + return matcher + +class PL_LoFTR(pl.LightningModule): + def __init__(self, config, pretrained_ckpt=None, profiler=None, dump_dir=None, test_mode=False, baseline_config=None): + """ + TODO: + - use the new version of PL logging API. + """ + super().__init__() + # Misc + self.config = config # full config + _config = lower_config(self.config) + self.profiler = profiler or PassThroughProfiler() + self.n_vals_plot = max(config.TRAINER.N_VAL_PAIRS_TO_PLOT // config.TRAINER.WORLD_SIZE, 1) + + if config.METHOD == "matchanything_eloftr": + self.matcher = LoFTR(config=_config['loftr'], profiler=self.profiler) + elif config.METHOD == "matchanything_roma": + self.matcher = MatchAnything_Model(config=_config['roma'], test_mode=test_mode) + else: + raise NotImplementedError + + if config.FINETUNE.ENABLE and test_mode: + # Inference time change model architecture before load pretrained model: + raise NotImplementedError + + # Pretrained weights + if pretrained_ckpt: + if config.METHOD in ["matchanything_eloftr", "matchanything_roma"]: + state_dict = torch.load(pretrained_ckpt, map_location='cpu')['state_dict'] + logger.info(f"Load model from:{self.matcher.load_state_dict(state_dict, strict=False)}") + else: + raise NotImplementedError + + if self.config.LOFTR.BACKBONE_TYPE == 'RepVGG' and test_mode and (config.METHOD == 'loftr'): + module = self.matcher.backbone.layer0 + if hasattr(module, 'switch_to_deploy'): + module.switch_to_deploy() + print('m0 switch to deploy ok') + for modules in [self.matcher.backbone.layer1, self.matcher.backbone.layer2, self.matcher.backbone.layer3]: + for module in modules: + if hasattr(module, 'switch_to_deploy'): + module.switch_to_deploy() + print('m switch to deploy ok') + + # Testing + self.dump_dir = dump_dir + self.max_gpu_memory = 0 + self.GPUID = 0 + self.warmup = False + + def gpumem(self, des, gpuid=None): + NUM_EXPAND = 1024 * 1024 * 1024 + gpu_id= self.GPUID if self.GPUID is not None else gpuid + handle = pynvml.nvmlDeviceGetHandleByIndex(gpu_id) + info = pynvml.nvmlDeviceGetMemoryInfo(handle) + gpu_Used = info.used + logger.info(f"GPU {gpu_id} memory used: {gpu_Used / NUM_EXPAND} GB while {des}") + # print(des, gpu_Used / NUM_EXPAND) + if gpu_Used / NUM_EXPAND > self.max_gpu_memory: + self.max_gpu_memory = gpu_Used / NUM_EXPAND + logger.info(f"[MAX]GPU {gpu_id} memory used: {gpu_Used / NUM_EXPAND} GB while {des}") + print('max_gpu_memory', self.max_gpu_memory) + + def configure_optimizers(self): + optimizer = build_optimizer(self, self.config) + scheduler = build_scheduler(self.config, optimizer) + return [optimizer], [scheduler] + + def optimizer_step( + self, epoch, batch_idx, optimizer, optimizer_idx, + optimizer_closure, on_tpu, using_native_amp, using_lbfgs): + # learning rate warm up + warmup_step = self.config.TRAINER.WARMUP_STEP + if self.trainer.global_step < warmup_step: + if self.config.TRAINER.WARMUP_TYPE == 'linear': + base_lr = self.config.TRAINER.WARMUP_RATIO * self.config.TRAINER.TRUE_LR + lr = base_lr + \ + (self.trainer.global_step / self.config.TRAINER.WARMUP_STEP) * \ + abs(self.config.TRAINER.TRUE_LR - base_lr) + for pg in optimizer.param_groups: + pg['lr'] = lr + elif self.config.TRAINER.WARMUP_TYPE == 'constant': + pass + else: + raise ValueError(f'Unknown lr warm-up strategy: {self.config.TRAINER.WARMUP_TYPE}') + + # update params + if self.config.LOFTR.FP16: + optimizer.step(closure=optimizer_closure) + else: + optimizer.step(closure=optimizer_closure) + optimizer.zero_grad() + + def _trainval_inference(self, batch): + with self.profiler.profile("Compute coarse supervision"): + + with torch.autocast(enabled=False, device_type='cuda'): + if ("roma" in self.config.METHOD) or ('dkm' in self.config.METHOD): + pass + else: + compute_supervision_coarse(batch, self.config) + + with self.profiler.profile("LoFTR"): + with torch.autocast(enabled=self.config.LOFTR.FP16, device_type='cuda'): + self.matcher(batch) + + with self.profiler.profile("Compute fine supervision"): + with torch.autocast(enabled=False, device_type='cuda'): + if ("roma" in self.config.METHOD) or ('dkm' in self.config.METHOD): + compute_roma_supervision(batch, self.config) + else: + compute_supervision_fine(batch, self.config, self.logger) + + with self.profiler.profile("Compute losses"): + pass + + def _compute_metrics(self, batch): + if 'gt_2D_matches' in batch: + compute_warp_control_pts_errors(batch, self.config) + elif batch['homography'].sum() != 0 and batch['T_0to1'].sum() == 0: + compute_homo_match_warp_errors(batch, self.config) # compute warp_errors for each match + compute_homo_corner_warp_errors(batch, self.config) # compute mean corner warp error each pair + else: + compute_symmetrical_epipolar_errors(batch, self.config) # compute epi_errs for each match + compute_pose_errors(batch, self.config) # compute R_errs, t_errs, pose_errs for each pair + + rel_pair_names = list(zip(*batch['pair_names'])) + bs = batch['image0'].size(0) + if self.config.LOFTR.FINE.MTD_SPVS: + topk = self.config.LOFTR.MATCH_FINE.TOPK + metrics = { + # to filter duplicate pairs caused by DistributedSampler + 'identifiers': ['#'.join(rel_pair_names[b]) for b in range(bs)], + 'epi_errs': [(batch['epi_errs'].reshape(-1,topk))[batch['m_bids'] == b].reshape(-1).cpu().numpy() for b in range(bs)], + 'R_errs': batch['R_errs'], + 't_errs': batch['t_errs'], + 'inliers': batch['inliers'], + 'num_matches': [batch['mconf'].shape[0]], # batch size = 1 only + 'percent_inliers': [ batch['inliers'][0].shape[0] / batch['mconf'].shape[0] if batch['mconf'].shape[0]!=0 else 1], # batch size = 1 only + } + else: + metrics = { + # to filter duplicate pairs caused by DistributedSampler + 'identifiers': ['#'.join(rel_pair_names[b]) for b in range(bs)], + 'epi_errs': [batch['epi_errs'][batch['m_bids'] == b].cpu().numpy() for b in range(bs)], + 'R_errs': batch['R_errs'], + 't_errs': batch['t_errs'], + 'inliers': batch['inliers'], + 'num_matches': [batch['mconf'].shape[0]], # batch size = 1 only + 'percent_inliers': [ batch['inliers'][0].shape[0] / batch['mconf'].shape[0] if batch['mconf'].shape[0]!=0 else 1], # batch size = 1 only + } + ret_dict = {'metrics': metrics} + return ret_dict, rel_pair_names + + def training_step(self, batch, batch_idx): + self._trainval_inference(batch) + + # logging + if self.trainer.global_rank == 0 and self.global_step % self.trainer.log_every_n_steps == 0: + # scalars + for k, v in batch['loss_scalars'].items(): + self.logger.experiment.add_scalar(f'train/{k}', v, self.global_step) + + # net-params + method = 'LOFTR' + if self.config[method]['MATCH_COARSE']['MATCH_TYPE'] == 'sinkhorn': + self.logger.experiment.add_scalar( + f'skh_bin_score', self.matcher.coarse_matching.bin_score.clone().detach().cpu().data, self.global_step) + + figures = {} + if self.config.TRAINER.ENABLE_PLOTTING: + compute_symmetrical_epipolar_errors(batch, self.config) # compute epi_errs for each match + figures = make_matching_figures(batch, self.config, self.config.TRAINER.PLOT_MODE) + for k, v in figures.items(): + self.logger.experiment.add_figure(f'train_match/{k}', v, self.global_step) + + return {'loss': batch['loss']} + + def training_epoch_end(self, outputs): + avg_loss = torch.stack([x['loss'] for x in outputs]).mean() + if self.trainer.global_rank == 0: + self.logger.experiment.add_scalar( + 'train/avg_loss_on_epoch', avg_loss, + global_step=self.current_epoch) + + def validation_step(self, batch, batch_idx, dataloader_idx=0): + self._trainval_inference(batch) + + ret_dict, _ = self._compute_metrics(batch) + + val_plot_interval = max(self.trainer.num_val_batches[0] // self.n_vals_plot, 1) + figures = {self.config.TRAINER.PLOT_MODE: []} + if batch_idx % val_plot_interval == 0: + figures = make_matching_figures(batch, self.config, mode=self.config.TRAINER.PLOT_MODE) + if self.config.LOFTR.PLOT_SCORES: + figs = make_scores_figures(batch, self.config, self.config.TRAINER.PLOT_MODE) + figures[self.config.TRAINER.PLOT_MODE] += figs[self.config.TRAINER.PLOT_MODE] + del figs + + return { + **ret_dict, + 'loss_scalars': batch['loss_scalars'], + 'figures': figures, + } + + def validation_epoch_end(self, outputs): + # handle multiple validation sets + multi_outputs = [outputs] if not isinstance(outputs[0], (list, tuple)) else outputs + multi_val_metrics = defaultdict(list) + + for valset_idx, outputs in enumerate(multi_outputs): + # since pl performs sanity_check at the very begining of the training + cur_epoch = self.trainer.current_epoch + if not self.trainer.resume_from_checkpoint and self.trainer.running_sanity_check: + cur_epoch = -1 + + # 1. loss_scalars: dict of list, on cpu + _loss_scalars = [o['loss_scalars'] for o in outputs] + loss_scalars = {k: flattenList(all_gather([_ls[k] for _ls in _loss_scalars])) for k in _loss_scalars[0]} + + # 2. val metrics: dict of list, numpy + _metrics = [o['metrics'] for o in outputs] + metrics = {k: flattenList(all_gather(flattenList([_me[k] for _me in _metrics]))) for k in _metrics[0]} + # NOTE: all ranks need to `aggregate_merics`, but only log at rank-0 + val_metrics_4tb = aggregate_metrics(metrics, self.config.TRAINER.EPI_ERR_THR, self.config.LOFTR.EVAL_TIMES) + for thr in [5, 10, 20]: + multi_val_metrics[f'auc@{thr}'].append(val_metrics_4tb[f'auc@{thr}']) + + # 3. figures + _figures = [o['figures'] for o in outputs] + figures = {k: flattenList(gather(flattenList([_me[k] for _me in _figures]))) for k in _figures[0]} + + # tensorboard records only on rank 0 + if self.trainer.global_rank == 0: + for k, v in loss_scalars.items(): + mean_v = torch.stack(v).mean() + self.logger.experiment.add_scalar(f'val_{valset_idx}/avg_{k}', mean_v, global_step=cur_epoch) + + for k, v in val_metrics_4tb.items(): + self.logger.experiment.add_scalar(f"metrics_{valset_idx}/{k}", v, global_step=cur_epoch) + + for k, v in figures.items(): + if self.trainer.global_rank == 0: + for plot_idx, fig in enumerate(v): + self.logger.experiment.add_figure( + f'val_match_{valset_idx}/{k}/pair-{plot_idx}', fig, cur_epoch, close=True) + plt.close('all') + + for thr in [5, 10, 20]: + self.log(f'auc@{thr}', torch.tensor(np.mean(multi_val_metrics[f'auc@{thr}']))) # ckpt monitors on this + + def test_step(self, batch, batch_idx): + if self.warmup: + for i in range(50): + self.matcher(batch) + self.warmup = False + + with torch.autocast(enabled=self.config.LOFTR.FP16, device_type='cuda'): + with self.profiler.profile("LoFTR"): + self.matcher(batch) + + ret_dict, rel_pair_names = self._compute_metrics(batch) + print(ret_dict['metrics']['num_matches']) + self.dump_dir = None + + return ret_dict + + def test_epoch_end(self, outputs): + print(self.config) + print('max GPU memory: ', self.max_gpu_memory) + print(self.profiler.summary()) + # metrics: dict of list, numpy + _metrics = [o['metrics'] for o in outputs] + metrics = {k: flattenList(gather(flattenList([_me[k] for _me in _metrics]))) for k in _metrics[0]} + + # [{key: [{...}, *#bs]}, *#batch] + if self.dump_dir is not None: + Path(self.dump_dir).mkdir(parents=True, exist_ok=True) + _dumps = flattenList([o['dumps'] for o in outputs]) # [{...}, #bs*#batch] + dumps = flattenList(gather(_dumps)) # [{...}, #proc*#bs*#batch] + logger.info(f'Prediction and evaluation results will be saved to: {self.dump_dir}') + + if self.trainer.global_rank == 0: + NUM_EXPAND = 1024 * 1024 * 1024 + gpu_id=self.GPUID + handle = pynvml.nvmlDeviceGetHandleByIndex(gpu_id) + info = pynvml.nvmlDeviceGetMemoryInfo(handle) + gpu_Used = info.used + print('pynvml', gpu_Used / NUM_EXPAND) + if gpu_Used / NUM_EXPAND > self.max_gpu_memory: + self.max_gpu_memory = gpu_Used / NUM_EXPAND + + print(self.profiler.summary()) + val_metrics_4tb = aggregate_metrics(metrics, self.config.TRAINER.EPI_ERR_THR, self.config.LOFTR.EVAL_TIMES, self.config.TRAINER.THRESHOLDS, method=self.config.TRAINER.AUC_METHOD) + logger.info('\n' + pprint.pformat(val_metrics_4tb)) + if self.dump_dir is not None: + np.save(Path(self.dump_dir) / 'LoFTR_pred_eval', dumps) \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/src/loftr/__init__.py b/imcui/third_party/MatchAnything/src/loftr/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..82e7da71337eb772257c9a2b6c96b41a562aadea --- /dev/null +++ b/imcui/third_party/MatchAnything/src/loftr/__init__.py @@ -0,0 +1 @@ +from .loftr import LoFTR \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/src/loftr/backbone/__init__.py b/imcui/third_party/MatchAnything/src/loftr/backbone/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d0eb682da64b684eeddcc5ea576b6e89137dd40b --- /dev/null +++ b/imcui/third_party/MatchAnything/src/loftr/backbone/__init__.py @@ -0,0 +1,61 @@ +from .resnet_fpn import ResNetFPN_8_2, ResNetFPN_16_4, ResNetFPN_8_1, ResNetFPN_8_2_align, ResNetFPN_8_1_align, ResNetFPN_8_2_fix, ResNet_8_1_align, VGG_8_1_align, RepVGG_8_1_align, \ + RepVGGnfpn_8_1_align, RepVGG_8_2_fix, s2dnet_8_1_align + +def build_backbone(config): + if config['backbone_type'] == 'ResNetFPN': + if config['align_corner'] is None or config['align_corner'] is True: + if config['resolution'] == (8, 2): + return ResNetFPN_8_2(config['resnetfpn']) + elif config['resolution'] == (16, 4): + return ResNetFPN_16_4(config['resnetfpn']) + elif config['resolution'] == (8, 1): + return ResNetFPN_8_1(config['resnetfpn']) + elif config['align_corner'] is False: + if config['resolution'] == (8, 2): + return ResNetFPN_8_2_align(config['resnetfpn']) + elif config['resolution'] == (16, 4): + return ResNetFPN_16_4(config['resnetfpn']) + elif config['resolution'] == (8, 1): + return ResNetFPN_8_1_align(config['resnetfpn']) + elif config['backbone_type'] == 'ResNetFPNFIX': + if config['align_corner'] is None or config['align_corner'] is True: + if config['resolution'] == (8, 2): + return ResNetFPN_8_2_fix(config['resnetfpn']) + elif config['backbone_type'] == 'ResNet': + if config['align_corner'] is None or config['align_corner'] is True: + raise ValueError(f"LOFTR.BACKBONE_TYPE {config['backbone_type']} not supported.") + elif config['align_corner'] is False: + if config['resolution'] == (8, 1): + return ResNet_8_1_align(config['resnetfpn']) + elif config['backbone_type'] == 'VGG': + if config['align_corner'] is None or config['align_corner'] is True: + raise ValueError(f"LOFTR.BACKBONE_TYPE {config['backbone_type']} not supported.") + elif config['align_corner'] is False: + if config['resolution'] == (8, 1): + return VGG_8_1_align(config['resnetfpn']) + elif config['backbone_type'] == 'RepVGG': + if config['align_corner'] is None or config['align_corner'] is True: + raise ValueError(f"LOFTR.BACKBONE_TYPE {config['backbone_type']} not supported.") + elif config['align_corner'] is False: + if config['resolution'] == (8, 1): + return RepVGG_8_1_align(config['resnetfpn']) + elif config['backbone_type'] == 'RepVGGNFPN': + if config['align_corner'] is None or config['align_corner'] is True: + raise ValueError(f"LOFTR.BACKBONE_TYPE {config['backbone_type']} not supported.") + elif config['align_corner'] is False: + if config['resolution'] == (8, 1): + return RepVGGnfpn_8_1_align(config['resnetfpn']) + elif config['backbone_type'] == 'RepVGGFPNFIX': + if config['align_corner'] is None or config['align_corner'] is True: + if config['resolution'] == (8, 2): + return RepVGG_8_2_fix(config['resnetfpn']) + elif config['align_corner'] is False: + raise ValueError(f"LOFTR.BACKBONE_TYPE {config['backbone_type']} not supported.") + elif config['backbone_type'] == 's2dnet': + if config['align_corner'] is None or config['align_corner'] is True: + raise ValueError(f"LOFTR.BACKBONE_TYPE {config['backbone_type']} not supported.") + elif config['align_corner'] is False: + if config['resolution'] == (8, 1): + return s2dnet_8_1_align(config['resnetfpn']) + else: + raise ValueError(f"LOFTR.BACKBONE_TYPE {config['backbone_type']} not supported.") diff --git a/imcui/third_party/MatchAnything/src/loftr/backbone/repvgg.py b/imcui/third_party/MatchAnything/src/loftr/backbone/repvgg.py new file mode 100644 index 0000000000000000000000000000000000000000..873a934dc0094fc742076c10efbaafcc78c283a7 --- /dev/null +++ b/imcui/third_party/MatchAnything/src/loftr/backbone/repvgg.py @@ -0,0 +1,319 @@ +# -------------------------------------------------------- +# RepVGG: Making VGG-style ConvNets Great Again (https://openaccess.thecvf.com/content/CVPR2021/papers/Ding_RepVGG_Making_VGG-Style_ConvNets_Great_Again_CVPR_2021_paper.pdf) +# Github source: https://github.com/DingXiaoH/RepVGG +# Licensed under The MIT License [see LICENSE for details] +# -------------------------------------------------------- +import torch.nn as nn +import numpy as np +import torch +import copy +# from se_block import SEBlock +import torch.utils.checkpoint as checkpoint +from loguru import logger + +def conv_bn(in_channels, out_channels, kernel_size, stride, padding, groups=1): + result = nn.Sequential() + result.add_module('conv', nn.Conv2d(in_channels=in_channels, out_channels=out_channels, + kernel_size=kernel_size, stride=stride, padding=padding, groups=groups, bias=False)) + result.add_module('bn', nn.BatchNorm2d(num_features=out_channels)) + return result + +class RepVGGBlock(nn.Module): + + def __init__(self, in_channels, out_channels, kernel_size, + stride=1, padding=0, dilation=1, groups=1, padding_mode='zeros', deploy=False, use_se=False, leaky=-1.0): + super(RepVGGBlock, self).__init__() + self.deploy = deploy + self.groups = groups + self.in_channels = in_channels + + assert kernel_size == 3 + assert padding == 1 + + padding_11 = padding - kernel_size // 2 + + if leaky == -2: + self.nonlinearity = nn.Identity() + logger.info(f"Using Identity nonlinearity in repvgg_block") + elif leaky < 0: + self.nonlinearity = nn.ReLU() + else: + self.nonlinearity = nn.LeakyReLU(leaky) + + if use_se: + # Note that RepVGG-D2se uses SE before nonlinearity. But RepVGGplus models uses SE after nonlinearity. + # self.se = SEBlock(out_channels, internal_neurons=out_channels // 16) + raise ValueError(f"SEBlock not supported") + else: + self.se = nn.Identity() + + if deploy: + self.rbr_reparam = nn.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, + padding=padding, dilation=dilation, groups=groups, bias=True, padding_mode=padding_mode) + + else: + self.rbr_identity = nn.BatchNorm2d(num_features=in_channels) if out_channels == in_channels and stride == 1 else None + self.rbr_dense = conv_bn(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding, groups=groups) + self.rbr_1x1 = conv_bn(in_channels=in_channels, out_channels=out_channels, kernel_size=1, stride=stride, padding=padding_11, groups=groups) + print('RepVGG Block, identity = ', self.rbr_identity) + + + def forward(self, inputs): + if hasattr(self, 'rbr_reparam'): + return self.nonlinearity(self.se(self.rbr_reparam(inputs))) + + if self.rbr_identity is None: + id_out = 0 + else: + id_out = self.rbr_identity(inputs) + + return self.nonlinearity(self.se(self.rbr_dense(inputs) + self.rbr_1x1(inputs) + id_out)) + + + # Optional. This may improve the accuracy and facilitates quantization in some cases. + # 1. Cancel the original weight decay on rbr_dense.conv.weight and rbr_1x1.conv.weight. + # 2. Use like this. + # loss = criterion(....) + # for every RepVGGBlock blk: + # loss += weight_decay_coefficient * 0.5 * blk.get_cust_L2() + # optimizer.zero_grad() + # loss.backward() + def get_custom_L2(self): + K3 = self.rbr_dense.conv.weight + K1 = self.rbr_1x1.conv.weight + t3 = (self.rbr_dense.bn.weight / ((self.rbr_dense.bn.running_var + self.rbr_dense.bn.eps).sqrt())).reshape(-1, 1, 1, 1).detach() + t1 = (self.rbr_1x1.bn.weight / ((self.rbr_1x1.bn.running_var + self.rbr_1x1.bn.eps).sqrt())).reshape(-1, 1, 1, 1).detach() + + l2_loss_circle = (K3 ** 2).sum() - (K3[:, :, 1:2, 1:2] ** 2).sum() # The L2 loss of the "circle" of weights in 3x3 kernel. Use regular L2 on them. + eq_kernel = K3[:, :, 1:2, 1:2] * t3 + K1 * t1 # The equivalent resultant central point of 3x3 kernel. + l2_loss_eq_kernel = (eq_kernel ** 2 / (t3 ** 2 + t1 ** 2)).sum() # Normalize for an L2 coefficient comparable to regular L2. + return l2_loss_eq_kernel + l2_loss_circle + + + +# This func derives the equivalent kernel and bias in a DIFFERENTIABLE way. +# You can get the equivalent kernel and bias at any time and do whatever you want, + # for example, apply some penalties or constraints during training, just like you do to the other models. +# May be useful for quantization or pruning. + def get_equivalent_kernel_bias(self): + kernel3x3, bias3x3 = self._fuse_bn_tensor(self.rbr_dense) + kernel1x1, bias1x1 = self._fuse_bn_tensor(self.rbr_1x1) + kernelid, biasid = self._fuse_bn_tensor(self.rbr_identity) + return kernel3x3 + self._pad_1x1_to_3x3_tensor(kernel1x1) + kernelid, bias3x3 + bias1x1 + biasid + + def _pad_1x1_to_3x3_tensor(self, kernel1x1): + if kernel1x1 is None: + return 0 + else: + return torch.nn.functional.pad(kernel1x1, [1,1,1,1]) + + def _fuse_bn_tensor(self, branch): + if branch is None: + return 0, 0 + if isinstance(branch, nn.Sequential): + kernel = branch.conv.weight + running_mean = branch.bn.running_mean + running_var = branch.bn.running_var + gamma = branch.bn.weight + beta = branch.bn.bias + eps = branch.bn.eps + else: + assert isinstance(branch, nn.BatchNorm2d) + if not hasattr(self, 'id_tensor'): + input_dim = self.in_channels // self.groups + kernel_value = np.zeros((self.in_channels, input_dim, 3, 3), dtype=np.float32) + for i in range(self.in_channels): + kernel_value[i, i % input_dim, 1, 1] = 1 + self.id_tensor = torch.from_numpy(kernel_value).to(branch.weight.device) + kernel = self.id_tensor + running_mean = branch.running_mean + running_var = branch.running_var + gamma = branch.weight + beta = branch.bias + eps = branch.eps + std = (running_var + eps).sqrt() + t = (gamma / std).reshape(-1, 1, 1, 1) + return kernel * t, beta - running_mean * gamma / std + + def switch_to_deploy(self): + if hasattr(self, 'rbr_reparam'): + return + kernel, bias = self.get_equivalent_kernel_bias() + self.rbr_reparam = nn.Conv2d(in_channels=self.rbr_dense.conv.in_channels, out_channels=self.rbr_dense.conv.out_channels, + kernel_size=self.rbr_dense.conv.kernel_size, stride=self.rbr_dense.conv.stride, + padding=self.rbr_dense.conv.padding, dilation=self.rbr_dense.conv.dilation, groups=self.rbr_dense.conv.groups, bias=True) + self.rbr_reparam.weight.data = kernel + self.rbr_reparam.bias.data = bias + self.__delattr__('rbr_dense') + self.__delattr__('rbr_1x1') + if hasattr(self, 'rbr_identity'): + self.__delattr__('rbr_identity') + if hasattr(self, 'id_tensor'): + self.__delattr__('id_tensor') + self.deploy = True + + + +class RepVGG(nn.Module): + + def __init__(self, num_blocks, num_classes=1000, width_multiplier=None, override_groups_map=None, deploy=False, use_se=False, use_checkpoint=False, leaky=-1.0): + super(RepVGG, self).__init__() + assert len(width_multiplier) == 4 + self.deploy = deploy + self.override_groups_map = override_groups_map or dict() + assert 0 not in self.override_groups_map + self.use_se = use_se + self.use_checkpoint = use_checkpoint + + self.in_planes = min(64, int(64 * width_multiplier[0])) + self.stage0 = RepVGGBlock(in_channels=1, out_channels=self.in_planes, kernel_size=3, stride=2, padding=1, deploy=self.deploy, use_se=self.use_se, leaky=leaky) + self.cur_layer_idx = 1 + self.stage1 = self._make_stage(int(64 * width_multiplier[0]), num_blocks[0], stride=1, leaky=leaky) + self.stage2 = self._make_stage(int(128 * width_multiplier[1]), num_blocks[1], stride=2, leaky=leaky) + self.stage3 = self._make_stage(int(256 * width_multiplier[2]), num_blocks[2], stride=2, leaky=leaky) + # self.stage4 = self._make_stage(int(512 * width_multiplier[3]), num_blocks[3], stride=1) + # self.gap = nn.AdaptiveAvgPool2d(output_size=1) + # self.linear = nn.Linear(int(512 * width_multiplier[3]), num_classes) + + def _make_stage(self, planes, num_blocks, stride, leaky=-1.0): + strides = [stride] + [1]*(num_blocks-1) + blocks = [] + for stride in strides: + cur_groups = self.override_groups_map.get(self.cur_layer_idx, 1) + blocks.append(RepVGGBlock(in_channels=self.in_planes, out_channels=planes, kernel_size=3, + stride=stride, padding=1, groups=cur_groups, deploy=self.deploy, use_se=self.use_se, leaky=leaky)) + self.in_planes = planes + self.cur_layer_idx += 1 + return nn.ModuleList(blocks) + + def forward(self, x): + out = self.stage0(x) + for stage in (self.stage1, self.stage2, self.stage3): # , self.stage4): + for block in stage: + if self.use_checkpoint: + out = checkpoint.checkpoint(block, out) + else: + out = block(out) + out = self.gap(out) + out = out.view(out.size(0), -1) + out = self.linear(out) + return out + + +optional_groupwise_layers = [2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26] +g2_map = {l: 2 for l in optional_groupwise_layers} +g4_map = {l: 4 for l in optional_groupwise_layers} + +def create_RepVGG_A0(deploy=False, use_checkpoint=False): + return RepVGG(num_blocks=[2, 4, 14, 1], num_classes=1000, + width_multiplier=[0.75, 0.75, 0.75, 2.5], override_groups_map=None, deploy=deploy, use_checkpoint=use_checkpoint) + +def create_RepVGG_A1(deploy=False, use_checkpoint=False): + return RepVGG(num_blocks=[2, 4, 14, 1], num_classes=1000, + width_multiplier=[1, 1, 1, 2.5], override_groups_map=None, deploy=deploy, use_checkpoint=use_checkpoint) +def create_RepVGG_A15(deploy=False, use_checkpoint=False): + return RepVGG(num_blocks=[2, 4, 14, 1], num_classes=1000, + width_multiplier=[1.25, 1.25, 1.25, 2.5], override_groups_map=None, deploy=deploy, use_checkpoint=use_checkpoint) +def create_RepVGG_A1_leaky(deploy=False, use_checkpoint=False): + return RepVGG(num_blocks=[2, 4, 14, 1], num_classes=1000, + width_multiplier=[1, 1, 1, 2.5], override_groups_map=None, deploy=deploy, use_checkpoint=use_checkpoint, leaky=0.01) + +def create_RepVGG_A2(deploy=False, use_checkpoint=False): + return RepVGG(num_blocks=[2, 4, 14, 1], num_classes=1000, + width_multiplier=[1.5, 1.5, 1.5, 2.75], override_groups_map=None, deploy=deploy, use_checkpoint=use_checkpoint) + +def create_RepVGG_B0(deploy=False, use_checkpoint=False): + return RepVGG(num_blocks=[4, 6, 16, 1], num_classes=1000, + width_multiplier=[1, 1, 1, 2.5], override_groups_map=None, deploy=deploy, use_checkpoint=use_checkpoint) + +def create_RepVGG_B1(deploy=False, use_checkpoint=False): + return RepVGG(num_blocks=[4, 6, 16, 1], num_classes=1000, + width_multiplier=[2, 2, 2, 4], override_groups_map=None, deploy=deploy, use_checkpoint=use_checkpoint) + +def create_RepVGG_B1g2(deploy=False, use_checkpoint=False): + return RepVGG(num_blocks=[4, 6, 16, 1], num_classes=1000, + width_multiplier=[2, 2, 2, 4], override_groups_map=g2_map, deploy=deploy, use_checkpoint=use_checkpoint) + +def create_RepVGG_B1g4(deploy=False, use_checkpoint=False): + return RepVGG(num_blocks=[4, 6, 16, 1], num_classes=1000, + width_multiplier=[2, 2, 2, 4], override_groups_map=g4_map, deploy=deploy, use_checkpoint=use_checkpoint) + + +def create_RepVGG_B2(deploy=False, use_checkpoint=False): + return RepVGG(num_blocks=[4, 6, 16, 1], num_classes=1000, + width_multiplier=[2.5, 2.5, 2.5, 5], override_groups_map=None, deploy=deploy, use_checkpoint=use_checkpoint) + +def create_RepVGG_B2g2(deploy=False, use_checkpoint=False): + return RepVGG(num_blocks=[4, 6, 16, 1], num_classes=1000, + width_multiplier=[2.5, 2.5, 2.5, 5], override_groups_map=g2_map, deploy=deploy, use_checkpoint=use_checkpoint) + +def create_RepVGG_B2g4(deploy=False, use_checkpoint=False): + return RepVGG(num_blocks=[4, 6, 16, 1], num_classes=1000, + width_multiplier=[2.5, 2.5, 2.5, 5], override_groups_map=g4_map, deploy=deploy, use_checkpoint=use_checkpoint) + + +def create_RepVGG_B3(deploy=False, use_checkpoint=False): + return RepVGG(num_blocks=[4, 6, 16, 1], num_classes=1000, + width_multiplier=[3, 3, 3, 5], override_groups_map=None, deploy=deploy, use_checkpoint=use_checkpoint) + +def create_RepVGG_B3g2(deploy=False, use_checkpoint=False): + return RepVGG(num_blocks=[4, 6, 16, 1], num_classes=1000, + width_multiplier=[3, 3, 3, 5], override_groups_map=g2_map, deploy=deploy, use_checkpoint=use_checkpoint) + +def create_RepVGG_B3g4(deploy=False, use_checkpoint=False): + return RepVGG(num_blocks=[4, 6, 16, 1], num_classes=1000, + width_multiplier=[3, 3, 3, 5], override_groups_map=g4_map, deploy=deploy, use_checkpoint=use_checkpoint) + +def create_RepVGG_D2se(deploy=False, use_checkpoint=False): + return RepVGG(num_blocks=[8, 14, 24, 1], num_classes=1000, + width_multiplier=[2.5, 2.5, 2.5, 5], override_groups_map=None, deploy=deploy, use_se=True, use_checkpoint=use_checkpoint) + + +func_dict = { +'RepVGG-A0': create_RepVGG_A0, +'RepVGG-A1': create_RepVGG_A1, +'RepVGG-A15': create_RepVGG_A15, +'RepVGG-A1_leaky': create_RepVGG_A1_leaky, +'RepVGG-A2': create_RepVGG_A2, +'RepVGG-B0': create_RepVGG_B0, +'RepVGG-B1': create_RepVGG_B1, +'RepVGG-B1g2': create_RepVGG_B1g2, +'RepVGG-B1g4': create_RepVGG_B1g4, +'RepVGG-B2': create_RepVGG_B2, +'RepVGG-B2g2': create_RepVGG_B2g2, +'RepVGG-B2g4': create_RepVGG_B2g4, +'RepVGG-B3': create_RepVGG_B3, +'RepVGG-B3g2': create_RepVGG_B3g2, +'RepVGG-B3g4': create_RepVGG_B3g4, +'RepVGG-D2se': create_RepVGG_D2se, # Updated at April 25, 2021. This is not reported in the CVPR paper. +} +def get_RepVGG_func_by_name(name): + return func_dict[name] + + + +# Use this for converting a RepVGG model or a bigger model with RepVGG as its component +# Use like this +# model = create_RepVGG_A0(deploy=False) +# train model or load weights +# repvgg_model_convert(model, save_path='repvgg_deploy.pth') +# If you want to preserve the original model, call with do_copy=True + +# ====================== for using RepVGG as the backbone of a bigger model, e.g., PSPNet, the pseudo code will be like +# train_backbone = create_RepVGG_B2(deploy=False) +# train_backbone.load_state_dict(torch.load('RepVGG-B2-train.pth')) +# train_pspnet = build_pspnet(backbone=train_backbone) +# segmentation_train(train_pspnet) +# deploy_pspnet = repvgg_model_convert(train_pspnet) +# segmentation_test(deploy_pspnet) +# ===================== example_pspnet.py shows an example + +def repvgg_model_convert(model:torch.nn.Module, save_path=None, do_copy=True): + if do_copy: + model = copy.deepcopy(model) + for module in model.modules(): + if hasattr(module, 'switch_to_deploy'): + module.switch_to_deploy() + if save_path is not None: + torch.save(model.state_dict(), save_path) + return model diff --git a/imcui/third_party/MatchAnything/src/loftr/backbone/resnet_fpn.py b/imcui/third_party/MatchAnything/src/loftr/backbone/resnet_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..3596d7bd7f827197476e3f6ffaa1770a6913a3f8 --- /dev/null +++ b/imcui/third_party/MatchAnything/src/loftr/backbone/resnet_fpn.py @@ -0,0 +1,1094 @@ +import torch.nn as nn +import torch.nn.functional as F +from .repvgg import get_RepVGG_func_by_name +from .s2dnet import S2DNet + + +def conv1x1(in_planes, out_planes, stride=1): + """1x1 convolution without padding""" + return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, padding=0, bias=False) + + +def conv3x3(in_planes, out_planes, stride=1): + """3x3 convolution with padding""" + return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False) + + +class BasicBlock(nn.Module): + def __init__(self, in_planes, planes, stride=1): + super().__init__() + self.conv1 = conv3x3(in_planes, planes, stride) + self.conv2 = conv3x3(planes, planes) + self.bn1 = nn.BatchNorm2d(planes) + self.bn2 = nn.BatchNorm2d(planes) + self.relu = nn.ReLU(inplace=True) + + if stride == 1: + self.downsample = None + else: + self.downsample = nn.Sequential( + conv1x1(in_planes, planes, stride=stride), + nn.BatchNorm2d(planes) + ) + + def forward(self, x): + y = x + y = self.relu(self.bn1(self.conv1(y))) + y = self.bn2(self.conv2(y)) + + if self.downsample is not None: + x = self.downsample(x) + + return self.relu(x+y) + + +class ResNetFPN_8_2(nn.Module): + """ + ResNet+FPN, output resolution are 1/8 and 1/2. + Each block has 2 layers. + """ + + def __init__(self, config): + super().__init__() + # Config + block = BasicBlock + initial_dim = config['initial_dim'] + block_dims = config['block_dims'] + + # Class Variable + self.in_planes = initial_dim + + # Networks + self.conv1 = nn.Conv2d(1, initial_dim, kernel_size=7, stride=2, padding=3, bias=False) + self.bn1 = nn.BatchNorm2d(initial_dim) + self.relu = nn.ReLU(inplace=True) + + self.layer1 = self._make_layer(block, block_dims[0], stride=1) # 1/2 + self.layer2 = self._make_layer(block, block_dims[1], stride=2) # 1/4 + self.layer3 = self._make_layer(block, block_dims[2], stride=2) # 1/8 + + # 3. FPN upsample + self.layer3_outconv = conv1x1(block_dims[2], block_dims[2]) + self.layer2_outconv = conv1x1(block_dims[1], block_dims[2]) + self.layer2_outconv2 = nn.Sequential( + conv3x3(block_dims[2], block_dims[2]), + nn.BatchNorm2d(block_dims[2]), + nn.LeakyReLU(), + conv3x3(block_dims[2], block_dims[1]), + ) + self.layer1_outconv = conv1x1(block_dims[0], block_dims[1]) + self.layer1_outconv2 = nn.Sequential( + conv3x3(block_dims[1], block_dims[1]), + nn.BatchNorm2d(block_dims[1]), + nn.LeakyReLU(), + conv3x3(block_dims[1], block_dims[0]), + ) + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + + def _make_layer(self, block, dim, stride=1): + layer1 = block(self.in_planes, dim, stride=stride) + layer2 = block(dim, dim, stride=1) + layers = (layer1, layer2) + + self.in_planes = dim + return nn.Sequential(*layers) + + def forward(self, x): + # ResNet Backbone + x0 = self.relu(self.bn1(self.conv1(x))) + x1 = self.layer1(x0) # 1/2 + x2 = self.layer2(x1) # 1/4 + x3 = self.layer3(x2) # 1/8 + + # FPN + x3_out = self.layer3_outconv(x3) + + x3_out_2x = F.interpolate(x3_out, scale_factor=2., mode='bilinear', align_corners=True) + x2_out = self.layer2_outconv(x2) + x2_out = self.layer2_outconv2(x2_out+x3_out_2x) + + x2_out_2x = F.interpolate(x2_out, scale_factor=2., mode='bilinear', align_corners=True) + x1_out = self.layer1_outconv(x1) + x1_out = self.layer1_outconv2(x1_out+x2_out_2x) + + return {'feats_c': x3_out, 'feats_f': x1_out} + + def pro(self, x, profiler): + with profiler.profile('ResNet Backbone'): + # ResNet Backbone + x0 = self.relu(self.bn1(self.conv1(x))) + x1 = self.layer1(x0) # 1/2 + x2 = self.layer2(x1) # 1/4 + x3 = self.layer3(x2) # 1/8 + + with profiler.profile('ResNet FPN'): + # FPN + x3_out = self.layer3_outconv(x3) + + x3_out_2x = F.interpolate(x3_out, scale_factor=2., mode='bilinear', align_corners=True) + x2_out = self.layer2_outconv(x2) + x2_out = self.layer2_outconv2(x2_out+x3_out_2x) + + x2_out_2x = F.interpolate(x2_out, scale_factor=2., mode='bilinear', align_corners=True) + x1_out = self.layer1_outconv(x1) + x1_out = self.layer1_outconv2(x1_out+x2_out_2x) + + return {'feats_c': x3_out, 'feats_f': x1_out} + +class ResNetFPN_8_2_fix(nn.Module): + """ + ResNet+FPN, output resolution are 1/8 and 1/2. + Each block has 2 layers. + """ + + def __init__(self, config): + super().__init__() + # Config + block = BasicBlock + initial_dim = config['initial_dim'] + block_dims = config['block_dims'] + + # Class Variable + self.in_planes = initial_dim + self.skip_fine_feature = config['coarse_feat_only'] + self.inter_feat = config['inter_feat'] + + # Networks + self.conv1 = nn.Conv2d(1, initial_dim, kernel_size=7, stride=2, padding=3, bias=False) + self.bn1 = nn.BatchNorm2d(initial_dim) + self.relu = nn.ReLU(inplace=True) + + self.layer1 = self._make_layer(block, block_dims[0], stride=1) # 1/2 + self.layer2 = self._make_layer(block, block_dims[1], stride=2) # 1/4 + self.layer3 = self._make_layer(block, block_dims[2], stride=2) # 1/8 + + # 3. FPN upsample + self.layer3_outconv = conv1x1(block_dims[2], block_dims[2]) + self.layer2_outconv = conv1x1(block_dims[1], block_dims[2]) + self.layer2_outconv2 = nn.Sequential( + conv3x3(block_dims[2], block_dims[2]), + nn.BatchNorm2d(block_dims[2]), + nn.LeakyReLU(), + conv3x3(block_dims[2], block_dims[1]), + ) + self.layer1_outconv = conv1x1(block_dims[0], block_dims[1]) + self.layer1_outconv2 = nn.Sequential( + conv3x3(block_dims[1], block_dims[1]), + nn.BatchNorm2d(block_dims[1]), + nn.LeakyReLU(), + conv3x3(block_dims[1], block_dims[0]), + ) + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + + def _make_layer(self, block, dim, stride=1): + layer1 = block(self.in_planes, dim, stride=stride) + layer2 = block(dim, dim, stride=1) + layers = (layer1, layer2) + + self.in_planes = dim + return nn.Sequential(*layers) + + def forward(self, x): + # ResNet Backbone + x0 = self.relu(self.bn1(self.conv1(x))) + x1 = self.layer1(x0) # 1/2 + x2 = self.layer2(x1) # 1/4 + x3 = self.layer3(x2) # 1/8 + + # FPN + if self.skip_fine_feature: + if self.inter_feat: + return {'feats_c': x3, 'feats_f': None, 'feats_x2': x2, 'feats_x1': x1} + else: + return {'feats_c': x3, 'feats_f': None} + + + x3_out = self.layer3_outconv(x3) # n+1 + + x3_out_2x = F.interpolate(x3_out, size=((x3_out.size(-2)-1)*2+1, (x3_out.size(-1)-1)*2+1), mode='bilinear', align_corners=True) # 2n+1 + x2_out = self.layer2_outconv(x2) + x2_out = self.layer2_outconv2(x2_out+x3_out_2x) + + x2_out_2x = F.interpolate(x2_out, size=((x2_out.size(-2)-1)*2+1, (x2_out.size(-1)-1)*2+1), mode='bilinear', align_corners=True) # 4n+1 + x1_out = self.layer1_outconv(x1) + x1_out = self.layer1_outconv2(x1_out+x2_out_2x) + + return {'feats_c': x3_out, 'feats_f': x1_out} + + +class ResNetFPN_16_4(nn.Module): + """ + ResNet+FPN, output resolution are 1/16 and 1/4. + Each block has 2 layers. + """ + + def __init__(self, config): + super().__init__() + # Config + block = BasicBlock + initial_dim = config['initial_dim'] + block_dims = config['block_dims'] + + # Class Variable + self.in_planes = initial_dim + + # Networks + self.conv1 = nn.Conv2d(1, initial_dim, kernel_size=7, stride=2, padding=3, bias=False) + self.bn1 = nn.BatchNorm2d(initial_dim) + self.relu = nn.ReLU(inplace=True) + + self.layer1 = self._make_layer(block, block_dims[0], stride=1) # 1/2 + self.layer2 = self._make_layer(block, block_dims[1], stride=2) # 1/4 + self.layer3 = self._make_layer(block, block_dims[2], stride=2) # 1/8 + self.layer4 = self._make_layer(block, block_dims[3], stride=2) # 1/16 + + # 3. FPN upsample + self.layer4_outconv = conv1x1(block_dims[3], block_dims[3]) + self.layer3_outconv = conv1x1(block_dims[2], block_dims[3]) + self.layer3_outconv2 = nn.Sequential( + conv3x3(block_dims[3], block_dims[3]), + nn.BatchNorm2d(block_dims[3]), + nn.LeakyReLU(), + conv3x3(block_dims[3], block_dims[2]), + ) + + self.layer2_outconv = conv1x1(block_dims[1], block_dims[2]) + self.layer2_outconv2 = nn.Sequential( + conv3x3(block_dims[2], block_dims[2]), + nn.BatchNorm2d(block_dims[2]), + nn.LeakyReLU(), + conv3x3(block_dims[2], block_dims[1]), + ) + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + + def _make_layer(self, block, dim, stride=1): + layer1 = block(self.in_planes, dim, stride=stride) + layer2 = block(dim, dim, stride=1) + layers = (layer1, layer2) + + self.in_planes = dim + return nn.Sequential(*layers) + + def forward(self, x): + # ResNet Backbone + x0 = self.relu(self.bn1(self.conv1(x))) + x1 = self.layer1(x0) # 1/2 + x2 = self.layer2(x1) # 1/4 + x3 = self.layer3(x2) # 1/8 + x4 = self.layer4(x3) # 1/16 + + # FPN + x4_out = self.layer4_outconv(x4) + + x4_out_2x = F.interpolate(x4_out, scale_factor=2., mode='bilinear', align_corners=True) + x3_out = self.layer3_outconv(x3) + x3_out = self.layer3_outconv2(x3_out+x4_out_2x) + + x3_out_2x = F.interpolate(x3_out, scale_factor=2., mode='bilinear', align_corners=True) + x2_out = self.layer2_outconv(x2) + x2_out = self.layer2_outconv2(x2_out+x3_out_2x) + + return {'feats_c': x4_out, 'feats_f': x2_out} + + +class ResNetFPN_8_1(nn.Module): + """ + ResNet+FPN, output resolution are 1/8 and 1. + Each block has 2 layers. + """ + + def __init__(self, config): + super().__init__() + # Config + block = BasicBlock + initial_dim = config['initial_dim'] + block_dims = config['block_dims'] + + # Class Variable + self.in_planes = initial_dim + self.skip_fine_feature = config['coarse_feat_only'] + self.inter_feat = config['inter_feat'] + + # Networks + self.conv1 = nn.Conv2d(1, initial_dim, kernel_size=7, stride=2, padding=3, bias=False) + self.bn1 = nn.BatchNorm2d(initial_dim) + self.relu = nn.ReLU(inplace=True) + + self.layer1 = self._make_layer(block, block_dims[0], stride=1) # 1/2 + self.layer2 = self._make_layer(block, block_dims[1], stride=2) # 1/4 + self.layer3 = self._make_layer(block, block_dims[2], stride=2) # 1/8 + + # 3. FPN upsample + if not self.skip_fine_feature: + self.layer3_outconv = conv1x1(block_dims[2], block_dims[2]) + self.layer2_outconv = conv1x1(block_dims[1], block_dims[2]) + self.layer2_outconv2 = nn.Sequential( + conv3x3(block_dims[2], block_dims[2]), + nn.BatchNorm2d(block_dims[2]), + nn.LeakyReLU(), + conv3x3(block_dims[2], block_dims[1]), + ) + self.layer1_outconv = conv1x1(block_dims[0], block_dims[1]) + self.layer1_outconv2 = nn.Sequential( + conv3x3(block_dims[1], block_dims[1]), + nn.BatchNorm2d(block_dims[1]), + nn.LeakyReLU(), + conv3x3(block_dims[1], block_dims[0]), + ) + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + + def _make_layer(self, block, dim, stride=1): + layer1 = block(self.in_planes, dim, stride=stride) + layer2 = block(dim, dim, stride=1) + layers = (layer1, layer2) + + self.in_planes = dim + return nn.Sequential(*layers) + + def forward(self, x): + # ResNet Backbone + x0 = self.relu(self.bn1(self.conv1(x))) + x1 = self.layer1(x0) # 1/2 + x2 = self.layer2(x1) # 1/4 + x3 = self.layer3(x2) # 1/8 + + # FPN + if self.skip_fine_feature: + if self.inter_feat: + return {'feats_c': x3, 'feats_f': None, 'feats_x2': x2, 'feats_x1': x1} + else: + return {'feats_c': x3, 'feats_f': None} + + x3_out = self.layer3_outconv(x3) + + x3_out_2x = F.interpolate(x3_out, scale_factor=2., mode='bilinear', align_corners=True) + x2_out = self.layer2_outconv(x2) + x2_out = self.layer2_outconv2(x2_out+x3_out_2x) + + x2_out_2x = F.interpolate(x2_out, scale_factor=2., mode='bilinear', align_corners=True) + x1_out = self.layer1_outconv(x1) + x1_out = self.layer1_outconv2(x1_out+x2_out_2x) + + x0_out = F.interpolate(x1_out, scale_factor=2., mode='bilinear', align_corners=False) + + if not self.inter_feat: + return {'feats_c': x3, 'feats_f': x0_out} + else: + return {'feats_c': x3, 'feats_f': x0_out, 'feats_x2': x2, 'feats_x1': x1} + + +class ResNetFPN_8_1_align(nn.Module): + """ + ResNet+FPN, output resolution are 1/8 and 1. + Each block has 2 layers. + """ + + def __init__(self, config): + super().__init__() + # Config + block = BasicBlock + initial_dim = config['initial_dim'] + block_dims = config['block_dims'] + + # Class Variable + self.in_planes = initial_dim + self.skip_fine_feature = config['coarse_feat_only'] + self.inter_feat = config['inter_feat'] + # Networks + self.conv1 = nn.Conv2d(1, initial_dim, kernel_size=7, stride=2, padding=3, bias=False) + self.bn1 = nn.BatchNorm2d(initial_dim) + self.relu = nn.ReLU(inplace=True) + + self.layer1 = self._make_layer(block, block_dims[0], stride=1) # 1/2 + self.layer2 = self._make_layer(block, block_dims[1], stride=2) # 1/4 + self.layer3 = self._make_layer(block, block_dims[2], stride=2) # 1/8 + + # 3. FPN upsample + if not self.skip_fine_feature: + self.layer3_outconv = conv1x1(block_dims[2], block_dims[2]) + self.layer2_outconv = conv1x1(block_dims[1], block_dims[2]) + self.layer2_outconv2 = nn.Sequential( + conv3x3(block_dims[2], block_dims[2]), + nn.BatchNorm2d(block_dims[2]), + nn.LeakyReLU(), + conv3x3(block_dims[2], block_dims[1]), + ) + self.layer1_outconv = conv1x1(block_dims[0], block_dims[1]) + self.layer1_outconv2 = nn.Sequential( + conv3x3(block_dims[1], block_dims[1]), + nn.BatchNorm2d(block_dims[1]), + nn.LeakyReLU(), + conv3x3(block_dims[1], block_dims[0]), + ) + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + + def _make_layer(self, block, dim, stride=1): + layer1 = block(self.in_planes, dim, stride=stride) + layer2 = block(dim, dim, stride=1) + layers = (layer1, layer2) + + self.in_planes = dim + return nn.Sequential(*layers) + + def forward(self, x): + # ResNet Backbone + x0 = self.relu(self.bn1(self.conv1(x))) + x1 = self.layer1(x0) # 1/2 + x2 = self.layer2(x1) # 1/4 + x3 = self.layer3(x2) # 1/8 + + # FPN + + if self.skip_fine_feature: + if self.inter_feat: + return {'feats_c': x3, 'feats_f': None, 'feats_x2': x2, 'feats_x1': x1} + else: + return {'feats_c': x3, 'feats_f': None} + + x3_out = self.layer3_outconv(x3) + x3_out_2x = F.interpolate(x3_out, scale_factor=2., mode='bilinear', align_corners=False) + x2_out = self.layer2_outconv(x2) + x2_out = self.layer2_outconv2(x2_out+x3_out_2x) + + x2_out_2x = F.interpolate(x2_out, scale_factor=2., mode='bilinear', align_corners=False) + x1_out = self.layer1_outconv(x1) + x1_out = self.layer1_outconv2(x1_out+x2_out_2x) + + x0_out = F.interpolate(x1_out, scale_factor=2., mode='bilinear', align_corners=False) + + if not self.inter_feat: + return {'feats_c': x3, 'feats_f': x0_out} + else: + return {'feats_c': x3, 'feats_f': x0_out, 'feats_x2': x2, 'feats_x1': x1} + + def pro(self, x, profiler): + with profiler.profile('ResNet Backbone'): + # ResNet Backbone + x0 = self.relu(self.bn1(self.conv1(x))) + x1 = self.layer1(x0) # 1/2 + x2 = self.layer2(x1) # 1/4 + x3 = self.layer3(x2) # 1/8 + + with profiler.profile('FPN'): + # FPN + x3_out = self.layer3_outconv(x3) + + if self.skip_fine: + return [x3_out, None] + x3_out_2x = F.interpolate(x3_out, scale_factor=2., mode='bilinear', align_corners=False) + x2_out = self.layer2_outconv(x2) + x2_out = self.layer2_outconv2(x2_out+x3_out_2x) + + x2_out_2x = F.interpolate(x2_out, scale_factor=2., mode='bilinear', align_corners=False) + x1_out = self.layer1_outconv(x1) + x1_out = self.layer1_outconv2(x1_out+x2_out_2x) + + with profiler.profile('upsample*1'): + x0_out = F.interpolate(x1_out, scale_factor=2., mode='bilinear', align_corners=False) + + return {'feats_c': x3_out, 'feats_f': x0_out} + + +class ResNetFPN_8_2_align(nn.Module): + """ + ResNet+FPN, output resolution are 1/8 and 1/2. + Each block has 2 layers. + """ + + def __init__(self, config): + super().__init__() + # Config + block = BasicBlock + initial_dim = config['initial_dim'] + block_dims = config['block_dims'] + + # Class Variable + self.in_planes = initial_dim + self.skip_fine_feature = config['coarse_feat_only'] + self.inter_feat = config['inter_feat'] + # Networks + self.conv1 = nn.Conv2d(1, initial_dim, kernel_size=7, stride=2, padding=3, bias=False) + self.bn1 = nn.BatchNorm2d(initial_dim) + self.relu = nn.ReLU(inplace=True) + + self.layer1 = self._make_layer(block, block_dims[0], stride=1) # 1/2 + self.layer2 = self._make_layer(block, block_dims[1], stride=2) # 1/4 + self.layer3 = self._make_layer(block, block_dims[2], stride=2) # 1/8 + + # 3. FPN upsample + if not self.skip_fine_feature: + self.layer3_outconv = conv1x1(block_dims[2], block_dims[2]) + self.layer2_outconv = conv1x1(block_dims[1], block_dims[2]) + self.layer2_outconv2 = nn.Sequential( + conv3x3(block_dims[2], block_dims[2]), + nn.BatchNorm2d(block_dims[2]), + nn.LeakyReLU(), + conv3x3(block_dims[2], block_dims[1]), + ) + self.layer1_outconv = conv1x1(block_dims[0], block_dims[1]) + self.layer1_outconv2 = nn.Sequential( + conv3x3(block_dims[1], block_dims[1]), + nn.BatchNorm2d(block_dims[1]), + nn.LeakyReLU(), + conv3x3(block_dims[1], block_dims[0]), + ) + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + + def _make_layer(self, block, dim, stride=1): + layer1 = block(self.in_planes, dim, stride=stride) + layer2 = block(dim, dim, stride=1) + layers = (layer1, layer2) + + self.in_planes = dim + return nn.Sequential(*layers) + + def forward(self, x): + # ResNet Backbone + x0 = self.relu(self.bn1(self.conv1(x))) + x1 = self.layer1(x0) # 1/2 + x2 = self.layer2(x1) # 1/4 + x3 = self.layer3(x2) # 1/8 + + if self.skip_fine_feature: + if self.inter_feat: + return {'feats_c': x3, 'feats_f': None, 'feats_x2': x2, 'feats_x1': x1} + else: + return {'feats_c': x3, 'feats_f': None} + + # FPN + x3_out = self.layer3_outconv(x3) + + x3_out_2x = F.interpolate(x3_out, scale_factor=2., mode='bilinear', align_corners=False) + x2_out = self.layer2_outconv(x2) + x2_out = self.layer2_outconv2(x2_out+x3_out_2x) + + x2_out_2x = F.interpolate(x2_out, scale_factor=2., mode='bilinear', align_corners=False) + x1_out = self.layer1_outconv(x1) + x1_out = self.layer1_outconv2(x1_out+x2_out_2x) + + if not self.inter_feat: + return {'feats_c': x3, 'feats_f': x1_out} + else: + return {'feats_c': x3, 'feats_f': x1_out, 'feats_x2': x2, 'feats_x1': x1} + + +class ResNet_8_1_align(nn.Module): + """ + ResNet, output resolution are 1/8 and 1. + Each block has 2 layers. + """ + + def __init__(self, config): + super().__init__() + # Config + block = BasicBlock + initial_dim = config['initial_dim'] + block_dims = config['block_dims'] + + # Class Variable + self.in_planes = initial_dim + + # Networks + self.conv1 = nn.Conv2d(1, initial_dim, kernel_size=7, stride=2, padding=3, bias=False) + self.bn1 = nn.BatchNorm2d(initial_dim) + self.relu = nn.ReLU(inplace=True) + + self.layer1 = self._make_layer(block, block_dims[0], stride=1) # 1/2 + self.layer2 = self._make_layer(block, block_dims[1], stride=2) # 1/4 + self.layer3 = self._make_layer(block, block_dims[2], stride=2) # 1/8 + + # 3. FPN upsample + # self.layer3_outconv = conv1x1(block_dims[2], block_dims[2]) + # self.layer2_outconv = conv1x1(block_dims[1], block_dims[2]) + # self.layer2_outconv2 = nn.Sequential( + # conv3x3(block_dims[2], block_dims[2]), + # nn.BatchNorm2d(block_dims[2]), + # nn.LeakyReLU(), + # conv3x3(block_dims[2], block_dims[1]), + # ) + # self.layer1_outconv = conv1x1(block_dims[0], block_dims[1]) + # self.layer1_outconv2 = nn.Sequential( + # conv3x3(block_dims[1], block_dims[1]), + # nn.BatchNorm2d(block_dims[1]), + # nn.LeakyReLU(), + # conv3x3(block_dims[1], block_dims[0]), + # ) + self.layer0_outconv = conv1x1(block_dims[2], block_dims[0]) + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + + def _make_layer(self, block, dim, stride=1): + layer1 = block(self.in_planes, dim, stride=stride) + layer2 = block(dim, dim, stride=1) + layers = (layer1, layer2) + + self.in_planes = dim + return nn.Sequential(*layers) + + def forward(self, x): + # ResNet Backbone + x0 = self.relu(self.bn1(self.conv1(x))) + x1 = self.layer1(x0) # 1/2 + x2 = self.layer2(x1) # 1/4 + x3 = self.layer3(x2) # 1/8 + + # FPN + # x3_out = self.layer3_outconv(x3) + + # x3_out_2x = F.interpolate(x3_out, scale_factor=2., mode='bilinear', align_corners=False) + # x2_out = self.layer2_outconv(x2) + # x2_out = self.layer2_outconv2(x2_out+x3_out_2x) + + # x2_out_2x = F.interpolate(x2_out, scale_factor=2., mode='bilinear', align_corners=False) + # x1_out = self.layer1_outconv(x1) + # x1_out = self.layer1_outconv2(x1_out+x2_out_2x) + + x0_out = F.interpolate(x3, scale_factor=8., mode='bilinear', align_corners=False) + x0_out = self.layer0_outconv(x0_out) + + return {'feats_c': x3, 'feats_f': x0_out} + +class VGG_8_1_align(nn.Module): + """ + VGG-like backbone, output resolution are 1/8 and 1. + Each block has 2 layers. + """ + + def __init__(self, config): + super().__init__() + # Config + block = BasicBlock + initial_dim = config['initial_dim'] + block_dims = config['block_dims'] + + self.relu = nn.ReLU(inplace=True) + self.pool = nn.MaxPool2d(kernel_size=2, stride=2) + c1, c2, c3, c4, c5 = 64, 64, 128, 128, 256 + + self.conv1a = nn.Conv2d(1, c1, kernel_size=3, stride=1, padding=1) + self.conv1b = nn.Conv2d(c1, c1, kernel_size=3, stride=1, padding=1) + self.conv2a = nn.Conv2d(c1, c2, kernel_size=3, stride=1, padding=1) + self.conv2b = nn.Conv2d(c2, c2, kernel_size=3, stride=1, padding=1) + self.conv3a = nn.Conv2d(c2, c3, kernel_size=3, stride=1, padding=1) + self.conv3b = nn.Conv2d(c3, c3, kernel_size=3, stride=1, padding=1) + self.conv4a = nn.Conv2d(c3, c4, kernel_size=3, stride=1, padding=1) + self.conv4b = nn.Conv2d(c4, c4, kernel_size=3, stride=1, padding=1) + + # self.convPa = nn.Conv2d(c4, c5, kernel_size=3, stride=1, padding=1) + # self.convPb = nn.Conv2d(c5, 65, kernel_size=1, stride=1, padding=0) + + self.convDa = nn.Conv2d(c4, c5, kernel_size=3, stride=1, padding=1) + self.convDb = nn.Conv2d( + c5, 256, + kernel_size=1, stride=1, padding=0) + self.layer0_outconv = conv1x1(block_dims[2], block_dims[0]) + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + + def _make_layer(self, block, dim, stride=1): + layer1 = block(self.in_planes, dim, stride=stride) + layer2 = block(dim, dim, stride=1) + layers = (layer1, layer2) + + self.in_planes = dim + return nn.Sequential(*layers) + + def forward(self, x): + # Shared Encoder + x = self.relu(self.conv1a(x)) + x = self.relu(self.conv1b(x)) + x = self.pool(x) + x = self.relu(self.conv2a(x)) + x = self.relu(self.conv2b(x)) + x = self.pool(x) + x = self.relu(self.conv3a(x)) + x = self.relu(self.conv3b(x)) + x = self.pool(x) + x = self.relu(self.conv4a(x)) + x = self.relu(self.conv4b(x)) + + cDa = self.relu(self.convDa(x)) + descriptors = self.convDb(cDa) + x3_out = nn.functional.normalize(descriptors, p=2, dim=1) + + x0_out = F.interpolate(x3_out, scale_factor=8., mode='bilinear', align_corners=False) + x0_out = self.layer0_outconv(x0_out) + # ResNet Backbone + # x0 = self.relu(self.bn1(self.conv1(x))) + # x1 = self.layer1(x0) # 1/2 + # x2 = self.layer2(x1) # 1/4 + # x3 = self.layer3(x2) # 1/8 + + # # FPN + # x3_out = self.layer3_outconv(x3) + + # x3_out_2x = F.interpolate(x3_out, scale_factor=2., mode='bilinear', align_corners=False) + # x2_out = self.layer2_outconv(x2) + # x2_out = self.layer2_outconv2(x2_out+x3_out_2x) + + # x2_out_2x = F.interpolate(x2_out, scale_factor=2., mode='bilinear', align_corners=False) + # x1_out = self.layer1_outconv(x1) + # x1_out = self.layer1_outconv2(x1_out+x2_out_2x) + + # x0_out = F.interpolate(x1_out, scale_factor=2., mode='bilinear', align_corners=False) + + return {'feats_c': x3_out, 'feats_f': x0_out} + +class RepVGG_8_1_align(nn.Module): + """ + RepVGG backbone, output resolution are 1/8 and 1. + Each block has 2 layers. + """ + + def __init__(self, config): + super().__init__() + # Config + # block = BasicBlock + # initial_dim = config['initial_dim'] + block_dims = config['block_dims'] + self.skip_fine_feature = config['coarse_feat_only'] + self.inter_feat = config['inter_feat'] + self.leaky = config['leaky'] + + # backbone_name='RepVGG-B0' + if config.get('repvggmodel') is not None: + backbone_name=config['repvggmodel'] + elif self.leaky: + backbone_name='RepVGG-A1_leaky' + else: + backbone_name='RepVGG-A1' + repvgg_fn = get_RepVGG_func_by_name(backbone_name) + backbone = repvgg_fn(False) + self.layer0, self.layer1, self.layer2, self.layer3 = backbone.stage0, backbone.stage1, backbone.stage2, backbone.stage3 #, backbone.stage4 + # self.layer0, self.layer1, self.layer2, self.layer3, self.layer4 = backbone.stage0, backbone.stage1, backbone.stage2, backbone.stage3, backbone.stage4 + + # 3. FPN upsample + if not self.skip_fine_feature: + self.layer3_outconv = conv1x1(block_dims[2], block_dims[2]) + self.layer2_outconv = conv1x1(block_dims[1], block_dims[2]) + self.layer2_outconv2 = nn.Sequential( + conv3x3(block_dims[2], block_dims[2]), + nn.BatchNorm2d(block_dims[2]), + nn.LeakyReLU(), + conv3x3(block_dims[2], block_dims[1]), + ) + self.layer1_outconv = conv1x1(block_dims[0], block_dims[1]) + self.layer1_outconv2 = nn.Sequential( + conv3x3(block_dims[1], block_dims[1]), + nn.BatchNorm2d(block_dims[1]), + nn.LeakyReLU(), + conv3x3(block_dims[1], block_dims[0]), + ) + + # self.layer0_outconv = conv1x1(192, 48) + + for layer in [self.layer0, self.layer1, self.layer2, self.layer3]: + for m in layer.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + # for layer in [self.layer0, self.layer1, self.layer2, self.layer3, self.layer4]: + # for m in layer.modules(): + # if isinstance(m, nn.Conv2d): + # nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + # elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): + # nn.init.constant_(m.weight, 1) + # nn.init.constant_(m.bias, 0) + + def forward(self, x): + + out = self.layer0(x) # 1/2 + for module in self.layer1: + out = module(out) # 1/2 + x1 = out + for module in self.layer2: + out = module(out) # 1/4 + x2 = out + for module in self.layer3: + out = module(out) # 1/8 + x3 = out + # for module in self.layer4: + # out = module(out) + # x3 = out + + if self.skip_fine_feature: + if self.inter_feat: + return {'feats_c': x3, 'feats_f': None, 'feats_x2': x2, 'feats_x1': x1} + else: + return {'feats_c': x3, 'feats_f': None} + x3_out = self.layer3_outconv(x3) + x3_out_2x = F.interpolate(x3_out, scale_factor=2., mode='bilinear', align_corners=False) + x2_out = self.layer2_outconv(x2) + x2_out = self.layer2_outconv2(x2_out+x3_out_2x) + + x2_out_2x = F.interpolate(x2_out, scale_factor=2., mode='bilinear', align_corners=False) + x1_out = self.layer1_outconv(x1) + x1_out = self.layer1_outconv2(x1_out+x2_out_2x) + + x0_out = F.interpolate(x1_out, scale_factor=2., mode='bilinear', align_corners=False) + + # x_f = F.interpolate(x_c, scale_factor=8., mode='bilinear', align_corners=False) + # x_f = self.layer0_outconv(x_f) + return {'feats_c': x3_out, 'feats_f': x0_out} + + +class RepVGG_8_2_fix(nn.Module): + """ + RepVGG backbone, output resolution are 1/8 and 1. + Each block has 2 layers. + """ + + def __init__(self, config): + super().__init__() + # Config + # block = BasicBlock + # initial_dim = config['initial_dim'] + block_dims = config['block_dims'] + self.skip_fine_feature = config['coarse_feat_only'] + self.inter_feat = config['inter_feat'] + + # backbone_name='RepVGG-B0' + backbone_name='RepVGG-A1' + repvgg_fn = get_RepVGG_func_by_name(backbone_name) + backbone = repvgg_fn(False) + self.layer0, self.layer1, self.layer2, self.layer3 = backbone.stage0, backbone.stage1, backbone.stage2, backbone.stage3 #, backbone.stage4 + + # 3. FPN upsample + if not self.skip_fine_feature: + self.layer3_outconv = conv1x1(block_dims[2], block_dims[2]) + self.layer2_outconv = conv1x1(block_dims[1], block_dims[2]) + self.layer2_outconv2 = nn.Sequential( + conv3x3(block_dims[2], block_dims[2]), + nn.BatchNorm2d(block_dims[2]), + nn.LeakyReLU(), + conv3x3(block_dims[2], block_dims[1]), + ) + self.layer1_outconv = conv1x1(block_dims[0], block_dims[1]) + self.layer1_outconv2 = nn.Sequential( + conv3x3(block_dims[1], block_dims[1]), + nn.BatchNorm2d(block_dims[1]), + nn.LeakyReLU(), + conv3x3(block_dims[1], block_dims[0]), + ) + + # self.layer0_outconv = conv1x1(192, 48) + + for layer in [self.layer0, self.layer1, self.layer2, self.layer3]: + for m in layer.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + + def forward(self, x): + + x0 = self.layer0(x) # 1/2 + out = x0 + for module in self.layer1: + out = module(out) # 1/2 + x1 = out + for module in self.layer2: + out = module(out) # 1/4 + x2 = out + for module in self.layer3: + out = module(out) # 1/8 + x3 = out + # for module in self.layer4: + # out = module(out) + + if self.skip_fine_feature: + if self.inter_feat: + return {'feats_c': x3, 'feats_f': None, 'feats_x2': x2, 'feats_x1': x1} + else: + return {'feats_c': x3, 'feats_f': None} + x3_out = self.layer3_outconv(x3) + x3_out_2x = F.interpolate(x3_out, size=((x3_out.size(-2)-1)*2+1, (x3_out.size(-1)-1)*2+1), mode='bilinear', align_corners=True) + x2_out = self.layer2_outconv(x2) + x2_out = self.layer2_outconv2(x2_out+x3_out_2x) + + x2_out_2x = F.interpolate(x2_out, size=((x2_out.size(-2)-1)*2+1, (x2_out.size(-1)-1)*2+1), mode='bilinear', align_corners=True) + x1_out = self.layer1_outconv(x1) + x1_out = self.layer1_outconv2(x1_out+x2_out_2x) + + # x0_out = F.interpolate(x1_out, scale_factor=2., mode='bilinear', align_corners=False) + + # x_f = F.interpolate(x_c, scale_factor=8., mode='bilinear', align_corners=False) + # x_f = self.layer0_outconv(x_f) + return {'feats_c': x3_out, 'feats_f': x1_out} + + +class RepVGGnfpn_8_1_align(nn.Module): + """ + RepVGG backbone, output resolution are 1/8 and 1. + Each block has 2 layers. + """ + + def __init__(self, config): + super().__init__() + # Config + # block = BasicBlock + # initial_dim = config['initial_dim'] + block_dims = config['block_dims'] + self.skip_fine_feature = config['coarse_feat_only'] + self.inter_feat = config['inter_feat'] + + # backbone_name='RepVGG-B0' + backbone_name='RepVGG-A1' + repvgg_fn = get_RepVGG_func_by_name(backbone_name) + backbone = repvgg_fn(False) + self.layer0, self.layer1, self.layer2, self.layer3 = backbone.stage0, backbone.stage1, backbone.stage2, backbone.stage3 #, backbone.stage4 + + # 3. FPN upsample + if not self.skip_fine_feature: + self.layer0_outconv = conv1x1(block_dims[2], block_dims[0]) + # self.layer0_outconv = conv1x1(192, 48) + + for layer in [self.layer0, self.layer1, self.layer2, self.layer3]: + for m in layer.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + + def forward(self, x): + + x0 = self.layer0(x) # 1/2 + out = x0 + for module in self.layer1: + out = module(out) # 1/2 + x1 = out + for module in self.layer2: + out = module(out) # 1/4 + x2 = out + for module in self.layer3: + out = module(out) # 1/8 + x3 = out + # for module in self.layer4: + # out = module(out) + + if self.skip_fine_feature: + if self.inter_feat: + return {'feats_c': x3, 'feats_f': None, 'feats_x2': x2, 'feats_x1': x1} + else: + return {'feats_c': x3, 'feats_f': None} + # x3_out = self.layer3_outconv(x3) + # x3_out_2x = F.interpolate(x3_out, scale_factor=2., mode='bilinear', align_corners=False) + # x2_out = self.layer2_outconv(x2) + # x2_out = self.layer2_outconv2(x2_out+x3_out_2x) + + # x2_out_2x = F.interpolate(x2_out, scale_factor=2., mode='bilinear', align_corners=False) + # x1_out = self.layer1_outconv(x1) + # x1_out = self.layer1_outconv2(x1_out+x2_out_2x) + + # x0_out = F.interpolate(x1_out, scale_factor=2., mode='bilinear', align_corners=False) + + x_f = F.interpolate(x3, scale_factor=8., mode='bilinear', align_corners=False) + x_f = self.layer0_outconv(x_f) + # x_f2 = F.interpolate(x3, scale_factor=8., mode='bilinear', align_corners=False) + # x_f2 = self.layer0_outconv(x_f2) + return {'feats_c': x3, 'feats_f': x_f} + + +class s2dnet_8_1_align(nn.Module): + """ + ResNet+FPN, output resolution are 1/8 and 1. + Each block has 2 layers. + """ + + def __init__(self, config): + super().__init__() + # Config + block = BasicBlock + initial_dim = config['initial_dim'] + block_dims = config['block_dims'] + + # Class Variable + self.in_planes = initial_dim + self.skip_fine_feature = config['coarse_feat_only'] + self.inter_feat = config['inter_feat'] + # Networks + self.backbone = S2DNet(checkpoint_path = '/cephfs-mvs/3dv-research/hexingyi/code_yf/loftrdev/weights/s2dnet/s2dnet_weights.pth') + # 3. FPN upsample + # self.layer3_outconv = conv1x1(block_dims[2], block_dims[2]) + # if not self.skip_fine_feature: + # self.layer2_outconv = conv1x1(block_dims[1], block_dims[2]) + # self.layer2_outconv2 = nn.Sequential( + # conv3x3(block_dims[2], block_dims[2]), + # nn.BatchNorm2d(block_dims[2]), + # nn.LeakyReLU(), + # conv3x3(block_dims[2], block_dims[1]), + # ) + # self.layer1_outconv = conv1x1(block_dims[0], block_dims[1]) + # self.layer1_outconv2 = nn.Sequential( + # conv3x3(block_dims[1], block_dims[1]), + # nn.BatchNorm2d(block_dims[1]), + # nn.LeakyReLU(), + # conv3x3(block_dims[1], block_dims[0]), + # ) + + # for m in self.modules(): + # if isinstance(m, nn.Conv2d): + # nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') + # elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)): + # nn.init.constant_(m.weight, 1) + # nn.init.constant_(m.bias, 0) + + def forward(self, x): + ret = self.backbone(x) + ret[2] = F.interpolate(ret[2], scale_factor=2., mode='bilinear', align_corners=False) + if self.skip_fine_feature: + if self.inter_feat: + return {'feats_c': ret[2], 'feats_f': None, 'feats_x2': ret[1], 'feats_x1': ret[0]} + else: + return {'feats_c': ret[2], 'feats_f': None,} + + def pro(self, x, profiler): + pass \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/src/loftr/backbone/s2dnet.py b/imcui/third_party/MatchAnything/src/loftr/backbone/s2dnet.py new file mode 100644 index 0000000000000000000000000000000000000000..2e4c2eb8a61a5193405ea86b7e67c11a19fa94f7 --- /dev/null +++ b/imcui/third_party/MatchAnything/src/loftr/backbone/s2dnet.py @@ -0,0 +1,131 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +# from torchvision import models +from typing import List, Dict + +# VGG-16 Layer Names and Channels +vgg16_layers = { + "conv1_1": 64, + "relu1_1": 64, + "conv1_2": 64, + "relu1_2": 64, + "pool1": 64, + "conv2_1": 128, + "relu2_1": 128, + "conv2_2": 128, + "relu2_2": 128, + "pool2": 128, + "conv3_1": 256, + "relu3_1": 256, + "conv3_2": 256, + "relu3_2": 256, + "conv3_3": 256, + "relu3_3": 256, + "pool3": 256, + "conv4_1": 512, + "relu4_1": 512, + "conv4_2": 512, + "relu4_2": 512, + "conv4_3": 512, + "relu4_3": 512, + "pool4": 512, + "conv5_1": 512, + "relu5_1": 512, + "conv5_2": 512, + "relu5_2": 512, + "conv5_3": 512, + "relu5_3": 512, + "pool5": 512, +} + +class AdapLayers(nn.Module): + """Small adaptation layers. + """ + + def __init__(self, hypercolumn_layers: List[str], output_dim: int = 128): + """Initialize one adaptation layer for every extraction point. + + Args: + hypercolumn_layers: The list of the hypercolumn layer names. + output_dim: The output channel dimension. + """ + super(AdapLayers, self).__init__() + self.layers = [] + channel_sizes = [vgg16_layers[name] for name in hypercolumn_layers] + for i, l in enumerate(channel_sizes): + layer = nn.Sequential( + nn.Conv2d(l, 64, kernel_size=1, stride=1, padding=0), + nn.ReLU(), + nn.Conv2d(64, output_dim, kernel_size=5, stride=1, padding=2), + nn.BatchNorm2d(output_dim), + ) + self.layers.append(layer) + self.add_module("adap_layer_{}".format(i), layer) + + def forward(self, features: List[torch.tensor]): + """Apply adaptation layers. + """ + for i, _ in enumerate(features): + features[i] = getattr(self, "adap_layer_{}".format(i))(features[i]) + return features + +class S2DNet(nn.Module): + """The S2DNet model + """ + + def __init__( + self, + # hypercolumn_layers: List[str] = ["conv2_2", "conv3_3", "relu4_3"], + hypercolumn_layers: List[str] = ["conv1_2", "conv3_3", "conv5_3"], + checkpoint_path: str = None, + ): + """Initialize S2DNet. + + Args: + device: The torch device to put the model on + hypercolumn_layers: Names of the layers to extract features from + checkpoint_path: Path to the pre-trained model. + """ + super(S2DNet, self).__init__() + self._checkpoint_path = checkpoint_path + self.layer_to_index = dict((k, v) for v, k in enumerate(vgg16_layers.keys())) + self._hypercolumn_layers = hypercolumn_layers + + # Initialize architecture + vgg16 = models.vgg16(pretrained=False) + # layers = list(vgg16.features.children())[:-2] + layers = list(vgg16.features.children())[:-1] + # layers = list(vgg16.features.children())[:23] # relu4_3 + self.encoder = nn.Sequential(*layers) + self.adaptation_layers = AdapLayers(self._hypercolumn_layers) # .to(self._device) + self.eval() + + # Restore params from checkpoint + if checkpoint_path: + print(">> Loading weights from {}".format(checkpoint_path)) + self._checkpoint = torch.load(checkpoint_path) + self._hypercolumn_layers = self._checkpoint["hypercolumn_layers"] + self.load_state_dict(self._checkpoint["state_dict"]) + + def forward(self, image_tensor: torch.FloatTensor): + """Compute intermediate feature maps at the provided extraction levels. + + Args: + image_tensor: The [N x 3 x H x Ws] input image tensor. + Returns: + feature_maps: The list of output feature maps. + """ + feature_maps, j = [], 0 + feature_map = image_tensor.repeat(1,3,1,1) + layer_list = list(self.encoder.modules())[0] + for i, layer in enumerate(layer_list): + feature_map = layer(feature_map) + if j < len(self._hypercolumn_layers): + next_extraction_index = self.layer_to_index[self._hypercolumn_layers[j]] + if i == next_extraction_index: + feature_maps.append(feature_map) + j += 1 + feature_maps = self.adaptation_layers(feature_maps) + return feature_maps diff --git a/imcui/third_party/MatchAnything/src/loftr/loftr.py b/imcui/third_party/MatchAnything/src/loftr/loftr.py new file mode 100644 index 0000000000000000000000000000000000000000..53b7d4a86ec175b483ead096ff9db1ae5802fa63 --- /dev/null +++ b/imcui/third_party/MatchAnything/src/loftr/loftr.py @@ -0,0 +1,273 @@ +import torch +import torch.nn as nn +from einops.einops import rearrange + +from .backbone import build_backbone +# from third_party.matchformer.model.backbone import build_backbone as build_backbone_matchformer +from .utils.position_encoding import PositionEncodingSine +from .loftr_module import LocalFeatureTransformer, FinePreprocess +from .utils.coarse_matching import CoarseMatching +from .utils.fine_matching import FineMatching + +from loguru import logger + +class LoFTR(nn.Module): + def __init__(self, config, profiler=None): + super().__init__() + # Misc + self.config = config + self.profiler = profiler + + # Modules + self.backbone = build_backbone(config) + if not (self.config['coarse']['skip'] or self.config['coarse']['rope'] or self.config['coarse']['pan'] or self.config['coarse']['token_mixer'] is not None): + self.pos_encoding = PositionEncodingSine( + config['coarse']['d_model'], + temp_bug_fix=config['coarse']['temp_bug_fix'], + npe=config['coarse']['npe'], + ) + if self.config['coarse']['abspe']: + self.pos_encoding = PositionEncodingSine( + config['coarse']['d_model'], + temp_bug_fix=config['coarse']['temp_bug_fix'], + npe=config['coarse']['npe'], + ) + + if self.config['coarse']['skip'] is False: + self.loftr_coarse = LocalFeatureTransformer(config) + self.coarse_matching = CoarseMatching(config['match_coarse']) + # self.fine_preprocess = FinePreprocess(config).float() + self.fine_preprocess = FinePreprocess(config) + if self.config['fine']['skip'] is False: + self.loftr_fine = LocalFeatureTransformer(config["fine"]) + self.fine_matching = FineMatching(config) + + def forward(self, data): + """ + Update: + data (dict): { + 'image0': (torch.Tensor): (N, 1, H, W) + 'image1': (torch.Tensor): (N, 1, H, W) + 'mask0'(optional) : (torch.Tensor): (N, H, W) '0' indicates a padded position + 'mask1'(optional) : (torch.Tensor): (N, H, W) + } + """ + # 1. Local Feature CNN + data.update({ + 'bs': data['image0'].size(0), + 'hw0_i': data['image0'].shape[2:], 'hw1_i': data['image1'].shape[2:] + }) + + if data['hw0_i'] == data['hw1_i']: # faster & better BN convergence + # feats_c, feats_f = self.backbone(torch.cat([data['image0'], data['image1']], dim=0)) + ret_dict = self.backbone(torch.cat([data['image0'], data['image1']], dim=0)) + feats_c, feats_f = ret_dict['feats_c'], ret_dict['feats_f'] + if self.config['inter_feat']: + data.update({ + 'feats_x2': ret_dict['feats_x2'], + 'feats_x1': ret_dict['feats_x1'], + }) + if self.config['coarse_feat_only']: + (feat_c0, feat_c1) = feats_c.split(data['bs']) + feat_f0, feat_f1 = None, None + else: + (feat_c0, feat_c1), (feat_f0, feat_f1) = feats_c.split(data['bs']), feats_f.split(data['bs']) + else: # handle different input shapes + # (feat_c0, feat_f0), (feat_c1, feat_f1) = self.backbone(data['image0']), self.backbone(data['image1']) + ret_dict0, ret_dict1 = self.backbone(data['image0']), self.backbone(data['image1']) + feat_c0, feat_f0 = ret_dict0['feats_c'], ret_dict0['feats_f'] + feat_c1, feat_f1 = ret_dict1['feats_c'], ret_dict1['feats_f'] + if self.config['inter_feat']: + data.update({ + 'feats_x2_0': ret_dict0['feats_x2'], + 'feats_x1_0': ret_dict0['feats_x1'], + 'feats_x2_1': ret_dict1['feats_x2'], + 'feats_x1_1': ret_dict1['feats_x1'], + }) + if self.config['coarse_feat_only']: + feat_f0, feat_f1 = None, None + + + mul = self.config['resolution'][0] // self.config['resolution'][1] + # mul = 4 + if self.config['fix_bias']: + data.update({ + 'hw0_c': feat_c0.shape[2:], 'hw1_c': feat_c1.shape[2:], + 'hw0_f': feat_f0.shape[2:] if feat_f0 is not None else [(feat_c0.shape[2]-1) * mul+1, (feat_c0.shape[3]-1) * mul+1] , + 'hw1_f': feat_f1.shape[2:] if feat_f1 is not None else [(feat_c1.shape[2]-1) * mul+1, (feat_c1.shape[3]-1) * mul+1] + }) + else: + data.update({ + 'hw0_c': feat_c0.shape[2:], 'hw1_c': feat_c1.shape[2:], + 'hw0_f': feat_f0.shape[2:] if feat_f0 is not None else [feat_c0.shape[2] * mul, feat_c0.shape[3] * mul] , + 'hw1_f': feat_f1.shape[2:] if feat_f1 is not None else [feat_c1.shape[2] * mul, feat_c1.shape[3] * mul] + }) + + # 2. coarse-level loftr module + # add featmap with positional encoding, then flatten it to sequence [N, HW, C] + if self.config['coarse']['skip']: + mask_c0 = mask_c1 = None # mask is useful in training + if 'mask0' in data: + mask_c0, mask_c1 = data['mask0'], data['mask1'] + feat_c0 = rearrange(feat_c0, 'n c h w -> n (h w) c') + feat_c1 = rearrange(feat_c1, 'n c h w -> n (h w) c') + + elif self.config['coarse']['pan']: + # assert feat_c0.shape[0] == 1, 'batch size must be 1 when using mask Xformer now' + if self.config['coarse']['abspe']: + feat_c0 = self.pos_encoding(feat_c0) + feat_c1 = self.pos_encoding(feat_c1) + + mask_c0 = mask_c1 = None # mask is useful in training + if 'mask0' in data: + mask_c0, mask_c1 = data['mask0'], data['mask1'] + if self.config['matchability']: # else match in loftr_coarse + feat_c0, feat_c1 = self.loftr_coarse(feat_c0, feat_c1, mask_c0, mask_c1, data=data) + else: + feat_c0, feat_c1 = self.loftr_coarse(feat_c0, feat_c1, mask_c0, mask_c1) + + feat_c0 = rearrange(feat_c0, 'n c h w -> n (h w) c') + feat_c1 = rearrange(feat_c1, 'n c h w -> n (h w) c') + else: + if not (self.config['coarse']['rope'] or self.config['coarse']['token_mixer'] is not None): + feat_c0 = rearrange(self.pos_encoding(feat_c0), 'n c h w -> n (h w) c') + feat_c1 = rearrange(self.pos_encoding(feat_c1), 'n c h w -> n (h w) c') + + mask_c0 = mask_c1 = None # mask is useful in training + if self.config['coarse']['rope']: + if 'mask0' in data: + mask_c0, mask_c1 = data['mask0'], data['mask1'] + else: + if 'mask0' in data: + mask_c0, mask_c1 = data['mask0'].flatten(-2), data['mask1'].flatten(-2) + feat_c0, feat_c1 = self.loftr_coarse(feat_c0, feat_c1, mask_c0, mask_c1) + if self.config['coarse']['rope']: + feat_c0 = rearrange(feat_c0, 'n c h w -> n (h w) c') + feat_c1 = rearrange(feat_c1, 'n c h w -> n (h w) c') + + # detect nan + if self.config['replace_nan'] and (torch.any(torch.isnan(feat_c0)) or torch.any(torch.isnan(feat_c1))): + logger.info(f'replace nan in coarse attention') + logger.info(f"feat_c0_nan_num: {torch.isnan(feat_c0).int().sum()}, feat_c1_nan_num: {torch.isnan(feat_c1).int().sum()}") + logger.info(f"feat_c0: {feat_c0}, feat_c1: {feat_c1}") + logger.info(f"feat_c0_max: {feat_c0.abs().max()}, feat_c1_max: {feat_c1.abs().max()}") + feat_c0[torch.isnan(feat_c0)] = 0 + feat_c1[torch.isnan(feat_c1)] = 0 + logger.info(f"feat_c0_nanmax: {feat_c0.abs().max()}, feat_c1_nanmax: {feat_c1.abs().max()}") + + # 3. match coarse-level + if not self.config['matchability']: # else match in loftr_coarse + self.coarse_matching(feat_c0, feat_c1, data, + mask_c0=mask_c0.view(mask_c0.size(0), -1) if mask_c0 is not None else mask_c0, + mask_c1=mask_c1.view(mask_c1.size(0), -1) if mask_c1 is not None else mask_c1 + ) + + #return data['conf_matrix'],feat_c0,feat_c1,data['feats_x2'],data['feats_x1'] + + # norm FPNfeat + if self.config['norm_fpnfeat']: + feat_c0, feat_c1 = map(lambda feat: feat / feat.shape[-1]**.5, + [feat_c0, feat_c1]) + if self.config['norm_fpnfeat2']: + assert self.config['inter_feat'] + logger.info(f'before norm_fpnfeat2 max of feat_c0, feat_c1:{feat_c0.abs().max()}, {feat_c1.abs().max()}') + if data['hw0_i'] == data['hw1_i']: + logger.info(f'before norm_fpnfeat2 max of data[feats_x2], data[feats_x1]:{data["feats_x2"].abs().max()}, {data["feats_x1"].abs().max()}') + feat_c0, feat_c1, data['feats_x2'], data['feats_x1'] = map(lambda feat: feat / feat.shape[-1]**.5, + [feat_c0, feat_c1, data['feats_x2'], data['feats_x1']]) + else: + feat_c0, feat_c1, data['feats_x2_0'], data['feats_x2_1'], data['feats_x1_0'], data['feats_x1_1'] = map(lambda feat: feat / feat.shape[-1]**.5, + [feat_c0, feat_c1, data['feats_x2_0'], data['feats_x2_1'], data['feats_x1_0'], data['feats_x1_1']]) + + + # 4. fine-level refinement + with torch.autocast(enabled=False, device_type="cuda"): + feat_f0_unfold, feat_f1_unfold = self.fine_preprocess(feat_f0, feat_f1, feat_c0, feat_c1, data) + + # detect nan + if self.config['replace_nan'] and (torch.any(torch.isnan(feat_f0_unfold)) or torch.any(torch.isnan(feat_f1_unfold))): + logger.info(f'replace nan in fine_preprocess') + logger.info(f"feat_f0_unfold_nan_num: {torch.isnan(feat_f0_unfold).int().sum()}, feat_f1_unfold_nan_num: {torch.isnan(feat_f1_unfold).int().sum()}") + logger.info(f"feat_f0_unfold: {feat_f0_unfold}, feat_f1_unfold: {feat_f1_unfold}") + logger.info(f"feat_f0_unfold_max: {feat_f0_unfold}, feat_f1_unfold_max: {feat_f1_unfold}") + feat_f0_unfold[torch.isnan(feat_f0_unfold)] = 0 + feat_f1_unfold[torch.isnan(feat_f1_unfold)] = 0 + logger.info(f"feat_f0_unfold_nanmax: {feat_f0_unfold}, feat_f1_unfold_nanmax: {feat_f1_unfold}") + + if self.config['fp16log'] and feat_c0 is not None: + logger.info(f"c0: {feat_c0.abs().max()}, c1: {feat_c1.abs().max()}") + del feat_c0, feat_c1, mask_c0, mask_c1 + if feat_f0_unfold.size(0) != 0: # at least one coarse level predicted + if self.config['fine']['pan']: + m, ww, c = feat_f0_unfold.size() # [m, ww, c] + w = self.config['fine_window_size'] + feat_f0_unfold, feat_f1_unfold = self.loftr_fine(feat_f0_unfold.reshape(m, c, w, w), feat_f1_unfold.reshape(m, c, w, w)) + feat_f0_unfold = rearrange(feat_f0_unfold, 'm c w h -> m (w h) c') + feat_f1_unfold = rearrange(feat_f1_unfold, 'm c w h -> m (w h) c') + elif self.config['fine']['skip']: + pass + else: + feat_f0_unfold, feat_f1_unfold = self.loftr_fine(feat_f0_unfold, feat_f1_unfold) + # 5. match fine-level + # log forward nan + if self.config['fp16log']: + if feat_f0_unfold.size(0) != 0 and feat_f0 is not None: + logger.info(f"f0: {feat_f0.abs().max()}, f1: {feat_f1.abs().max()}, uf0: {feat_f0_unfold.abs().max()}, uf1: {feat_f1_unfold.abs().max()}") + elif feat_f0_unfold.size(0) != 0: + logger.info(f"uf0: {feat_f0_unfold.abs().max()}, uf1: {feat_f1_unfold.abs().max()}") + # elif feat_c0 is not None: + # logger.info(f"c0: {feat_c0.abs().max()}, c1: {feat_c1.abs().max()}") + + with torch.autocast(enabled=False, device_type="cuda"): + self.fine_matching(feat_f0_unfold, feat_f1_unfold, data) + + return data + + def load_state_dict(self, state_dict, *args, **kwargs): + for k in list(state_dict.keys()): + if k.startswith('matcher.'): + state_dict[k.replace('matcher.', '', 1)] = state_dict.pop(k) + return super().load_state_dict(state_dict, *args, **kwargs) + + def refine(self, data): + """ + Update: + data (dict): { + 'image0': (torch.Tensor): (N, 1, H, W) + 'image1': (torch.Tensor): (N, 1, H, W) + 'mask0'(optional) : (torch.Tensor): (N, H, W) '0' indicates a padded position + 'mask1'(optional) : (torch.Tensor): (N, H, W) + } + """ + # 1. Local Feature CNN + data.update({ + 'bs': data['image0'].size(0), + 'hw0_i': data['image0'].shape[2:], 'hw1_i': data['image1'].shape[2:] + }) + feat_f0, feat_f1 = None, None + feat_c0, feat_c1 = data['feat_c0'], data['feat_c1'] + # 4. fine-level refinement + feat_f0_unfold, feat_f1_unfold = self.fine_preprocess(feat_f0, feat_f1, feat_c0, feat_c1, data) + if feat_f0_unfold.size(0) != 0: # at least one coarse level predicted + if self.config['fine']['pan']: + m, ww, c = feat_f0_unfold.size() # [m, ww, c] + w = self.config['fine_window_size'] + feat_f0_unfold, feat_f1_unfold = self.loftr_fine(feat_f0_unfold.reshape(m, c, w, w), feat_f1_unfold.reshape(m, c, w, w)) + feat_f0_unfold = rearrange(feat_f0_unfold, 'm c w h -> m (w h) c') + feat_f1_unfold = rearrange(feat_f1_unfold, 'm c w h -> m (w h) c') + elif self.config['fine']['skip']: + pass + else: + feat_f0_unfold, feat_f1_unfold = self.loftr_fine(feat_f0_unfold, feat_f1_unfold) + # 5. match fine-level + # log forward nan + if self.config['fp16log']: + if feat_f0_unfold.size(0) != 0 and feat_f0 is not None and feat_c0 is not None: + logger.info(f"c0: {feat_c0.abs().max()}, c1: {feat_c1.abs().max()}, f0: {feat_f0.abs().max()}, f1: {feat_f1.abs().max()}, uf0: {feat_f0_unfold.abs().max()}, uf1: {feat_f1_unfold.abs().max()}") + elif feat_f0 is not None and feat_c0 is not None: + logger.info(f"c0: {feat_c0.abs().max()}, c1: {feat_c1.abs().max()}, f0: {feat_f0.abs().max()}, f1: {feat_f1.abs().max()}") + elif feat_c0 is not None: + logger.info(f"c0: {feat_c0.abs().max()}, c1: {feat_c1.abs().max()}") + + self.fine_matching(feat_f0_unfold, feat_f1_unfold, data) + return data \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/src/loftr/loftr_module/__init__.py b/imcui/third_party/MatchAnything/src/loftr/loftr_module/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ca51db4f50a0c4f3dcd795e74b83e633ab2e990a --- /dev/null +++ b/imcui/third_party/MatchAnything/src/loftr/loftr_module/__init__.py @@ -0,0 +1,2 @@ +from .transformer import LocalFeatureTransformer +from .fine_preprocess import FinePreprocess diff --git a/imcui/third_party/MatchAnything/src/loftr/loftr_module/fine_preprocess.py b/imcui/third_party/MatchAnything/src/loftr/loftr_module/fine_preprocess.py new file mode 100644 index 0000000000000000000000000000000000000000..019c0199e7be5c7fe65669420a98003c51c8bed2 --- /dev/null +++ b/imcui/third_party/MatchAnything/src/loftr/loftr_module/fine_preprocess.py @@ -0,0 +1,350 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from einops.einops import rearrange, repeat +from ..backbone.repvgg import RepVGGBlock + +from loguru import logger + +def conv1x1(in_planes, out_planes, stride=1): + """1x1 convolution without padding""" + return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, padding=0, bias=False) + + +def conv3x3(in_planes, out_planes, stride=1): + """3x3 convolution with padding""" + return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False) + +class FinePreprocess(nn.Module): + def __init__(self, config): + super().__init__() + + self.config = config + self.cat_c_feat = config['fine_concat_coarse_feat'] + self.sample_c_feat = config['fine_sample_coarse_feat'] + self.fpn_inter_feat = config['inter_feat'] + self.rep_fpn = config['rep_fpn'] + self.deploy = config['rep_deploy'] + self.multi_regress = config['match_fine']['multi_regress'] + self.local_regress = config['match_fine']['local_regress'] + self.local_regress_inner = config['match_fine']['local_regress_inner'] + block_dims = config['resnetfpn']['block_dims'] + + self.mtd_spvs = self.config['fine']['mtd_spvs'] + self.align_corner = self.config['align_corner'] + self.fix_bias = self.config['fix_bias'] + + if self.mtd_spvs: + self.W = self.config['fine_window_size'] + else: + # assert False, 'fine_window_matching_size to be revised' # good notification! + # self.W = self.config['fine_window_matching_size'] + self.W = self.config['fine_window_size'] + + self.backbone_type = self.config['backbone_type'] + + d_model_c = self.config['coarse']['d_model'] + d_model_f = self.config['fine']['d_model'] + self.d_model_f = d_model_f + if self.fpn_inter_feat: + if self.rep_fpn: + self.layer3_outconv = conv1x1(block_dims[2], block_dims[2]) + self.layer2_outconv = conv1x1(block_dims[1], block_dims[2]) + self.layer2_outconv2 = [] + self.layer2_outconv2.append(RepVGGBlock(in_channels=block_dims[2], out_channels=block_dims[2], kernel_size=3, + stride=1, padding=1, groups=1, deploy=self.deploy, use_se=False, leaky=0.01)) + self.layer2_outconv2.append(RepVGGBlock(in_channels=block_dims[2], out_channels=block_dims[1], kernel_size=3, + stride=1, padding=1, groups=1, deploy=self.deploy, use_se=False, leaky=-2)) + self.layer2_outconv2 = nn.ModuleList(self.layer2_outconv2) + self.layer1_outconv = conv1x1(block_dims[0], block_dims[1]) + self.layer1_outconv2 = [] + self.layer1_outconv2.append(RepVGGBlock(in_channels=block_dims[1], out_channels=block_dims[1], kernel_size=3, + stride=1, padding=1, groups=1, deploy=self.deploy, use_se=False, leaky=0.01)) + self.layer1_outconv2.append(RepVGGBlock(in_channels=block_dims[1], out_channels=block_dims[0], kernel_size=3, + stride=1, padding=1, groups=1, deploy=self.deploy, use_se=False, leaky=-2)) + self.layer1_outconv2 = nn.ModuleList(self.layer1_outconv2) + + else: + self.layer3_outconv = conv1x1(block_dims[2], block_dims[2]) + self.layer2_outconv = conv1x1(block_dims[1], block_dims[2]) + self.layer2_outconv2 = nn.Sequential( + conv3x3(block_dims[2], block_dims[2]), + nn.BatchNorm2d(block_dims[2]), + nn.LeakyReLU(), + conv3x3(block_dims[2], block_dims[1]), + ) + self.layer1_outconv = conv1x1(block_dims[0], block_dims[1]) + self.layer1_outconv2 = nn.Sequential( + conv3x3(block_dims[1], block_dims[1]), + nn.BatchNorm2d(block_dims[1]), + nn.LeakyReLU(), + conv3x3(block_dims[1], block_dims[0]), + ) + elif self.cat_c_feat: + self.down_proj = nn.Linear(d_model_c, d_model_f, bias=True) + self.merge_feat = nn.Linear(2*d_model_f, d_model_f, bias=True) + if self.sample_c_feat: + self.down_proj = nn.Linear(d_model_c, d_model_f, bias=True) + + + self._reset_parameters() + + def _reset_parameters(self): + for p in self.parameters(): + if p.dim() > 1: + nn.init.kaiming_normal_(p, mode="fan_out", nonlinearity="relu") + + def inter_fpn(self, feat_c, x2, x1, stride): + feat_c = self.layer3_outconv(feat_c) + feat_c = F.interpolate(feat_c, scale_factor=2., mode='bilinear', align_corners=False) + x2 = self.layer2_outconv(x2) + if self.rep_fpn: + x2 = x2 + feat_c + for layer in self.layer2_outconv2: + x2 = layer(x2) + else: + x2 = self.layer2_outconv2(x2+feat_c) + + x2 = F.interpolate(x2, scale_factor=2., mode='bilinear', align_corners=False) + x1 = self.layer1_outconv(x1) + if self.rep_fpn: + x1 = x1 + x2 + for layer in self.layer1_outconv2: + x1 = layer(x1) + else: + x1 = self.layer1_outconv2(x1+x2) + + if stride == 4: + logger.info('stride == 4') + + elif stride == 8: + logger.info('stride == 8') + x1 = F.interpolate(x1, scale_factor=2., mode='bilinear', align_corners=False) + else: + logger.info('stride not in {4,8}') + assert False + return x1 + + def forward(self, feat_f0, feat_f1, feat_c0, feat_c1, data): + W = self.W + if self.fix_bias: + stride = 4 + else: + stride = data['hw0_f'][0] // data['hw0_c'][0] + + data.update({'W': W}) + if data['b_ids'].shape[0] == 0: + feat0 = torch.empty(0, self.W**2, self.d_model_f, device=feat_c0.device) + feat1 = torch.empty(0, self.W**2, self.d_model_f, device=feat_c0.device) + # return feat0, feat1 + return feat0.float(), feat1.float() + + if self.fpn_inter_feat: + if data['hw0_i'] != data['hw1_i']: + if self.align_corner is False: + assert self.backbone_type != 's2dnet' + + feat_c0 = rearrange(feat_c0, 'b (h w) c -> b c h w', h=data['hw0_c'][0]) + feat_c1 = rearrange(feat_c1, 'b (h w) c -> b c h w', h=data['hw1_c'][0]) + x2_0, x1_0 = data['feats_x2_0'], data['feats_x1_0'] + x2_1, x1_1 = data['feats_x2_1'], data['feats_x1_1'] + del data['feats_x2_0'], data['feats_x1_0'], data['feats_x2_1'], data['feats_x1_1'] + feat_f0, feat_f1 = self.inter_fpn(feat_c0, x2_0, x1_0, stride), self.inter_fpn(feat_c1, x2_1, x1_1, stride) + + if self.local_regress_inner: + assert W == 8 + feat_f0 = F.unfold(feat_f0, kernel_size=(W, W), stride=stride, padding=0) + feat_f0 = rearrange(feat_f0, 'n (c ww) l -> n l ww c', ww=W**2) + feat_f1 = F.unfold(feat_f1, kernel_size=(W+2, W+2), stride=stride, padding=1) + feat_f1 = rearrange(feat_f1, 'n (c ww) l -> n l ww c', ww=(W+2)**2) + elif W == 10 and self.multi_regress: + feat_f0 = F.unfold(feat_f0, kernel_size=(W, W), stride=stride, padding=1) + feat_f0 = rearrange(feat_f0, 'n (c ww) l -> n l ww c', ww=W**2) + feat_f1 = F.unfold(feat_f1, kernel_size=(W, W), stride=stride, padding=1) + feat_f1 = rearrange(feat_f1, 'n (c ww) l -> n l ww c', ww=W**2) + elif W == 10: + feat_f0 = F.unfold(feat_f0, kernel_size=(W, W), stride=stride, padding=1) + feat_f0 = rearrange(feat_f0, 'n (c ww) l -> n l ww c', ww=W**2) + feat_f1 = F.unfold(feat_f1, kernel_size=(W, W), stride=stride, padding=1) + feat_f1 = rearrange(feat_f1, 'n (c ww) l -> n l ww c', ww=W**2) + else: + assert not self.multi_regress + feat_f0 = F.unfold(feat_f0, kernel_size=(W, W), stride=stride, padding=0) + feat_f0 = rearrange(feat_f0, 'n (c ww) l -> n l ww c', ww=W**2) + feat_f1 = F.unfold(feat_f1, kernel_size=(W, W), stride=stride, padding=0) + feat_f1 = rearrange(feat_f1, 'n (c ww) l -> n l ww c', ww=W**2) + + # 2. select only the predicted matches + feat_f0 = feat_f0[data['b_ids'], data['i_ids']] # [n, ww, cf] + feat_f1 = feat_f1[data['b_ids'], data['j_ids']] + + return feat_f0, feat_f1 + + else: + if self.align_corner is False: + feat_c = torch.cat([feat_c0, feat_c1], 0) + feat_c = rearrange(feat_c, 'b (h w) c -> b c h w', h=data['hw0_c'][0]) # 1/8 256 + x2 = data['feats_x2'].float() # 1/4 128 + x1 = data['feats_x1'].float() # 1/2 64 + del data['feats_x2'], data['feats_x1'] + assert self.backbone_type != 's2dnet' + feat_c = self.layer3_outconv(feat_c) + feat_c = F.interpolate(feat_c, scale_factor=2., mode='bilinear', align_corners=False) + x2 = self.layer2_outconv(x2) + if self.rep_fpn: + x2 = x2 + feat_c + for layer in self.layer2_outconv2: + x2 = layer(x2) + else: + x2 = self.layer2_outconv2(x2+feat_c) + + x2 = F.interpolate(x2, scale_factor=2., mode='bilinear', align_corners=False) + x1 = self.layer1_outconv(x1) + if self.rep_fpn: + x1 = x1 + x2 + for layer in self.layer1_outconv2: + x1 = layer(x1) + else: + x1 = self.layer1_outconv2(x1+x2) + + if stride == 4: + # logger.info('stride == 4') + pass + elif stride == 8: + # logger.info('stride == 8') + x1 = F.interpolate(x1, scale_factor=2., mode='bilinear', align_corners=False) + else: + # logger.info('stride not in {4,8}') + assert False + + feat_f0, feat_f1 = torch.chunk(x1, 2, dim=0) + + # 1. unfold(crop) all local windows + if self.local_regress_inner: + assert W == 8 + feat_f0 = F.unfold(feat_f0, kernel_size=(W, W), stride=stride, padding=0) + feat_f0 = rearrange(feat_f0, 'n (c ww) l -> n l ww c', ww=W**2) + feat_f1 = F.unfold(feat_f1, kernel_size=(W+2, W+2), stride=stride, padding=1) + feat_f1 = rearrange(feat_f1, 'n (c ww) l -> n l ww c', ww=(W+2)**2) + elif self.multi_regress or (self.local_regress and W == 10): + feat_f0 = F.unfold(feat_f0, kernel_size=(W, W), stride=stride, padding=1) + feat_f0 = rearrange(feat_f0, 'n (c ww) l -> n l ww c', ww=W**2) + feat_f1 = F.unfold(feat_f1, kernel_size=(W, W), stride=stride, padding=1) + feat_f1 = rearrange(feat_f1, 'n (c ww) l -> n l ww c', ww=W**2) + elif W == 10: + feat_f0 = F.unfold(feat_f0, kernel_size=(W, W), stride=stride, padding=1) + feat_f0 = rearrange(feat_f0, 'n (c ww) l -> n l ww c', ww=W**2) + feat_f1 = F.unfold(feat_f1, kernel_size=(W, W), stride=stride, padding=1) + feat_f1 = rearrange(feat_f1, 'n (c ww) l -> n l ww c', ww=W**2) + + else: + feat_f0 = F.unfold(feat_f0, kernel_size=(W, W), stride=stride, padding=0) + feat_f0 = rearrange(feat_f0, 'n (c ww) l -> n l ww c', ww=W**2) + feat_f1 = F.unfold(feat_f1, kernel_size=(W, W), stride=stride, padding=0) + feat_f1 = rearrange(feat_f1, 'n (c ww) l -> n l ww c', ww=W**2) + + # 2. select only the predicted matches + feat_f0 = feat_f0[data['b_ids'], data['i_ids']] # [n, ww, cf] + feat_f1 = feat_f1[data['b_ids'], data['j_ids']] + + return feat_f0, feat_f1 + elif self.fix_bias: + feat_c = torch.cat([feat_c0, feat_c1], 0) + feat_c = rearrange(feat_c, 'b (h w) c -> b c h w', h=data['hw0_c'][0]) + x2 = data['feats_x2'].float() + x1 = data['feats_x1'].float() + assert self.backbone_type != 's2dnet' + x3_out = self.layer3_outconv(feat_c) + x3_out_2x = F.interpolate(x3_out, size=((x3_out.size(-2)-1)*2+1, (x3_out.size(-1)-1)*2+1), mode='bilinear', align_corners=False) + x2 = self.layer2_outconv(x2) + x2 = self.layer2_outconv2(x2+x3_out_2x) + + x2 = F.interpolate(x2, size=((x2.size(-2)-1)*2+1, (x2.size(-1)-1)*2+1), mode='bilinear', align_corners=False) + x1_out = self.layer1_outconv(x1) + x1_out = self.layer1_outconv2(x1_out+x2) + x0_out = x1_out + + feat_f0, feat_f1 = torch.chunk(x0_out, 2, dim=0) + + # 1. unfold(crop) all local windows + feat_f0_unfold = F.unfold(feat_f0, kernel_size=(W, W), stride=stride, padding=W//2) + feat_f0_unfold = rearrange(feat_f0_unfold, 'n (c ww) l -> n l ww c', ww=W**2) + feat_f1_unfold = F.unfold(feat_f1, kernel_size=(W, W), stride=stride, padding=W//2) + feat_f1_unfold = rearrange(feat_f1_unfold, 'n (c ww) l -> n l ww c', ww=W**2) + + # 2. select only the predicted matches + feat_f0_unfold = feat_f0_unfold[data['b_ids'], data['i_ids']] # [n, ww, cf] + feat_f1_unfold = feat_f1_unfold[data['b_ids'], data['j_ids']] + + return feat_f0_unfold, feat_f1_unfold + + + + elif self.sample_c_feat: + if self.align_corner is False: + # easy implemented but memory consuming + feat_c = self.down_proj(torch.cat([feat_c0, + feat_c1], 0)) # [n, (h w), c] -> [2n, (h w), cf] + feat_c = rearrange(feat_c, 'n (h w) c -> n c h w', h=data['hw0_c'][0], w=data['hw0_c'][1]) + feat_f = F.interpolate(feat_c, scale_factor=8., mode='bilinear', align_corners=False) # [2n, cf, hf, wf] + feat_f_unfold = F.unfold(feat_f, kernel_size=(W, W), stride=stride, padding=0) + feat_f_unfold = rearrange(feat_f_unfold, 'n (c ww) l -> n l ww c', ww=W**2) + feat_f0_unfold, feat_f1_unfold = torch.chunk(feat_f_unfold, 2, dim=0) + feat_f0_unfold = feat_f0_unfold[data['b_ids'], data['i_ids']] # [m, ww, cf] + feat_f1_unfold = feat_f1_unfold[data['b_ids'], data['j_ids']] # [m, ww, cf] + # return feat_f0_unfold, feat_f1_unfold + return feat_f0_unfold.float(), feat_f1_unfold.float() + else: + if self.align_corner is False: + # 1. unfold(crop) all local windows + assert False, 'maybe exist bugs' + feat_f0_unfold = F.unfold(feat_f0, kernel_size=(W, W), stride=stride, padding=0) + feat_f0_unfold = rearrange(feat_f0_unfold, 'n (c ww) l -> n l ww c', ww=W**2) + feat_f1_unfold = F.unfold(feat_f1, kernel_size=(W, W), stride=stride, padding=0) + feat_f1_unfold = rearrange(feat_f1_unfold, 'n (c ww) l -> n l ww c', ww=W**2) + + # 2. select only the predicted matches + feat_f0_unfold = feat_f0_unfold[data['b_ids'], data['i_ids']] # [n, ww, cf] + feat_f1_unfold = feat_f1_unfold[data['b_ids'], data['j_ids']] + + # option: use coarse-level loftr feature as context: concat and linear + if self.cat_c_feat: + feat_c_win = self.down_proj(torch.cat([feat_c0[data['b_ids'], data['i_ids']], + feat_c1[data['b_ids'], data['j_ids']]], 0)) # [2n, c] + feat_cf_win = self.merge_feat(torch.cat([ + torch.cat([feat_f0_unfold, feat_f1_unfold], 0), # [2n, ww, cf] + repeat(feat_c_win, 'n c -> n ww c', ww=W**2), # [2n, ww, cf] + ], -1)) + feat_f0_unfold, feat_f1_unfold = torch.chunk(feat_cf_win, 2, dim=0) + + return feat_f0_unfold, feat_f1_unfold + + else: + # 1. unfold(crop) all local windows + if self.fix_bias: + feat_f0_unfold = F.unfold(feat_f0, kernel_size=(W, W), stride=stride, padding=W//2) + feat_f0_unfold = rearrange(feat_f0_unfold, 'n (c ww) l -> n l ww c', ww=W**2) + feat_f1_unfold = F.unfold(feat_f1, kernel_size=(W, W), stride=stride, padding=W//2) + feat_f1_unfold = rearrange(feat_f1_unfold, 'n (c ww) l -> n l ww c', ww=W**2) + else: + feat_f0_unfold = F.unfold(feat_f0, kernel_size=(W, W), stride=stride, padding=W//2) + feat_f0_unfold = rearrange(feat_f0_unfold, 'n (c ww) l -> n l ww c', ww=W**2) + feat_f1_unfold = F.unfold(feat_f1, kernel_size=(W, W), stride=stride, padding=W//2) + feat_f1_unfold = rearrange(feat_f1_unfold, 'n (c ww) l -> n l ww c', ww=W**2) + + # 2. select only the predicted matches + feat_f0_unfold = feat_f0_unfold[data['b_ids'], data['i_ids']] # [n, ww, cf] + feat_f1_unfold = feat_f1_unfold[data['b_ids'], data['j_ids']] + + # option: use coarse-level loftr feature as context: concat and linear + if self.cat_c_feat: + feat_c_win = self.down_proj(torch.cat([feat_c0[data['b_ids'], data['i_ids']], + feat_c1[data['b_ids'], data['j_ids']]], 0)) # [2n, c] + feat_cf_win = self.merge_feat(torch.cat([ + torch.cat([feat_f0_unfold, feat_f1_unfold], 0), # [2n, ww, cf] + repeat(feat_c_win, 'n c -> n ww c', ww=W**2), # [2n, ww, cf] + ], -1)) + feat_f0_unfold, feat_f1_unfold = torch.chunk(feat_cf_win, 2, dim=0) + + # return feat_f0_unfold, feat_f1_unfold + return feat_f0_unfold.float(), feat_f1_unfold.float() \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/src/loftr/loftr_module/linear_attention.py b/imcui/third_party/MatchAnything/src/loftr/loftr_module/linear_attention.py new file mode 100644 index 0000000000000000000000000000000000000000..1d7f08fcd77195dab126a59d2e832e72bc31012a --- /dev/null +++ b/imcui/third_party/MatchAnything/src/loftr/loftr_module/linear_attention.py @@ -0,0 +1,217 @@ +""" +Linear Transformer proposed in "Transformers are RNNs: Fast Autoregressive Transformers with Linear Attention" +Modified from: https://github.com/idiap/fast-transformers/blob/master/fast_transformers/attention/linear_attention.py +""" + +import torch +from torch.nn import Module, Dropout +import torch.nn.functional as F + +# if hasattr(F, 'scaled_dot_product_attention'): +# FLASH_AVAILABLE = True +# else: # v100 +FLASH_AVAILABLE = False + # import xformers.ops +from ..utils.position_encoding import PositionEncodingSine, RoPEPositionEncodingSine +from einops.einops import rearrange +from loguru import logger + + +# flash_attn_func_ok = True +# try: +# from flash_attn import flash_attn_func +# except ModuleNotFoundError: +# flash_attn_func_ok = False + +def elu_feature_map(x): + return torch.nn.functional.elu(x) + 1 + + +class LinearAttention(Module): + def __init__(self, eps=1e-6): + super().__init__() + self.feature_map = elu_feature_map + self.eps = eps + + @torch.cuda.amp.custom_fwd(cast_inputs=torch.float32) + def forward(self, queries, keys, values, q_mask=None, kv_mask=None): + """ Multi-Head linear attention proposed in "Transformers are RNNs" + Args: + queries: [N, L, H, D] + keys: [N, S, H, D] + values: [N, S, H, D] + q_mask: [N, L] + kv_mask: [N, S] + Returns: + queried_values: (N, L, H, D) + """ + Q = self.feature_map(queries) + K = self.feature_map(keys) + + # set padded position to zero + if q_mask is not None: + Q = Q * q_mask[:, :, None, None] + if kv_mask is not None: + K = K * kv_mask[:, :, None, None] + values = values * kv_mask[:, :, None, None] + + v_length = values.size(1) + values = values / v_length # prevent fp16 overflow + KV = torch.einsum("nshd,nshv->nhdv", K, values) # (S,D)' @ S,V + Z = 1 / (torch.einsum("nlhd,nhd->nlh", Q, K.sum(dim=1)) + self.eps) + # queried_values = torch.einsum("nlhd,nhdv,nlh->nlhv", Q, KV, Z) * v_length + queried_values = torch.einsum("nlhd,nhdv,nlh->nlhv", Q, KV, Z) * v_length + + return queried_values.contiguous() + +class RoPELinearAttention(Module): + def __init__(self, eps=1e-6): + super().__init__() + self.feature_map = elu_feature_map + self.eps = eps + self.RoPE = RoPEPositionEncodingSine(256, max_shape=(256, 256)) + + @torch.cuda.amp.custom_fwd(cast_inputs=torch.float32) + def forward(self, queries, keys, values, q_mask=None, kv_mask=None, H=None, W=None): + """ Multi-Head linear attention proposed in "Transformers are RNNs" + Args: + queries: [N, L, H, D] + keys: [N, S, H, D] + values: [N, S, H, D] + q_mask: [N, L] + kv_mask: [N, S] + Returns: + queried_values: (N, L, H, D) + """ + Q = self.feature_map(queries) + K = self.feature_map(keys) + nhead, d = Q.size(2), Q.size(3) + # set padded position to zero + if q_mask is not None: + Q = Q * q_mask[:, :, None, None] + if kv_mask is not None: + K = K * kv_mask[:, :, None, None] + values = values * kv_mask[:, :, None, None] + + v_length = values.size(1) + values = values / v_length # prevent fp16 overflow + # Q = Q / Q.size(1) + # logger.info(f"Q: {Q.dtype}, K: {K.dtype}, values: {values.dtype}") + + Z = 1 / (torch.einsum("nlhd,nhd->nlh", Q, K.sum(dim=1)) + self.eps) + # logger.info(f"Z_max: {Z.abs().max()}") + Q = rearrange(Q, 'n (h w) nhead d -> n h w (nhead d)', h=H, w=W) + K = rearrange(K, 'n (h w) nhead d -> n h w (nhead d)', h=H, w=W) + Q, K = self.RoPE(Q), self.RoPE(K) + # logger.info(f"Q_rope: {Q.abs().max()}, K_rope: {K.abs().max()}") + Q = rearrange(Q, 'n h w (nhead d) -> n (h w) nhead d', nhead=nhead, d=d) + K = rearrange(K, 'n h w (nhead d) -> n (h w) nhead d', nhead=nhead, d=d) + KV = torch.einsum("nshd,nshv->nhdv", K, values) # (S,D)' @ S,V + del K, values + # logger.info(f"KV_max: {KV.abs().max()}") + # queried_values = torch.einsum("nlhd,nhdv,nlh->nlhv", Q, KV, Z) * v_length + # Q = torch.einsum("nlhd,nlh->nlhd", Q, Z) + # logger.info(f"QZ_max: {Q.abs().max()}") + # queried_values = torch.einsum("nlhd,nhdv->nlhv", Q, KV) * v_length + # logger.info(f"message_max: {queried_values.abs().max()}") + queried_values = torch.einsum("nlhd,nhdv,nlh->nlhv", Q, KV, Z) * v_length + + return queried_values.contiguous() + + +class FullAttention(Module): + def __init__(self, use_dropout=False, attention_dropout=0.1): + super().__init__() + self.use_dropout = use_dropout + self.dropout = Dropout(attention_dropout) + + # @torch.cuda.amp.custom_fwd(cast_inputs=torch.float32) + def forward(self, queries, keys, values, q_mask=None, kv_mask=None): + """ Multi-head scaled dot-product attention, a.k.a full attention. + Args: + queries: [N, L, H, D] + keys: [N, S, H, D] + values: [N, S, H, D] + q_mask: [N, L] + kv_mask: [N, S] + Returns: + queried_values: (N, L, H, D) + """ + # assert kv_mask is None + # mask = torch.zeros(queries.size(0)*queries.size(2), queries.size(1), keys.size(1), device=queries.device) + # mask.masked_fill(~(q_mask[:, :, None] * kv_mask[:, None, :]), float('-inf')) + # if keys.size(1) % 8 != 0: + # mask = torch.cat([mask, torch.zeros(queries.size(0)*queries.size(2), queries.size(1), 8-keys.size(1)%8, device=queries.device)], dim=-1) + # out = xformers.ops.memory_efficient_attention(queries, keys, values, attn_bias=mask[...,:keys.size(1)]) + # return out + + # N = queries.size(0) + # list_q = [queries[i, :q_mask[i].sum, ...] for i in N] + # list_k = [keys[i, :kv_mask[i].sum, ...] for i in N] + # list_v = [values[i, :kv_mask[i].sum, ...] for i in N] + # assert N == 1 + # out = xformers.ops.memory_efficient_attention(queries[:,:q_mask.sum(),...], keys[:,:kv_mask.sum(),...], values[:,:kv_mask.sum(),...]) + # out = torch.cat([out, torch.zeros(out.size(0), queries.size(1)-q_mask.sum(), queries.size(2), queries.size(3), device=queries.device)], dim=1) + # return out + # Compute the unnormalized attention and apply the masks + QK = torch.einsum("nlhd,nshd->nlsh", queries, keys) + if kv_mask is not None: + QK.masked_fill_(~(q_mask[:, :, None, None] * kv_mask[:, None, :, None]), -1e5) # float('-inf') + + # Compute the attention and the weighted average + softmax_temp = 1. / queries.size(3)**.5 # sqrt(D) + A = torch.softmax(softmax_temp * QK, dim=2) + if self.use_dropout: + A = self.dropout(A) + + queried_values = torch.einsum("nlsh,nshd->nlhd", A, values) + + return queried_values.contiguous() + + +class XAttention(Module): + def __init__(self, use_dropout=False, attention_dropout=0.1): + super().__init__() + self.use_dropout = use_dropout + if use_dropout: + self.dropout = Dropout(attention_dropout) + + def forward(self, queries, keys, values, q_mask=None, kv_mask=None): + """ Multi-head scaled dot-product attention, a.k.a full attention. + Args: + if FLASH_AVAILABLE: # pytorch scaled_dot_product_attention + queries: [N, H, L, D] + keys: [N, H, S, D] + values: [N, H, S, D] + else: + queries: [N, L, H, D] + keys: [N, S, H, D] + values: [N, S, H, D] + q_mask: [N, L] + kv_mask: [N, S] + Returns: + queried_values: (N, L, H, D) + """ + + assert q_mask is None and kv_mask is None, "already been sliced" + if FLASH_AVAILABLE: + # args = [x.half().contiguous() for x in [queries, keys, values]] + # out = F.scaled_dot_product_attention(*args, attn_mask=mask).to(queries.dtype) + args = [x.contiguous() for x in [queries, keys, values]] + out = F.scaled_dot_product_attention(*args) + else: + # if flash_attn_func_ok: + # out = flash_attn_func(queries, keys, values) + # else: + QK = torch.einsum("nlhd,nshd->nlsh", queries, keys) + + # Compute the attention and the weighted average + softmax_temp = 1. / queries.size(3)**.5 # sqrt(D) + A = torch.softmax(softmax_temp * QK, dim=2) + + out = torch.einsum("nlsh,nshd->nlhd", A, values) + + # out = xformers.ops.memory_efficient_attention(queries, keys, values) + # out = xformers.ops.memory_efficient_attention(queries[:,:q_mask.sum(),...], keys[:,:kv_mask.sum(),...], values[:,:kv_mask.sum(),...]) + # out = torch.cat([out, torch.zeros(out.size(0), queries.size(1)-q_mask.sum(), queries.size(2), queries.size(3), device=queries.device)], dim=1) + return out diff --git a/imcui/third_party/MatchAnything/src/loftr/loftr_module/transformer.py b/imcui/third_party/MatchAnything/src/loftr/loftr_module/transformer.py new file mode 100644 index 0000000000000000000000000000000000000000..1ee8ce85912ad44539c27836ddd20f676912df5b --- /dev/null +++ b/imcui/third_party/MatchAnything/src/loftr/loftr_module/transformer.py @@ -0,0 +1,1768 @@ +import copy +import torch +import torch.nn as nn +import torch.nn.functional as F +from .linear_attention import LinearAttention, RoPELinearAttention, FullAttention, XAttention +from einops.einops import rearrange +from collections import OrderedDict +from .transformer_utils import TokenConfidence, MatchAssignment, filter_matches +from ..utils.coarse_matching import CoarseMatching +from ..utils.position_encoding import RoPEPositionEncodingSine +import numpy as np +from loguru import logger + +PFLASH_AVAILABLE = False + +class PANEncoderLayer(nn.Module): + def __init__(self, + d_model, + nhead, + attention='linear', + pool_size=4, + bn=True, + xformer=False, + leaky=-1.0, + dw_conv=False, + scatter=False, + ): + super(PANEncoderLayer, self).__init__() + + self.pool_size = pool_size + self.dw_conv = dw_conv + self.scatter = scatter + if self.dw_conv: + self.aggregate = nn.Conv2d(d_model, d_model, kernel_size=pool_size, padding=0, stride=pool_size, bias=False, groups=d_model) + + assert not self.scatter, 'buggy implemented here' + self.dim = d_model // nhead + self.nhead = nhead + + self.max_pool = torch.nn.MaxPool2d(kernel_size=self.pool_size, stride=self.pool_size) + # multi-head attention + if bn: + method = 'dw_bn' + else: + method = 'dw' + self.q_proj_conv = self._build_projection(d_model, d_model, method=method) + self.k_proj_conv = self._build_projection(d_model, d_model, method=method) + self.v_proj_conv = self._build_projection(d_model, d_model, method=method) + + # self.q_proj = nn.Linear(d_mosdel, d_model, bias=False) + # self.k_proj = nn.Linear(d_model, d_model, bias=False) + # self.v_proj = nn.Linear(d_model, d_model, bias=False) + if xformer: + self.attention = XAttention() + else: + self.attention = LinearAttention() if attention == 'linear' else FullAttention() + self.merge = nn.Linear(d_model, d_model, bias=False) + + # feed-forward network + if leaky > 0: + self.mlp = nn.Sequential( + nn.Linear(d_model*2, d_model*2, bias=False), + nn.LeakyReLU(leaky, True), + nn.Linear(d_model*2, d_model, bias=False), + ) + + else: + self.mlp = nn.Sequential( + nn.Linear(d_model*2, d_model*2, bias=False), + nn.ReLU(True), + nn.Linear(d_model*2, d_model, bias=False), + ) + + # norm and dropout + self.norm1 = nn.LayerNorm(d_model) + self.norm2 = nn.LayerNorm(d_model) + + # self.norm1 = nn.BatchNorm2d(d_model) + + def forward(self, x, source, x_mask=None, source_mask=None): + """ + Args: + x (torch.Tensor): [N, C, H1, W1] + source (torch.Tensor): [N, C, H2, W2] + x_mask (torch.Tensor): [N, H1, W1] (optional) (L = H1*W1) + source_mask (torch.Tensor): [N, H2, W2] (optional) (S = H2*W2) + """ + bs = x.size(0) + H1, W1 = x.size(-2), x.size(-1) + H2, W2 = source.size(-2), source.size(-1) + + query, key, value = x, source, source + + if self.dw_conv: + query = self.norm1(self.aggregate(query).permute(0,2,3,1)).permute(0,3,1,2) + else: + query = self.norm1(self.max_pool(query).permute(0,2,3,1)).permute(0,3,1,2) + # only need to cal key or value... + key = self.norm1(self.max_pool(key).permute(0,2,3,1)).permute(0,3,1,2) + value = self.norm1(self.max_pool(value).permute(0,2,3,1)).permute(0,3,1,2) + + # After 0617 bnorm to prevent permute*6 + # query = self.norm1(self.max_pool(query)) + # key = self.norm1(self.max_pool(key)) + # value = self.norm1(self.max_pool(value)) + # multi-head attention + query = self.q_proj_conv(query) # [N, C, H1//pool, W1//pool] + key = self.k_proj_conv(key) + value = self.v_proj_conv(value) + + C = query.shape[-3] + + ismask = x_mask is not None and source_mask is not None + if bs == 1 or not ismask: + if ismask: + x_mask = self.max_pool(x_mask.float()).bool() # [N, H1//pool, W1//pool] + source_mask = self.max_pool(source_mask.float()).bool() + + mask_h0, mask_w0 = x_mask[0].sum(-2)[0], x_mask[0].sum(-1)[0] + mask_h1, mask_w1 = source_mask[0].sum(-2)[0], source_mask[0].sum(-1)[0] + + query = query[:, :, :mask_h0, :mask_w0] + key = key[:, :, :mask_h1, :mask_w1] + value = value[:, :, :mask_h1, :mask_w1] + + else: + assert x_mask is None and source_mask is None + + # query = query.reshape(bs, -1, self.nhead, self.dim) # [N, L, H, D] + # key = key.reshape(bs, -1, self.nhead, self.dim) # [N, S, H, D] + # value = value.reshape(bs, -1, self.nhead, self.dim) # [N, S, H, D] + if PFLASH_AVAILABLE: # N H L D + query = rearrange(query, 'n (nhead d) h w -> n nhead (h w) d', nhead=self.nhead, d=self.dim) + key = rearrange(key, 'n (nhead d) h w -> n nhead (h w) d', nhead=self.nhead, d=self.dim) + value = rearrange(value, 'n (nhead d) h w -> n nhead (h w) d', nhead=self.nhead, d=self.dim) + + else: # N L H D + query = rearrange(query, 'n (nhead d) h w -> n (h w) nhead d', nhead=self.nhead, d=self.dim) # [N, L, H, D] + key = rearrange(key, 'n (nhead d) h w -> n (h w) nhead d', nhead=self.nhead, d=self.dim) # [N, S, H, D] + value = rearrange(value, 'n (nhead d) h w -> n (h w) nhead d', nhead=self.nhead, d=self.dim) # [N, S, H, D] + + message = self.attention(query, key, value, q_mask=None, kv_mask=None) # [N, L, H, D] or [N, H, L, D] + + if PFLASH_AVAILABLE: # N H L D + message = rearrange(message, 'n nhead L d -> n L nhead d', nhead=self.nhead, d=self.dim) + + if ismask: + message = message.view(bs, mask_h0, mask_w0, self.nhead, self.dim) + if mask_h0 != x_mask.size(-2): + message = torch.cat([message, torch.zeros(message.size(0), x_mask.size(-2)-mask_h0, x_mask.size(-1), self.nhead, self.dim, device=message.device, dtype=message.dtype)], dim=1) + elif mask_w0 != x_mask.size(-1): + message = torch.cat([message, torch.zeros(message.size(0), x_mask.size(-2), x_mask.size(-1)-mask_w0, self.nhead, self.dim, device=message.device, dtype=message.dtype)], dim=2) + # message = message.view(bs, -1, self.nhead*self.dim) # [N, L, C] + + else: + assert x_mask is None and source_mask is None + + + message = self.merge(message.reshape(bs, -1, self.nhead*self.dim)) # [N, L, C] + # message = message.reshape(bs, C, H1//self.pool_size, W1//self.pool_size) # [N, C, H, W] bug??? + message = rearrange(message, 'b (h w) c -> b c h w', h=H1//self.pool_size, w=W1//self.pool_size) # [N, C, H, W] + + if self.scatter: + message = torch.repeat_interleave(message, self.pool_size, dim=-2) + message = torch.repeat_interleave(message, self.pool_size, dim=-1) + # message = self.aggregate(message) + message = message * self.aggregate.weight.data.reshape(1, C, self.pool_size, self.pool_size).repeat(1,1,message.shape[-2]//self.pool_size,message.shape[-1]//self.pool_size) + else: + message = torch.nn.functional.interpolate(message, scale_factor=self.pool_size, mode='bilinear', align_corners=False) # [N, C, H1, W1] + + # message = self.norm1(message) + + # feed-forward network + message = self.mlp(torch.cat([x, message], dim=1).permute(0, 2, 3, 1)) # [N, H1, W1, C] + message = self.norm2(message).permute(0, 3, 1, 2) # [N, C, H1, W1] + + return x + message + else: + x_mask = self.max_pool(x_mask.float()).bool() + source_mask = self.max_pool(source_mask.float()).bool() + m_list = [] + for i in range(bs): + mask_h0, mask_w0 = x_mask[i].sum(-2)[0], x_mask[i].sum(-1)[0] + mask_h1, mask_w1 = source_mask[i].sum(-2)[0], source_mask[i].sum(-1)[0] + + q = query[i:i+1, :, :mask_h0, :mask_w0] + k = key[i:i+1, :, :mask_h1, :mask_w1] + v = value[i:i+1, :, :mask_h1, :mask_w1] + + if PFLASH_AVAILABLE: # N H L D + q = rearrange(q, 'n (nhead d) h w -> n nhead (h w) d', nhead=self.nhead, d=self.dim) + k = rearrange(k, 'n (nhead d) h w -> n nhead (h w) d', nhead=self.nhead, d=self.dim) + v = rearrange(v, 'n (nhead d) h w -> n nhead (h w) d', nhead=self.nhead, d=self.dim) + + else: # N L H D + + q = rearrange(q, 'n (nhead d) h w -> n (h w) nhead d', nhead=self.nhead, d=self.dim) # [N, L, H, D] + k = rearrange(k, 'n (nhead d) h w -> n (h w) nhead d', nhead=self.nhead, d=self.dim) # [N, S, H, D] + v = rearrange(v, 'n (nhead d) h w -> n (h w) nhead d', nhead=self.nhead, d=self.dim) # [N, S, H, D] + + m = self.attention(q, k, v, q_mask=None, kv_mask=None) # [N, L, H, D] + + if PFLASH_AVAILABLE: # N H L D + m = rearrange(m, 'n nhead L d -> n L nhead d', nhead=self.nhead, d=self.dim) + + m = m.view(1, mask_h0, mask_w0, self.nhead, self.dim) + if mask_h0 != x_mask.size(-2): + m = torch.cat([m, torch.zeros(1, x_mask.size(-2)-mask_h0, x_mask.size(-1), self.nhead, self.dim, device=m.device, dtype=m.dtype)], dim=1) + elif mask_w0 != x_mask.size(-1): + m = torch.cat([m, torch.zeros(1, x_mask.size(-2), x_mask.size(-1)-mask_w0, self.nhead, self.dim, device=m.device, dtype=m.dtype)], dim=2) + m_list.append(m) + message = torch.cat(m_list, dim=0) + + + message = self.merge(message.view(bs, -1, self.nhead*self.dim)) # [N, L, C] + # message = message.reshape(bs, C, H1//self.pool_size, W1//self.pool_size) # [N, C, H, W] bug??? + message = rearrange(message, 'b (h w) c -> b c h w', h=H1//self.pool_size, w=W1//self.pool_size) # [N, C, H, W] + + if self.scatter: + message = torch.repeat_interleave(message, self.pool_size, dim=-2) + message = torch.repeat_interleave(message, self.pool_size, dim=-1) + # message = self.aggregate(message) + # assert False + else: + message = torch.nn.functional.interpolate(message, scale_factor=self.pool_size, mode='bilinear', align_corners=False) # [N, C, H1, W1] + + # message = self.norm1(message) + + # feed-forward network + message = self.mlp(torch.cat([x, message], dim=1).permute(0, 2, 3, 1)) # [N, H1, W1, C] + message = self.norm2(message).permute(0, 3, 1, 2) # [N, C, H1, W1] + + return x + message + + + def pro(self, x, source, x_mask=None, source_mask=None, profiler=None): + """ + Args: + x (torch.Tensor): [N, C, H1, W1] + source (torch.Tensor): [N, C, H2, W2] + x_mask (torch.Tensor): [N, H1, W1] (optional) (L = H1*W1) + source_mask (torch.Tensor): [N, H2, W2] (optional) (S = H2*W2) + """ + bs = x.size(0) + H1, W1 = x.size(-2), x.size(-1) + H2, W2 = source.size(-2), source.size(-1) + + query, key, value = x, source, source + + with profiler.profile("permute*6+norm1*3+max_pool*3"): + if self.dw_conv: + query = self.norm1(self.aggregate(query).permute(0,2,3,1)).permute(0,3,1,2) + else: + query = self.norm1(self.max_pool(query).permute(0,2,3,1)).permute(0,3,1,2) + # only need to cal key or value... + key = self.norm1(self.max_pool(key).permute(0,2,3,1)).permute(0,3,1,2) + value = self.norm1(self.max_pool(value).permute(0,2,3,1)).permute(0,3,1,2) + + with profiler.profile("permute*6"): + query = query.permute(0, 2, 3, 1) + key = key.permute(0, 2, 3, 1) + value = value.permute(0, 2, 3, 1) + + query = query.permute(0,3,1,2) + key = key.permute(0,3,1,2) + value = value.permute(0,3,1,2) + + # query = self.bnorm1(self.max_pool(query)) + # key = self.bnorm1(self.max_pool(key)) + # value = self.bnorm1(self.max_pool(value)) + # multi-head attention + + with profiler.profile("q_conv+k_conv+v_conv"): + query = self.q_proj_conv(query) # [N, C, H1//pool, W1//pool] + key = self.k_proj_conv(key) + value = self.v_proj_conv(value) + + C = query.shape[-3] + # TODO: Need to be consistent with bs=1 (where mask region do not in attention at all) + if x_mask is not None and source_mask is not None: + x_mask = self.max_pool(x_mask.float()).bool() # [N, H1//pool, W1//pool] + source_mask = self.max_pool(source_mask.float()).bool() + + mask_h0, mask_w0 = x_mask[0].sum(-2)[0], x_mask[0].sum(-1)[0] + mask_h1, mask_w1 = source_mask[0].sum(-2)[0], source_mask[0].sum(-1)[0] + + query = query[:, :, :mask_h0, :mask_w0] + key = key[:, :, :mask_h1, :mask_w1] + value = value[:, :, :mask_h1, :mask_w1] + + # mask_h0, mask_w0 = data['mask0'][0].sum(-2)[0], data['mask0'][0].sum(-1)[0] + # mask_h1, mask_w1 = data['mask1'][0].sum(-2)[0], data['mask1'][0].sum(-1)[0] + # C = feat_c0.shape[-3] + # feat_c0 = feat_c0[:, :, :mask_h0, :mask_w0] + # feat_c1 = feat_c1[:, :, :mask_h1, :mask_w1] + + + # feat_c0 = feat_c0.reshape(-1, mask_h0, mask_w0, C) + # feat_c1 = feat_c1.reshape(-1, mask_h1, mask_w1, C) + # if mask_h0 != data['mask0'].size(-2): + # feat_c0 = torch.cat([feat_c0, torch.zeros(feat_c0.size(0), data['hw0_c'][0]-mask_h0, data['hw0_c'][1], C, device=feat_c0.device)], dim=1) + # elif mask_w0 != data['mask0'].size(-1): + # feat_c0 = torch.cat([feat_c0, torch.zeros(feat_c0.size(0), data['hw0_c'][0], data['hw0_c'][1]-mask_w0, C, device=feat_c0.device)], dim=2) + + # if mask_h1 != data['mask1'].size(-2): + # feat_c1 = torch.cat([feat_c1, torch.zeros(feat_c1.size(0), data['hw1_c'][0]-mask_h1, data['hw1_c'][1], C, device=feat_c1.device)], dim=1) + # elif mask_w1 != data['mask1'].size(-1): + # feat_c1 = torch.cat([feat_c1, torch.zeros(feat_c1.size(0), data['hw1_c'][0], data['hw1_c'][1]-mask_w1, C, device=feat_c1.device)], dim=2) + + + else: + assert x_mask is None and source_mask is None + + + + # query = query.reshape(bs, -1, self.nhead, self.dim) # [N, L, H, D] + # key = key.reshape(bs, -1, self.nhead, self.dim) # [N, S, H, D] + # value = value.reshape(bs, -1, self.nhead, self.dim) # [N, S, H, D] + + with profiler.profile("rearrange*3"): + query = rearrange(query, 'n (nhead d) h w -> n (h w) nhead d', nhead=self.nhead, d=self.dim) # [N, L, H, D] + key = rearrange(key, 'n (nhead d) h w -> n (h w) nhead d', nhead=self.nhead, d=self.dim) # [N, S, H, D] + value = rearrange(value, 'n (nhead d) h w -> n (h w) nhead d', nhead=self.nhead, d=self.dim) # [N, S, H, D] + + with profiler.profile("attention"): + message = self.attention(query, key, value, q_mask=None, kv_mask=None) # [N, L, H, D] + + if x_mask is not None and source_mask is not None: + message = message.view(bs, mask_h0, mask_w0, self.nhead, self.dim) + if mask_h0 != x_mask.size(-2): + message = torch.cat([message, torch.zeros(message.size(0), x_mask.size(-2)-mask_h0, x_mask.size(-1), self.nhead, self.dim, device=message.device, dtype=message.dtype)], dim=1) + elif mask_w0 != x_mask.size(-1): + message = torch.cat([message, torch.zeros(message.size(0), x_mask.size(-2), x_mask.size(-1)-mask_w0, self.nhead, self.dim, device=message.device, dtype=message.dtype)], dim=2) + # message = message.view(bs, -1, self.nhead*self.dim) # [N, L, C] + + else: + assert x_mask is None and source_mask is None + + with profiler.profile("merge"): + message = self.merge(message.view(bs, -1, self.nhead*self.dim)) # [N, L, C] + # message = message.reshape(bs, C, H1//self.pool_size, W1//self.pool_size) # [N, C, H, W] bug??? + + with profiler.profile("rearrange*1"): + message = rearrange(message, 'b (h w) c -> b c h w', h=H1//self.pool_size, w=W1//self.pool_size) # [N, C, H, W] + + with profiler.profile("upsample"): + if self.scatter: + message = torch.repeat_interleave(message, self.pool_size, dim=-2) + message = torch.repeat_interleave(message, self.pool_size, dim=-1) + # message = self.aggregate(message) + # assert False + else: + message = torch.nn.functional.interpolate(message, scale_factor=self.pool_size, mode='bilinear', align_corners=False) # [N, C, H1, W1] + + # message = self.norm1(message) + + # feed-forward network + with profiler.profile("feed-forward_mlp+permute*2+norm2"): + message = self.mlp(torch.cat([x, message], dim=1).permute(0, 2, 3, 1)) # [N, H1, W1, C] + message = self.norm2(message).permute(0, 3, 1, 2) # [N, C, H1, W1] + + return x + message + + + def _build_projection(self, + dim_in, + dim_out, + kernel_size=3, + padding=1, + stride=1, + method='dw_bn', + ): + if method == 'dw_bn': + proj = nn.Sequential(OrderedDict([ + ('conv', nn.Conv2d( + dim_in, + dim_in, + kernel_size=kernel_size, + padding=padding, + stride=stride, + bias=False, + groups=dim_in + )), + ('bn', nn.BatchNorm2d(dim_in)), + # ('rearrage', Rearrange('b c h w -> b (h w) c')), + ])) + elif method == 'avg': + proj = nn.Sequential(OrderedDict([ + ('avg', nn.AvgPool2d( + kernel_size=kernel_size, + padding=padding, + stride=stride, + ceil_mode=True + )), + # ('rearrage', Rearrange('b c h w -> b (h w) c')), + ])) + elif method == 'linear': + proj = None + elif method == 'dw': + proj = nn.Sequential(OrderedDict([ + ('conv', nn.Conv2d( + dim_in, + dim_in, + kernel_size=kernel_size, + padding=padding, + stride=stride, + bias=False, + groups=dim_in + )), + # ('rearrage', Rearrange('b c h w -> b (h w) c')), + ])) + else: + raise ValueError('Unknown method ({})'.format(method)) + + return proj + +class AG_RoPE_EncoderLayer(nn.Module): + def __init__(self, + d_model, + nhead, + attention='linear', + pool_size=4, + pool_size2=4, + xformer=False, + leaky=-1.0, + dw_conv=False, + dw_conv2=False, + scatter=False, + norm_before=True, + rope=False, + npe=None, + vit_norm=False, + dw_proj=False, + ): + super(AG_RoPE_EncoderLayer, self).__init__() + + self.pool_size = pool_size + self.pool_size2 = pool_size2 + self.dw_conv = dw_conv + self.dw_conv2 = dw_conv2 + self.scatter = scatter + self.norm_before = norm_before + self.vit_norm = vit_norm + self.dw_proj = dw_proj + self.rope = rope + if self.dw_conv and self.pool_size != 1: + self.aggregate = nn.Conv2d(d_model, d_model, kernel_size=pool_size, padding=0, stride=pool_size, bias=False, groups=d_model) + if self.dw_conv2 and self.pool_size2 != 1: + self.aggregate2 = nn.Conv2d(d_model, d_model, kernel_size=pool_size2, padding=0, stride=pool_size2, bias=False, groups=d_model) + + self.dim = d_model // nhead + self.nhead = nhead + + self.max_pool = torch.nn.MaxPool2d(kernel_size=self.pool_size2, stride=self.pool_size2) + + # multi-head attention + if self.dw_proj: + self.q_proj = nn.Conv2d(d_model, d_model, kernel_size=3, padding=1, stride=1, bias=False, groups=d_model) + self.k_proj = nn.Conv2d(d_model, d_model, kernel_size=3, padding=1, stride=1, bias=False, groups=d_model) + self.v_proj = nn.Conv2d(d_model, d_model, kernel_size=3, padding=1, stride=1, bias=False, groups=d_model) + else: + self.q_proj = nn.Linear(d_model, d_model, bias=False) + self.k_proj = nn.Linear(d_model, d_model, bias=False) + self.v_proj = nn.Linear(d_model, d_model, bias=False) + + if self.rope: + self.rope_pos_enc = RoPEPositionEncodingSine(d_model, max_shape=(256, 256), npe=npe, ropefp16=True) + + if xformer: + self.attention = XAttention() + else: + self.attention = LinearAttention() if attention == 'linear' else FullAttention() + self.merge = nn.Linear(d_model, d_model, bias=False) + + # feed-forward network + if leaky > 0: + if self.vit_norm: + self.mlp = nn.Sequential( + nn.Linear(d_model, d_model*2, bias=False), + nn.LeakyReLU(leaky, True), + nn.Linear(d_model*2, d_model, bias=False), + ) + else: + self.mlp = nn.Sequential( + nn.Linear(d_model*2, d_model*2, bias=False), + nn.LeakyReLU(leaky, True), + nn.Linear(d_model*2, d_model, bias=False), + ) + + else: + if self.vit_norm: + self.mlp = nn.Sequential( + nn.Linear(d_model, d_model*2, bias=False), + nn.ReLU(True), + nn.Linear(d_model*2, d_model, bias=False), + ) + else: + self.mlp = nn.Sequential( + nn.Linear(d_model*2, d_model*2, bias=False), + nn.ReLU(True), + nn.Linear(d_model*2, d_model, bias=False), + ) + + # norm and dropout + self.norm1 = nn.LayerNorm(d_model) + self.norm2 = nn.LayerNorm(d_model) + + # self.norm1 = nn.BatchNorm2d(d_model) + + def forward(self, x, source, x_mask=None, source_mask=None): + """ + Args: + x (torch.Tensor): [N, C, H1, W1] + source (torch.Tensor): [N, C, H2, W2] + x_mask (torch.Tensor): [N, H1, W1] (optional) (L = H1*W1) + source_mask (torch.Tensor): [N, H2, W2] (optional) (S = H2*W2) + """ + bs, C, H1, W1 = x.size() + H2, W2 = source.size(-2), source.size(-1) + + + if self.norm_before and not self.vit_norm: + if self.pool_size == 1: + query = self.norm1(x.permute(0,2,3,1)) # [N, H, W, C] + elif self.dw_conv: + query = self.norm1(self.aggregate(x).permute(0,2,3,1)) # [N, H, W, C] + else: + query = self.norm1(self.max_pool(x).permute(0,2,3,1)) # [N, H, W, C] + if self.pool_size2 == 1: + source = self.norm1(source.permute(0,2,3,1)) # [N, H, W, C] + elif self.dw_conv2: + source = self.norm1(self.aggregate2(source).permute(0,2,3,1)) # [N, H, W, C] + else: + source = self.norm1(self.max_pool(source).permute(0,2,3,1)) # [N, H, W, C] + elif self.vit_norm: + if self.pool_size == 1: + query = self.norm1(x.permute(0,2,3,1)) # [N, H, W, C] + elif self.dw_conv: + query = self.aggregate(self.norm1(x.permute(0,2,3,1)).permute(0,3,1,2)).permute(0,2,3,1) # [N, H, W, C] + else: + query = self.max_pool(self.norm1(x.permute(0,2,3,1)).permute(0,3,1,2)).permute(0,2,3,1) # [N, H, W, C] + if self.pool_size2 == 1: + source = self.norm1(source.permute(0,2,3,1)) # [N, H, W, C] + elif self.dw_conv2: + source = self.aggregate2(self.norm1(source.permute(0,2,3,1)).permute(0,3,1,2)).permute(0,2,3,1) # [N, H, W, C] + else: + source = self.max_pool(self.norm1(source.permute(0,2,3,1)).permute(0,3,1,2)).permute(0,2,3,1) # [N, H, W, C] + else: + if self.pool_size == 1: + query = x.permute(0,2,3,1) # [N, H, W, C] + elif self.dw_conv: + query = self.aggregate(x).permute(0,2,3,1) # [N, H, W, C] + else: + query = self.max_pool(x).permute(0,2,3,1) # [N, H, W, C] + if self.pool_size2 == 1: + source = source.permute(0,2,3,1) # [N, H, W, C] + elif self.dw_conv2: + source = self.aggregate2(source).permute(0,2,3,1) # [N, H, W, C] + else: + source = self.max_pool(source).permute(0,2,3,1) # [N, H, W, C] + + # projection + if self.dw_proj: + query = self.q_proj(query.permute(0,3,1,2)).permute(0,2,3,1) + key = self.k_proj(source.permute(0,3,1,2)).permute(0,2,3,1) + value = self.v_proj(source.permute(0,3,1,2)).permute(0,2,3,1) + else: + query, key, value = self.q_proj(query), self.k_proj(source), self.v_proj(source) + + # RoPE + if self.rope: + query = self.rope_pos_enc(query) + if self.pool_size == 1 and self.pool_size2 == 4: + key = self.rope_pos_enc(key, 4) + else: + key = self.rope_pos_enc(key) + + use_mask = x_mask is not None and source_mask is not None + if bs == 1 or not use_mask: + if use_mask: + # downsample mask + if self.pool_size ==1: + pass + else: + x_mask = self.max_pool(x_mask.float()).bool() # [N, H1//pool, W1//pool] + + if self.pool_size2 ==1: + pass + else: + source_mask = self.max_pool(source_mask.float()).bool() + + mask_h0, mask_w0 = x_mask[0].sum(-2)[0], x_mask[0].sum(-1)[0] + mask_h1, mask_w1 = source_mask[0].sum(-2)[0], source_mask[0].sum(-1)[0] + + query = query[:, :mask_h0, :mask_w0, :] + key = key[:, :mask_h1, :mask_w1, :] + value = value[:, :mask_h1, :mask_w1, :] + else: + assert x_mask is None and source_mask is None + + if PFLASH_AVAILABLE: # [N, H, W, C] -> [N, h, L, D] + query = rearrange(query, 'n h w (nhead d) -> n nhead (h w) d', nhead=self.nhead, d=self.dim) + key = rearrange(key, 'n h w (nhead d) -> n nhead (h w) d', nhead=self.nhead, d=self.dim) + value = rearrange(value, 'n h w (nhead d) -> n nhead (h w) d', nhead=self.nhead, d=self.dim) + else: # N L H D + query = rearrange(query, 'n h w (nhead d) -> n (h w) nhead d', nhead=self.nhead, d=self.dim) + key = rearrange(key, 'n h w (nhead d) -> n (h w) nhead d', nhead=self.nhead, d=self.dim) + value = rearrange(value, 'n h w (nhead d) -> n (h w) nhead d', nhead=self.nhead, d=self.dim) + + message = self.attention(query, key, value, q_mask=None, kv_mask=None) # [N, L, h, D] or [N, h, L, D] + + if PFLASH_AVAILABLE: # [N, h, L, D] -> [N, L, h, D] + message = rearrange(message, 'n nhead L d -> n L nhead d', nhead=self.nhead, d=self.dim) + + if use_mask: # padding zero + message = message.view(bs, mask_h0, mask_w0, self.nhead, self.dim) # [N L h D] + if mask_h0 != x_mask.size(-2): + message = torch.cat([message, torch.zeros(message.size(0), x_mask.size(-2)-mask_h0, x_mask.size(-1), self.nhead, self.dim, device=message.device, dtype=message.dtype)], dim=1) + elif mask_w0 != x_mask.size(-1): + message = torch.cat([message, torch.zeros(message.size(0), x_mask.size(-2), x_mask.size(-1)-mask_w0, self.nhead, self.dim, device=message.device, dtype=message.dtype)], dim=2) + else: + assert x_mask is None and source_mask is None + + message = self.merge(message.reshape(bs, -1, self.nhead*self.dim)) # [N, L, C] + message = rearrange(message, 'b (h w) c -> b c h w', h=H1//self.pool_size, w=W1//self.pool_size) # [N, C, H, W] + + if self.pool_size == 1: + pass + else: + if self.scatter: + message = torch.repeat_interleave(message, self.pool_size, dim=-2) + message = torch.repeat_interleave(message, self.pool_size, dim=-1) + message = message * self.aggregate.weight.data.reshape(1, C, self.pool_size, self.pool_size).repeat(1,1,message.shape[-2]//self.pool_size,message.shape[-1]//self.pool_size) + else: + message = torch.nn.functional.interpolate(message, scale_factor=self.pool_size, mode='bilinear', align_corners=False) # [N, C, H1, W1] + + if not self.norm_before and not self.vit_norm: + message = self.norm1(message.permute(0,2,3,1)).permute(0,3,1,2) # [N, C, H, W] + + # feed-forward network + if self.vit_norm: + message_inter = (x + message) + del x + message = self.norm2(message_inter.permute(0, 2, 3, 1)) + message = self.mlp(message).permute(0, 3, 1, 2) # [N, C, H1, W1] + return message_inter + message + else: + message = self.mlp(torch.cat([x, message], dim=1).permute(0, 2, 3, 1)) # [N, H1, W1, C] + message = self.norm2(message).permute(0, 3, 1, 2) # [N, C, H1, W1] + + return x + message + else: # mask with bs > 1 + if self.pool_size ==1: + pass + else: + x_mask = self.max_pool(x_mask.float()).bool() + + if self.pool_size2 ==1: + pass + else: + source_mask = self.max_pool(source_mask.float()).bool() + m_list = [] + for i in range(bs): + mask_h0, mask_w0 = x_mask[i].sum(-2)[0], x_mask[i].sum(-1)[0] + mask_h1, mask_w1 = source_mask[i].sum(-2)[0], source_mask[i].sum(-1)[0] + + q = query[i:i+1, :mask_h0, :mask_w0, :] + k = key[i:i+1, :mask_h1, :mask_w1, :] + v = value[i:i+1, :mask_h1, :mask_w1, :] + + if PFLASH_AVAILABLE: # [N, H, W, C] -> [N, h, L, D] + q = rearrange(q, 'n h w (nhead d) -> n nhead (h w) d', nhead=self.nhead, d=self.dim) + k = rearrange(k, 'n h w (nhead d) -> n nhead (h w) d', nhead=self.nhead, d=self.dim) + v = rearrange(v, 'n h w (nhead d) -> n nhead (h w) d', nhead=self.nhead, d=self.dim) + else: # N L H D + q = rearrange(q, 'n h w (nhead d) -> n (h w) nhead d', nhead=self.nhead, d=self.dim) + k = rearrange(k, 'n h w (nhead d) -> n (h w) nhead d', nhead=self.nhead, d=self.dim) + v = rearrange(v, 'n h w (nhead d) -> n (h w) nhead d', nhead=self.nhead, d=self.dim) + + m = self.attention(q, k, v, q_mask=None, kv_mask=None) # [N, L, h, D] or [N, h, L, D] + + if PFLASH_AVAILABLE: # [N, h, L, D] -> [N, L, h, D] + m = rearrange(m, 'n nhead L d -> n L nhead d', nhead=self.nhead, d=self.dim) + + m = m.view(1, mask_h0, mask_w0, self.nhead, self.dim) + if mask_h0 != x_mask.size(-2): + m = torch.cat([m, torch.zeros(1, x_mask.size(-2)-mask_h0, x_mask.size(-1), self.nhead, self.dim, device=m.device, dtype=m.dtype)], dim=1) + elif mask_w0 != x_mask.size(-1): + m = torch.cat([m, torch.zeros(1, x_mask.size(-2), x_mask.size(-1)-mask_w0, self.nhead, self.dim, device=m.device, dtype=m.dtype)], dim=2) + m_list.append(m) + m = torch.cat(m_list, dim=0) + + m = self.merge(m.reshape(bs, -1, self.nhead*self.dim)) # [N, L, C] + # m = m.reshape(bs, C, H1//self.pool_size, W1//self.pool_size) # [N, C, H, W] why this bug worked + m = rearrange(m, 'b (h w) c -> b c h w', h=H1//self.pool_size, w=W1//self.pool_size) # [N, C, H, W] + + if self.pool_size == 1: + pass + else: + if self.scatter: + m = torch.repeat_interleave(m, self.pool_size, dim=-2) + m = torch.repeat_interleave(m, self.pool_size, dim=-1) + m = m * self.aggregate.weight.data.reshape(1, C, self.pool_size, self.pool_size).repeat(1,1,m.shape[-2]//self.pool_size,m.shape[-1]//self.pool_size) + else: + m = torch.nn.functional.interpolate(m, scale_factor=self.pool_size, mode='bilinear', align_corners=False) # [N, C, H1, W1] + + + if not self.norm_before and not self.vit_norm: + m = self.norm1(m.permute(0,2,3,1)).permute(0,3,1,2) # [N, C, H, W] + + # feed-forward network + if self.vit_norm: + m_inter = (x + m) + del x + m = self.norm2(m_inter.permute(0, 2, 3, 1)) + m = self.mlp(m).permute(0, 3, 1, 2) # [N, C, H1, W1] + return m_inter + m + else: + m = self.mlp(torch.cat([x, m], dim=1).permute(0, 2, 3, 1)) # [N, H1, W1, C] + m = self.norm2(m).permute(0, 3, 1, 2) # [N, C, H1, W1] + + return x + m + + return x + m + +class AG_Conv_EncoderLayer(nn.Module): + def __init__(self, + d_model, + nhead, + attention='linear', + pool_size=4, + bn=True, + xformer=False, + leaky=-1.0, + dw_conv=False, + dw_conv2=False, + scatter=False, + norm_before=True, + ): + super(AG_Conv_EncoderLayer, self).__init__() + + self.pool_size = pool_size + self.dw_conv = dw_conv + self.dw_conv2 = dw_conv2 + self.scatter = scatter + self.norm_before = norm_before + if self.dw_conv: + self.aggregate = nn.Conv2d(d_model, d_model, kernel_size=pool_size, padding=0, stride=pool_size, bias=False, groups=d_model) + if self.dw_conv2: + self.aggregate2 = nn.Conv2d(d_model, d_model, kernel_size=pool_size, padding=0, stride=pool_size, bias=False, groups=d_model) + self.dim = d_model // nhead + self.nhead = nhead + + self.max_pool = torch.nn.MaxPool2d(kernel_size=self.pool_size, stride=self.pool_size) + + # multi-head attention + if bn: + method = 'dw_bn' + else: + method = 'dw' + self.q_proj_conv = self._build_projection(d_model, d_model, method=method) + self.k_proj_conv = self._build_projection(d_model, d_model, method=method) + self.v_proj_conv = self._build_projection(d_model, d_model, method=method) + + if xformer: + self.attention = XAttention() + else: + self.attention = LinearAttention() if attention == 'linear' else FullAttention() + self.merge = nn.Linear(d_model, d_model, bias=False) + + # feed-forward network + if leaky > 0: + self.mlp = nn.Sequential( + nn.Linear(d_model*2, d_model*2, bias=False), + nn.LeakyReLU(leaky, True), + nn.Linear(d_model*2, d_model, bias=False), + ) + + else: + self.mlp = nn.Sequential( + nn.Linear(d_model*2, d_model*2, bias=False), + nn.ReLU(True), + nn.Linear(d_model*2, d_model, bias=False), + ) + + # norm and dropout + self.norm1 = nn.LayerNorm(d_model) + self.norm2 = nn.LayerNorm(d_model) + + def forward(self, x, source, x_mask=None, source_mask=None): + """ + Args: + x (torch.Tensor): [N, C, H1, W1] + source (torch.Tensor): [N, C, H2, W2] + x_mask (torch.Tensor): [N, H1, W1] (optional) (L = H1*W1) + source_mask (torch.Tensor): [N, H2, W2] (optional) (S = H2*W2) + """ + bs = x.size(0) + H1, W1 = x.size(-2), x.size(-1) + H2, W2 = source.size(-2), source.size(-1) + C = x.shape[-3] + + if self.norm_before: + if self.dw_conv: + query = self.norm1(self.aggregate(x).permute(0,2,3,1)).permute(0,3,1,2) + else: + query = self.norm1(self.max_pool(x).permute(0,2,3,1)).permute(0,3,1,2) + if self.dw_conv2: + source = self.norm1(self.aggregate2(source).permute(0,2,3,1)).permute(0,3,1,2) + else: + source = self.norm1(self.max_pool(source).permute(0,2,3,1)).permute(0,3,1,2) + else: + if self.dw_conv: + query = self.aggregate(x) + else: + query = self.max_pool(x) + if self.dw_conv2: + source = self.aggregate2(source) + else: + source = self.max_pool(source) + + key, value = source, source + + query = self.q_proj_conv(query) # [N, C, H1//pool, W1//pool] + key = self.k_proj_conv(key) + value = self.v_proj_conv(value) + + use_mask = x_mask is not None and source_mask is not None + if bs == 1 or not use_mask: + if use_mask: + x_mask = self.max_pool(x_mask.float()).bool() # [N, H1//pool, W1//pool] + source_mask = self.max_pool(source_mask.float()).bool() + + mask_h0, mask_w0 = x_mask[0].sum(-2)[0], x_mask[0].sum(-1)[0] + mask_h1, mask_w1 = source_mask[0].sum(-2)[0], source_mask[0].sum(-1)[0] + + query = query[:, :, :mask_h0, :mask_w0] + key = key[:, :, :mask_h1, :mask_w1] + value = value[:, :, :mask_h1, :mask_w1] + + else: + assert x_mask is None and source_mask is None + + if PFLASH_AVAILABLE: # N H L D + query = rearrange(query, 'n (nhead d) h w -> n nhead (h w) d', nhead=self.nhead, d=self.dim) + key = rearrange(key, 'n (nhead d) h w -> n nhead (h w) d', nhead=self.nhead, d=self.dim) + value = rearrange(value, 'n (nhead d) h w -> n nhead (h w) d', nhead=self.nhead, d=self.dim) + + else: # N L H D + query = rearrange(query, 'n (nhead d) h w -> n (h w) nhead d', nhead=self.nhead, d=self.dim) # [N, L, H, D] + key = rearrange(key, 'n (nhead d) h w -> n (h w) nhead d', nhead=self.nhead, d=self.dim) # [N, S, H, D] + value = rearrange(value, 'n (nhead d) h w -> n (h w) nhead d', nhead=self.nhead, d=self.dim) # [N, S, H, D] + + message = self.attention(query, key, value, q_mask=None, kv_mask=None) # [N, L, H, D] or [N, H, L, D] + + if PFLASH_AVAILABLE: # N H L D + message = rearrange(message, 'n nhead L d -> n L nhead d', nhead=self.nhead, d=self.dim) + + if use_mask: # padding zero + message = message.view(bs, mask_h0, mask_w0, self.nhead, self.dim) # [N L H D] + if mask_h0 != x_mask.size(-2): + message = torch.cat([message, torch.zeros(message.size(0), x_mask.size(-2)-mask_h0, x_mask.size(-1), self.nhead, self.dim, device=message.device, dtype=message.dtype)], dim=1) + elif mask_w0 != x_mask.size(-1): + message = torch.cat([message, torch.zeros(message.size(0), x_mask.size(-2), x_mask.size(-1)-mask_w0, self.nhead, self.dim, device=message.device, dtype=message.dtype)], dim=2) + else: + assert x_mask is None and source_mask is None + + message = self.merge(message.view(bs, -1, self.nhead*self.dim)) # [N, L, C] + message = rearrange(message, 'b (h w) c -> b c h w', h=H1//self.pool_size, w=W1//self.pool_size) # [N, C, H, W] + + if self.scatter: + message = torch.repeat_interleave(message, self.pool_size, dim=-2) + message = torch.repeat_interleave(message, self.pool_size, dim=-1) + message = message * self.aggregate.weight.data.reshape(1, C, self.pool_size, self.pool_size).repeat(1,1,message.shape[-2]//self.pool_size,message.shape[-1]//self.pool_size) + else: + message = torch.nn.functional.interpolate(message, scale_factor=self.pool_size, mode='bilinear', align_corners=False) # [N, C, H1, W1] + + if not self.norm_before: + message = self.norm1(message.permute(0,2,3,1)).permute(0,3,1,2) # [N, C, H, W] + + # feed-forward network + message = self.mlp(torch.cat([x, message], dim=1).permute(0, 2, 3, 1)) # [N, H1, W1, C] + message = self.norm2(message).permute(0, 3, 1, 2) # [N, C, H1, W1] + + return x + message + else: # mask with bs > 1 + x_mask = self.max_pool(x_mask.float()).bool() + source_mask = self.max_pool(source_mask.float()).bool() + m_list = [] + for i in range(bs): + mask_h0, mask_w0 = x_mask[i].sum(-2)[0], x_mask[i].sum(-1)[0] + mask_h1, mask_w1 = source_mask[i].sum(-2)[0], source_mask[i].sum(-1)[0] + + q = query[i:i+1, :, :mask_h0, :mask_w0] + k = key[i:i+1, :, :mask_h1, :mask_w1] + v = value[i:i+1, :, :mask_h1, :mask_w1] + + if PFLASH_AVAILABLE: # N H L D + q = rearrange(q, 'n (nhead d) h w -> n nhead (h w) d', nhead=self.nhead, d=self.dim) + k = rearrange(k, 'n (nhead d) h w -> n nhead (h w) d', nhead=self.nhead, d=self.dim) + v = rearrange(v, 'n (nhead d) h w -> n nhead (h w) d', nhead=self.nhead, d=self.dim) + + else: # N L H D + q = rearrange(q, 'n (nhead d) h w -> n (h w) nhead d', nhead=self.nhead, d=self.dim) # [N, L, H, D] + k = rearrange(k, 'n (nhead d) h w -> n (h w) nhead d', nhead=self.nhead, d=self.dim) # [N, S, H, D] + v = rearrange(v, 'n (nhead d) h w -> n (h w) nhead d', nhead=self.nhead, d=self.dim) # [N, S, H, D] + + m = self.attention(q, k, v, q_mask=None, kv_mask=None) # [N, L, H, D] + + if PFLASH_AVAILABLE: # N H L D + m = rearrange(m, 'n nhead L d -> n L nhead d', nhead=self.nhead, d=self.dim) + + m = m.view(1, mask_h0, mask_w0, self.nhead, self.dim) + if mask_h0 != x_mask.size(-2): + m = torch.cat([m, torch.zeros(1, x_mask.size(-2)-mask_h0, x_mask.size(-1), self.nhead, self.dim, device=m.device, dtype=m.dtype)], dim=1) + elif mask_w0 != x_mask.size(-1): + m = torch.cat([m, torch.zeros(1, x_mask.size(-2), x_mask.size(-1)-mask_w0, self.nhead, self.dim, device=m.device, dtype=m.dtype)], dim=2) + m_list.append(m) + m = torch.cat(m_list, dim=0) + + m = self.merge(m.view(bs, -1, self.nhead*self.dim)) # [N, L, C] + + # m = m.reshape(bs, C, H1//self.pool_size, W1//self.pool_size) # [N, C, H, W] why this bug worked + m = rearrange(m, 'b (h w) c -> b c h w', h=H1//self.pool_size, w=W1//self.pool_size) # [N, C, H, W] + + if self.scatter: + m = torch.repeat_interleave(m, self.pool_size, dim=-2) + m = torch.repeat_interleave(m, self.pool_size, dim=-1) + m = m * self.aggregate.weight.data.reshape(1, C, self.pool_size, self.pool_size).repeat(1,1,m.shape[-2]//self.pool_size,m.shape[-1]//self.pool_size) + else: + m = torch.nn.functional.interpolate(m, scale_factor=self.pool_size, mode='bilinear', align_corners=False) # [N, C, H1, W1] + + if not self.norm_before: + m = self.norm1(m.permute(0,2,3,1)).permute(0,3,1,2) # [N, C, H, W] + + # feed-forward network + m = self.mlp(torch.cat([x, m], dim=1).permute(0, 2, 3, 1)) # [N, H1, W1, C] + m = self.norm2(m).permute(0, 3, 1, 2) # [N, C, H1, W1] + + return x + m + + def _build_projection(self, + dim_in, + dim_out, + kernel_size=3, + padding=1, + stride=1, + method='dw_bn', + ): + if method == 'dw_bn': + proj = nn.Sequential(OrderedDict([ + ('conv', nn.Conv2d( + dim_in, + dim_in, + kernel_size=kernel_size, + padding=padding, + stride=stride, + bias=False, + groups=dim_in + )), + ('bn', nn.BatchNorm2d(dim_in)), + # ('rearrage', Rearrange('b c h w -> b (h w) c')), + ])) + elif method == 'avg': + proj = nn.Sequential(OrderedDict([ + ('avg', nn.AvgPool2d( + kernel_size=kernel_size, + padding=padding, + stride=stride, + ceil_mode=True + )), + # ('rearrage', Rearrange('b c h w -> b (h w) c')), + ])) + elif method == 'linear': + proj = None + elif method == 'dw': + proj = nn.Sequential(OrderedDict([ + ('conv', nn.Conv2d( + dim_in, + dim_in, + kernel_size=kernel_size, + padding=padding, + stride=stride, + bias=False, + groups=dim_in + )), + # ('rearrage', Rearrange('b c h w -> b (h w) c')), + ])) + else: + raise ValueError('Unknown method ({})'.format(method)) + + return proj + + +class RoPELoFTREncoderLayer(nn.Module): + def __init__(self, + d_model, + nhead, + attention='linear', + rope=False, + token_mixer=None, + ): + super(RoPELoFTREncoderLayer, self).__init__() + + self.dim = d_model // nhead + self.nhead = nhead + + # multi-head attention + if token_mixer is None: + self.q_proj = nn.Linear(d_model, d_model, bias=False) + self.k_proj = nn.Linear(d_model, d_model, bias=False) + self.v_proj = nn.Linear(d_model, d_model, bias=False) + + self.rope = rope + self.token_mixer = None + if token_mixer is not None: + self.token_mixer = token_mixer + if token_mixer == 'dwcn': + self.attention = nn.Sequential(OrderedDict([ + ('conv', nn.Conv2d( + d_model, + d_model, + kernel_size=3, + padding=1, + stride=1, + bias=False, + groups=d_model + )), + ])) + elif self.rope: + assert attention == 'linear' + self.attention = RoPELinearAttention() + + if token_mixer is None: + self.merge = nn.Linear(d_model, d_model, bias=False) + + # feed-forward network + if token_mixer is None: + self.mlp = nn.Sequential( + nn.Linear(d_model*2, d_model*2, bias=False), + nn.ReLU(True), + nn.Linear(d_model*2, d_model, bias=False), + ) + else: + self.mlp = nn.Sequential( + nn.Linear(d_model, d_model, bias=False), + nn.ReLU(True), + nn.Linear(d_model, d_model, bias=False), + ) + # norm and dropout + self.norm1 = nn.LayerNorm(d_model) + self.norm2 = nn.LayerNorm(d_model) + + def forward(self, x, source, x_mask=None, source_mask=None, H=None, W=None): + """ + Args: + x (torch.Tensor): [N, L, C] + source (torch.Tensor): [N, L, C] + x_mask (torch.Tensor): [N, L] (optional) + source_mask (torch.Tensor): [N, S] (optional) + """ + bs = x.size(0) + assert H*W == x.size(-2) + + # x = rearrange(x, 'n c h w -> n (h w) c') + # source = rearrange(source, 'n c h w -> n (h w) c') + query, key, value = x, source, source + + if self.token_mixer is not None: + # multi-head attention + m = self.norm1(x) + m = rearrange(m, 'n (h w) c -> n c h w', h=H, w=W) + m = self.attention(m) + m = rearrange(m, 'n c h w -> n (h w) c') + + x = x + m + x = x + self.mlp(self.norm2(x)) + return x + else: + # multi-head attention + query = self.q_proj(query).view(bs, -1, self.nhead, self.dim) # [N, L, (H, D)] + key = self.k_proj(key).view(bs, -1, self.nhead, self.dim) # [N, S, (H, D)] + value = self.v_proj(value).view(bs, -1, self.nhead, self.dim) + message = self.attention(query, key, value, q_mask=x_mask, kv_mask=source_mask, H=H, W=W) # [N, L, (H, D)] + message = self.merge(message.view(bs, -1, self.nhead*self.dim)) # [N, L, C] + message = self.norm1(message) + + # feed-forward network + message = self.mlp(torch.cat([x, message], dim=2)) + message = self.norm2(message) + + return x + message + +class LoFTREncoderLayer(nn.Module): + def __init__(self, + d_model, + nhead, + attention='linear', + xformer=False, + ): + super(LoFTREncoderLayer, self).__init__() + + self.dim = d_model // nhead + self.nhead = nhead + + # multi-head attention + self.q_proj = nn.Linear(d_model, d_model, bias=False) + self.k_proj = nn.Linear(d_model, d_model, bias=False) + self.v_proj = nn.Linear(d_model, d_model, bias=False) + + if xformer: + self.attention = XAttention() + else: + self.attention = LinearAttention() if attention == 'linear' else FullAttention() + self.merge = nn.Linear(d_model, d_model, bias=False) + + # feed-forward network + self.mlp = nn.Sequential( + nn.Linear(d_model*2, d_model*2, bias=False), + nn.ReLU(True), + nn.Linear(d_model*2, d_model, bias=False), + ) + + # norm and dropout + self.norm1 = nn.LayerNorm(d_model) + self.norm2 = nn.LayerNorm(d_model) + + def forward(self, x, source, x_mask=None, source_mask=None): + """ + Args: + x (torch.Tensor): [N, L, C] + source (torch.Tensor): [N, S, C] + x_mask (torch.Tensor): [N, L] (optional) + source_mask (torch.Tensor): [N, S] (optional) + """ + bs = x.size(0) + query, key, value = x, source, source + + # multi-head attention + query = self.q_proj(query).view(bs, -1, self.nhead, self.dim) # [N, L, (H, D)] + key = self.k_proj(key).view(bs, -1, self.nhead, self.dim) # [N, S, (H, D)] + value = self.v_proj(value).view(bs, -1, self.nhead, self.dim) + message = self.attention(query, key, value, q_mask=x_mask, kv_mask=source_mask) # [N, L, (H, D)] + message = self.merge(message.view(bs, -1, self.nhead*self.dim)) # [N, L, C] + message = self.norm1(message) + + # feed-forward network + message = self.mlp(torch.cat([x, message], dim=2)) + message = self.norm2(message) + + return x + message + + def pro(self, x, source, x_mask=None, source_mask=None, profiler=None): + """ + Args: + x (torch.Tensor): [N, L, C] + source (torch.Tensor): [N, S, C] + x_mask (torch.Tensor): [N, L] (optional) + source_mask (torch.Tensor): [N, S] (optional) + """ + bs = x.size(0) + query, key, value = x, source, source + + # multi-head attention + with profiler.profile("proj*3"): + query = self.q_proj(query).view(bs, -1, self.nhead, self.dim) # [N, L, (H, D)] + key = self.k_proj(key).view(bs, -1, self.nhead, self.dim) # [N, S, (H, D)] + value = self.v_proj(value).view(bs, -1, self.nhead, self.dim) + with profiler.profile("attention"): + message = self.attention(query, key, value, q_mask=x_mask, kv_mask=source_mask) # [N, L, (H, D)] + with profiler.profile("merge"): + message = self.merge(message.view(bs, -1, self.nhead*self.dim)) # [N, L, C] + with profiler.profile("norm1"): + message = self.norm1(message) + + # feed-forward network + with profiler.profile("mlp"): + message = self.mlp(torch.cat([x, message], dim=2)) + with profiler.profile("norm2"): + message = self.norm2(message) + + return x + message + +class PANEncoderLayer_cross(nn.Module): + def __init__(self, + d_model, + nhead, + attention='linear', + pool_size=4, + bn=True, + ): + super(PANEncoderLayer_cross, self).__init__() + + self.pool_size = pool_size + + self.dim = d_model // nhead + self.nhead = nhead + + self.max_pool = torch.nn.MaxPool2d(kernel_size=self.pool_size, stride=self.pool_size) + # multi-head attention + if bn: + method = 'dw_bn' + else: + method = 'dw' + self.qk_proj_conv = self._build_projection(d_model, d_model, method=method) + self.v_proj_conv = self._build_projection(d_model, d_model, method=method) + + # self.q_proj = nn.Linear(d_mosdel, d_model, bias=False) + # self.k_proj = nn.Linear(d_model, d_model, bias=False) + # self.v_proj = nn.Linear(d_model, d_model, bias=False) + self.attention = FullAttention() + self.merge = nn.Linear(d_model, d_model, bias=False) + + # feed-forward network + self.mlp = nn.Sequential( + nn.Linear(d_model*2, d_model*2, bias=False), + nn.ReLU(True), + nn.Linear(d_model*2, d_model, bias=False), + ) + + # norm and dropout + self.norm1 = nn.LayerNorm(d_model) + self.norm2 = nn.LayerNorm(d_model) + + # self.norm1 = nn.BatchNorm2d(d_model) + + def forward(self, x1, x2, x1_mask=None, x2_mask=None): + """ + Args: + x (torch.Tensor): [N, C, H1, W1] + source (torch.Tensor): [N, C, H2, W2] + x_mask (torch.Tensor): [N, H1, W1] (optional) (L = H1*W1) + source_mask (torch.Tensor): [N, H2, W2] (optional) (S = H2*W2) + """ + bs = x1.size(0) + H1, W1 = x1.size(-2) // self.pool_size, x1.size(-1) // self.pool_size + H2, W2 = x2.size(-2) // self.pool_size, x2.size(-1) // self.pool_size + + query = self.norm1(self.max_pool(x1).permute(0,2,3,1)).permute(0,3,1,2) + key = self.norm1(self.max_pool(x2).permute(0,2,3,1)).permute(0,3,1,2) + v2 = self.norm1(self.max_pool(x2).permute(0,2,3,1)).permute(0,3,1,2) + v1 = self.norm1(self.max_pool(x1).permute(0,2,3,1)).permute(0,3,1,2) + + # multi-head attention + query = self.qk_proj_conv(query) # [N, C, H1//pool, W1//pool] + key = self.qk_proj_conv(key) + v2 = self.v_proj_conv(v2) + v1 = self.v_proj_conv(v1) + + C = query.shape[-3] + if x1_mask is not None and x2_mask is not None: + x1_mask = self.max_pool(x1_mask.float()).bool() # [N, H1//pool, W1//pool] + x2_mask = self.max_pool(x2_mask.float()).bool() + + mask_h1, mask_w1 = x1_mask[0].sum(-2)[0], x1_mask[0].sum(-1)[0] + mask_h2, mask_w2 = x2_mask[0].sum(-2)[0], x2_mask[0].sum(-1)[0] + + query = query[:, :, :mask_h1, :mask_w1] + key = key[:, :, :mask_h2, :mask_w2] + v1 = v1[:, :, :mask_h1, :mask_w1] + v2 = v2[:, :, :mask_h2, :mask_w2] + x1_mask = x1_mask[:, :mask_h1, :mask_w1] + x2_mask = x2_mask[:, :mask_h2, :mask_w2] + + else: + assert x1_mask is None and x2_mask is None + + query = rearrange(query, 'n (nhead d) h w -> n (h w) nhead d', nhead=self.nhead, d=self.dim) # [N, L, H, D] + key = rearrange(key, 'n (nhead d) h w -> n (h w) nhead d', nhead=self.nhead, d=self.dim) # [N, S, H, D] + v2 = rearrange(v2, 'n (nhead d) h w -> n (h w) nhead d', nhead=self.nhead, d=self.dim) # [N, S, H, D] + v1 = rearrange(v1, 'n (nhead d) h w -> n (h w) nhead d', nhead=self.nhead, d=self.dim) # [N, S, H, D] + if x2_mask is not None or x1_mask is not None: + x1_mask = x1_mask.flatten(-2) + x2_mask = x2_mask.flatten(-2) + + + QK = torch.einsum("nlhd,nshd->nlsh", query, key) + with torch.autocast(enabled=False, device_type='cuda'): + if x2_mask is not None or x1_mask is not None: + # S1 = S2.transpose(-2,-3).masked_fill(~(x_mask[:, None, :, None] * source_mask[:, :, None, None]), -1e9) # float('-inf') + QK = QK.float().masked_fill_(~(x1_mask[:, :, None, None] * x2_mask[:, None, :, None]), -1e9) # float('-inf') + + + # Compute the attention and the weighted average + softmax_temp = 1. / query.size(3)**.5 # sqrt(D) + S1 = torch.softmax(softmax_temp * QK, dim=2) + S2 = torch.softmax(softmax_temp * QK, dim=3) + + m1 = torch.einsum("nlsh,nshd->nlhd", S1, v2) + m2 = torch.einsum("nlsh,nlhd->nshd", S2, v1) + + if x1_mask is not None and x2_mask is not None: + m1 = m1.view(bs, mask_h1, mask_w1, self.nhead, self.dim) + if mask_h1 != H1: + m1 = torch.cat([m1, torch.zeros(m1.size(0), H1-mask_h1, W1, self.nhead, self.dim, device=m1.device, dtype=m1.dtype)], dim=1) + elif mask_w1 != W1: + m1 = torch.cat([m1, torch.zeros(m1.size(0), H1, W1-mask_w1, self.nhead, self.dim, device=m1.device, dtype=m1.dtype)], dim=2) + else: + assert x1_mask is None and x2_mask is None + + m1 = self.merge(m1.reshape(bs, -1, self.nhead*self.dim)) # [N, L, C] + m1 = rearrange(m1, 'b (h w) c -> b c h w', h=H1, w=W1) # [N, C, H, W] + m1 = torch.nn.functional.interpolate(m1, scale_factor=self.pool_size, mode='bilinear', align_corners=False) # [N, C, H1, W1] + # feed-forward network + m1 = self.mlp(torch.cat([x1, m1], dim=1).permute(0, 2, 3, 1)) # [N, H1, W1, C] + m1 = self.norm2(m1).permute(0, 3, 1, 2) # [N, C, H1, W1] + + if x1_mask is not None and x2_mask is not None: + m2 = m2.view(bs, mask_h2, mask_w2, self.nhead, self.dim) + if mask_h2 != H2: + m2 = torch.cat([m2, torch.zeros(m2.size(0), H2-mask_h2, W2, self.nhead, self.dim, device=m2.device, dtype=m2.dtype)], dim=1) + elif mask_w2 != W2: + m2 = torch.cat([m2, torch.zeros(m2.size(0), H2, W2-mask_w2, self.nhead, self.dim, device=m2.device, dtype=m2.dtype)], dim=2) + else: + assert x1_mask is None and x2_mask is None + + m2 = self.merge(m2.reshape(bs, -1, self.nhead*self.dim)) # [N, L, C] + m2 = rearrange(m2, 'b (h w) c -> b c h w', h=H2, w=W2) # [N, C, H, W] + m2 = torch.nn.functional.interpolate(m2, scale_factor=self.pool_size, mode='bilinear', align_corners=False) # [N, C, H1, W1] + # feed-forward network + m2 = self.mlp(torch.cat([x2, m2], dim=1).permute(0, 2, 3, 1)) # [N, H1, W1, C] + m2 = self.norm2(m2).permute(0, 3, 1, 2) # [N, C, H1, W1] + + return x1 + m1, x2 + m2 + + def _build_projection(self, + dim_in, + dim_out, + kernel_size=3, + padding=1, + stride=1, + method='dw_bn', + ): + if method == 'dw_bn': + proj = nn.Sequential(OrderedDict([ + ('conv', nn.Conv2d( + dim_in, + dim_in, + kernel_size=kernel_size, + padding=padding, + stride=stride, + bias=False, + groups=dim_in + )), + ('bn', nn.BatchNorm2d(dim_in)), + # ('rearrage', Rearrange('b c h w -> b (h w) c')), + ])) + elif method == 'avg': + proj = nn.Sequential(OrderedDict([ + ('avg', nn.AvgPool2d( + kernel_size=kernel_size, + padding=padding, + stride=stride, + ceil_mode=True + )), + # ('rearrage', Rearrange('b c h w -> b (h w) c')), + ])) + elif method == 'linear': + proj = None + elif method == 'dw': + proj = nn.Sequential(OrderedDict([ + ('conv', nn.Conv2d( + dim_in, + dim_in, + kernel_size=kernel_size, + padding=padding, + stride=stride, + bias=False, + groups=dim_in + )), + # ('rearrage', Rearrange('b c h w -> b (h w) c')), + ])) + else: + raise ValueError('Unknown method ({})'.format(method)) + + return proj + +class LocalFeatureTransformer(nn.Module): + """A Local Feature Transformer (LoFTR) module.""" + + def __init__(self, config): + super(LocalFeatureTransformer, self).__init__() + + self.full_config = config + self.fine = False + if 'coarse' not in config: + self.fine = True # fine attention + else: + config = config['coarse'] + self.d_model = config['d_model'] + self.nhead = config['nhead'] + self.layer_names = config['layer_names'] + self.pan = config['pan'] + self.bidirect = config['bidirection'] + # prune + self.pool_size = config['pool_size'] + self.matchability = False + self.depth_confidence = -1.0 + self.width_confidence = -1.0 + # self.depth_confidence = config['depth_confidence'] + # self.width_confidence = config['width_confidence'] + # self.matchability = self.depth_confidence > 0 or self.width_confidence > 0 + # self.thr = self.full_config['match_coarse']['thr'] + if not self.fine: + # asy + self.asymmetric = config['asymmetric'] + self.asymmetric_self = config['asymmetric_self'] + # aggregate + self.aggregate = config['dwconv'] + # RoPE + self.rope = config['rope'] + # absPE + self.abspe = config['abspe'] + + else: + self.rope, self.asymmetric, self.asymmetric_self, self.aggregate = False, False, False, False + if self.matchability: + self.n_layers = len(self.layer_names) // 2 + assert self.n_layers == 4 + self.log_assignment = nn.ModuleList( + [MatchAssignment(self.d_model) for _ in range(self.n_layers)]) + self.token_confidence = nn.ModuleList([ + TokenConfidence(self.d_model) for _ in range(self.n_layers-1)]) + + self.CoarseMatching = CoarseMatching(self.full_config['match_coarse']) + + # self only + # if self.rope: + # self_layer = RoPELoFTREncoderLayer(config['d_model'], config['nhead'], config['attention'], config['rope'], config['token_mixer']) + # self.layers = nn.ModuleList([copy.deepcopy(self_layer) for _ in range(len(self.layer_names))]) + + if self.bidirect: + assert config['xformer'] is False and config['pan'] is True + self_layer = PANEncoderLayer(config['d_model'], config['nhead'], config['attention'], config['pool_size'], config['bn'], config['xformer']) + cross_layer = PANEncoderLayer_cross(config['d_model'], config['nhead'], config['attention'], config['pool_size'], config['bn']) + self.layers = nn.ModuleList([copy.deepcopy(self_layer) if _ == 'self' else copy.deepcopy(cross_layer) for _ in self.layer_names]) + else: + if self.aggregate: + if self.rope: + # assert config['npe'][0] == 832 and config['npe'][1] == 832 and config['npe'][2] == 832 and config['npe'][3] == 832 + logger.info(f'npe trainH,trainW,testH,testW: {config["npe"][0]}, {config["npe"][1]}, {config["npe"][2]}, {config["npe"][3]}') + self_layer = AG_RoPE_EncoderLayer(config['d_model'], config['nhead'], config['attention'], config['pool_size'], config['pool_size2'], + config['xformer'], config['leaky'], config['dwconv'], config['dwconv2'], config['scatter'], + config['norm_before'], config['rope'], config['npe'], config['vit_norm'], config['rope_dwproj']) + cross_layer = AG_RoPE_EncoderLayer(config['d_model'], config['nhead'], config['attention'], config['pool_size'], config['pool_size2'], + config['xformer'], config['leaky'], config['dwconv'], config['dwconv2'], config['scatter'], + config['norm_before'], False, config['npe'], config['vit_norm'], config['rope_dwproj']) + self.layers = nn.ModuleList([copy.deepcopy(self_layer) if _ == 'self' else copy.deepcopy(cross_layer) for _ in self.layer_names]) + elif self.abspe: + logger.info(f'npe trainH,trainW,testH,testW: {config["npe"][0]}, {config["npe"][1]}, {config["npe"][2]}, {config["npe"][3]}') + self_layer = AG_RoPE_EncoderLayer(config['d_model'], config['nhead'], config['attention'], config['pool_size'], config['pool_size2'], + config['xformer'], config['leaky'], config['dwconv'], config['dwconv2'], config['scatter'], + config['norm_before'], False, config['npe'], config['vit_norm'], config['rope_dwproj']) + cross_layer = AG_RoPE_EncoderLayer(config['d_model'], config['nhead'], config['attention'], config['pool_size'], config['pool_size2'], + config['xformer'], config['leaky'], config['dwconv'], config['dwconv2'], config['scatter'], + config['norm_before'], False, config['npe'], config['vit_norm'], config['rope_dwproj']) + self.layers = nn.ModuleList([copy.deepcopy(self_layer) if _ == 'self' else copy.deepcopy(cross_layer) for _ in self.layer_names]) + + else: + encoder_layer = AG_Conv_EncoderLayer(config['d_model'], config['nhead'], config['attention'], config['pool_size'], config['bn'], + config['xformer'], config['leaky'], config['dwconv'], config['scatter'], + config['norm_before']) + self.layers = nn.ModuleList([copy.deepcopy(encoder_layer) for _ in range(len(self.layer_names))]) + else: + encoder_layer = PANEncoderLayer(config['d_model'], config['nhead'], config['attention'], config['pool_size'], + config['bn'], config['xformer'], config['leaky'], config['dwconv'], config['scatter']) \ + if config['pan'] else LoFTREncoderLayer(config['d_model'], config['nhead'], + config['attention'], config['xformer']) + self.layers = nn.ModuleList([copy.deepcopy(encoder_layer) for _ in range(len(self.layer_names))]) + self._reset_parameters() + + def _reset_parameters(self): + for p in self.parameters(): + if p.dim() > 1: + nn.init.xavier_uniform_(p) + + def forward(self, feat0, feat1, mask0=None, mask1=None, data=None): + """ + Args: + feat0 (torch.Tensor): [N, C, H, W] + feat1 (torch.Tensor): [N, C, H, W] + mask0 (torch.Tensor): [N, L] (optional) + mask1 (torch.Tensor): [N, S] (optional) + """ + # nchw for pan and n(hw)c for loftr + assert self.d_model == feat0.size(1) or self.d_model == feat0.size(-1), "the feature number of src and transformer must be equal" + H0, W0, H1, W1 = feat0.size(-2), feat0.size(-1), feat1.size(-2), feat1.size(-1) + bs = feat0.shape[0] + padding = False + if bs == 1 and mask0 is not None and mask1 is not None and self.pan: # NCHW for pan + mask_H0, mask_W0 = mask0.size(-2), mask0.size(-1) + mask_H1, mask_W1 = mask1.size(-2), mask1.size(-1) + mask_h0, mask_w0 = mask0[0].sum(-2)[0], mask0[0].sum(-1)[0] + mask_h1, mask_w1 = mask1[0].sum(-2)[0], mask1[0].sum(-1)[0] + + #round to self.pool_size + if self.pan: + mask_h0, mask_w0, mask_h1, mask_w1 = mask_h0//self.pool_size*self.pool_size, mask_w0//self.pool_size*self.pool_size, mask_h1//self.pool_size*self.pool_size, mask_w1//self.pool_size*self.pool_size + + feat0 = feat0[:, :, :mask_h0, :mask_w0] + feat1 = feat1[:, :, :mask_h1, :mask_w1] + + padding = True + + # rope self only + # if self.rope: + # feat0, feat1 = rearrange(feat0, 'b c h w -> b (h w) c'), rearrange(feat1, 'b c h w -> b (h w) c') + # prune + if padding: + l0, l1 = mask_h0 * mask_w0, mask_h1 * mask_w1 + else: + l0, l1 = H0 * W0, H1 * W1 + do_early_stop = self.depth_confidence > 0 + do_point_pruning = self.width_confidence > 0 + if do_point_pruning: + ind0 = torch.arange(0, l0, device=feat0.device)[None] + ind1 = torch.arange(0, l1, device=feat0.device)[None] + # We store the index of the layer at which pruning is detected. + prune0 = torch.ones_like(ind0) + prune1 = torch.ones_like(ind1) + if do_early_stop: + token0, token1 = None, None + + for i, (layer, name) in enumerate(zip(self.layers, self.layer_names)): + if padding: + mask0, mask1 = None, None + if name == 'self': + # if self.rope: + # feat0 = layer(feat0, feat0, mask0, mask1, H0, W0) + # feat1 = layer(feat1, feat1, mask0, mask1, H1, W1) + if self.asymmetric: + assert False, 'not worked' + # feat0 = layer(feat0, feat0, mask0, mask1) + feat1 = layer(feat1, feat1, mask1, mask1) + else: + feat0 = layer(feat0, feat0, mask0, mask0) + feat1 = layer(feat1, feat1, mask1, mask1) + elif name == 'cross': + if self.bidirect: + feat0, feat1 = layer(feat0, feat1, mask0, mask1) + else: + if self.asymmetric or self.asymmetric_self: + assert False, 'not worked' + feat0 = layer(feat0, feat1, mask0, mask1) + else: + feat0 = layer(feat0, feat1, mask0, mask1) + feat1 = layer(feat1, feat0, mask1, mask0) + + if i == len(self.layer_names) - 1 and not self.training: + continue + if self.matchability: + desc0, desc1 = rearrange(feat0, 'b c h w -> b (h w) c'), rearrange(feat1, 'b c h w -> b (h w) c') + if do_early_stop: + token0, token1 = self.token_confidence[i//2](desc0, desc1) + if self.check_if_stop(token0, token1, i, l0+l1) and not self.training: + break + if do_point_pruning: + scores0, scores1 = self.log_assignment[i//2].scores(desc0, desc1) + mask0 = self.get_pruning_mask(token0, scores0, i) + mask1 = self.get_pruning_mask(token1, scores1, i) + ind0, ind1 = ind0[mask0][None], ind1[mask1][None] + feat0, feat1 = desc0[mask0][None], desc1[mask1][None] + if feat0.shape[-2] == 0 or desc1.shape[-2] == 0: + break + prune0[:, ind0] += 1 + prune1[:, ind1] += 1 + if self.training and self.matchability: + scores, _, matchability0, matchability1 = self.log_assignment[i//2](desc0, desc1) + m0_full = torch.zeros((bs, mask_h0 * mask_w0), device=matchability0.device, dtype=matchability0.dtype) + m0_full.scatter(1, ind0, matchability0.squeeze(-1)) + if padding and self.d_model == feat0.size(1): + m0_full = m0_full.reshape(bs, mask_h0, mask_w0) + bs, c, mask_h0, mask_w0 = feat0.size() + if mask_h0 != mask_H0: + m0_full = torch.cat([m0_full, torch.zeros(bs, mask_H0-mask_h0, mask_w0, device=m0_full.device, dtype=m0_full.dtype)], dim=1) + elif mask_w0 != mask_W0: + m0_full = torch.cat([m0_full, torch.zeros(bs, mask_h0, mask_W0-mask_w0, device=m0_full.device, dtype=m0_full.dtype)], dim=2) + m0_full = m0_full.reshape(bs, mask_H0*mask_W0) + m1_full = torch.zeros((bs, mask_h1 * mask_w1), device=matchability0.device, dtype=matchability0.dtype) + m1_full.scatter(1, ind1, matchability1.squeeze(-1)) + if padding and self.d_model == feat1.size(1): + m1_full = m1_full.reshape(bs, mask_h1, mask_w1) + bs, c, mask_h1, mask_w1 = feat1.size() + if mask_h1 != mask_H1: + m1_full = torch.cat([m1_full, torch.zeros(bs, mask_H1-mask_h1, mask_w1, device=m1_full.device, dtype=m1_full.dtype)], dim=1) + elif mask_w1 != mask_W1: + m1_full = torch.cat([m1_full, torch.zeros(bs, mask_h1, mask_W1-mask_w1, device=m1_full.device, dtype=m1_full.dtype)], dim=2) + m1_full = m1_full.reshape(bs, mask_H1*mask_W1) + data.update({'matchability0_'+str(i//2): m0_full, 'matchability1_'+str(i//2): m1_full}) + m0, m1, mscores0, mscores1 = filter_matches( + scores, self.thr) + if do_point_pruning: + m0_ = torch.full((bs, l0), -1, device=m0.device, dtype=m0.dtype) + m1_ = torch.full((bs, l1), -1, device=m1.device, dtype=m1.dtype) + m0_[:, ind0] = torch.where( + m0 == -1, -1, ind1.gather(1, m0.clamp(min=0))) + m1_[:, ind1] = torch.where( + m1 == -1, -1, ind0.gather(1, m1.clamp(min=0))) + mscores0_ = torch.zeros((bs, l0), device=mscores0.device) + mscores1_ = torch.zeros((bs, l1), device=mscores1.device) + mscores0_[:, ind0] = mscores0 + mscores1_[:, ind1] = mscores1 + m0, m1, mscores0, mscores1 = m0_, m1_, mscores0_, mscores1_ + if padding and self.d_model == feat0.size(1): + m0 = m0.reshape(bs, mask_h0, mask_w0) + bs, c, mask_h0, mask_w0 = feat0.size() + if mask_h0 != mask_H0: + m0 = torch.cat([m0, -torch.ones(bs, mask_H0-mask_h0, mask_w0, device=m0.device, dtype=m0.dtype)], dim=1) + elif mask_w0 != mask_W0: + m0 = torch.cat([m0, -torch.ones(bs, mask_h0, mask_W0-mask_w0, device=m0.device, dtype=m0.dtype)], dim=2) + m0 = m0.reshape(bs, mask_H0*mask_W0) + if padding and self.d_model == feat1.size(1): + m1 = m1.reshape(bs, mask_h1, mask_w1) + bs, c, mask_h1, mask_w1 = feat1.size() + if mask_h1 != mask_H1: + m1 = torch.cat([m1, -torch.ones(bs, mask_H1-mask_h1, mask_w1, device=m1.device, dtype=m1.dtype)], dim=1) + elif mask_w1 != mask_W1: + m1 = torch.cat([m1, -torch.ones(bs, mask_h1, mask_W1-mask_w1, device=m1.device, dtype=m1.dtype)], dim=2) + m1 = m1.reshape(bs, mask_H1*mask_W1) + data.update({'matches0_'+str(i//2): m0, 'matches1_'+str(i//2): m1}) + conf = torch.zeros((bs, l0 * l1), device=scores.device, dtype=scores.dtype) + ind = ind0[...,None] * l1 + ind1[:,None,:] + # conf[ind.reshape(bs, -1)] = scores.reshape(bs, -1).exp() + conf.scatter(1, ind.reshape(bs, -1), scores.reshape(bs, -1).exp()) + if padding and self.d_model == feat0.size(1): + conf = conf.reshape(bs, mask_h0, mask_w0, mask_h1, mask_w1) + bs, c, mask_h0, mask_w0 = feat0.size() + if mask_h0 != mask_H0: + conf = torch.cat([conf, torch.zeros(bs, mask_H0-mask_h0, mask_w0, mask_h1, mask_w1, device=conf.device, dtype=conf.dtype)], dim=1) + elif mask_w0 != mask_W0: + conf = torch.cat([conf, torch.zeros(bs, mask_h0, mask_W0-mask_w0, mask_h1, mask_w1, device=conf.device, dtype=conf.dtype)], dim=2) + bs, c, mask_h1, mask_w1 = feat1.size() + if mask_h1 != mask_H1: + conf = torch.cat([conf, torch.zeros(bs, mask_H0, mask_W0, mask_H1-mask_h1, mask_W1, device=conf.device, dtype=conf.dtype)], dim=3) + elif mask_w1 != mask_W1: + conf = torch.cat([conf, torch.zeros(bs, mask_H0, mask_W0, mask_H1, mask_W1-mask_w1, device=conf.device, dtype=conf.dtype)], dim=4) + conf = conf.reshape(bs, mask_H0*mask_W0, mask_H1*mask_W1) + data.update({'conf_matrix_'+str(i//2): conf}) + + + + else: + raise KeyError + + if self.matchability and not self.training: + scores, _, matchability0, matchability1 = self.log_assignment[i//2](desc0, desc1) + conf = torch.zeros((bs, l0 * l1), device=scores.device, dtype=scores.dtype) + ind = ind0[...,None] * l1 + ind1[:,None,:] + # conf[ind.reshape(bs, -1)] = scores.reshape(bs, -1).exp() + conf.scatter(1, ind.reshape(bs, -1), scores.reshape(bs, -1).exp()) + if padding and self.d_model == feat0.size(1): + conf = conf.reshape(bs, mask_h0, mask_w0, mask_h1, mask_w1) + bs, c, mask_h0, mask_w0 = feat0.size() + if mask_h0 != mask_H0: + conf = torch.cat([conf, torch.zeros(bs, mask_H0-mask_h0, mask_w0, mask_h1, mask_w1, device=conf.device, dtype=conf.dtype)], dim=1) + elif mask_w0 != mask_W0: + conf = torch.cat([conf, torch.zeros(bs, mask_h0, mask_W0-mask_w0, mask_h1, mask_w1, device=conf.device, dtype=conf.dtype)], dim=2) + bs, c, mask_h1, mask_w1 = feat1.size() + if mask_h1 != mask_H1: + conf = torch.cat([conf, torch.zeros(bs, mask_H0, mask_W0, mask_H1-mask_h1, mask_W1, device=conf.device, dtype=conf.dtype)], dim=3) + elif mask_w1 != mask_W1: + conf = torch.cat([conf, torch.zeros(bs, mask_H0, mask_W0, mask_H1, mask_W1-mask_w1, device=conf.device, dtype=conf.dtype)], dim=4) + conf = conf.reshape(bs, mask_H0*mask_W0, mask_H1*mask_W1) + data.update({'conf_matrix': conf}) + data.update(**self.CoarseMatching.get_coarse_match(conf, data)) + # m0, m1, mscores0, mscores1 = filter_matches( + # scores, self.conf.filter_threshold) + + # matches, mscores = [], [] + # for k in range(b): + # valid = m0[k] > -1 + # m_indices_0 = torch.where(valid)[0] + # m_indices_1 = m0[k][valid] + # if do_point_pruning: + # m_indices_0 = ind0[k, m_indices_0] + # m_indices_1 = ind1[k, m_indices_1] + # matches.append(torch.stack([m_indices_0, m_indices_1], -1)) + # mscores.append(mscores0[k][valid]) + + # # TODO: Remove when hloc switches to the compact format. + # if do_point_pruning: + # m0_ = torch.full((b, m), -1, device=m0.device, dtype=m0.dtype) + # m1_ = torch.full((b, n), -1, device=m1.device, dtype=m1.dtype) + # m0_[:, ind0] = torch.where( + # m0 == -1, -1, ind1.gather(1, m0.clamp(min=0))) + # m1_[:, ind1] = torch.where( + # m1 == -1, -1, ind0.gather(1, m1.clamp(min=0))) + # mscores0_ = torch.zeros((b, m), device=mscores0.device) + # mscores1_ = torch.zeros((b, n), device=mscores1.device) + # mscores0_[:, ind0] = mscores0 + # mscores1_[:, ind1] = mscores1 + # m0, m1, mscores0, mscores1 = m0_, m1_, mscores0_, mscores1_ + + # pred = { + # 'matches0': m0, + # 'matches1': m1, + # 'matching_scores0': mscores0, + # 'matching_scores1': mscores1, + # 'stop': i+1, + # 'matches': matches, + # 'scores': mscores, + # } + + # if do_point_pruning: + # pred.update(dict(prune0=prune0, prune1=prune1)) + # return pred + + + if padding and self.d_model == feat0.size(1): + bs, c, mask_h0, mask_w0 = feat0.size() + if mask_h0 != mask_H0: + feat0 = torch.cat([feat0, torch.zeros(bs, c, mask_H0-mask_h0, mask_W0, device=feat0.device, dtype=feat0.dtype)], dim=-2) + elif mask_w0 != mask_W0: + feat0 = torch.cat([feat0, torch.zeros(bs, c, mask_H0, mask_W0-mask_w0, device=feat0.device, dtype=feat0.dtype)], dim=-1) + bs, c, mask_h1, mask_w1 = feat1.size() + if mask_h1 != mask_H1: + feat1 = torch.cat([feat1, torch.zeros(bs, c, mask_H1-mask_h1, mask_W1, device=feat1.device, dtype=feat1.dtype)], dim=-2) + elif mask_w1 != mask_W1: + feat1 = torch.cat([feat1, torch.zeros(bs, c, mask_H1, mask_W1-mask_w1, device=feat1.device, dtype=feat1.dtype)], dim=-1) + + return feat0, feat1 + + def pro(self, feat0, feat1, mask0=None, mask1=None, profiler=None): + """ + Args: + feat0 (torch.Tensor): [N, C, H, W] + feat1 (torch.Tensor): [N, C, H, W] + mask0 (torch.Tensor): [N, L] (optional) + mask1 (torch.Tensor): [N, S] (optional) + """ + + assert self.d_model == feat0.size(1) or self.d_model == feat0.size(-1), "the feature number of src and transformer must be equal" + with profiler.profile("LoFTR_transformer_attention"): + for layer, name in zip(self.layers, self.layer_names): + if name == 'self': + feat0 = layer.pro(feat0, feat0, mask0, mask0, profiler=profiler) + feat1 = layer.pro(feat1, feat1, mask1, mask1, profiler=profiler) + elif name == 'cross': + feat0 = layer.pro(feat0, feat1, mask0, mask1, profiler=profiler) + feat1 = layer.pro(feat1, feat0, mask1, mask0, profiler=profiler) + else: + raise KeyError + + return feat0, feat1 + + def confidence_threshold(self, layer_index: int) -> float: + """ scaled confidence threshold """ + threshold = 0.8 + 0.1 * np.exp(-4.0 * layer_index / self.n_layers) + return np.clip(threshold, 0, 1) + + def get_pruning_mask(self, confidences: torch.Tensor, scores: torch.Tensor, + layer_index: int) -> torch.Tensor: + """ mask points which should be removed """ + threshold = self.confidence_threshold(layer_index) + if confidences is not None: + scores = torch.where( + confidences > threshold, scores, scores.new_tensor(1.0)) + return scores > (1 - self.width_confidence) + + def check_if_stop(self, + confidences0: torch.Tensor, + confidences1: torch.Tensor, + layer_index: int, num_points: int) -> torch.Tensor: + """ evaluate stopping condition""" + confidences = torch.cat([confidences0, confidences1], -1) + threshold = self.confidence_threshold(layer_index) + pos = 1.0 - (confidences < threshold).float().sum() / num_points + return pos > self.depth_confidence diff --git a/imcui/third_party/MatchAnything/src/loftr/loftr_module/transformer_utils.py b/imcui/third_party/MatchAnything/src/loftr/loftr_module/transformer_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..25c261c973e8eeb6803ba6d21b5eb86992c3d857 --- /dev/null +++ b/imcui/third_party/MatchAnything/src/loftr/loftr_module/transformer_utils.py @@ -0,0 +1,76 @@ +import torch +from torch import nn +from torch.nn import functional as F + +class TokenConfidence(nn.Module): + def __init__(self, dim: int) -> None: + super().__init__() + self.token = nn.Sequential( + nn.Linear(dim, 1), + nn.Sigmoid() + ) + + def forward(self, desc0: torch.Tensor, desc1: torch.Tensor): + """ get confidence tokens """ + return ( + self.token(desc0.detach().float()).squeeze(-1), + self.token(desc1.detach().float()).squeeze(-1)) + +def sigmoid_log_double_softmax( + sim: torch.Tensor, z0: torch.Tensor, z1: torch.Tensor) -> torch.Tensor: + """ create the log assignment matrix from logits and similarity""" + b, m, n = sim.shape + m0, m1 = torch.sigmoid(z0), torch.sigmoid(z1) + certainties = torch.log(m0) + torch.log(m1).transpose(1, 2) + scores0 = F.log_softmax(sim, 2) + scores1 = F.log_softmax( + sim.transpose(-1, -2).contiguous(), 2).transpose(-1, -2) + scores = scores0 + scores1 + certainties + # scores[:, :-1, -1] = F.logsigmoid(-z0.squeeze(-1)) + # scores[:, -1, :-1] = F.logsigmoid(-z1.squeeze(-1)) + return scores, m0, m1 + +class MatchAssignment(nn.Module): + def __init__(self, dim: int) -> None: + super().__init__() + self.dim = dim + self.matchability = nn.Linear(dim, 1, bias=True) + self.final_proj = nn.Linear(dim, dim, bias=True) + + @torch.cuda.amp.custom_fwd(cast_inputs=torch.float32) + def forward(self, desc0: torch.Tensor, desc1: torch.Tensor): + """ build assignment matrix from descriptors """ + mdesc0, mdesc1 = self.final_proj(desc0), self.final_proj(desc1) + _, _, d = mdesc0.shape + mdesc0, mdesc1 = mdesc0 / d**.25, mdesc1 / d**.25 + sim = torch.einsum('bmd,bnd->bmn', mdesc0, mdesc1) + z0 = self.matchability(desc0) + z1 = self.matchability(desc1) + scores, m0, m1 = sigmoid_log_double_softmax(sim, z0, z1) + return scores, sim, m0, m1 + + def scores(self, desc0: torch.Tensor, desc1: torch.Tensor): + m0 = torch.sigmoid(self.matchability(desc0)).squeeze(-1) + m1 = torch.sigmoid(self.matchability(desc1)).squeeze(-1) + return m0, m1 + +def filter_matches(scores: torch.Tensor, th: float): + """ obtain matches from a log assignment matrix [Bx M+1 x N+1]""" + max0, max1 = scores.max(2), scores.max(1) + m0, m1 = max0.indices, max1.indices + indices0 = torch.arange(m0.shape[1], device=m0.device)[None] + indices1 = torch.arange(m1.shape[1], device=m1.device)[None] + mutual0 = indices0 == m1.gather(1, m0) + mutual1 = indices1 == m0.gather(1, m1) + max0_exp = max0.values.exp() + zero = max0_exp.new_tensor(0) + mscores0 = torch.where(mutual0, max0_exp, zero) + mscores1 = torch.where(mutual1, mscores0.gather(1, m1), zero) + if th is not None: + valid0 = mutual0 & (mscores0 > th) + else: + valid0 = mutual0 + valid1 = mutual1 & valid0.gather(1, m1) + m0 = torch.where(valid0, m0, -1) + m1 = torch.where(valid1, m1, -1) + return m0, m1, mscores0, mscores1 diff --git a/imcui/third_party/MatchAnything/src/loftr/utils/coarse_matching.py b/imcui/third_party/MatchAnything/src/loftr/utils/coarse_matching.py new file mode 100644 index 0000000000000000000000000000000000000000..cd8dfca8227423ed699ea736e23b516bed68c19d --- /dev/null +++ b/imcui/third_party/MatchAnything/src/loftr/utils/coarse_matching.py @@ -0,0 +1,266 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from einops.einops import rearrange, repeat + +from loguru import logger + +INF = 1e9 + +def mask_border(m, b: int, v): + """ Mask borders with value + Args: + m (torch.Tensor): [N, H0, W0, H1, W1] + b (int) + v (m.dtype) + """ + if b <= 0: + return + + m[:, :b] = v + m[:, :, :b] = v + m[:, :, :, :b] = v + m[:, :, :, :, :b] = v + m[:, -b:] = v + m[:, :, -b:] = v + m[:, :, :, -b:] = v + m[:, :, :, :, -b:] = v + + +def mask_border_with_padding(m, bd, v, p_m0, p_m1): + if bd <= 0: + return + + m[:, :bd] = v + m[:, :, :bd] = v + m[:, :, :, :bd] = v + m[:, :, :, :, :bd] = v + + h0s, w0s = p_m0.sum(1).max(-1)[0].int(), p_m0.sum(-1).max(-1)[0].int() + h1s, w1s = p_m1.sum(1).max(-1)[0].int(), p_m1.sum(-1).max(-1)[0].int() + for b_idx, (h0, w0, h1, w1) in enumerate(zip(h0s, w0s, h1s, w1s)): + m[b_idx, h0 - bd:] = v + m[b_idx, :, w0 - bd:] = v + m[b_idx, :, :, h1 - bd:] = v + m[b_idx, :, :, :, w1 - bd:] = v + + +def compute_max_candidates(p_m0, p_m1): + """Compute the max candidates of all pairs within a batch + + Args: + p_m0, p_m1 (torch.Tensor): padded masks + """ + h0s, w0s = p_m0.sum(1).max(-1)[0], p_m0.sum(-1).max(-1)[0] + h1s, w1s = p_m1.sum(1).max(-1)[0], p_m1.sum(-1).max(-1)[0] + max_cand = torch.sum( + torch.min(torch.stack([h0s * w0s, h1s * w1s], -1), -1)[0]) + return max_cand + + +class CoarseMatching(nn.Module): + def __init__(self, config): + super().__init__() + self.config = config + # general config + self.thr = config['thr'] + self.border_rm = config['border_rm'] + # -- # for trainig fine-level LoFTR + self.train_coarse_percent = config['train_coarse_percent'] + self.train_pad_num_gt_min = config['train_pad_num_gt_min'] + + # we provide 2 options for differentiable matching + self.match_type = config['match_type'] + if self.match_type == 'dual_softmax': + self.temperature = config['dsmax_temperature'] + elif self.match_type == 'sinkhorn': + try: + from .superglue import log_optimal_transport + except ImportError: + raise ImportError("download superglue.py first!") + self.log_optimal_transport = log_optimal_transport + self.bin_score = nn.Parameter( + torch.tensor(config['skh_init_bin_score'], requires_grad=True)) + self.skh_iters = config['skh_iters'] + self.skh_prefilter = config['skh_prefilter'] + else: + raise NotImplementedError() + + self.mtd = config['mtd_spvs'] + self.fix_bias = config['fix_bias'] + + def forward(self, feat_c0, feat_c1, data, mask_c0=None, mask_c1=None): + """ + Args: + feat0 (torch.Tensor): [N, L, C] + feat1 (torch.Tensor): [N, S, C] + data (dict) + mask_c0 (torch.Tensor): [N, L] (optional) + mask_c1 (torch.Tensor): [N, S] (optional) + Update: + data (dict): { + 'b_ids' (torch.Tensor): [M'], + 'i_ids' (torch.Tensor): [M'], + 'j_ids' (torch.Tensor): [M'], + 'gt_mask' (torch.Tensor): [M'], + 'mkpts0_c' (torch.Tensor): [M, 2], + 'mkpts1_c' (torch.Tensor): [M, 2], + 'mconf' (torch.Tensor): [M]} + NOTE: M' != M during training. + """ + N, L, S, C = feat_c0.size(0), feat_c0.size(1), feat_c1.size(1), feat_c0.size(2) + + # normalize + feat_c0, feat_c1 = map(lambda feat: feat / feat.shape[-1]**.5, + [feat_c0, feat_c1]) + + if self.match_type == 'dual_softmax': + with torch.autocast(enabled=False, device_type='cuda'): + sim_matrix = torch.einsum("nlc,nsc->nls", feat_c0, + feat_c1) / self.temperature + if mask_c0 is not None: + sim_matrix = sim_matrix.float().masked_fill_( + ~(mask_c0[..., None] * mask_c1[:, None]).bool(), + -INF + # float("-inf") if sim_matrix.dtype == torch.float16 else -INF + ) + if self.config['fp16log']: + t1 = F.softmax(sim_matrix, 1) + t2 = F.softmax(sim_matrix, 2) + conf_matrix = t1*t2 + logger.info(f'feat_c0absmax: {feat_c0.abs().max()}') + logger.info(f'feat_c1absmax: {feat_c1.abs().max()}') + logger.info(f'sim_matrix: {sim_matrix.dtype}') + logger.info(f'sim_matrixabsmax: {sim_matrix.abs().max()}') + logger.info(f't1: {t1.dtype}, t2: {t2.dtype}, conf_matrix: {conf_matrix.dtype}') + logger.info(f't1absmax: {t1.abs().max()}, t2absmax: {t2.abs().max()}, conf_matrixabsmax: {conf_matrix.abs().max()}') + else: + conf_matrix = F.softmax(sim_matrix, 1) * F.softmax(sim_matrix, 2) + + data.update({'conf_matrix': conf_matrix}) + + # predict coarse matches from conf_matrix + data.update(**self.get_coarse_match(conf_matrix, data)) + + @torch.no_grad() + def get_coarse_match(self, conf_matrix, data): + """ + Args: + conf_matrix (torch.Tensor): [N, L, S] + data (dict): with keys ['hw0_i', 'hw1_i', 'hw0_c', 'hw1_c'] + Returns: + coarse_matches (dict): { + 'b_ids' (torch.Tensor): [M'], + 'i_ids' (torch.Tensor): [M'], + 'j_ids' (torch.Tensor): [M'], + 'gt_mask' (torch.Tensor): [M'], + 'm_bids' (torch.Tensor): [M], + 'mkpts0_c' (torch.Tensor): [M, 2], + 'mkpts1_c' (torch.Tensor): [M, 2], + 'mconf' (torch.Tensor): [M]} + """ + axes_lengths = { + 'h0c': data['hw0_c'][0], + 'w0c': data['hw0_c'][1], + 'h1c': data['hw1_c'][0], + 'w1c': data['hw1_c'][1] + } + _device = conf_matrix.device + # 1. confidence thresholding + mask = conf_matrix > self.thr + mask = rearrange(mask, 'b (h0c w0c) (h1c w1c) -> b h0c w0c h1c w1c', + **axes_lengths) + if 'mask0' not in data: + mask_border(mask, self.border_rm, False) + else: + mask_border_with_padding(mask, self.border_rm, False, + data['mask0'], data['mask1']) + mask = rearrange(mask, 'b h0c w0c h1c w1c -> b (h0c w0c) (h1c w1c)', + **axes_lengths) + + # 2. mutual nearest + if self.mtd: + b_ids, i_ids, j_ids = torch.where(mask) + mconf = conf_matrix[b_ids, i_ids, j_ids] + else: + mask = mask \ + * (conf_matrix == conf_matrix.max(dim=2, keepdim=True)[0]) \ + * (conf_matrix == conf_matrix.max(dim=1, keepdim=True)[0]) + + # 3. find all valid coarse matches + # this only works when at most one `True` in each row + mask_v, all_j_ids = mask.max(dim=2) + b_ids, i_ids = torch.where(mask_v) + j_ids = all_j_ids[b_ids, i_ids] + mconf = conf_matrix[b_ids, i_ids, j_ids] + + # 4. Random sampling of training samples for fine-level LoFTR + # (optional) pad samples with gt coarse-level matches + if self.training: + # NOTE: + # The sampling is performed across all pairs in a batch without manually balancing + # #samples for fine-level increases w.r.t. batch_size + if 'mask0' not in data: + num_candidates_max = mask.size(0) * max( + mask.size(1), mask.size(2)) + else: + num_candidates_max = compute_max_candidates( + data['mask0'], data['mask1']) + num_matches_train = int(num_candidates_max * + self.train_coarse_percent) + num_matches_pred = len(b_ids) + assert self.train_pad_num_gt_min < num_matches_train, "min-num-gt-pad should be less than num-train-matches" + + # pred_indices is to select from prediction + if num_matches_pred <= num_matches_train - self.train_pad_num_gt_min: + pred_indices = torch.arange(num_matches_pred, device=_device) + else: + pred_indices = torch.randint( + num_matches_pred, + (num_matches_train - self.train_pad_num_gt_min, ), + device=_device) + + # gt_pad_indices is to select from gt padding. e.g. max(3787-4800, 200) + gt_pad_indices = torch.randint( + len(data['spv_b_ids']), + (max(num_matches_train - num_matches_pred, + self.train_pad_num_gt_min), ), + device=_device) + mconf_gt = torch.zeros(len(data['spv_b_ids']), device=_device) # set conf of gt paddings to all zero + + b_ids, i_ids, j_ids, mconf = map( + lambda x, y: torch.cat([x[pred_indices], y[gt_pad_indices]], + dim=0), + *zip([b_ids, data['spv_b_ids']], [i_ids, data['spv_i_ids']], + [j_ids, data['spv_j_ids']], [mconf, mconf_gt])) + + # These matches select patches that feed into fine-level network + coarse_matches = {'b_ids': b_ids, 'i_ids': i_ids, 'j_ids': j_ids} + + # 4. Update with matches in original image resolution + if self.fix_bias: + scale = 8 + else: + scale = data['hw0_i'][0] / data['hw0_c'][0] + scale0 = scale * data['scale0'][b_ids] if 'scale0' in data else scale + scale1 = scale * data['scale1'][b_ids] if 'scale1' in data else scale + mkpts0_c = torch.stack( + [i_ids % data['hw0_c'][1], i_ids // data['hw0_c'][1]], + dim=1) * scale0 + mkpts1_c = torch.stack( + [j_ids % data['hw1_c'][1], j_ids // data['hw1_c'][1]], + dim=1) * scale1 + + m_bids = b_ids[mconf != 0] + + m_bids_f = repeat(m_bids, 'b -> b k', k = 3).reshape(-1) + coarse_matches.update({ + 'gt_mask': mconf == 0, + 'm_bids': m_bids, # mconf == 0 => gt matches + 'm_bids_f': m_bids_f, + 'mkpts0_c': mkpts0_c[mconf != 0], + 'mkpts1_c': mkpts1_c[mconf != 0], + 'mconf': mconf[mconf != 0] + }) + + return coarse_matches \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/src/loftr/utils/fine_matching.py b/imcui/third_party/MatchAnything/src/loftr/utils/fine_matching.py new file mode 100644 index 0000000000000000000000000000000000000000..7be172a9bf9d45e3cbcf33a4926abddfca877629 --- /dev/null +++ b/imcui/third_party/MatchAnything/src/loftr/utils/fine_matching.py @@ -0,0 +1,493 @@ +import math +import torch +import torch.nn as nn +import torch.nn.functional as F + +from kornia.geometry.subpix import dsnt +from kornia.utils.grid import create_meshgrid + +from loguru import logger + +class FineMatching(nn.Module): + """FineMatching with s2d paradigm""" + + def __init__(self, config): + super().__init__() + self.config = config + self.topk = config['match_fine']['topk'] + self.mtd_spvs = config['fine']['mtd_spvs'] + self.align_corner = config['align_corner'] + self.fix_bias = config['fix_bias'] + self.normfinem = config['match_fine']['normfinem'] + self.fix_fine_matching = config['match_fine']['fix_fine_matching'] + self.mutual_nearest = config['match_fine']['force_nearest'] + self.skip_fine_softmax = config['match_fine']['skip_fine_softmax'] + self.normfeat = config['match_fine']['normfeat'] + self.use_sigmoid = config['match_fine']['use_sigmoid'] + self.local_regress = config['match_fine']['local_regress'] + self.local_regress_rmborder = config['match_fine']['local_regress_rmborder'] + self.local_regress_nomask = config['match_fine']['local_regress_nomask'] + self.local_regress_temperature = config['match_fine']['local_regress_temperature'] + self.local_regress_padone = config['match_fine']['local_regress_padone'] + self.local_regress_slice = config['match_fine']['local_regress_slice'] + self.local_regress_slicedim = config['match_fine']['local_regress_slicedim'] + self.local_regress_inner = config['match_fine']['local_regress_inner'] + self.multi_regress = config['match_fine']['multi_regress'] + def forward(self, feat_0, feat_1, data): + """ + Args: + feat0 (torch.Tensor): [M, WW, C] + feat1 (torch.Tensor): [M, WW, C] + data (dict) + Update: + data (dict):{ + 'expec_f' (torch.Tensor): [M, 3], + 'mkpts0_f' (torch.Tensor): [M, 2], + 'mkpts1_f' (torch.Tensor): [M, 2]} + """ + M, WW, C = feat_0.shape + W = int(math.sqrt(WW)) + if self.fix_bias: + scale = 2 + else: + scale = data['hw0_i'][0] / data['hw0_f'][0] + self.M, self.W, self.WW, self.C, self.scale = M, W, WW, C, scale + + # corner case: if no coarse matches found + if M == 0: + assert self.training == False, "M is always >0, when training, see coarse_matching.py" + # logger.warning('No matches found in coarse-level.') + if self.mtd_spvs: + data.update({ + 'conf_matrix_f': torch.empty(0, WW, WW, device=feat_0.device), + 'mkpts0_f': data['mkpts0_c'], + 'mkpts1_f': data['mkpts1_c'], + }) + # if self.local_regress: + # data.update({ + # 'sim_matrix_f': torch.empty(0, WW, WW, device=feat_0.device), + # }) + return + else: + data.update({ + 'expec_f': torch.empty(0, 3, device=feat_0.device), + 'mkpts0_f': data['mkpts0_c'], + 'mkpts1_f': data['mkpts1_c'], + }) + return + + if self.mtd_spvs: + with torch.autocast(enabled=False, device_type='cuda'): + # feat_0 = feat_0 / feat_0.size(-2) + if self.local_regress_slice: + feat_ff0, feat_ff1 = feat_0[...,-self.local_regress_slicedim:], feat_1[...,-self.local_regress_slicedim:] + feat_f0, feat_f1 = feat_0[...,:-self.local_regress_slicedim], feat_1[...,:-self.local_regress_slicedim] + conf_matrix_ff = torch.einsum('mlc,mrc->mlr', feat_ff0, feat_ff1 / (self.local_regress_slicedim)**.5) + else: + feat_f0, feat_f1 = feat_0, feat_1 + if self.normfinem: + feat_f0 = feat_f0 / C**.5 + feat_f1 = feat_f1 / C**.5 + conf_matrix_f = torch.einsum('mlc,mrc->mlr', feat_f0, feat_f1) + else: + if self.local_regress_slice: + conf_matrix_f = torch.einsum('mlc,mrc->mlr', feat_f0, feat_f1 / (C - self.local_regress_slicedim)**.5) + else: + conf_matrix_f = torch.einsum('mlc,mrc->mlr', feat_f0, feat_f1 / C**.5) + + if self.normfeat: + feat_f0, feat_f1 = torch.nn.functional.normalize(feat_f0.float(), p=2, dim=-1), torch.nn.functional.normalize(feat_f1.float(), p=2, dim=-1) + + if self.config['fp16log']: + logger.info(f'sim_matrix: {conf_matrix_f.abs().max()}') + # sim_matrix *= 1. / C**.5 # normalize + + if self.multi_regress: + assert not self.local_regress + assert not self.normfinem and not self.normfeat + heatmap = F.softmax(conf_matrix_f, 2).view(M, WW, W, W) # [M, WW, W, W] + + assert (W - 2) == (self.config['resolution'][0] // self.config['resolution'][1]) # c8 + windows_scale = (W - 1) / (self.config['resolution'][0] // self.config['resolution'][1]) + + coords_normalized = dsnt.spatial_expectation2d(heatmap, True) * windows_scale # [M, WW, 2] + grid_normalized = create_meshgrid(W, W, True, heatmap.device).reshape(1, -1, 2)[:,None,:,:] * windows_scale # [1, 1, WW, 2] + + # compute std over + var = torch.sum(grid_normalized**2 * heatmap.view(M, WW, WW, 1), dim=-2) - coords_normalized**2 # ([1,1,WW,2] * [M,WW,WW,1])->[M,WW,2] + std = torch.sum(torch.sqrt(torch.clamp(var, min=1e-10)), -1) # [M,WW] clamp needed for numerical stability + + # for fine-level supervision + data.update({'expec_f': torch.cat([coords_normalized, std.unsqueeze(-1)], -1)}) # [M, WW, 2] + + # get the least uncertain matches + val, idx = torch.topk(std, self.topk, dim=-1, largest=False) # [M,topk] + coords_normalized = coords_normalized[torch.arange(M, device=conf_matrix_f.device, dtype=torch.long)[:,None], idx] # [M,topk] + + grid = create_meshgrid(W, W, False, idx.device) - W // 2 + 0.5 # [1, W, W, 2] + grid = grid.reshape(1, -1, 2).expand(M, -1, -1) # [M, WW, 2] + delta_l = torch.gather(grid, 1, idx.unsqueeze(-1).expand(-1, -1, 2)) # [M, topk, 2] in (x, y) + + # compute absolute kpt coords + self.get_multi_fine_match_align(delta_l, coords_normalized, data) + + + else: + + if self.skip_fine_softmax: + pass + elif self.use_sigmoid: + conf_matrix_f = torch.sigmoid(conf_matrix_f) + else: + if self.local_regress: + del feat_f0, feat_f1 + softmax_matrix_f = F.softmax(conf_matrix_f, 1) * F.softmax(conf_matrix_f, 2) + # softmax_matrix_f = conf_matrix_f + if self.local_regress_inner: + softmax_matrix_f = softmax_matrix_f.reshape(M, self.WW, self.W+2, self.W+2) + softmax_matrix_f = softmax_matrix_f[...,1:-1,1:-1].reshape(M, self.WW, self.WW) + # if self.training: + # for fine-level supervision + data.update({'conf_matrix_f': softmax_matrix_f}) + if self.local_regress_slice: + data.update({'sim_matrix_ff': conf_matrix_ff}) + else: + data.update({'sim_matrix_f': conf_matrix_f}) + + else: + conf_matrix_f = F.softmax(conf_matrix_f, 1) * F.softmax(conf_matrix_f, 2) + + # for fine-level supervision + data.update({'conf_matrix_f': conf_matrix_f}) + + # compute absolute kpt coords + if self.local_regress: + self.get_fine_ds_match(softmax_matrix_f, data) + del softmax_matrix_f + idx_l, idx_r = data['idx_l'], data['idx_r'] + del data['idx_l'], data['idx_r'] + m_ids = torch.arange(M, device=idx_l.device, dtype=torch.long).unsqueeze(-1).expand(-1, self.topk) + # if self.training: + m_ids = m_ids[:len(data['mconf']) // self.topk] + idx_r_iids, idx_r_jids = idx_r // W, idx_r % W + + # remove boarder + if self.local_regress_nomask: + # log for inner precent + # mask = (idx_r_iids >= 1) & (idx_r_iids <= W-2) & (idx_r_jids >= 1) & (idx_r_jids <= W-2) + # mask_sum = mask.sum() + # logger.info(f'total fine match: {mask.numel()}; regressed fine match: {mask_sum}, per: {mask_sum / mask.numel()}') + mask = None + m_ids, idx_l, idx_r_iids, idx_r_jids = m_ids.reshape(-1), idx_l.reshape(-1), idx_r_iids.reshape(-1), idx_r_jids.reshape(-1) + if self.local_regress_inner: # been sliced before + delta = create_meshgrid(3, 3, True, conf_matrix_f.device).to(torch.long) # [1, 3, 3, 2] + else: + # no mask + 1 for padding + delta = create_meshgrid(3, 3, True, conf_matrix_f.device).to(torch.long) + torch.tensor([1], dtype=torch.long, device=conf_matrix_f.device) # [1, 3, 3, 2] + + m_ids = m_ids[...,None,None].expand(-1, 3, 3) + idx_l = idx_l[...,None,None].expand(-1, 3, 3) # [m, k, 3, 3] + + idx_r_iids = idx_r_iids[...,None,None].expand(-1, 3, 3) + delta[None, ..., 1] + idx_r_jids = idx_r_jids[...,None,None].expand(-1, 3, 3) + delta[None, ..., 0] + + if idx_l.numel() == 0: + data.update({ + 'mkpts0_f': data['mkpts0_c'], + 'mkpts1_f': data['mkpts1_c'], + }) + return + + if self.local_regress_slice: + conf_matrix_f = conf_matrix_ff + if self.local_regress_inner: + conf_matrix_f = conf_matrix_f.reshape(M, self.WW, self.W+2, self.W+2) + else: + conf_matrix_f = conf_matrix_f.reshape(M, self.WW, self.W, self.W) + conf_matrix_f = F.pad(conf_matrix_f, (1,1,1,1)) + else: + mask = (idx_r_iids >= 1) & (idx_r_iids <= W-2) & (idx_r_jids >= 1) & (idx_r_jids <= W-2) + if W == 10: + idx_l_iids, idx_l_jids = idx_l // W, idx_l % W + mask = mask & (idx_l_iids >= 1) & (idx_l_iids <= W-2) & (idx_l_jids >= 1) & (idx_l_jids <= W-2) + + m_ids = m_ids[mask].to(torch.long) + idx_l, idx_r_iids, idx_r_jids = idx_l[mask].to(torch.long), idx_r_iids[mask].to(torch.long), idx_r_jids[mask].to(torch.long) + + m_ids, idx_l, idx_r_iids, idx_r_jids = m_ids.reshape(-1), idx_l.reshape(-1), idx_r_iids.reshape(-1), idx_r_jids.reshape(-1) + mask = mask.reshape(-1) + + delta = create_meshgrid(3, 3, True, conf_matrix_f.device).to(torch.long) # [1, 3, 3, 2] + + m_ids = m_ids[:,None,None].expand(-1, 3, 3) + idx_l = idx_l[:,None,None].expand(-1, 3, 3) # [m, 3, 3] + # bug !!!!!!!!! 1,0 rather 0,1 + # idx_r_iids = idx_r_iids[...,None,None].expand(-1, 3, 3) + delta[None, ..., 0] + # idx_r_jids = idx_r_jids[...,None,None].expand(-1, 3, 3) + delta[None, ..., 1] + idx_r_iids = idx_r_iids[:,None,None].expand(-1, 3, 3) + delta[..., 1] + idx_r_jids = idx_r_jids[:,None,None].expand(-1, 3, 3) + delta[..., 0] + + if idx_l.numel() == 0: + data.update({ + 'mkpts0_f': data['mkpts0_c'], + 'mkpts1_f': data['mkpts1_c'], + }) + return + if not self.local_regress_slice: + conf_matrix_f = conf_matrix_f.reshape(M, self.WW, self.W, self.W) + else: + conf_matrix_f = conf_matrix_ff.reshape(M, self.WW, self.W, self.W) + + conf_matrix_f = conf_matrix_f[m_ids, idx_l, idx_r_iids, idx_r_jids] + conf_matrix_f = conf_matrix_f.reshape(-1, 9) + if self.local_regress_padone: # follow the training detach the gradient of center + conf_matrix_f[:,4] = -1e4 + heatmap = F.softmax(conf_matrix_f / self.local_regress_temperature, -1) + logger.info(f'maxmax&maxmean of heatmap: {heatmap.view(-1).max()}, {heatmap.view(-1).min(), heatmap.max(-1)[0].mean()}') + heatmap[:,4] = 1.0 # no need gradient calculation in inference + logger.info(f'min of heatmap: {heatmap.view(-1).min()}') + heatmap = heatmap.reshape(-1, 3, 3) + # heatmap = torch.ones_like(softmax) # ones_like for detach the gradient of center + # heatmap[:,:4], heatmap[:,5:] = softmax[:,:4], softmax[:,5:] + # heatmap = heatmap.reshape(-1, 3, 3) + else: + conf_matrix_f = F.softmax(conf_matrix_f / self.local_regress_temperature, -1) + # logger.info(f'max&min&mean of heatmap: {conf_matrix_f.view(-1).max()}, {conf_matrix_f.view(-1).min(), conf_matrix_f.max(-1)[0].mean()}') + heatmap = conf_matrix_f.reshape(-1, 3, 3) + + # compute coordinates from heatmap + coords_normalized = dsnt.spatial_expectation2d(heatmap[None], True)[0] + + # coords_normalized_l2 = coords_normalized.norm(p=2, dim=-1) + # logger.info(f'mean&max&min abs of local: {coords_normalized_l2.mean(), coords_normalized_l2.max(), coords_normalized_l2.min()}') + + # compute absolute kpt coords + + if data['bs'] == 1: + scale1 = scale * data['scale1'] if 'scale0' in data else scale + else: + if mask is not None: + scale1 = scale * data['scale1'][data['b_ids']][:len(data['mconf']) // self.topk,...][:,None,:].expand(-1, self.topk, 2).reshape(-1, 2)[mask] if 'scale0' in data else scale + else: + scale1 = scale * data['scale1'][data['b_ids']][:len(data['mconf']) // self.topk,...][:,None,:].expand(-1, self.topk, 2).reshape(-1, 2) if 'scale0' in data else scale + + self.get_fine_match_local(coords_normalized, data, scale1, mask, True) + + else: + self.get_fine_ds_match(conf_matrix_f, data) + + + else: + if self.align_corner is True: + feat_f0, feat_f1 = feat_0, feat_1 + feat_f0_picked = feat_f0_picked = feat_f0[:, WW//2, :] + sim_matrix = torch.einsum('mc,mrc->mr', feat_f0_picked, feat_f1) + softmax_temp = 1. / C**.5 + heatmap = torch.softmax(softmax_temp * sim_matrix, dim=1).view(-1, W, W) + + # compute coordinates from heatmap + coords_normalized = dsnt.spatial_expectation2d(heatmap[None], True)[0] # [M, 2] + grid_normalized = create_meshgrid(W, W, True, heatmap.device).reshape(1, -1, 2) # [1, WW, 2] + + # compute std over + var = torch.sum(grid_normalized**2 * heatmap.view(-1, WW, 1), dim=1) - coords_normalized**2 # [M, 2] + std = torch.sum(torch.sqrt(torch.clamp(var, min=1e-10)), -1) # [M] clamp needed for numerical stability + + # for fine-level supervision + data.update({'expec_f': torch.cat([coords_normalized, std.unsqueeze(1)], -1)}) + + # compute absolute kpt coords + self.get_fine_match(coords_normalized, data) + else: + feat_f0, feat_f1 = feat_0, feat_1 + # even matching windows while coarse grid not aligned to fine grid!!! + # assert W == 5, "others size not checked" + if self.fix_bias: + assert W % 2 == 1, "W must be odd when select" + feat_f0_picked = feat_f0[:, WW//2] + + else: + # assert W == 6, "others size not checked" + assert W % 2 == 0, "W must be even when coarse grid not aligned to fine grid(average)" + feat_f0_picked = (feat_f0[:, WW//2 - W//2 - 1] + feat_f0[:, WW//2 - W//2] + feat_f0[:, WW//2 + W//2] + feat_f0[:, WW//2 + W//2 - 1]) / 4 + sim_matrix = torch.einsum('mc,mrc->mr', feat_f0_picked, feat_f1) + softmax_temp = 1. / C**.5 + heatmap = torch.softmax(softmax_temp * sim_matrix, dim=1).view(-1, W, W) + + # compute coordinates from heatmap + windows_scale = (W - 1) / (self.config['resolution'][0] // self.config['resolution'][1]) + + coords_normalized = dsnt.spatial_expectation2d(heatmap[None], True)[0] * windows_scale # [M, 2] + grid_normalized = create_meshgrid(W, W, True, heatmap.device).reshape(1, -1, 2) * windows_scale # [1, WW, 2] + + # compute std over + var = torch.sum(grid_normalized**2 * heatmap.view(-1, WW, 1), dim=1) - coords_normalized**2 # [M, 2] + std = torch.sum(torch.sqrt(torch.clamp(var, min=1e-10)), -1) # [M] clamp needed for numerical stability + + # for fine-level supervision + data.update({'expec_f': torch.cat([coords_normalized, std.unsqueeze(1)], -1)}) + + # compute absolute kpt coords + self.get_fine_match_align(coords_normalized, data) + + + @torch.no_grad() + def get_fine_match(self, coords_normed, data): + W, WW, C, scale = self.W, self.WW, self.C, self.scale + + # mkpts0_f and mkpts1_f + mkpts0_f = data['mkpts0_c'] + scale1 = scale * data['scale1'][data['b_ids']] if 'scale0' in data else scale + mkpts1_f = data['mkpts1_c'] + (coords_normed * (W // 2) * scale1)[:len(data['mconf'])] + + data.update({ + "mkpts0_f": mkpts0_f, + "mkpts1_f": mkpts1_f + }) + + def get_fine_match_local(self, coords_normed, data, scale1, mask, reserve_border=True): + W, WW, C, scale = self.W, self.WW, self.C, self.scale + + if mask is None: + mkpts0_c, mkpts1_c = data['mkpts0_c'], data['mkpts1_c'] + else: + data['mkpts0_c'], data['mkpts1_c'] = data['mkpts0_c'].reshape(-1, 2), data['mkpts1_c'].reshape(-1, 2) + mkpts0_c, mkpts1_c = data['mkpts0_c'][mask], data['mkpts1_c'][mask] + mask_sum = mask.sum() + logger.info(f'total fine match: {mask.numel()}; regressed fine match: {mask_sum}, per: {mask_sum / mask.numel()}') + # print(mkpts0_c.shape, mkpts1_c.shape, coords_normed.shape, scale1.shape) + # print(data['mkpts0_c'].shape, data['mkpts1_c'].shape) + # mkpts0_f and mkpts1_f + mkpts0_f = mkpts0_c + mkpts1_f = mkpts1_c + (coords_normed * (3 // 2) * scale1) + + if reserve_border and mask is not None: + mkpts0_f, mkpts1_f = torch.cat([mkpts0_f, data['mkpts0_c'][~mask].reshape(-1, 2)]), torch.cat([mkpts1_f, data['mkpts1_c'][~mask].reshape(-1, 2)]) + else: + pass + + del data['mkpts0_c'], data['mkpts1_c'] + data.update({ + "mkpts0_f": mkpts0_f, + "mkpts1_f": mkpts1_f + }) + + # can be used for both aligned and not aligned + @torch.no_grad() + def get_fine_match_align(self, coord_normed, data): + W, WW, C, scale = self.W, self.WW, self.C, self.scale + c2f = self.config['resolution'][0] // self.config['resolution'][1] + # mkpts0_f and mkpts1_f + mkpts0_f = data['mkpts0_c'] + scale1 = scale * data['scale1'][data['b_ids']] if 'scale0' in data else scale + mkpts1_f = data['mkpts1_c'] + (coord_normed * (c2f // 2) * scale1)[:len(data['mconf'])] + + data.update({ + "mkpts0_f": mkpts0_f, + "mkpts1_f": mkpts1_f + }) + + @torch.no_grad() + def get_multi_fine_match_align(self, delta_l, coord_normed, data): + W, WW, C, scale = self.W, self.WW, self.C, self.scale + c2f = self.config['resolution'][0] // self.config['resolution'][1] + # mkpts0_f and mkpts1_f + scale0 = scale * data['scale0'][data['b_ids']] if 'scale0' in data else torch.tensor([[scale, scale]], device=delta_l.device) + scale1 = scale * data['scale1'][data['b_ids']] if 'scale0' in data else torch.tensor([[scale, scale]], device=delta_l.device) + mkpts0_f = (data['mkpts0_c'][:,None,:] + (delta_l * scale0[:,None,:])[:len(data['mconf']),...]).reshape(-1, 2) + mkpts1_f = (data['mkpts1_c'][:,None,:] + (coord_normed * (c2f // 2) * scale1[:,None,:])[:len(data['mconf'])]).reshape(-1, 2) + + data.update({ + "mkpts0_f": mkpts0_f, + "mkpts1_f": mkpts1_f, + "mconf": data['mconf'][:,None].expand(-1, self.topk).reshape(-1) + }) + + @torch.no_grad() + def get_fine_ds_match(self, conf_matrix, data): + W, WW, C, scale = self.W, self.WW, self.C, self.scale + + # select topk matches + m, _, _ = conf_matrix.shape + + + if self.mutual_nearest: + pass + + + elif not self.fix_fine_matching: # only allow one2mul but mul2one + + val, idx_r = conf_matrix.max(-1) # (m, WW), (m, WW) + val, idx_l = torch.topk(val, self.topk, dim = -1) # (m, topk), (m, topk) + idx_r = torch.gather(idx_r, 1, idx_l) # (m, topk) + + # mkpts0_c use xy coordinate, so we don't need to convert it to hw coordinate + # grid = create_meshgrid(W, W, False, conf_matrix.device).transpose(-3,-2) - W // 2 + 0.5 # (1, W, W, 2) + grid = create_meshgrid(W, W, False, conf_matrix.device) - W // 2 + 0.5 # (1, W, W, 2) + grid = grid.reshape(1, -1, 2).expand(m, -1, -1) # (m, WW, 2) + delta_l = torch.gather(grid, 1, idx_l.unsqueeze(-1).expand(-1, -1, 2)) # (m, topk, 2) + delta_r = torch.gather(grid, 1, idx_r.unsqueeze(-1).expand(-1, -1, 2)) # (m, topk, 2) + + # mkpts0_f and mkpts1_f + scale0 = scale * data['scale0'][data['b_ids']] if 'scale0' in data else scale + scale1 = scale * data['scale1'][data['b_ids']] if 'scale0' in data else scale + + if torch.is_tensor(scale0) and scale0.numel() > 1: # num of scale0 > 1 + mkpts0_f = (data['mkpts0_c'][:,None,:] + (delta_l * scale0[:,None,:])[:len(data['mconf']),...]).reshape(-1, 2) + mkpts1_f = (data['mkpts1_c'][:,None,:] + (delta_r * scale1[:,None,:])[:len(data['mconf']),...]).reshape(-1, 2) + else: # scale0 is a float + mkpts0_f = (data['mkpts0_c'][:,None,:] + (delta_l * scale0)[:len(data['mconf']),...]).reshape(-1, 2) + mkpts1_f = (data['mkpts1_c'][:,None,:] + (delta_r * scale1)[:len(data['mconf']),...]).reshape(-1, 2) + + else: # allow one2mul mul2one and mul2mul + conf_matrix = conf_matrix.reshape(m, -1) + if self.local_regress: # for the compatibility of former config + conf_matrix = conf_matrix[:len(data['mconf']),...] + val, idx = torch.topk(conf_matrix, self.topk, dim = -1) + idx_l = idx // WW + idx_r = idx % WW + + if self.local_regress: + data.update({'idx_l': idx_l, 'idx_r': idx_r}) + + # mkpts0_c use xy coordinate, so we don't need to convert it to hw coordinate + # grid = create_meshgrid(W, W, False, conf_matrix.device).transpose(-3,-2) - W // 2 + 0.5 # (1, W, W, 2) + grid = create_meshgrid(W, W, False, conf_matrix.device) - W // 2 + 0.5 + grid = grid.reshape(1, -1, 2).expand(m, -1, -1) + delta_l = torch.gather(grid, 1, idx_l.unsqueeze(-1).expand(-1, -1, 2)) + delta_r = torch.gather(grid, 1, idx_r.unsqueeze(-1).expand(-1, -1, 2)) + + # mkpts0_f and mkpts1_f + scale0 = scale * data['scale0'][data['b_ids']] if 'scale0' in data else scale + scale1 = scale * data['scale1'][data['b_ids']] if 'scale0' in data else scale + + if self.local_regress: + if torch.is_tensor(scale0) and scale0.numel() > 1: # num of scale0 > 1 + mkpts0_f = (data['mkpts0_c'][:,None,:] + (delta_l * scale0[:len(data['mconf']),...][:,None,:])).reshape(-1, 2) + mkpts1_f = (data['mkpts1_c'][:,None,:] + (delta_r * scale1[:len(data['mconf']),...][:,None,:])).reshape(-1, 2) + else: # scale0 is a float + mkpts0_f = (data['mkpts0_c'][:,None,:] + (delta_l * scale0)).reshape(-1, 2) + mkpts1_f = (data['mkpts1_c'][:,None,:] + (delta_r * scale1)).reshape(-1, 2) + + else: + if torch.is_tensor(scale0) and scale0.numel() > 1: # num of scale0 > 1 + mkpts0_f = (data['mkpts0_c'][:,None,:] + (delta_l * scale0[:,None,:])[:len(data['mconf']),...]).reshape(-1, 2) + mkpts1_f = (data['mkpts1_c'][:,None,:] + (delta_r * scale1[:,None,:])[:len(data['mconf']),...]).reshape(-1, 2) + else: # scale0 is a float + mkpts0_f = (data['mkpts0_c'][:,None,:] + (delta_l * scale0)[:len(data['mconf']),...]).reshape(-1, 2) + mkpts1_f = (data['mkpts1_c'][:,None,:] + (delta_r * scale1)[:len(data['mconf']),...]).reshape(-1, 2) + del data['mkpts0_c'], data['mkpts1_c'] + data['mconf'] = data['mconf'].reshape(-1, 1).expand(-1, self.topk).reshape(-1) + # data['mconf'] = val.reshape(-1)[:len(data['mconf'])]*0.1 + data['mconf'] + + if self.local_regress: + data.update({ + "mkpts0_c": mkpts0_f, + "mkpts1_c": mkpts1_f + }) + else: + data.update({ + "mkpts0_f": mkpts0_f, + "mkpts1_f": mkpts1_f + }) + diff --git a/imcui/third_party/MatchAnything/src/loftr/utils/geometry.py b/imcui/third_party/MatchAnything/src/loftr/utils/geometry.py new file mode 100644 index 0000000000000000000000000000000000000000..47de76bd8d8928b123bc7357349b1e7ae4ee90ac --- /dev/null +++ b/imcui/third_party/MatchAnything/src/loftr/utils/geometry.py @@ -0,0 +1,298 @@ +import torch +from src.utils.homography_utils import warp_points_torch + +def get_unique_indices(input_tensor): + if input_tensor.shape[0] > 1: + unique, inverse = torch.unique(input_tensor, sorted=True, return_inverse=True, dim=0) + perm = torch.arange(inverse.size(0), dtype=inverse.dtype, device=inverse.device) + inverse, perm = inverse.flip([0]), perm.flip([0]) + perm = inverse.new_empty(unique.size(0)).scatter_(0, inverse, perm) + else: + perm = torch.zeros((input_tensor.shape[0],), dtype=torch.long, device=input_tensor.device) + return perm + + +@torch.no_grad() +def warp_kpts(kpts0, depth0, depth1, T_0to1, K0, K1, consistency_thr=0.2, cycle_proj_distance_thr=3.0): + """ Warp kpts0 from I0 to I1 with depth, K and Rt + Also check covisibility and depth consistency. + Depth is consistent if relative error < 0.2 (hard-coded). + + Args: + kpts0 (torch.Tensor): [N, L, 2] - , + depth0 (torch.Tensor): [N, H, W], + depth1 (torch.Tensor): [N, H, W], + T_0to1 (torch.Tensor): [N, 3, 4], + K0 (torch.Tensor): [N, 3, 3], + K1 (torch.Tensor): [N, 3, 3], + Returns: + calculable_mask (torch.Tensor): [N, L] + warped_keypoints0 (torch.Tensor): [N, L, 2] + """ + kpts0_long = kpts0.round().long() + + # Sample depth, get calculable_mask on depth != 0 + kpts0_depth = torch.stack( + [depth0[i, kpts0_long[i, :, 1], kpts0_long[i, :, 0]] for i in range(kpts0.shape[0])], dim=0 + ) # (N, L) + nonzero_mask = kpts0_depth != 0 + + # Unproject + kpts0_h = torch.cat([kpts0, torch.ones_like(kpts0[:, :, [0]])], dim=-1) * kpts0_depth[..., None] # (N, L, 3) + kpts0_cam = K0.inverse() @ kpts0_h.transpose(2, 1) # (N, 3, L) + + # Rigid Transform + w_kpts0_cam = T_0to1[:, :3, :3] @ kpts0_cam + T_0to1[:, :3, [3]] # (N, 3, L) + w_kpts0_depth_computed = w_kpts0_cam[:, 2, :] + + # Project + w_kpts0_h = (K1 @ w_kpts0_cam).transpose(2, 1) # (N, L, 3) + w_kpts0 = w_kpts0_h[:, :, :2] / (w_kpts0_h[:, :, [2]] + 1e-4) # (N, L, 2), +1e-4 to avoid zero depth + + # Covisible Check + h, w = depth1.shape[1:3] + covisible_mask = (w_kpts0[:, :, 0] > 0) * (w_kpts0[:, :, 0] < w-1) * \ + (w_kpts0[:, :, 1] > 0) * (w_kpts0[:, :, 1] < h-1) + w_kpts0_long = w_kpts0.long() + w_kpts0_long[~covisible_mask, :] = 0 + + w_kpts0_depth = torch.stack( + [depth1[i, w_kpts0_long[i, :, 1], w_kpts0_long[i, :, 0]] for i in range(w_kpts0_long.shape[0])], dim=0 + ) # (N, L) + consistent_mask = ((w_kpts0_depth - w_kpts0_depth_computed) / w_kpts0_depth).abs() < consistency_thr + + # Cycle Consistency Check + dst_pts_h = torch.cat([w_kpts0, torch.ones_like(w_kpts0[..., [0]], device=w_kpts0.device)], dim=-1) * w_kpts0_depth[..., None] # B * N_dst * N_pts * 3 + dst_pts_cam = K1.inverse() @ dst_pts_h.transpose(2, 1) # (N, 3, L) + dst_pose = T_0to1.inverse() + world_points_cycle_back = dst_pose[:, :3, :3] @ dst_pts_cam + dst_pose[:, :3, [3]] + src_warp_back_h = (K0 @ world_points_cycle_back).transpose(2, 1) # (N, L, 3) + src_back_proj_pts = src_warp_back_h[..., :2] / (src_warp_back_h[..., [2]] + 1e-4) + cycle_reproj_distance_mask = torch.linalg.norm(src_back_proj_pts - kpts0[:, None], dim=-1) < cycle_proj_distance_thr + + valid_mask = nonzero_mask * covisible_mask * consistent_mask * cycle_reproj_distance_mask + + return valid_mask, w_kpts0 + +@torch.no_grad() +def warp_kpts_by_sparse_gt_matches_batches(kpts0, gt_matches, dist_thr): + B, n_pts = kpts0.shape[0], kpts0.shape[1] + if n_pts > 20 * 10000: + all_kpts_valid_mask, all_kpts_warpped = [], [] + for b_id in range(B): + kpts_valid_mask, kpts_warpped = warp_kpts_by_sparse_gt_matches(kpts0[[b_id]], gt_matches[[b_id]], dist_thr[[b_id]]) + all_kpts_valid_mask.append(kpts_valid_mask) + all_kpts_warpped.append(kpts_warpped) + return torch.cat(all_kpts_valid_mask, dim=0), torch.cat(all_kpts_warpped, dim=0) + else: + return warp_kpts_by_sparse_gt_matches(kpts0, gt_matches, dist_thr) + +@torch.no_grad() +def warp_kpts_by_sparse_gt_matches(kpts0, gt_matches, dist_thr): + kpts_warpped = torch.zeros_like(kpts0) + kpts_valid_mask = torch.zeros_like(kpts0[..., 0], dtype=torch.bool) + gt_matches_non_padding_mask = gt_matches.sum(-1) > 0 + + dist_matrix = torch.cdist(kpts0, gt_matches[..., :2]) # B * N * M + if dist_thr is not None: + mask = dist_matrix < dist_thr[:, None, None] + else: + mask = torch.ones_like(dist_matrix, dtype=torch.bool) + # Mutual-Nearest check: + mask = mask \ + * (dist_matrix == dist_matrix.min(dim=2, keepdim=True)[0]) \ + * (dist_matrix == dist_matrix.min(dim=1, keepdim=True)[0]) + + mask_v, all_j_ids = mask.max(dim=2) + b_ids, i_ids = torch.where(mask_v) + j_ids = all_j_ids[b_ids, i_ids] + + j_uq_indices = get_unique_indices(torch.stack([b_ids, j_ids], dim=-1)) + b_ids, i_ids, j_ids = map(lambda x: x[j_uq_indices], [b_ids, i_ids, j_ids]) + + i_uq_indices = get_unique_indices(torch.stack([b_ids, i_ids], dim=-1)) + b_ids, i_ids, j_ids = map(lambda x: x[i_uq_indices], [b_ids, i_ids, j_ids]) + + kpts_valid_mask[b_ids, i_ids] = gt_matches_non_padding_mask[b_ids, j_ids] + kpts_warpped[b_ids, i_ids] = gt_matches[..., 2:][b_ids, j_ids] + + return kpts_valid_mask, kpts_warpped + +@torch.no_grad() +def warp_kpts_by_sparse_gt_matches_fine_chunks(kpts0, gt_matches, dist_thr): + B, n_pts = kpts0.shape[0], kpts0.shape[1] + chunk_n = 500 + all_kpts_valid_mask, all_kpts_warpped = [], [] + for b_id in range(0, B, chunk_n): + kpts_valid_mask, kpts_warpped = warp_kpts_by_sparse_gt_matches_fine(kpts0[b_id : b_id+chunk_n], gt_matches, dist_thr) + all_kpts_valid_mask.append(kpts_valid_mask) + all_kpts_warpped.append(kpts_warpped) + return torch.cat(all_kpts_valid_mask, dim=0), torch.cat(all_kpts_warpped, dim=0) + +@torch.no_grad() +def warp_kpts_by_sparse_gt_matches_fine(kpts0, gt_matches, dist_thr): + """ + Only support single batch + Input: + kpts0: N * ww * 2 + gt_matches: M * 2 + """ + B = kpts0.shape[0] # B is the fine matches in a single pair + assert gt_matches.shape[0] == 1 + kpts_warpped = torch.zeros_like(kpts0) + kpts_valid_mask = torch.zeros_like(kpts0[..., 0], dtype=torch.bool) + gt_matches_non_padding_mask = gt_matches.sum(-1) > 0 + + dist_matrix = torch.cdist(kpts0, gt_matches[..., :2]) # B * N * M + if dist_thr is not None: + mask = dist_matrix < dist_thr[:, None, None] + else: + mask = torch.ones_like(dist_matrix, dtype=torch.bool) + # Mutual-Nearest check: + mask = mask \ + * (dist_matrix == dist_matrix.min(dim=2, keepdim=True)[0]) \ + * (dist_matrix == dist_matrix.min(dim=1, keepdim=True)[0]) + + mask_v, all_j_ids = mask.max(dim=2) + b_ids, i_ids = torch.where(mask_v) + j_ids = all_j_ids[b_ids, i_ids] + + j_uq_indices = get_unique_indices(torch.stack([b_ids, j_ids], dim=-1)) + b_ids, i_ids, j_ids = map(lambda x: x[j_uq_indices], [b_ids, i_ids, j_ids]) + + i_uq_indices = get_unique_indices(torch.stack([b_ids, i_ids], dim=-1)) + b_ids, i_ids, j_ids = map(lambda x: x[i_uq_indices], [b_ids, i_ids, j_ids]) + + kpts_valid_mask[b_ids, i_ids] = gt_matches_non_padding_mask[0, j_ids] + kpts_warpped[b_ids, i_ids] = gt_matches[..., 2:][0, j_ids] + + return kpts_valid_mask, kpts_warpped + +@torch.no_grad() +def warp_kpts_by_sparse_gt_matches_fast(kpts0, gt_matches, scale0, current_h, current_w): + B, n_gt_pts = gt_matches.shape[0], gt_matches.shape[1] + kpts_warpped = torch.zeros_like(kpts0) + kpts_valid_mask = torch.zeros_like(kpts0[..., 0], dtype=torch.bool) + gt_matches_non_padding_mask = gt_matches.sum(-1) > 0 + + all_j_idxs = torch.arange(gt_matches.shape[-2], device=gt_matches.device, dtype=torch.long)[None].expand(B, n_gt_pts) + all_b_idxs = torch.arange(B, device=gt_matches.device, dtype=torch.long)[:, None].expand(B, n_gt_pts) + gt_matches_rescale = gt_matches[..., :2] / scale0 # From original img scale to resized scale + in_boundary_mask = (gt_matches_rescale[..., 0] <= current_w-1) & (gt_matches_rescale[..., 0] >= 0) & (gt_matches_rescale[..., 1] <= current_h -1) & (gt_matches_rescale[..., 1] >= 0) + + gt_matches_rescale = gt_matches_rescale.round().to(torch.long) + all_i_idxs = gt_matches_rescale[..., 1] * current_w + gt_matches_rescale[..., 0] # idx = y * w + x + + # Filter: + b_ids, i_ids, j_ids = map(lambda x: x[gt_matches_non_padding_mask & in_boundary_mask], [all_b_idxs, all_i_idxs, all_j_idxs]) + + j_uq_indices = get_unique_indices(torch.stack([b_ids, j_ids], dim=-1)) + b_ids, i_ids, j_ids = map(lambda x: x[j_uq_indices], [b_ids, i_ids, j_ids]) + + i_uq_indices = get_unique_indices(torch.stack([b_ids, i_ids], dim=-1)) + b_ids, i_ids, j_ids = map(lambda x: x[i_uq_indices], [b_ids, i_ids, j_ids]) + + kpts_valid_mask[b_ids, i_ids] = gt_matches_non_padding_mask[b_ids, j_ids] + kpts_warpped[b_ids, i_ids] = gt_matches[..., 2:][b_ids, j_ids] + + return kpts_valid_mask, kpts_warpped + + +@torch.no_grad() +def homo_warp_kpts(kpts0, norm_pixel_mat, homo_sample_normed, original_size0=None, original_size1=None): + """ + original_size1: N * 2, (h, w) + """ + normed_kpts0_h = norm_pixel_mat @ torch.cat([kpts0, torch.ones_like(kpts0[:, :, [0]])], dim=-1).transpose(2, 1) # (N * 3 * L) + kpts_warpped_h = (torch.linalg.inv(norm_pixel_mat) @ homo_sample_normed @ normed_kpts0_h).transpose(2, 1) # (N * L * 3) + kpts_warpped = kpts_warpped_h[..., :2] / kpts_warpped_h[..., [2]] # N * L * 2 + valid_mask = (kpts_warpped[..., 0] > 0) & (kpts_warpped[..., 0] < original_size1[:, [1]]) & (kpts_warpped[..., 1] > 0) \ + & (kpts_warpped[..., 1] < original_size1[:, [0]]) # N * L + if original_size0 is not None: + valid_mask *= (kpts0[..., 0] > 0) & (kpts0[..., 0] < original_size0[:, [1]]) & (kpts0[..., 1] > 0) \ + & (kpts0[..., 1] < original_size0[:, [0]]) # N * L + + return valid_mask, kpts_warpped + +@torch.no_grad() +# if using mask in homo warp(for coarse supervision) +def homo_warp_kpts_with_mask(kpts0, scale, depth_mask, norm_pixel_mat, homo_sample_normed, original_size0=None, original_size1=None): + """ + original_size1: N * 2, (h, w) + """ + normed_kpts0_h = norm_pixel_mat @ torch.cat([kpts0, torch.ones_like(kpts0[:, :, [0]])], dim=-1).transpose(2, 1) # (N * 3 * L) + kpts_warpped_h = (torch.linalg.inv(norm_pixel_mat) @ homo_sample_normed @ normed_kpts0_h).transpose(2, 1) # (N * L * 3) + kpts_warpped = kpts_warpped_h[..., :2] / kpts_warpped_h[..., [2]] # N * L * 2 + # get coarse-level depth_mask + depth_mask_coarse = depth_mask[:, :, ::scale, ::scale] + depth_mask_coarse = depth_mask_coarse.reshape(depth_mask.shape[0], -1) + + valid_mask = (kpts_warpped[..., 0] > 0) & (kpts_warpped[..., 0] < original_size1[:, [1]]) & (kpts_warpped[..., 1] > 0) \ + & (kpts_warpped[..., 1] < original_size1[:, [0]]) & (depth_mask_coarse != 0) # N * L + if original_size0 is not None: + valid_mask *= (kpts0[..., 0] > 0) & (kpts0[..., 0] < original_size0[:, [1]]) & (kpts0[..., 1] > 0) \ + & (kpts0[..., 1] < original_size0[:, [0]]) & (depth_mask_coarse != 0) # N * L + + return valid_mask, kpts_warpped + +@torch.no_grad() +# if using mask in homo warp(for fine supervision) +def homo_warp_kpts_with_mask_f(kpts0, depth_mask, norm_pixel_mat, homo_sample_normed, original_size0=None, original_size1=None): + """ + original_size1: N * 2, (h, w) + """ + normed_kpts0_h = norm_pixel_mat @ torch.cat([kpts0, torch.ones_like(kpts0[:, :, [0]])], dim=-1).transpose(2, 1) # (N * 3 * L) + kpts_warpped_h = (torch.linalg.inv(norm_pixel_mat) @ homo_sample_normed @ normed_kpts0_h).transpose(2, 1) # (N * L * 3) + kpts_warpped = kpts_warpped_h[..., :2] / kpts_warpped_h[..., [2]] # N * L * 2 + valid_mask = (kpts_warpped[..., 0] > 0) & (kpts_warpped[..., 0] < original_size1[:, [1]]) & (kpts_warpped[..., 1] > 0) \ + & (kpts_warpped[..., 1] < original_size1[:, [0]]) & (depth_mask != 0) # N * L + if original_size0 is not None: + valid_mask *= (kpts0[..., 0] > 0) & (kpts0[..., 0] < original_size0[:, [1]]) & (kpts0[..., 1] > 0) \ + & (kpts0[..., 1] < original_size0[:, [0]]) & (depth_mask != 0) # N * L + + return valid_mask, kpts_warpped + +@torch.no_grad() +def homo_warp_kpts_glue(kpts0, homo, original_size0=None, original_size1=None): + """ + original_size1: N * 2, (h, w) + """ + kpts_warpped = warp_points_torch(kpts0, homo, inverse=False) + valid_mask = (kpts_warpped[..., 0] > 0) & (kpts_warpped[..., 0] < original_size1[:, [1]]) & (kpts_warpped[..., 1] > 0) \ + & (kpts_warpped[..., 1] < original_size1[:, [0]]) # N * L + if original_size0 is not None: + valid_mask *= (kpts0[..., 0] > 0) & (kpts0[..., 0] < original_size0[:, [1]]) & (kpts0[..., 1] > 0) \ + & (kpts0[..., 1] < original_size0[:, [0]]) # N * L + return valid_mask, kpts_warpped + +@torch.no_grad() +# if using mask in homo warp(for coarse supervision) +def homo_warp_kpts_glue_with_mask(kpts0, scale, depth_mask, homo, original_size0=None, original_size1=None): + """ + original_size1: N * 2, (h, w) + """ + kpts_warpped = warp_points_torch(kpts0, homo, inverse=False) + # get coarse-level depth_mask + depth_mask_coarse = depth_mask[:, :, ::scale, ::scale] + depth_mask_coarse = depth_mask_coarse.reshape(depth_mask.shape[0], -1) + + valid_mask = (kpts_warpped[..., 0] > 0) & (kpts_warpped[..., 0] < original_size1[:, [1]]) & (kpts_warpped[..., 1] > 0) \ + & (kpts_warpped[..., 1] < original_size1[:, [0]]) & (depth_mask_coarse != 0) # N * L + if original_size0 is not None: + valid_mask *= (kpts0[..., 0] > 0) & (kpts0[..., 0] < original_size0[:, [1]]) & (kpts0[..., 1] > 0) \ + & (kpts0[..., 1] < original_size0[:, [0]]) & (depth_mask_coarse != 0) # N * L + return valid_mask, kpts_warpped + +@torch.no_grad() +# if using mask in homo warp(for fine supervision) +def homo_warp_kpts_glue_with_mask_f(kpts0, depth_mask, homo, original_size0=None, original_size1=None): + """ + original_size1: N * 2, (h, w) + """ + kpts_warpped = warp_points_torch(kpts0, homo, inverse=False) + valid_mask = (kpts_warpped[..., 0] > 0) & (kpts_warpped[..., 0] < original_size1[:, [1]]) & (kpts_warpped[..., 1] > 0) \ + & (kpts_warpped[..., 1] < original_size1[:, [0]]) & (depth_mask != 0) # N * L + if original_size0 is not None: + valid_mask *= (kpts0[..., 0] > 0) & (kpts0[..., 0] < original_size0[:, [1]]) & (kpts0[..., 1] > 0) \ + & (kpts0[..., 1] < original_size0[:, [0]]) & (depth_mask != 0) # N * L + return valid_mask, kpts_warpped \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/src/loftr/utils/position_encoding.py b/imcui/third_party/MatchAnything/src/loftr/utils/position_encoding.py new file mode 100644 index 0000000000000000000000000000000000000000..e4a4b4780943617588cb193efb51a261ebc17cda --- /dev/null +++ b/imcui/third_party/MatchAnything/src/loftr/utils/position_encoding.py @@ -0,0 +1,131 @@ +import math +import torch +from torch import nn + + +class PositionEncodingSine(nn.Module): + """ + This is a sinusoidal position encoding that generalized to 2-dimensional images + """ + + def __init__(self, d_model, max_shape=(256, 256), temp_bug_fix=True, npe=False): + """ + Args: + max_shape (tuple): for 1/8 featmap, the max length of 256 corresponds to 2048 pixels + temp_bug_fix (bool): As noted in this [issue](https://github.com/zju3dv/LoFTR/issues/41), + the original implementation of LoFTR includes a bug in the pos-enc impl, which has little impact + on the final performance. For now, we keep both impls for backward compatability. + We will remove the buggy impl after re-training all variants of our released models. + """ + super().__init__() + + pe = torch.zeros((d_model, *max_shape)) + y_position = torch.ones(max_shape).cumsum(0).float().unsqueeze(0) + x_position = torch.ones(max_shape).cumsum(1).float().unsqueeze(0) + + assert npe is not None + if npe is not None: + if isinstance(npe, bool): + train_res_H, train_res_W, test_res_H, test_res_W = 832, 832, 832, 832 + print('loftr no npe!!!!', npe) + else: + print('absnpe!!!!', npe) + train_res_H, train_res_W, test_res_H, test_res_W = npe[0], npe[1], npe[2], npe[3] # train_res_H, train_res_W, test_res_H, test_res_W + y_position, x_position = y_position * train_res_H / test_res_H, x_position * train_res_W / test_res_W + + if temp_bug_fix: + div_term = torch.exp(torch.arange(0, d_model//2, 2).float() * (-math.log(10000.0) / (d_model//2))) + else: # a buggy implementation (for backward compatability only) + div_term = torch.exp(torch.arange(0, d_model//2, 2).float() * (-math.log(10000.0) / d_model//2)) + div_term = div_term[:, None, None] # [C//4, 1, 1] + pe[0::4, :, :] = torch.sin(x_position * div_term) + pe[1::4, :, :] = torch.cos(x_position * div_term) + pe[2::4, :, :] = torch.sin(y_position * div_term) + pe[3::4, :, :] = torch.cos(y_position * div_term) + + self.register_buffer('pe', pe.unsqueeze(0), persistent=False) # [1, C, H, W] + + def forward(self, x): + """ + Args: + x: [N, C, H, W] + """ + return x + self.pe[:, :, :x.size(2), :x.size(3)] + +class RoPEPositionEncodingSine(nn.Module): + """ + This is a sinusoidal position encoding that generalized to 2-dimensional images + """ + + def __init__(self, d_model, max_shape=(256, 256), npe=None, ropefp16=True): + """ + Args: + max_shape (tuple): for 1/8 featmap, the max length of 256 corresponds to 2048 pixels + temp_bug_fix (bool): As noted in this [issue](https://github.com/zju3dv/LoFTR/issues/41), + the original implementation of LoFTR includes a bug in the pos-enc impl, which has little impact + on the final performance. For now, we keep both impls for backward compatability. + We will remove the buggy impl after re-training all variants of our released models. + """ + super().__init__() + + # pe = torch.zeros((d_model, *max_shape)) + # y_position = torch.ones(max_shape).cumsum(0).float().unsqueeze(-1) + # x_position = torch.ones(max_shape).cumsum(1).float().unsqueeze(-1) + i_position = torch.ones(max_shape).cumsum(0).float().unsqueeze(-1) # [H, 1] + j_position = torch.ones(max_shape).cumsum(1).float().unsqueeze(-1) # [W, 1] + + assert npe is not None + if npe is not None: + train_res_H, train_res_W, test_res_H, test_res_W = npe[0], npe[1], npe[2], npe[3] # train_res_H, train_res_W, test_res_H, test_res_W + i_position, j_position = i_position * train_res_H / test_res_H, j_position * train_res_W / test_res_W + + div_term = torch.exp(torch.arange(0, d_model//4, 1).float() * (-math.log(10000.0) / (d_model//4))) + div_term = div_term[None, None, :] # [1, 1, C//4] + # pe[0::4, :, :] = torch.sin(x_position * div_term) + # pe[1::4, :, :] = torch.cos(x_position * div_term) + # pe[2::4, :, :] = torch.sin(y_position * div_term) + # pe[3::4, :, :] = torch.cos(y_position * div_term) + sin = torch.zeros(*max_shape, d_model//2, dtype=torch.float16 if ropefp16 else torch.float32) + cos = torch.zeros(*max_shape, d_model//2, dtype=torch.float16 if ropefp16 else torch.float32) + sin[:, :, 0::2] = torch.sin(i_position * div_term).half() if ropefp16 else torch.sin(i_position * div_term) + sin[:, :, 1::2] = torch.sin(j_position * div_term).half() if ropefp16 else torch.sin(j_position * div_term) + cos[:, :, 0::2] = torch.cos(i_position * div_term).half() if ropefp16 else torch.cos(i_position * div_term) + cos[:, :, 1::2] = torch.cos(j_position * div_term).half() if ropefp16 else torch.cos(j_position * div_term) + + sin = sin.repeat_interleave(2, dim=-1) + cos = cos.repeat_interleave(2, dim=-1) + # self.register_buffer('pe', pe.unsqueeze(0), persistent=False) # [1, H, W, C] + self.register_buffer('sin', sin.unsqueeze(0), persistent=False) # [1, H, W, C//2] + self.register_buffer('cos', cos.unsqueeze(0), persistent=False) # [1, H, W, C//2] + + i_position4 = i_position.reshape(64,4,64,4,1)[...,0,:] + i_position4 = i_position4.mean(-3) + j_position4 = j_position.reshape(64,4,64,4,1)[:,0,...] + j_position4 = j_position4.mean(-2) + sin4 = torch.zeros(max_shape[0]//4, max_shape[1]//4, d_model//2, dtype=torch.float16 if ropefp16 else torch.float32) + cos4 = torch.zeros(max_shape[0]//4, max_shape[1]//4, d_model//2, dtype=torch.float16 if ropefp16 else torch.float32) + sin4[:, :, 0::2] = torch.sin(i_position4 * div_term).half() if ropefp16 else torch.sin(i_position4 * div_term) + sin4[:, :, 1::2] = torch.sin(j_position4 * div_term).half() if ropefp16 else torch.sin(j_position4 * div_term) + cos4[:, :, 0::2] = torch.cos(i_position4 * div_term).half() if ropefp16 else torch.cos(i_position4 * div_term) + cos4[:, :, 1::2] = torch.cos(j_position4 * div_term).half() if ropefp16 else torch.cos(j_position4 * div_term) + sin4 = sin4.repeat_interleave(2, dim=-1) + cos4 = cos4.repeat_interleave(2, dim=-1) + self.register_buffer('sin4', sin4.unsqueeze(0), persistent=False) # [1, H, W, C//2] + self.register_buffer('cos4', cos4.unsqueeze(0), persistent=False) # [1, H, W, C//2] + + + + def forward(self, x, ratio=1): + """ + Args: + x: [N, H, W, C] + """ + if ratio == 4: + return (x * self.cos4[:, :x.size(1), :x.size(2), :]) + (self.rotate_half(x) * self.sin4[:, :x.size(1), :x.size(2), :]) + else: + return (x * self.cos[:, :x.size(1), :x.size(2), :]) + (self.rotate_half(x) * self.sin[:, :x.size(1), :x.size(2), :]) + + def rotate_half(self, x): + x = x.unflatten(-1, (-1, 2)) + x1, x2 = x.unbind(dim=-1) + return torch.stack((-x2, x1), dim=-1).flatten(start_dim=-2) \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/src/loftr/utils/supervision.py b/imcui/third_party/MatchAnything/src/loftr/utils/supervision.py new file mode 100644 index 0000000000000000000000000000000000000000..f57caa3a4b1498e31b5daca0289dd9381489f2b9 --- /dev/null +++ b/imcui/third_party/MatchAnything/src/loftr/utils/supervision.py @@ -0,0 +1,475 @@ +from math import log +from loguru import logger as loguru_logger + +import torch +import torch.nn.functional as F +from einops import rearrange, repeat +from kornia.utils import create_meshgrid + +from .geometry import warp_kpts, homo_warp_kpts, homo_warp_kpts_glue, homo_warp_kpts_with_mask, homo_warp_kpts_with_mask_f, homo_warp_kpts_glue_with_mask, homo_warp_kpts_glue_with_mask_f, warp_kpts_by_sparse_gt_matches_fast, warp_kpts_by_sparse_gt_matches_fine_chunks + +from kornia.geometry.subpix import dsnt +from kornia.utils.grid import create_meshgrid + +def static_vars(**kwargs): + def decorate(func): + for k in kwargs: + setattr(func, k, kwargs[k]) + return func + return decorate + +############## ↓ Coarse-Level supervision ↓ ############## + +@torch.no_grad() +def mask_pts_at_padded_regions(grid_pt, mask): + """For megadepth dataset, zero-padding exists in images""" + mask = repeat(mask, 'n h w -> n (h w) c', c=2) + grid_pt[~mask.bool()] = 0 + return grid_pt + + +@torch.no_grad() +def spvs_coarse(data, config): + """ + Update: + data (dict): { + "conf_matrix_gt": [N, hw0, hw1], + 'spv_b_ids': [M] + 'spv_i_ids': [M] + 'spv_j_ids': [M] + 'spv_w_pt0_i': [N, hw0, 2], in original image resolution + 'spv_pt1_i': [N, hw1, 2], in original image resolution + } + + NOTE: + - for scannet dataset, there're 3 kinds of resolution {i, c, f} + - for megadepth dataset, there're 4 kinds of resolution {i, i_resize, c, f} + """ + # 1. misc + device = data['image0'].device + N, _, H0, W0 = data['image0'].shape + _, _, H1, W1 = data['image1'].shape + + if 'loftr' in config.METHOD: + scale = config['LOFTR']['RESOLUTION'][0] + + scale0 = scale * data['scale0'][:, None] if 'scale0' in data else scale + scale1 = scale * data['scale1'][:, None] if 'scale0' in data else scale + h0, w0, h1, w1 = map(lambda x: x // scale, [H0, W0, H1, W1]) + + if config['LOFTR']['MATCH_COARSE']['MTD_SPVS'] and not config['LOFTR']['FORCE_LOOP_BACK']: + # 2. warp grids + # create kpts in meshgrid and resize them to image resolution + grid_pt0_c = create_meshgrid(h0, w0, False, device).reshape(1, h0*w0, 2).repeat(N, 1, 1) # [N, hw, 2] + grid_pt0_i = scale0 * grid_pt0_c + grid_pt1_c = create_meshgrid(h1, w1, False, device).reshape(1, h1*w1, 2).repeat(N, 1, 1) + grid_pt1_i = scale1 * grid_pt1_c + + correct_0to1 = torch.zeros((grid_pt0_i.shape[0], grid_pt0_i.shape[1]), dtype=torch.bool, device=grid_pt0_i.device) + w_pt0_i = torch.zeros_like(grid_pt0_i) + + valid_dpt_b_mask = data['T_0to1'].sum(dim=-1).sum(dim=-1) != 0 + valid_homo_warp_mask = (data['homography'].sum(dim=-1).sum(dim=-1) != 0) | (data['homo_sample_normed'].sum(dim=-1).sum(dim=-1) != 0) + valid_gt_match_warp_mask = (data['gt_matches_mask'][:, 0] != 0) # N + + if valid_homo_warp_mask.sum() != 0: + if data['homography'].sum()==0: + if 'homo_mask0' in data and (data['homo_mask0'].sum()!=0): # the key 'depth_mask' only exits when using the dataste "CommonDataSetHomoWarp" + correct_0to1_homo, w_pt0_i_homo = homo_warp_kpts_with_mask(grid_pt0_i[valid_homo_warp_mask], scale, data['homo_mask0'][valid_homo_warp_mask], data['norm_pixel_mat'][valid_homo_warp_mask], data['homo_sample_normed'][valid_homo_warp_mask], original_size1=data['origin_img_size1'][valid_homo_warp_mask]) + else: + correct_0to1_homo, w_pt0_i_homo = homo_warp_kpts(grid_pt0_i[valid_homo_warp_mask], data['norm_pixel_mat'][valid_homo_warp_mask], \ + data['homo_sample_normed'][valid_homo_warp_mask], original_size1=data['origin_img_size1'][valid_homo_warp_mask]) + else: + if 'homo_mask0' in data and (data['homo_mask0']==0).sum()!=0: + correct_0to1_homo, w_pt0_i_homo = homo_warp_kpts_glue_with_mask(grid_pt0_i[valid_homo_warp_mask], scale, data['homo_mask0'][valid_homo_warp_mask], data['homography'][valid_homo_warp_mask], original_size1=data['origin_img_size1'][valid_homo_warp_mask]) + else: + correct_0to1_homo, w_pt0_i_homo = homo_warp_kpts_glue(grid_pt0_i[valid_homo_warp_mask], data['homography'][valid_homo_warp_mask], \ + original_size1=data['origin_img_size1'][valid_homo_warp_mask]) + correct_0to1[valid_homo_warp_mask] = correct_0to1_homo + w_pt0_i[valid_homo_warp_mask] = w_pt0_i_homo + + if valid_gt_match_warp_mask.sum() != 0: + correct_0to1_dpt, w_pt0_i_dpt = warp_kpts_by_sparse_gt_matches_fast(grid_pt0_i[valid_gt_match_warp_mask], data['gt_matches'][valid_gt_match_warp_mask], scale0=scale0[valid_gt_match_warp_mask], current_h=h0, current_w=w0) + correct_0to1[valid_gt_match_warp_mask] = correct_0to1_dpt + w_pt0_i[valid_gt_match_warp_mask] = w_pt0_i_dpt + + if valid_dpt_b_mask.sum() != 0: + correct_0to1_dpt, w_pt0_i_dpt = warp_kpts(grid_pt0_i[valid_dpt_b_mask], data['depth0'][valid_dpt_b_mask], data['depth1'][valid_dpt_b_mask], data['T_0to1'][valid_dpt_b_mask], data['K0'][valid_dpt_b_mask], data['K1'][valid_dpt_b_mask], consistency_thr=0.05) + correct_0to1[valid_dpt_b_mask] = correct_0to1_dpt + w_pt0_i[valid_dpt_b_mask] = w_pt0_i_dpt + + w_pt0_c = w_pt0_i / scale1 + + # 3. check if mutual nearest neighbor + w_pt0_c_round = w_pt0_c[:, :, :].round() # [N, hw, 2] + if config.LOFTR.LOSS.COARSE_OVERLAP_WEIGHT: + w_pt0_c_error = (1.0 - 2*torch.abs(w_pt0_c - w_pt0_c_round)).prod(-1) + w_pt0_c_round = w_pt0_c_round.long() # [N, hw, 2] + nearest_index1 = w_pt0_c_round[..., 0] + w_pt0_c_round[..., 1] * w1 # [N, hw] + + # corner case: out of boundary + def out_bound_mask(pt, w, h): + return (pt[..., 0] < 0) + (pt[..., 0] >= w) + (pt[..., 1] < 0) + (pt[..., 1] >= h) + nearest_index1[out_bound_mask(w_pt0_c_round, w1, h1)] = -1 + + correct_0to1[:, 0] = False # ignore the top-left corner + + # 4. construct a gt conf_matrix + mask1 = torch.stack([data['mask1'].reshape(-1, h1*w1)[_b, _i] for _b, _i in enumerate(nearest_index1)], dim=0) + correct_0to1 = correct_0to1 * data['mask0'].reshape(-1, h0*w0) * mask1 + + conf_matrix_gt = torch.zeros(N, h0*w0, h1*w1, device=device, dtype=torch.bool) + b_ids, i_ids = torch.where(correct_0to1 != 0) + j_ids = nearest_index1[b_ids, i_ids] + valid_j_ids = j_ids != -1 + b_ids, i_ids, j_ids = map(lambda x: x[valid_j_ids], [b_ids, i_ids, j_ids]) + + conf_matrix_gt[b_ids, i_ids, j_ids] = 1 + + # overlap weight + if config.LOFTR.LOSS.COARSE_OVERLAP_WEIGHT: + conf_matrix_error_gt = w_pt0_c_error[b_ids, i_ids] + assert torch.all(conf_matrix_error_gt >= -0.001) + assert torch.all(conf_matrix_error_gt <= 1.001) + data.update({'conf_matrix_error_gt': conf_matrix_error_gt}) + data.update({'conf_matrix_gt': conf_matrix_gt}) + + # 5. save coarse matches(gt) for training fine level + if len(b_ids) == 0: + loguru_logger.warning(f"No groundtruth coarse match found for: {data['pair_names']}") + # this won't affect fine-level loss calculation + b_ids = torch.tensor([0], device=device) + i_ids = torch.tensor([0], device=device) + j_ids = torch.tensor([0], device=device) + + data.update({ + 'spv_b_ids': b_ids, + 'spv_i_ids': i_ids, + 'spv_j_ids': j_ids + }) + + data.update({'mkpts0_c_gt_b_ids': b_ids}) + data.update({'mkpts0_c_gt': torch.stack([i_ids % w0, i_ids // w0], dim=-1) * scale0[b_ids, 0]}) + data.update({'mkpts1_c_gt': torch.stack([j_ids % w1, j_ids // w1], dim=-1) * scale1[b_ids, 0]}) + + # 6. save intermediate results (for fast fine-level computation) + data.update({ + 'spv_w_pt0_i': w_pt0_i, + 'spv_pt1_i': grid_pt1_i, + # 'correct_0to1_c': correct_0to1 + }) + else: + raise NotImplementedError + +def compute_supervision_coarse(data, config): + spvs_coarse(data, config) + +@torch.no_grad() +def get_gt_flow(data, h, w): + device = data['image0'].device + B, _, H0, W0 = data['image0'].shape + scale = H0 / h + + scale0 = scale * data['scale0'][:, None] if 'scale0' in data else scale + scale1 = scale * data['scale1'][:, None] if 'scale0' in data else scale + + x1_n = torch.meshgrid( + *[ + torch.linspace( + -1 + 1 / n, 1 - 1 / n, n, device=device + ) + for n in (B, h, w) + ] + ) + grid_coord = torch.stack((x1_n[2], x1_n[1]), dim=-1).reshape(B, h*w, 2) # normalized + grid_coord = torch.stack( + (w * (grid_coord[..., 0] + 1) / 2, h * (grid_coord[..., 1] + 1) / 2), dim=-1 + ) # [-1+1/h, 1-1/h] -> [0.5, h-0.5] + grid_coord_in_origin = grid_coord * scale0 + + correct_0to1 = torch.zeros((grid_coord_in_origin.shape[0], grid_coord_in_origin.shape[1]), dtype=torch.bool, device=device) + w_pt0_i = torch.zeros_like(grid_coord_in_origin) + + valid_dpt_b_mask = data['T_0to1'].sum(dim=-1).sum(dim=-1) != 0 + valid_homo_warp_mask = (data['homography'].sum(dim=-1).sum(dim=-1) != 0) | (data['homo_sample_normed'].sum(dim=-1).sum(dim=-1) != 0) + valid_gt_match_warp_mask = (data['gt_matches_mask'] != 0)[:, 0] + + if valid_homo_warp_mask.sum() != 0: + if data['homography'].sum()==0: + if 'homo_mask0' in data and (data['homo_mask0'].sum()!=0): + # data['load_mask'] = True or False, data['depth_mask'] = depth_mask or None + correct_0to1_homo, w_pt0_i_homo = homo_warp_kpts_with_mask(grid_coord_in_origin[valid_homo_warp_mask], int(scale), data['homo_mask0'][valid_homo_warp_mask], data['norm_pixel_mat'][valid_homo_warp_mask], \ + data['homo_sample_normed'][valid_homo_warp_mask], original_size1=data['origin_img_size1'][valid_homo_warp_mask]) + else: + correct_0to1_homo, w_pt0_i_homo = homo_warp_kpts(grid_coord_in_origin[valid_homo_warp_mask], data['norm_pixel_mat'][valid_homo_warp_mask], data['homo_sample_normed'][valid_homo_warp_mask], \ + original_size1=data['origin_img_size1'][valid_homo_warp_mask]) + else: + if 'homo_mask0' in data and (data['homo_mask0']==0).sum()!=0: + correct_0to1_homo, w_pt0_i_homo = homo_warp_kpts_glue_with_mask(grid_coord_in_origin[valid_homo_warp_mask], int(scale), data['homo_mask0'][valid_homo_warp_mask], data['homography'][valid_homo_warp_mask], \ + original_size1=data['origin_img_size1'][valid_homo_warp_mask]) + else: + correct_0to1_homo, w_pt0_i_homo = homo_warp_kpts_glue(grid_coord_in_origin[valid_homo_warp_mask], data['homography'][valid_homo_warp_mask], \ + original_size1=data['origin_img_size1'][valid_homo_warp_mask]) + correct_0to1[valid_homo_warp_mask] = correct_0to1_homo + w_pt0_i[valid_homo_warp_mask] = w_pt0_i_homo + + if valid_gt_match_warp_mask.sum() != 0: + correct_0to1_dpt, w_pt0_i_dpt = warp_kpts_by_sparse_gt_matches_fast(grid_coord_in_origin[valid_gt_match_warp_mask], data['gt_matches'][valid_gt_match_warp_mask], scale0=scale0[valid_gt_match_warp_mask], current_h=h, current_w=w) + correct_0to1[valid_gt_match_warp_mask] = correct_0to1_dpt + w_pt0_i[valid_gt_match_warp_mask] = w_pt0_i_dpt + if valid_dpt_b_mask.sum() != 0: + correct_0to1_dpt, w_pt0_i_dpt = warp_kpts(grid_coord_in_origin[valid_dpt_b_mask], data['depth0'][valid_dpt_b_mask], data['depth1'][valid_dpt_b_mask], data['T_0to1'][valid_dpt_b_mask], data['K0'][valid_dpt_b_mask], data['K1'][valid_dpt_b_mask], consistency_thr=0.05) + correct_0to1[valid_dpt_b_mask] = correct_0to1_dpt + w_pt0_i[valid_dpt_b_mask] = w_pt0_i_dpt + + w_pt0_c = w_pt0_i / scale1 + + def out_bound_mask(pt, w, h): + return (pt[..., 0] < 0) + (pt[..., 0] >= w) + (pt[..., 1] < 0) + (pt[..., 1] >= h) + correct_0to1[out_bound_mask(w_pt0_c, w, h)] = 0 + + w_pt0_n = torch.stack( + (2 * w_pt0_c[..., 0] / w - 1, 2 * w_pt0_c[..., 1] / h - 1), dim=-1 + ) # from [0.5,h-0.5] -> [-1+1/h, 1-1/h] + # w_pt1_c = w_pt1_i / scale0 + + if scale > 8: + data.update({'mkpts0_c_gt': grid_coord_in_origin[correct_0to1]}) + data.update({'mkpts1_c_gt': w_pt0_i[correct_0to1]}) + + return w_pt0_n.reshape(B, h, w, 2), correct_0to1.float().reshape(B, h, w) + +@torch.no_grad() +def compute_roma_supervision(data, config): + gt_flow = {} + for scale in list(data["corresps"]): + scale_corresps = data["corresps"][scale] + flow_pre_delta = rearrange(scale_corresps['flow'] if 'flow'in scale_corresps else scale_corresps['dense_flow'], "b d h w -> b h w d") + b, h, w, d = flow_pre_delta.shape + gt_warp, gt_prob = get_gt_flow(data, h, w) + gt_flow[scale] = {'gt_warp': gt_warp, "gt_prob": gt_prob} + + data.update({"gt": gt_flow}) + +############## ↓ Fine-Level supervision ↓ ############## + +@static_vars(counter = 0) +@torch.no_grad() +def spvs_fine(data, config, logger = None): + """ + Update: + data (dict):{ + "expec_f_gt": [M, 2]} + """ + # 1. misc + # w_pt0_i, pt1_i = data.pop('spv_w_pt0_i'), data.pop('spv_pt1_i') + if config.LOFTR.FINE.MTD_SPVS: + pt1_i = data['spv_pt1_i'] + else: + spv_w_pt0_i, pt1_i = data['spv_w_pt0_i'], data['spv_pt1_i'] + if 'loftr' in config.METHOD: + scale = config['LOFTR']['RESOLUTION'][1] + scale_c = config['LOFTR']['RESOLUTION'][0] + radius = config['LOFTR']['FINE_WINDOW_SIZE'] // 2 + + # 2. get coarse prediction + b_ids, i_ids, j_ids = data['b_ids'], data['i_ids'], data['j_ids'] + + # 3. compute gt + scalei0 = scale * data['scale0'][b_ids] if 'scale0' in data else scale + scale0 = scale * data['scale0'] if 'scale0' in data else scale + scalei1 = scale * data['scale1'][b_ids] if 'scale0' in data else scale + + if config.LOFTR.FINE.MTD_SPVS: + W = config['LOFTR']['FINE_WINDOW_SIZE'] + WW = W*W + device = data['image0'].device + + N, _, H0, W0 = data['image0'].shape + _, _, H1, W1 = data['image1'].shape + + if config.LOFTR.ALIGN_CORNER is False: + hf0, wf0, hf1, wf1 = data['hw0_f'][0], data['hw0_f'][1], data['hw1_f'][0], data['hw1_f'][1] + hc0, wc0, hc1, wc1 = data['hw0_c'][0], data['hw0_c'][1], data['hw1_c'][0], data['hw1_c'][1] + # loguru_logger.info('hf0, wf0, hf1, wf1', hf0, wf0, hf1, wf1) + else: + hf0, wf0, hf1, wf1 = map(lambda x: x // scale, [H0, W0, H1, W1]) + hc0, wc0, hc1, wc1 = map(lambda x: x // scale_c, [H0, W0, H1, W1]) + + m = b_ids.shape[0] + if m == 0: + conf_matrix_f_gt = torch.zeros(m, WW, WW, device=device) + + data.update({'conf_matrix_f_gt': conf_matrix_f_gt}) + if config.LOFTR.LOSS.FINE_OVERLAP_WEIGHT: + conf_matrix_f_error_gt = torch.zeros(1, device=device) + data.update({'conf_matrix_f_error_gt': conf_matrix_f_error_gt}) + if config.LOFTR.MATCH_FINE.MULTI_REGRESS: + data.update({'expec_f': torch.zeros(1, 3, device=device)}) + data.update({'expec_f_gt': torch.zeros(1, 2, device=device)}) + + if config.LOFTR.MATCH_FINE.LOCAL_REGRESS: + data.update({'expec_f': torch.zeros(1, 2, device=device)}) + data.update({'expec_f_gt': torch.zeros(1, 2, device=device)}) + else: + grid_pt0_f = create_meshgrid(hf0, wf0, False, device) - W // 2 + 0.5 # [1, hf0, wf0, 2] # use fine coordinates + # grid_pt0_f = create_meshgrid(hf0, wf0, False, device) + 0.5 # [1, hf0, wf0, 2] # use fine coordinates + grid_pt0_f = rearrange(grid_pt0_f, 'n h w c -> n c h w') + # 1. unfold(crop) all local windows + if config.LOFTR.ALIGN_CORNER is False: # even windows + if config.LOFTR.MATCH_FINE.MULTI_REGRESS or (config.LOFTR.MATCH_FINE.LOCAL_REGRESS and W == 10): + grid_pt0_f_unfold = F.unfold(grid_pt0_f, kernel_size=(W, W), stride=W-2, padding=1) # overlap windows W-2 padding=1 + else: + grid_pt0_f_unfold = F.unfold(grid_pt0_f, kernel_size=(W, W), stride=W, padding=0) + else: + grid_pt0_f_unfold = F.unfold(grid_pt0_f[..., :-1, :-1], kernel_size=(W, W), stride=W, padding=W//2) + grid_pt0_f_unfold = rearrange(grid_pt0_f_unfold, 'n (c ww) l -> n l ww c', ww=W**2) # [1, hc0*wc0, W*W, 2] + grid_pt0_f_unfold = repeat(grid_pt0_f_unfold[0], 'l ww c -> N l ww c', N=N) + + # 2. select only the predicted matches + grid_pt0_f_unfold = grid_pt0_f_unfold[data['b_ids'], data['i_ids']] # [m, ww, 2] + grid_pt0_f_unfold = scalei0[:,None,:] * grid_pt0_f_unfold # [m, ww, 2] + + # use depth mask + if 'homo_mask0' in data and (data['homo_mask0'].sum()!=0): + # depth_mask --> (n, 1, hf, wf) + homo_mask0 = data['homo_mask0'] + homo_mask0 = F.unfold(homo_mask0[..., :-1, :-1], kernel_size=(W, W), stride=W, padding=W//2) + homo_mask0 = rearrange(homo_mask0, 'n (c ww) l -> n l ww c', ww=W**2) # [1, hc0*wc0, W*W, 1] + homo_mask0 = repeat(homo_mask0[0], 'l ww c -> N l ww c', N=N) + # select only the predicted matches + homo_mask0 = homo_mask0[data['b_ids'], data['i_ids']] + + correct_0to1_f_list, w_pt0_i_list = [], [] + + correct_0to1_f = torch.zeros(m, WW, device=device, dtype=torch.bool) + w_pt0_i = torch.zeros(m, WW, 2, device=device, dtype=torch.float32) + for b in range(N): + mask = b_ids == b + + match = int(mask.sum()) + skip_reshape = False + if match == 0: + print(f"no pred fine matches, skip!") + continue + if (data['homography'][b].sum() != 0) | (data['homo_sample_normed'][b].sum() != 0): + if data['homography'][b].sum()==0: + if 'homo_mask0' in data and (data['homo_mask0'].sum()!=0): + correct_0to1_f_mask, w_pt0_i_mask = homo_warp_kpts_with_mask_f(grid_pt0_f_unfold[mask].reshape(1,-1,2), homo_mask0[mask].reshape(1,-1), data['norm_pixel_mat'][[b]], \ + data['homo_sample_normed'][[b]], data['origin_img_size0'][[b]], data['origin_img_size1'][[b]]) + else: + correct_0to1_f_mask, w_pt0_i_mask = homo_warp_kpts(grid_pt0_f_unfold[mask].reshape(1,-1,2), data['norm_pixel_mat'][[b]], \ + data['homo_sample_normed'][[b]], data['origin_img_size0'][[b]], data['origin_img_size1'][[b]]) + else: + if 'homo_mask0' in data and (data['homo_mask0'].sum()!=0): + correct_0to1_f_mask, w_pt0_i_mask = homo_warp_kpts_glue_with_mask_f(grid_pt0_f_unfold[mask].reshape(1,-1,2), homo_mask0[mask].reshape(1,-1), data['homography'][[b]], \ + data['origin_img_size0'][[b]], data['origin_img_size1'][[b]]) + else: + correct_0to1_f_mask, w_pt0_i_mask = homo_warp_kpts_glue(grid_pt0_f_unfold[mask].reshape(1,-1,2), data['homography'][[b]], \ + data['origin_img_size0'][[b]], data['origin_img_size1'][[b]]) + elif data['T_0to1'][b].sum() != 0: + correct_0to1_f_mask, w_pt0_i_mask = warp_kpts(grid_pt0_f_unfold[mask].reshape(1,-1,2), data['depth0'][[b],...], + data['depth1'][[b],...], data['T_0to1'][[b],...], + data['K0'][[b],...], data['K1'][[b],...]) # [k, WW], [k, WW, 2] + elif data['gt_matches_mask'][b].sum() != 0: + correct_0to1_f_mask, w_pt0_i_mask = warp_kpts_by_sparse_gt_matches_fine_chunks(grid_pt0_f_unfold[mask], gt_matches=data['gt_matches'][[b]], dist_thr=scale0[[b]].max(dim=-1)[0]) + skip_reshape = True + correct_0to1_f[mask] = correct_0to1_f_mask.reshape(match, WW) if not skip_reshape else correct_0to1_f_mask + w_pt0_i[mask] = w_pt0_i_mask.reshape(match, WW, 2) if not skip_reshape else w_pt0_i_mask + + delta_w_pt0_i = w_pt0_i - pt1_i[b_ids, j_ids][:,None,:] # [m, WW, 2] + delta_w_pt0_f = delta_w_pt0_i / scalei1[:,None,:] + W // 2 - 0.5 + delta_w_pt0_f_round = delta_w_pt0_f[:, :, :].round() + if config.LOFTR.LOSS.FINE_OVERLAP_WEIGHT and config.LOFTR.LOSS.FINE_OVERLAP_WEIGHT2: + w_pt0_f_error = (1.0 - torch.abs(delta_w_pt0_f - delta_w_pt0_f_round)).prod(-1) # [0.25, 1] + elif config.LOFTR.LOSS.FINE_OVERLAP_WEIGHT: + w_pt0_f_error = (1.0 - 2*torch.abs(delta_w_pt0_f - delta_w_pt0_f_round)).prod(-1) # [0, 1] + delta_w_pt0_f_round = delta_w_pt0_f_round.long() + + + nearest_index1 = delta_w_pt0_f_round[..., 0] + delta_w_pt0_f_round[..., 1] * W # [m, WW] + + def out_bound_mask(pt, w, h): + return (pt[..., 0] < 0) + (pt[..., 0] >= w) + (pt[..., 1] < 0) + (pt[..., 1] >= h) + ob_mask = out_bound_mask(delta_w_pt0_f_round, W, W) + nearest_index1[ob_mask] = 0 + + correct_0to1_f[ob_mask] = 0 + m_ids_d, i_ids_d = torch.where(correct_0to1_f != 0) + + j_ids_d = nearest_index1[m_ids_d, i_ids_d] + + # For plotting: + mkpts0_f_gt = grid_pt0_f_unfold[m_ids_d, i_ids_d] # [m, 2] + mkpts1_f_gt = w_pt0_i[m_ids_d, i_ids_d] # [m, 2] + data.update({'mkpts0_f_gt_b_ids': m_ids_d}) + data.update({'mkpts0_f_gt': mkpts0_f_gt}) + data.update({'mkpts1_f_gt': mkpts1_f_gt}) + + if config.LOFTR.MATCH_FINE.MULTI_REGRESS: + assert not config.LOFTR.MATCH_FINE.LOCAL_REGRESS + expec_f_gt = delta_w_pt0_f - W // 2 + 0.5 # use delta(e.g. [-3.5,3.5]) in regression rather than [0,W] (e.g. [0,7]) + expec_f_gt = expec_f_gt[m_ids_d, i_ids_d] / (W // 2 - 1) # specific radius for overlaped even windows & align_corner=False + data.update({'expec_f_gt': expec_f_gt}) + data.update({'m_ids_d': m_ids_d, 'i_ids_d': i_ids_d}) + else: # spv fine dual softmax + if config.LOFTR.MATCH_FINE.LOCAL_REGRESS: + expec_f_gt = delta_w_pt0_f - delta_w_pt0_f_round + + # mask fine windows boarder + j_ids_d_il, j_ids_d_jl = j_ids_d // W, j_ids_d % W + if config.LOFTR.MATCH_FINE.LOCAL_REGRESS_NOMASK: + mask = None + m_ids_dl, i_ids_dl, j_ids_d_il, j_ids_d_jl = m_ids_d.to(torch.long), i_ids_d.to(torch.long), j_ids_d_il.to(torch.long), j_ids_d_jl.to(torch.long) + else: + mask = (j_ids_d_il >= 1) & (j_ids_d_il < W-1) & (j_ids_d_jl >= 1) & (j_ids_d_jl < W-1) + if W == 10: + i_ids_d_il, i_ids_d_jl = i_ids_d // W, i_ids_d % W + mask = mask & (i_ids_d_il >= 1) & (i_ids_d_il <= W-2) & (i_ids_d_jl >= 1) & (i_ids_d_jl <= W-2) + + m_ids_dl, i_ids_dl, j_ids_d_il, j_ids_d_jl = m_ids_d[mask].to(torch.long), i_ids_d[mask].to(torch.long), j_ids_d_il[mask].to(torch.long), j_ids_d_jl[mask].to(torch.long) + if mask is not None: + loguru_logger.info(f'percent of gt mask.sum / mask.numel: {mask.sum().float()/mask.numel():.2f}') + if m_ids_dl.numel() == 0: + loguru_logger.warning(f"No groundtruth fine match found for local regress: {data['pair_names']}") + data.update({'expec_f_gt': torch.zeros(1, 2, device=device)}) + data.update({'expec_f': torch.zeros(1, 2, device=device)}) + else: + expec_f_gt = expec_f_gt[m_ids_dl, i_ids_dl] + data.update({"expec_f_gt": expec_f_gt}) + + data.update({"m_ids_dl": m_ids_dl, + "i_ids_dl": i_ids_dl, + "j_ids_d_il": j_ids_d_il, + "j_ids_d_jl": j_ids_d_jl + }) + else: # no fine regress + pass + + # spv fine dual softmax + conf_matrix_f_gt = torch.zeros(m, WW, WW, device=device, dtype=torch.bool) + conf_matrix_f_gt[m_ids_d, i_ids_d, j_ids_d] = 1 + data.update({'conf_matrix_f_gt': conf_matrix_f_gt}) + if config.LOFTR.LOSS.FINE_OVERLAP_WEIGHT: + w_pt0_f_error = w_pt0_f_error[m_ids_d, i_ids_d] + assert torch.all(w_pt0_f_error >= -0.001) + assert torch.all(w_pt0_f_error <= 1.001) + data.update({'conf_matrix_f_error_gt': w_pt0_f_error}) + + conf_matrix_f_gt_sum = conf_matrix_f_gt.sum() + if conf_matrix_f_gt_sum != 0: + pass + else: + loguru_logger.info(f'[no gt plot]no fine matches to supervise') + + else: + expec_f_gt = (spv_w_pt0_i[b_ids, i_ids] - pt1_i[b_ids, j_ids]) / scalei1 / 4 # [M, 2] + data.update({"expec_f_gt": expec_f_gt}) + + +def compute_supervision_fine(data, config, logger=None): + data_source = data['dataset_name'][0] + if data_source.lower() in ['scannet', 'megadepth']: + spvs_fine(data, config, logger) + else: + raise NotImplementedError \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/src/optimizers/__init__.py b/imcui/third_party/MatchAnything/src/optimizers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2c946086518db86e6775774eca36d67188c8b657 --- /dev/null +++ b/imcui/third_party/MatchAnything/src/optimizers/__init__.py @@ -0,0 +1,50 @@ +import torch +from torch.optim.lr_scheduler import MultiStepLR, CosineAnnealingLR, ExponentialLR + + +def build_optimizer(model, config): + name = config.TRAINER.OPTIMIZER + lr = config.TRAINER.TRUE_LR + + if name == "adam": + return torch.optim.Adam(model.parameters(), lr=lr, weight_decay=config.TRAINER.ADAM_DECAY, eps=config.TRAINER.OPTIMIZER_EPS) + elif name == "adamw": + if ("ROMA" in config.METHOD) or ("DKM" in config.METHOD): + # Filter the backbone param and others param: + keyword = 'model.encoder' + backbone_params = [param for name, param in list(filter(lambda kv: keyword in kv[0], model.named_parameters()))] + base_params = [param for name, param in list(filter(lambda kv: keyword not in kv[0], model.named_parameters()))] + params = [{'params': backbone_params, 'lr': lr * 0.05}, {'params': base_params}] + return torch.optim.AdamW(model.parameters(), lr=lr, weight_decay=config.TRAINER.ADAMW_DECAY, eps=config.TRAINER.OPTIMIZER_EPS) + else: + return torch.optim.AdamW(model.parameters(), lr=lr, weight_decay=config.TRAINER.ADAMW_DECAY, eps=config.TRAINER.OPTIMIZER_EPS) + else: + raise ValueError(f"TRAINER.OPTIMIZER = {name} is not a valid optimizer!") + + +def build_scheduler(config, optimizer): + """ + Returns: + scheduler (dict):{ + 'scheduler': lr_scheduler, + 'interval': 'step', # or 'epoch' + 'monitor': 'val_f1', (optional) + 'frequency': x, (optional) + } + """ + scheduler = {'interval': config.TRAINER.SCHEDULER_INTERVAL} + name = config.TRAINER.SCHEDULER + + if name == 'MultiStepLR': + scheduler.update( + {'scheduler': MultiStepLR(optimizer, config.TRAINER.MSLR_MILESTONES, gamma=config.TRAINER.MSLR_GAMMA)}) + elif name == 'CosineAnnealing': + scheduler.update( + {'scheduler': CosineAnnealingLR(optimizer, config.TRAINER.COSA_TMAX)}) + elif name == 'ExponentialLR': + scheduler.update( + {'scheduler': ExponentialLR(optimizer, config.TRAINER.ELR_GAMMA)}) + else: + raise NotImplementedError() + + return scheduler diff --git a/imcui/third_party/MatchAnything/src/utils/__init__.py b/imcui/third_party/MatchAnything/src/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/imcui/third_party/MatchAnything/src/utils/augment.py b/imcui/third_party/MatchAnything/src/utils/augment.py new file mode 100644 index 0000000000000000000000000000000000000000..d7c5d3e11b6fe083aaeff7555bb7ce3a4bfb755d --- /dev/null +++ b/imcui/third_party/MatchAnything/src/utils/augment.py @@ -0,0 +1,55 @@ +import albumentations as A + + +class DarkAug(object): + """ + Extreme dark augmentation aiming at Aachen Day-Night + """ + + def __init__(self) -> None: + self.augmentor = A.Compose([ + A.RandomBrightnessContrast(p=0.75, brightness_limit=(-0.6, 0.0), contrast_limit=(-0.5, 0.3)), + A.Blur(p=0.1, blur_limit=(3, 9)), + A.MotionBlur(p=0.2, blur_limit=(3, 25)), + A.RandomGamma(p=0.1, gamma_limit=(15, 65)), + A.HueSaturationValue(p=0.1, val_shift_limit=(-100, -40)) + ], p=0.75) + + def __call__(self, x): + return self.augmentor(image=x)['image'] + + +class MobileAug(object): + """ + Random augmentations aiming at images of mobile/handhold devices. + """ + + def __init__(self): + self.augmentor = A.Compose([ + A.MotionBlur(p=0.25), + A.ColorJitter(p=0.5), + A.RandomRain(p=0.1), # random occlusion + A.RandomSunFlare(p=0.1), + A.JpegCompression(p=0.25), + A.ISONoise(p=0.25) + ], p=1.0) + + def __call__(self, x): + return self.augmentor(image=x)['image'] + + +def build_augmentor(method=None, **kwargs): + if method is not None: + raise NotImplementedError('Using of augmentation functions are not supported yet!') + if method == 'dark': + return DarkAug() + elif method == 'mobile': + return MobileAug() + elif method is None: + return None + else: + raise ValueError(f'Invalid augmentation method: {method}') + + +if __name__ == '__main__': + augmentor = build_augmentor('FDA') diff --git a/imcui/third_party/MatchAnything/src/utils/colmap.py b/imcui/third_party/MatchAnything/src/utils/colmap.py new file mode 100644 index 0000000000000000000000000000000000000000..deefe92a3a8e132e3d5d51b8eaf08b1050e22aac --- /dev/null +++ b/imcui/third_party/MatchAnything/src/utils/colmap.py @@ -0,0 +1,530 @@ +# Copyright (c) 2022, ETH Zurich and UNC Chapel Hill. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# +# * Neither the name of ETH Zurich and UNC Chapel Hill nor the names of +# its contributors may be used to endorse or promote products derived +# from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# +# Author: Johannes L. Schoenberger (jsch-at-demuc-dot-de) + +from typing import List, Tuple, Dict +import os +import collections +import numpy as np +import struct +import argparse + + +CameraModel = collections.namedtuple( + "CameraModel", ["model_id", "model_name", "num_params"]) +BaseCamera = collections.namedtuple( + "Camera", ["id", "model", "width", "height", "params"]) +BaseImage = collections.namedtuple( + "Image", ["id", "qvec", "tvec", "camera_id", "name", "xys", "point3D_ids"]) +Point3D = collections.namedtuple( + "Point3D", ["id", "xyz", "rgb", "error", "image_ids", "point2D_idxs"]) + + +class Image(BaseImage): + def qvec2rotmat(self): + return qvec2rotmat(self.qvec) + + @property + def world_to_camera(self) -> np.ndarray: + R = qvec2rotmat(self.qvec) + t = self.tvec + world2cam = np.eye(4) + world2cam[:3, :3] = R + world2cam[:3, 3] = t + return world2cam + + +class Camera(BaseCamera): + @property + def K(self): + K = np.eye(3) + if self.model == "SIMPLE_PINHOLE" or self.model == "SIMPLE_RADIAL" or self.model == "RADIAL" or self.model == "SIMPLE_RADIAL_FISHEYE" or self.model == "RADIAL_FISHEYE": + K[0, 0] = self.params[0] + K[1, 1] = self.params[0] + K[0, 2] = self.params[1] + K[1, 2] = self.params[2] + elif self.model == "PINHOLE" or self.model == "OPENCV" or self.model == "OPENCV_FISHEYE" or self.model == "FULL_OPENCV" or self.model == "FOV" or self.model == "THIN_PRISM_FISHEYE": + K[0, 0] = self.params[0] + K[1, 1] = self.params[1] + K[0, 2] = self.params[2] + K[1, 2] = self.params[3] + else: + raise NotImplementedError + return K + + +CAMERA_MODELS = { + CameraModel(model_id=0, model_name="SIMPLE_PINHOLE", num_params=3), + CameraModel(model_id=1, model_name="PINHOLE", num_params=4), + CameraModel(model_id=2, model_name="SIMPLE_RADIAL", num_params=4), + CameraModel(model_id=3, model_name="RADIAL", num_params=5), + CameraModel(model_id=4, model_name="OPENCV", num_params=8), + CameraModel(model_id=5, model_name="OPENCV_FISHEYE", num_params=8), + CameraModel(model_id=6, model_name="FULL_OPENCV", num_params=12), + CameraModel(model_id=7, model_name="FOV", num_params=5), + CameraModel(model_id=8, model_name="SIMPLE_RADIAL_FISHEYE", num_params=4), + CameraModel(model_id=9, model_name="RADIAL_FISHEYE", num_params=5), + CameraModel(model_id=10, model_name="THIN_PRISM_FISHEYE", num_params=12) +} +CAMERA_MODEL_IDS = dict([(camera_model.model_id, camera_model) + for camera_model in CAMERA_MODELS]) +CAMERA_MODEL_NAMES = dict([(camera_model.model_name, camera_model) + for camera_model in CAMERA_MODELS]) + + +def read_next_bytes(fid, num_bytes, format_char_sequence, endian_character="<"): + """Read and unpack the next bytes from a binary file. + :param fid: + :param num_bytes: Sum of combination of {2, 4, 8}, e.g. 2, 6, 16, 30, etc. + :param format_char_sequence: List of {c, e, f, d, h, H, i, I, l, L, q, Q}. + :param endian_character: Any of {@, =, <, >, !} + :return: Tuple of read and unpacked values. + """ + data = fid.read(num_bytes) + return struct.unpack(endian_character + format_char_sequence, data) + + +def write_next_bytes(fid, data, format_char_sequence, endian_character="<"): + """pack and write to a binary file. + :param fid: + :param data: data to send, if multiple elements are sent at the same time, + they should be encapsuled either in a list or a tuple + :param format_char_sequence: List of {c, e, f, d, h, H, i, I, l, L, q, Q}. + should be the same length as the data list or tuple + :param endian_character: Any of {@, =, <, >, !} + """ + if isinstance(data, (list, tuple)): + bytes = struct.pack(endian_character + format_char_sequence, *data) + else: + bytes = struct.pack(endian_character + format_char_sequence, data) + fid.write(bytes) + + +def read_cameras_text(path): + """ + see: src/base/reconstruction.cc + void Reconstruction::WriteCamerasText(const std::string& path) + void Reconstruction::ReadCamerasText(const std::string& path) + """ + cameras = {} + with open(path, "r") as fid: + while True: + line = fid.readline() + if not line: + break + line = line.strip() + if len(line) > 0 and line[0] != "#": + elems = line.split() + camera_id = int(elems[0]) + model = elems[1] + width = int(elems[2]) + height = int(elems[3]) + params = np.array(tuple(map(float, elems[4:]))) + cameras[camera_id] = Camera(id=camera_id, model=model, + width=width, height=height, + params=params) + return cameras + + +def read_cameras_binary(path_to_model_file): + """ + see: src/base/reconstruction.cc + void Reconstruction::WriteCamerasBinary(const std::string& path) + void Reconstruction::ReadCamerasBinary(const std::string& path) + """ + cameras = {} + with open(path_to_model_file, "rb") as fid: + num_cameras = read_next_bytes(fid, 8, "Q")[0] + for _ in range(num_cameras): + camera_properties = read_next_bytes( + fid, num_bytes=24, format_char_sequence="iiQQ") + camera_id = camera_properties[0] + model_id = camera_properties[1] + model_name = CAMERA_MODEL_IDS[camera_properties[1]].model_name + width = camera_properties[2] + height = camera_properties[3] + num_params = CAMERA_MODEL_IDS[model_id].num_params + params = read_next_bytes(fid, num_bytes=8*num_params, + format_char_sequence="d"*num_params) + cameras[camera_id] = Camera(id=camera_id, + model=model_name, + width=width, + height=height, + params=np.array(params)) + assert len(cameras) == num_cameras + return cameras + + +def write_cameras_text(cameras, path): + """ + see: src/base/reconstruction.cc + void Reconstruction::WriteCamerasText(const std::string& path) + void Reconstruction::ReadCamerasText(const std::string& path) + """ + HEADER = "# Camera list with one line of data per camera:\n" + \ + "# CAMERA_ID, MODEL, WIDTH, HEIGHT, PARAMS[]\n" + \ + "# Number of cameras: {}\n".format(len(cameras)) + with open(path, "w") as fid: + fid.write(HEADER) + for _, cam in cameras.items(): + to_write = [cam.id, cam.model, cam.width, cam.height, *cam.params] + line = " ".join([str(elem) for elem in to_write]) + fid.write(line + "\n") + + +def write_cameras_binary(cameras, path_to_model_file): + """ + see: src/base/reconstruction.cc + void Reconstruction::WriteCamerasBinary(const std::string& path) + void Reconstruction::ReadCamerasBinary(const std::string& path) + """ + with open(path_to_model_file, "wb") as fid: + write_next_bytes(fid, len(cameras), "Q") + for _, cam in cameras.items(): + model_id = CAMERA_MODEL_NAMES[cam.model].model_id + camera_properties = [cam.id, + model_id, + cam.width, + cam.height] + write_next_bytes(fid, camera_properties, "iiQQ") + for p in cam.params: + write_next_bytes(fid, float(p), "d") + return cameras + + +def read_images_text(path): + """ + see: src/base/reconstruction.cc + void Reconstruction::ReadImagesText(const std::string& path) + void Reconstruction::WriteImagesText(const std::string& path) + """ + images = {} + with open(path, "r") as fid: + while True: + line = fid.readline() + if not line: + break + line = line.strip() + if len(line) > 0 and line[0] != "#": + elems = line.split() + image_id = int(elems[0]) + qvec = np.array(tuple(map(float, elems[1:5]))) + tvec = np.array(tuple(map(float, elems[5:8]))) + camera_id = int(elems[8]) + image_name = elems[9] + elems = fid.readline().split() + xys = np.column_stack([tuple(map(float, elems[0::3])), + tuple(map(float, elems[1::3]))]) + point3D_ids = np.array(tuple(map(int, elems[2::3]))) + images[image_id] = Image( + id=image_id, qvec=qvec, tvec=tvec, + camera_id=camera_id, name=image_name, + xys=xys, point3D_ids=point3D_ids) + return images + + +def read_images_binary(path_to_model_file): + """ + see: src/base/reconstruction.cc + void Reconstruction::ReadImagesBinary(const std::string& path) + void Reconstruction::WriteImagesBinary(const std::string& path) + """ + images = {} + with open(path_to_model_file, "rb") as fid: + num_reg_images = read_next_bytes(fid, 8, "Q")[0] + for _ in range(num_reg_images): + binary_image_properties = read_next_bytes( + fid, num_bytes=64, format_char_sequence="idddddddi") + image_id = binary_image_properties[0] + qvec = np.array(binary_image_properties[1:5]) + tvec = np.array(binary_image_properties[5:8]) + camera_id = binary_image_properties[8] + image_name = "" + current_char = read_next_bytes(fid, 1, "c")[0] + while current_char != b"\x00": # look for the ASCII 0 entry + image_name += current_char.decode("utf-8") + current_char = read_next_bytes(fid, 1, "c")[0] + num_points2D = read_next_bytes(fid, num_bytes=8, + format_char_sequence="Q")[0] + x_y_id_s = read_next_bytes(fid, num_bytes=24*num_points2D, + format_char_sequence="ddq"*num_points2D) + xys = np.column_stack([tuple(map(float, x_y_id_s[0::3])), + tuple(map(float, x_y_id_s[1::3]))]) + point3D_ids = np.array(tuple(map(int, x_y_id_s[2::3]))) + images[image_id] = Image( + id=image_id, qvec=qvec, tvec=tvec, + camera_id=camera_id, name=image_name, + xys=xys, point3D_ids=point3D_ids) + return images + + +def write_images_text(images, path): + """ + see: src/base/reconstruction.cc + void Reconstruction::ReadImagesText(const std::string& path) + void Reconstruction::WriteImagesText(const std::string& path) + """ + if len(images) == 0: + mean_observations = 0 + else: + mean_observations = sum((len(img.point3D_ids) for _, img in images.items()))/len(images) + HEADER = "# Image list with two lines of data per image:\n" + \ + "# IMAGE_ID, QW, QX, QY, QZ, TX, TY, TZ, CAMERA_ID, NAME\n" + \ + "# POINTS2D[] as (X, Y, POINT3D_ID)\n" + \ + "# Number of images: {}, mean observations per image: {}\n".format(len(images), mean_observations) + + with open(path, "w") as fid: + fid.write(HEADER) + for _, img in images.items(): + image_header = [img.id, *img.qvec, *img.tvec, img.camera_id, img.name] + first_line = " ".join(map(str, image_header)) + fid.write(first_line + "\n") + + points_strings = [] + for xy, point3D_id in zip(img.xys, img.point3D_ids): + points_strings.append(" ".join(map(str, [*xy, point3D_id]))) + fid.write(" ".join(points_strings) + "\n") + + +def write_images_binary(images, path_to_model_file): + """ + see: src/base/reconstruction.cc + void Reconstruction::ReadImagesBinary(const std::string& path) + void Reconstruction::WriteImagesBinary(const std::string& path) + """ + with open(path_to_model_file, "wb") as fid: + write_next_bytes(fid, len(images), "Q") + for _, img in images.items(): + write_next_bytes(fid, img.id, "i") + write_next_bytes(fid, img.qvec.tolist(), "dddd") + write_next_bytes(fid, img.tvec.tolist(), "ddd") + write_next_bytes(fid, img.camera_id, "i") + for char in img.name: + write_next_bytes(fid, char.encode("utf-8"), "c") + write_next_bytes(fid, b"\x00", "c") + write_next_bytes(fid, len(img.point3D_ids), "Q") + for xy, p3d_id in zip(img.xys, img.point3D_ids): + write_next_bytes(fid, [*xy, p3d_id], "ddq") + + +def read_points3D_text(path): + """ + see: src/base/reconstruction.cc + void Reconstruction::ReadPoints3DText(const std::string& path) + void Reconstruction::WritePoints3DText(const std::string& path) + """ + points3D = {} + with open(path, "r") as fid: + while True: + line = fid.readline() + if not line: + break + line = line.strip() + if len(line) > 0 and line[0] != "#": + elems = line.split() + point3D_id = int(elems[0]) + xyz = np.array(tuple(map(float, elems[1:4]))) + rgb = np.array(tuple(map(int, elems[4:7]))) + error = float(elems[7]) + image_ids = np.array(tuple(map(int, elems[8::2]))) + point2D_idxs = np.array(tuple(map(int, elems[9::2]))) + points3D[point3D_id] = Point3D(id=point3D_id, xyz=xyz, rgb=rgb, + error=error, image_ids=image_ids, + point2D_idxs=point2D_idxs) + return points3D + + +def read_points3D_binary(path_to_model_file): + """ + see: src/base/reconstruction.cc + void Reconstruction::ReadPoints3DBinary(const std::string& path) + void Reconstruction::WritePoints3DBinary(const std::string& path) + """ + points3D = {} + with open(path_to_model_file, "rb") as fid: + num_points = read_next_bytes(fid, 8, "Q")[0] + for _ in range(num_points): + binary_point_line_properties = read_next_bytes( + fid, num_bytes=43, format_char_sequence="QdddBBBd") + point3D_id = binary_point_line_properties[0] + xyz = np.array(binary_point_line_properties[1:4]) + rgb = np.array(binary_point_line_properties[4:7]) + error = np.array(binary_point_line_properties[7]) + track_length = read_next_bytes( + fid, num_bytes=8, format_char_sequence="Q")[0] + track_elems = read_next_bytes( + fid, num_bytes=8*track_length, + format_char_sequence="ii"*track_length) + image_ids = np.array(tuple(map(int, track_elems[0::2]))) + point2D_idxs = np.array(tuple(map(int, track_elems[1::2]))) + points3D[point3D_id] = Point3D( + id=point3D_id, xyz=xyz, rgb=rgb, + error=error, image_ids=image_ids, + point2D_idxs=point2D_idxs) + return points3D + + +def write_points3D_text(points3D, path): + """ + see: src/base/reconstruction.cc + void Reconstruction::ReadPoints3DText(const std::string& path) + void Reconstruction::WritePoints3DText(const std::string& path) + """ + if len(points3D) == 0: + mean_track_length = 0 + else: + mean_track_length = sum((len(pt.image_ids) for _, pt in points3D.items()))/len(points3D) + HEADER = "# 3D point list with one line of data per point:\n" + \ + "# POINT3D_ID, X, Y, Z, R, G, B, ERROR, TRACK[] as (IMAGE_ID, POINT2D_IDX)\n" + \ + "# Number of points: {}, mean track length: {}\n".format(len(points3D), mean_track_length) + + with open(path, "w") as fid: + fid.write(HEADER) + for _, pt in points3D.items(): + point_header = [pt.id, *pt.xyz, *pt.rgb, pt.error] + fid.write(" ".join(map(str, point_header)) + " ") + track_strings = [] + for image_id, point2D in zip(pt.image_ids, pt.point2D_idxs): + track_strings.append(" ".join(map(str, [image_id, point2D]))) + fid.write(" ".join(track_strings) + "\n") + + +def write_points3D_binary(points3D, path_to_model_file): + """ + see: src/base/reconstruction.cc + void Reconstruction::ReadPoints3DBinary(const std::string& path) + void Reconstruction::WritePoints3DBinary(const std::string& path) + """ + with open(path_to_model_file, "wb") as fid: + write_next_bytes(fid, len(points3D), "Q") + for _, pt in points3D.items(): + write_next_bytes(fid, pt.id, "Q") + write_next_bytes(fid, pt.xyz.tolist(), "ddd") + write_next_bytes(fid, pt.rgb.tolist(), "BBB") + write_next_bytes(fid, pt.error, "d") + track_length = pt.image_ids.shape[0] + write_next_bytes(fid, track_length, "Q") + for image_id, point2D_id in zip(pt.image_ids, pt.point2D_idxs): + write_next_bytes(fid, [image_id, point2D_id], "ii") + + +def detect_model_format(path, ext): + if os.path.isfile(os.path.join(path, "cameras" + ext)) and \ + os.path.isfile(os.path.join(path, "images" + ext)) and \ + os.path.isfile(os.path.join(path, "points3D" + ext)): + print("Detected model format: '" + ext + "'") + return True + + return False + + +def read_model(path, ext="") -> Tuple[Dict[int, Camera], Dict[int, Image], Dict[int, Point3D]]: + # try to detect the extension automatically + if ext == "": + if detect_model_format(path, ".bin"): + ext = ".bin" + elif detect_model_format(path, ".txt"): + ext = ".txt" + else: + raise ValueError("Provide model format: '.bin' or '.txt'") + + if ext == ".txt": + cameras = read_cameras_text(os.path.join(path, "cameras" + ext)) + images = read_images_text(os.path.join(path, "images" + ext)) + points3D = read_points3D_text(os.path.join(path, "points3D") + ext) + else: + cameras = read_cameras_binary(os.path.join(path, "cameras" + ext)) + images = read_images_binary(os.path.join(path, "images" + ext)) + points3D = read_points3D_binary(os.path.join(path, "points3D") + ext) + return cameras, images, points3D + + +def write_model(cameras, images, points3D, path, ext=".bin"): + if ext == ".txt": + write_cameras_text(cameras, os.path.join(path, "cameras" + ext)) + write_images_text(images, os.path.join(path, "images" + ext)) + write_points3D_text(points3D, os.path.join(path, "points3D") + ext) + else: + write_cameras_binary(cameras, os.path.join(path, "cameras" + ext)) + write_images_binary(images, os.path.join(path, "images" + ext)) + write_points3D_binary(points3D, os.path.join(path, "points3D") + ext) + return cameras, images, points3D + + +def qvec2rotmat(qvec): + return np.array([ + [1 - 2 * qvec[2]**2 - 2 * qvec[3]**2, + 2 * qvec[1] * qvec[2] - 2 * qvec[0] * qvec[3], + 2 * qvec[3] * qvec[1] + 2 * qvec[0] * qvec[2]], + [2 * qvec[1] * qvec[2] + 2 * qvec[0] * qvec[3], + 1 - 2 * qvec[1]**2 - 2 * qvec[3]**2, + 2 * qvec[2] * qvec[3] - 2 * qvec[0] * qvec[1]], + [2 * qvec[3] * qvec[1] - 2 * qvec[0] * qvec[2], + 2 * qvec[2] * qvec[3] + 2 * qvec[0] * qvec[1], + 1 - 2 * qvec[1]**2 - 2 * qvec[2]**2]]) + + +def rotmat2qvec(R): + Rxx, Ryx, Rzx, Rxy, Ryy, Rzy, Rxz, Ryz, Rzz = R.flat + K = np.array([ + [Rxx - Ryy - Rzz, 0, 0, 0], + [Ryx + Rxy, Ryy - Rxx - Rzz, 0, 0], + [Rzx + Rxz, Rzy + Ryz, Rzz - Rxx - Ryy, 0], + [Ryz - Rzy, Rzx - Rxz, Rxy - Ryx, Rxx + Ryy + Rzz]]) / 3.0 + eigvals, eigvecs = np.linalg.eigh(K) + qvec = eigvecs[[3, 0, 1, 2], np.argmax(eigvals)] + if qvec[0] < 0: + qvec *= -1 + return qvec + + +def main(): + parser = argparse.ArgumentParser(description="Read and write COLMAP binary and text models") + parser.add_argument("--input_model", help="path to input model folder") + parser.add_argument("--input_format", choices=[".bin", ".txt"], + help="input model format", default="") + parser.add_argument("--output_model", + help="path to output model folder") + parser.add_argument("--output_format", choices=[".bin", ".txt"], + help="outut model format", default=".txt") + args = parser.parse_args() + + cameras, images, points3D = read_model(path=args.input_model, ext=args.input_format) + + print("num_cameras:", len(cameras)) + print("num_images:", len(images)) + print("num_points3D:", len(points3D)) + + if args.output_model is not None: + write_model(cameras, images, points3D, path=args.output_model, ext=args.output_format) + + +if __name__ == "__main__": + main() diff --git a/imcui/third_party/MatchAnything/src/utils/colmap/__init__.py b/imcui/third_party/MatchAnything/src/utils/colmap/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/imcui/third_party/MatchAnything/src/utils/colmap/database.py b/imcui/third_party/MatchAnything/src/utils/colmap/database.py new file mode 100644 index 0000000000000000000000000000000000000000..81a9c47bad6c522597dfaaf2a85ae0d252b5ab10 --- /dev/null +++ b/imcui/third_party/MatchAnything/src/utils/colmap/database.py @@ -0,0 +1,417 @@ +# Copyright (c) 2018, ETH Zurich and UNC Chapel Hill. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# +# * Neither the name of ETH Zurich and UNC Chapel Hill nor the names of +# its contributors may be used to endorse or promote products derived +# from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# +# Author: Johannes L. Schoenberger (jsch-at-demuc-dot-de) + +# This script is based on an original implementation by True Price. + +import sys +import sqlite3 +import numpy as np +from loguru import logger + + +IS_PYTHON3 = sys.version_info[0] >= 3 + +MAX_IMAGE_ID = 2**31 - 1 + +CREATE_CAMERAS_TABLE = """CREATE TABLE IF NOT EXISTS cameras ( + camera_id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, + model INTEGER NOT NULL, + width INTEGER NOT NULL, + height INTEGER NOT NULL, + params BLOB, + prior_focal_length INTEGER NOT NULL)""" + +CREATE_DESCRIPTORS_TABLE = """CREATE TABLE IF NOT EXISTS descriptors ( + image_id INTEGER PRIMARY KEY NOT NULL, + rows INTEGER NOT NULL, + cols INTEGER NOT NULL, + data BLOB, + FOREIGN KEY(image_id) REFERENCES images(image_id) ON DELETE CASCADE)""" + +CREATE_IMAGES_TABLE = """CREATE TABLE IF NOT EXISTS images ( + image_id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, + name TEXT NOT NULL UNIQUE, + camera_id INTEGER NOT NULL, + prior_qw REAL, + prior_qx REAL, + prior_qy REAL, + prior_qz REAL, + prior_tx REAL, + prior_ty REAL, + prior_tz REAL, + CONSTRAINT image_id_check CHECK(image_id >= 0 and image_id < {}), + FOREIGN KEY(camera_id) REFERENCES cameras(camera_id)) +""".format(MAX_IMAGE_ID) + +CREATE_TWO_VIEW_GEOMETRIES_TABLE = """ +CREATE TABLE IF NOT EXISTS two_view_geometries ( + pair_id INTEGER PRIMARY KEY NOT NULL, + rows INTEGER NOT NULL, + cols INTEGER NOT NULL, + data BLOB, + config INTEGER NOT NULL, + F BLOB, + E BLOB, + H BLOB, + qvec BLOB, + tvec BLOB) +""" + +CREATE_KEYPOINTS_TABLE = """CREATE TABLE IF NOT EXISTS keypoints ( + image_id INTEGER PRIMARY KEY NOT NULL, + rows INTEGER NOT NULL, + cols INTEGER NOT NULL, + data BLOB, + FOREIGN KEY(image_id) REFERENCES images(image_id) ON DELETE CASCADE) +""" + +CREATE_MATCHES_TABLE = """CREATE TABLE IF NOT EXISTS matches ( + pair_id INTEGER PRIMARY KEY NOT NULL, + rows INTEGER NOT NULL, + cols INTEGER NOT NULL, + data BLOB)""" + +CREATE_NAME_INDEX = \ + "CREATE UNIQUE INDEX IF NOT EXISTS index_name ON images(name)" + +CREATE_ALL = "; ".join([ + CREATE_CAMERAS_TABLE, + CREATE_IMAGES_TABLE, + CREATE_KEYPOINTS_TABLE, + CREATE_DESCRIPTORS_TABLE, + CREATE_MATCHES_TABLE, + CREATE_TWO_VIEW_GEOMETRIES_TABLE, + CREATE_NAME_INDEX +]) + + +def image_ids_to_pair_id(image_id1, image_id2): + if image_id1 > image_id2: + image_id1, image_id2 = image_id2, image_id1 + return image_id1 * MAX_IMAGE_ID + image_id2 + + +def pair_id_to_image_ids(pair_id): + image_id2 = pair_id % MAX_IMAGE_ID + image_id1 = (pair_id - image_id2) / MAX_IMAGE_ID + return image_id1, image_id2 + + +def array_to_blob(array): + if IS_PYTHON3: + return array.tobytes() + else: + return np.getbuffer(array) + + +def blob_to_array(blob, dtype, shape=(-1,)): + if IS_PYTHON3: + return np.fromstring(blob, dtype=dtype).reshape(*shape) + else: + return np.frombuffer(blob, dtype=dtype).reshape(*shape) + + +class COLMAPDatabase(sqlite3.Connection): + + @staticmethod + def connect(database_path): + return sqlite3.connect(str(database_path), factory=COLMAPDatabase) + + + def __init__(self, *args, **kwargs): + super(COLMAPDatabase, self).__init__(*args, **kwargs) + + self.create_tables = lambda: self.executescript(CREATE_ALL) + self.create_cameras_table = \ + lambda: self.executescript(CREATE_CAMERAS_TABLE) + self.create_descriptors_table = \ + lambda: self.executescript(CREATE_DESCRIPTORS_TABLE) + self.create_images_table = \ + lambda: self.executescript(CREATE_IMAGES_TABLE) + self.create_two_view_geometries_table = \ + lambda: self.executescript(CREATE_TWO_VIEW_GEOMETRIES_TABLE) + self.create_keypoints_table = \ + lambda: self.executescript(CREATE_KEYPOINTS_TABLE) + self.create_matches_table = \ + lambda: self.executescript(CREATE_MATCHES_TABLE) + self.create_name_index = lambda: self.executescript(CREATE_NAME_INDEX) + + def add_camera(self, model, width, height, params, + prior_focal_length=False, camera_id=None): + params = np.asarray(params, np.float64) + cursor = self.execute( + "INSERT INTO cameras VALUES (?, ?, ?, ?, ?, ?)", + (camera_id, model, width, height, array_to_blob(params), + prior_focal_length)) + return cursor.lastrowid + + def add_image(self, name, camera_id, + prior_q=np.full(4, np.NaN), prior_t=np.full(3, np.NaN), + image_id=None): + cursor = self.execute( + "INSERT INTO images VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + (image_id, name, camera_id, prior_q[0], prior_q[1], prior_q[2], + prior_q[3], prior_t[0], prior_t[1], prior_t[2])) + return cursor.lastrowid + + def add_keypoints(self, image_id, keypoints): + assert(len(keypoints.shape) == 2) + assert(keypoints.shape[1] in [2, 4, 6]) + + keypoints = np.asarray(keypoints, np.float32) + self.execute( + "INSERT INTO keypoints VALUES (?, ?, ?, ?)", + (image_id,) + keypoints.shape + (array_to_blob(keypoints),)) + + def add_descriptors(self, image_id, descriptors): + descriptors = np.ascontiguousarray(descriptors, np.uint8) + self.execute( + "INSERT INTO descriptors VALUES (?, ?, ?, ?)", + (image_id,) + descriptors.shape + (array_to_blob(descriptors),)) + + def add_matches(self, image_id1, image_id2, matches): + assert(len(matches.shape) == 2) + assert(matches.shape[1] == 2) + + if image_id1 > image_id2: + matches = matches[:,::-1] + + pair_id = image_ids_to_pair_id(image_id1, image_id2) + matches = np.asarray(matches, np.uint32) + self.execute( + "INSERT INTO matches VALUES (?, ?, ?, ?)", + (pair_id,) + matches.shape + (array_to_blob(matches),)) + + def add_two_view_geometry(self, image_id1, image_id2, matches, + F=np.eye(3), E=np.eye(3), H=np.eye(3), + qvec=np.array([1.0, 0.0, 0.0, 0.0]), + tvec=np.zeros(3), config=2): + assert(len(matches.shape) == 2) + assert(matches.shape[1] == 2) + + if image_id1 > image_id2: + matches = matches[:,::-1] + + pair_id = image_ids_to_pair_id(image_id1, image_id2) + matches = np.asarray(matches, np.uint32) + F = np.asarray(F, dtype=np.float64) + E = np.asarray(E, dtype=np.float64) + H = np.asarray(H, dtype=np.float64) + qvec = np.asarray(qvec, dtype=np.float64) + tvec = np.asarray(tvec, dtype=np.float64) + self.execute( + "INSERT INTO two_view_geometries VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", + (pair_id,) + matches.shape + (array_to_blob(matches), config, + array_to_blob(F), array_to_blob(E), array_to_blob(H), + array_to_blob(qvec), array_to_blob(tvec))) + + def update_two_view_geometry(self, image_id1, image_id2, matches, + F=np.eye(3), E=np.eye(3), H=np.eye(3), config=2): + assert(len(matches.shape) == 2) + assert(matches.shape[1] == 2) + + if image_id1 > image_id2: + matches = matches[:,::-1] + + pair_id = image_ids_to_pair_id(image_id1, image_id2) + matches = np.asarray(matches, np.uint32) + F = np.asarray(F, dtype=np.float64) + E = np.asarray(E, dtype=np.float64) + H = np.asarray(H, dtype=np.float64) + + # Find whether exists: + row = self.execute(f"SELECT * FROM two_view_geometries WHERE pair_id = {pair_id} ") + data = list(next(row)) + try: + matches_old = blob_to_array(data[3], np.uint32, (-1, 2)) + except: + matches_old = None + + if matches_old is not None: + for match in matches: + img0_id, img1_id = match + + # Find duplicated pts + img0_dup_idxs = np.where(matches_old[:, 0] == img0_id) + img1_dup_idxs = np.where(matches_old[:, 1] == img1_id) + + if len(img0_dup_idxs[0]) == 0 and len(img1_dup_idxs[0]) == 0: + # No duplicated matches: + matches_old = np.concatenate([matches_old, match[None]], axis=0) + elif len(img0_dup_idxs[0]) == 1 and len(img1_dup_idxs[0]) == 0: + matches_old[img0_dup_idxs[0]][0,1] = img1_id + elif len(img0_dup_idxs[0]) == 0 and len(img1_dup_idxs[0]) == 1: + matches_old[img1_dup_idxs[0]][0,0] = img0_id + elif len(img0_dup_idxs[0]) == 1 and len(img1_dup_idxs[0]) == 1: + if img0_dup_idxs[0] != img1_dup_idxs[0]: + # logger.warning(f"Duplicated matches exists!") + matches_old[img0_dup_idxs[0]][0,1] = img1_id + matches_old[img1_dup_idxs[0]][0,0] = img0_id + else: + raise NotImplementedError + + # matches = np.concatenate([matches_old, matches], axis=0) # N * 2 + matches = matches_old + self.execute(f"DELETE FROM two_view_geometries WHERE pair_id = {pair_id}") + + data[1:4] = matches.shape + (array_to_blob(np.asarray(matches, np.uint32)),) + self.execute("INSERT INTO two_view_geometries VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", tuple(data)) + else: + raise NotImplementedError + + # self.add_two_view_geometry(image_id1, image_id2, matches) + + +def example_usage(): + import os + import argparse + + parser = argparse.ArgumentParser() + parser.add_argument("--database_path", default="database.db") + args = parser.parse_args() + + if os.path.exists(args.database_path): + print("ERROR: database path already exists -- will not modify it.") + return + + # Open the database. + + db = COLMAPDatabase.connect(args.database_path) + + # For convenience, try creating all the tables upfront. + + db.create_tables() + + # Create dummy cameras. + + model1, width1, height1, params1 = \ + 0, 1024, 768, np.array((1024., 512., 384.)) + model2, width2, height2, params2 = \ + 2, 1024, 768, np.array((1024., 512., 384., 0.1)) + + camera_id1 = db.add_camera(model1, width1, height1, params1) + camera_id2 = db.add_camera(model2, width2, height2, params2) + + # Create dummy images. + + image_id1 = db.add_image("image1.png", camera_id1) + image_id2 = db.add_image("image2.png", camera_id1) + image_id3 = db.add_image("image3.png", camera_id2) + image_id4 = db.add_image("image4.png", camera_id2) + + # Create dummy keypoints. + # + # Note that COLMAP supports: + # - 2D keypoints: (x, y) + # - 4D keypoints: (x, y, theta, scale) + # - 6D affine keypoints: (x, y, a_11, a_12, a_21, a_22) + + num_keypoints = 1000 + keypoints1 = np.random.rand(num_keypoints, 2) * (width1, height1) + keypoints2 = np.random.rand(num_keypoints, 2) * (width1, height1) + keypoints3 = np.random.rand(num_keypoints, 2) * (width2, height2) + keypoints4 = np.random.rand(num_keypoints, 2) * (width2, height2) + + db.add_keypoints(image_id1, keypoints1) + db.add_keypoints(image_id2, keypoints2) + db.add_keypoints(image_id3, keypoints3) + db.add_keypoints(image_id4, keypoints4) + + # Create dummy matches. + + M = 50 + matches12 = np.random.randint(num_keypoints, size=(M, 2)) + matches23 = np.random.randint(num_keypoints, size=(M, 2)) + matches34 = np.random.randint(num_keypoints, size=(M, 2)) + + db.add_matches(image_id1, image_id2, matches12) + db.add_matches(image_id2, image_id3, matches23) + db.add_matches(image_id3, image_id4, matches34) + + # Commit the data to the file. + + db.commit() + + # Read and check cameras. + + rows = db.execute("SELECT * FROM cameras") + + camera_id, model, width, height, params, prior = next(rows) + params = blob_to_array(params, np.float64) + assert camera_id == camera_id1 + assert model == model1 and width == width1 and height == height1 + assert np.allclose(params, params1) + + camera_id, model, width, height, params, prior = next(rows) + params = blob_to_array(params, np.float64) + assert camera_id == camera_id2 + assert model == model2 and width == width2 and height == height2 + assert np.allclose(params, params2) + + # Read and check keypoints. + + keypoints = dict( + (image_id, blob_to_array(data, np.float32, (-1, 2))) + for image_id, data in db.execute( + "SELECT image_id, data FROM keypoints")) + + assert np.allclose(keypoints[image_id1], keypoints1) + assert np.allclose(keypoints[image_id2], keypoints2) + assert np.allclose(keypoints[image_id3], keypoints3) + assert np.allclose(keypoints[image_id4], keypoints4) + + # Read and check matches. + + pair_ids = [image_ids_to_pair_id(*pair) for pair in + ((image_id1, image_id2), + (image_id2, image_id3), + (image_id3, image_id4))] + + matches = dict( + (pair_id_to_image_ids(pair_id), + blob_to_array(data, np.uint32, (-1, 2))) + for pair_id, data in db.execute("SELECT pair_id, data FROM matches") + ) + + assert np.all(matches[(image_id1, image_id2)] == matches12) + assert np.all(matches[(image_id2, image_id3)] == matches23) + assert np.all(matches[(image_id3, image_id4)] == matches34) + + # Clean up. + + db.close() + + if os.path.exists(args.database_path): + os.remove(args.database_path) + + +if __name__ == "__main__": + example_usage() diff --git a/imcui/third_party/MatchAnything/src/utils/colmap/eval_helper.py b/imcui/third_party/MatchAnything/src/utils/colmap/eval_helper.py new file mode 100644 index 0000000000000000000000000000000000000000..2335a1abbc6321f7e3a4b40123d8386d4900a9d2 --- /dev/null +++ b/imcui/third_party/MatchAnything/src/utils/colmap/eval_helper.py @@ -0,0 +1,232 @@ +import math +import cv2 +import os +import numpy as np +from .read_write_model import read_images_binary + + +def align_model(model, rot, trans, scale): + return (np.matmul(rot, model) + trans) * scale + + +def align(model, data): + ''' + Source: https://vision.in.tum.de/data/datasets/rgbd-dataset/tools + #absolute_trajectory_error_ate + Align two trajectories using the method of Horn (closed-form). + + Input: + model -- first trajectory (3xn) + data -- second trajectory (3xn) + + Output: + rot -- rotation matrix (3x3) + trans -- translation vector (3x1) + trans_error -- translational error per point (1xn) + + ''' + + if model.shape[1] < 3: + print('Need at least 3 points for ATE: {}'.format(model)) + return np.identity(3), np.zeros((3, 1)), 1 + + # Get zero centered point cloud + model_zerocentered = model - model.mean(1, keepdims=True) + data_zerocentered = data - data.mean(1, keepdims=True) + + # constructed covariance matrix + W = np.zeros((3, 3)) + for column in range(model.shape[1]): + W += np.outer(model_zerocentered[:, column], + data_zerocentered[:, column]) + + # SVD + U, d, Vh = np.linalg.linalg.svd(W.transpose()) + S = np.identity(3) + if (np.linalg.det(U) * np.linalg.det(Vh) < 0): + S[2, 2] = -1 + rot = np.matmul(np.matmul(U, S), Vh) + trans = data.mean(1, keepdims=True) - np.matmul( + rot, model.mean(1, keepdims=True)) + + # apply rot and trans to point cloud + model_aligned = align_model(model, rot, trans, 1.0) + model_aligned_zerocentered = model_aligned - model_aligned.mean( + 1, keepdims=True) + + # calc scale based on distance to point cloud center + data_dist = np.sqrt((data_zerocentered * data_zerocentered).sum(axis=0)) + model_aligned_dist = np.sqrt( + (model_aligned_zerocentered * model_aligned_zerocentered).sum(axis=0)) + scale_array = data_dist / model_aligned_dist + scale = np.median(scale_array) + + return rot, trans, scale + + +def quaternion_matrix(quaternion): + '''Return homogeneous rotation matrix from quaternion. + + >>> M = quaternion_matrix([0.99810947, 0.06146124, 0, 0]) + >>> numpy.allclose(M, rotation_matrix(0.123, [1, 0, 0])) + True + >>> M = quaternion_matrix([1, 0, 0, 0]) + >>> numpy.allclose(M, numpy.identity(4)) + True + >>> M = quaternion_matrix([0, 1, 0, 0]) + >>> numpy.allclose(M, numpy.diag([1, -1, -1, 1])) + True + ''' + + q = np.array(quaternion, dtype=np.float64, copy=True) + n = np.dot(q, q) + if n < _EPS: + return np.identity(4) + + q *= math.sqrt(2.0 / n) + q = np.outer(q, q) + + return np.array( + [[1.0 - q[2, 2] - q[3, 3], q[1, 2] - q[3, 0], q[1, 3] + q[2, 0], 0.0], + [q[1, 2] + q[3, 0], 1.0 - q[1, 1] - q[3, 3], q[2, 3] - q[1, 0], 0.0], + [q[1, 3] - q[2, 0], q[2, 3] + q[1, 0], 1.0 - q[1, 1] - q[2, 2], 0.0], + [0.0, 0.0, 0.0, 1.0]]) + + +def quaternion_from_matrix(matrix, isprecise=False): + '''Return quaternion from rotation matrix. + + If isprecise is True, the input matrix is assumed to be a precise rotation + matrix and a faster algorithm is used. + + >>> q = quaternion_from_matrix(numpy.identity(4), True) + >>> numpy.allclose(q, [1, 0, 0, 0]) + True + >>> q = quaternion_from_matrix(numpy.diag([1, -1, -1, 1])) + >>> numpy.allclose(q, [0, 1, 0, 0]) or numpy.allclose(q, [0, -1, 0, 0]) + True + >>> R = rotation_matrix(0.123, (1, 2, 3)) + >>> q = quaternion_from_matrix(R, True) + >>> numpy.allclose(q, [0.9981095, 0.0164262, 0.0328524, 0.0492786]) + True + >>> R = [[-0.545, 0.797, 0.260, 0], [0.733, 0.603, -0.313, 0], + ... [-0.407, 0.021, -0.913, 0], [0, 0, 0, 1]] + >>> q = quaternion_from_matrix(R) + >>> numpy.allclose(q, [0.19069, 0.43736, 0.87485, -0.083611]) + True + >>> R = [[0.395, 0.362, 0.843, 0], [-0.626, 0.796, -0.056, 0], + ... [-0.677, -0.498, 0.529, 0], [0, 0, 0, 1]] + >>> q = quaternion_from_matrix(R) + >>> numpy.allclose(q, [0.82336615, -0.13610694, 0.46344705, -0.29792603]) + True + >>> R = random_rotation_matrix() + >>> q = quaternion_from_matrix(R) + >>> is_same_transform(R, quaternion_matrix(q)) + True + >>> R = euler_matrix(0.0, 0.0, numpy.pi/2.0) + >>> numpy.allclose(quaternion_from_matrix(R, isprecise=False), + ... quaternion_from_matrix(R, isprecise=True)) + True + + ''' + + M = np.array(matrix, dtype=np.float64, copy=False)[:4, :4] + if isprecise: + q = np.empty((4, )) + t = np.trace(M) + if t > M[3, 3]: + q[0] = t + q[3] = M[1, 0] - M[0, 1] + q[2] = M[0, 2] - M[2, 0] + q[1] = M[2, 1] - M[1, 2] + else: + i, j, k = 1, 2, 3 + if M[1, 1] > M[0, 0]: + i, j, k = 2, 3, 1 + if M[2, 2] > M[i, i]: + i, j, k = 3, 1, 2 + t = M[i, i] - (M[j, j] + M[k, k]) + M[3, 3] + q[i] = t + q[j] = M[i, j] + M[j, i] + q[k] = M[k, i] + M[i, k] + q[3] = M[k, j] - M[j, k] + q *= 0.5 / math.sqrt(t * M[3, 3]) + else: + m00 = M[0, 0] + m01 = M[0, 1] + m02 = M[0, 2] + m10 = M[1, 0] + m11 = M[1, 1] + m12 = M[1, 2] + m20 = M[2, 0] + m21 = M[2, 1] + m22 = M[2, 2] + + # symmetric matrix K + K = np.array([[m00 - m11 - m22, 0.0, 0.0, 0.0], + [m01 + m10, m11 - m00 - m22, 0.0, 0.0], + [m02 + m20, m12 + m21, m22 - m00 - m11, 0.0], + [m21 - m12, m02 - m20, m10 - m01, m00 + m11 + m22]]) + K /= 3.0 + + # quaternion is eigenvector of K that corresponds to largest eigenvalue + w, V = np.linalg.eigh(K) + q = V[[3, 0, 1, 2], np.argmax(w)] + + if q[0] < 0.0: + np.negative(q, q) + + return q + +def is_colmap_img_valid(colmap_img_file): + '''Return validity of a colmap reconstruction''' + + images_bin = read_images_binary(colmap_img_file) + # Check if everything is finite for this subset + for key in images_bin.keys(): + q = np.asarray(images_bin[key].qvec).flatten() + t = np.asarray(images_bin[key].tvec).flatten() + + is_cur_valid = True + is_cur_valid = is_cur_valid and q.shape == (4, ) + is_cur_valid = is_cur_valid and t.shape == (3, ) + is_cur_valid = is_cur_valid and np.all(np.isfinite(q)) + is_cur_valid = is_cur_valid and np.all(np.isfinite(t)) + + # If any is invalid, immediately return + if not is_cur_valid: + return False + + return True + +def get_best_colmap_index(colmap_output_path): + ''' + Determines the colmap model with the most images if there is more than one. + ''' + + # First find the colmap reconstruction with the most number of images. + best_index, best_num_images = -1, 0 + + # Check all valid sub reconstructions. + if os.path.exists(colmap_output_path): + idx_list = [ + _d for _d in os.listdir(colmap_output_path) + if os.path.isdir(os.path.join(colmap_output_path, _d)) + ] + else: + idx_list = [] + + for cur_index in idx_list: + cur_output_path = os.path.join(colmap_output_path, cur_index) + if os.path.isdir(cur_output_path): + colmap_img_file = os.path.join(cur_output_path, 'images.bin') + images_bin = read_images_binary(colmap_img_file) + # Check validity + if not is_colmap_img_valid(colmap_img_file): + continue + # Find the reconstruction with most number of images + if len(images_bin) > best_num_images: + best_index = int(cur_index) + best_num_images = len(images_bin) + + return str(best_index) \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/src/utils/colmap/read_write_model.py b/imcui/third_party/MatchAnything/src/utils/colmap/read_write_model.py new file mode 100644 index 0000000000000000000000000000000000000000..eeb03c3bee0f1d6ffd5285835c83920e11de5b51 --- /dev/null +++ b/imcui/third_party/MatchAnything/src/utils/colmap/read_write_model.py @@ -0,0 +1,509 @@ +# Copyright (c) 2018, ETH Zurich and UNC Chapel Hill. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# * Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# +# * Neither the name of ETH Zurich and UNC Chapel Hill nor the names of +# its contributors may be used to endorse or promote products derived +# from this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +# +# Author: Johannes L. Schoenberger (jsch-at-demuc-dot-de) + +import os +import sys +import collections +import numpy as np +import struct +import argparse + + +CameraModel = collections.namedtuple( + "CameraModel", ["model_id", "model_name", "num_params"]) +Camera = collections.namedtuple( + "Camera", ["id", "model", "width", "height", "params"]) +BaseImage = collections.namedtuple( + "Image", ["id", "qvec", "tvec", "camera_id", "name", "xys", "point3D_ids"]) +Point3D = collections.namedtuple( + "Point3D", ["id", "xyz", "rgb", "error", "image_ids", "point2D_idxs"]) + + +class Image(BaseImage): + def qvec2rotmat(self): + return qvec2rotmat(self.qvec) + + +CAMERA_MODELS = { + CameraModel(model_id=0, model_name="SIMPLE_PINHOLE", num_params=3), + CameraModel(model_id=1, model_name="PINHOLE", num_params=4), + CameraModel(model_id=2, model_name="SIMPLE_RADIAL", num_params=4), + CameraModel(model_id=3, model_name="RADIAL", num_params=5), + CameraModel(model_id=4, model_name="OPENCV", num_params=8), + CameraModel(model_id=5, model_name="OPENCV_FISHEYE", num_params=8), + CameraModel(model_id=6, model_name="FULL_OPENCV", num_params=12), + CameraModel(model_id=7, model_name="FOV", num_params=5), + CameraModel(model_id=8, model_name="SIMPLE_RADIAL_FISHEYE", num_params=4), + CameraModel(model_id=9, model_name="RADIAL_FISHEYE", num_params=5), + CameraModel(model_id=10, model_name="THIN_PRISM_FISHEYE", num_params=12) +} +CAMERA_MODEL_IDS = dict([(camera_model.model_id, camera_model) + for camera_model in CAMERA_MODELS]) +CAMERA_MODEL_NAMES = dict([(camera_model.model_name, camera_model) + for camera_model in CAMERA_MODELS]) + + +def read_next_bytes(fid, num_bytes, format_char_sequence, endian_character="<"): + """Read and unpack the next bytes from a binary file. + :param fid: + :param num_bytes: Sum of combination of {2, 4, 8}, e.g. 2, 6, 16, 30, etc. + :param format_char_sequence: List of {c, e, f, d, h, H, i, I, l, L, q, Q}. + :param endian_character: Any of {@, =, <, >, !} + :return: Tuple of read and unpacked values. + """ + data = fid.read(num_bytes) + return struct.unpack(endian_character + format_char_sequence, data) + + +def write_next_bytes(fid, data, format_char_sequence, endian_character="<"): + """pack and write to a binary file. + :param fid: + :param data: data to send, if multiple elements are sent at the same time, + they should be encapsuled either in a list or a tuple + :param format_char_sequence: List of {c, e, f, d, h, H, i, I, l, L, q, Q}. + should be the same length as the data list or tuple + :param endian_character: Any of {@, =, <, >, !} + """ + if isinstance(data, (list, tuple)): + bytes = struct.pack(endian_character + format_char_sequence, *data) + else: + bytes = struct.pack(endian_character + format_char_sequence, data) + fid.write(bytes) + + +def read_cameras_text(path): + """ + see: src/base/reconstruction.cc + void Reconstruction::WriteCamerasText(const std::string& path) + void Reconstruction::ReadCamerasText(const std::string& path) + """ + cameras = {} + with open(path, "r") as fid: + while True: + line = fid.readline() + if not line: + break + line = line.strip() + if len(line) > 0 and line[0] != "#": + elems = line.split() + camera_id = int(elems[0]) + model = elems[1] + width = int(elems[2]) + height = int(elems[3]) + params = np.array(tuple(map(float, elems[4:]))) + cameras[camera_id] = Camera(id=camera_id, model=model, + width=width, height=height, + params=params) + return cameras + + +def read_cameras_binary(path_to_model_file): + """ + see: src/base/reconstruction.cc + void Reconstruction::WriteCamerasBinary(const std::string& path) + void Reconstruction::ReadCamerasBinary(const std::string& path) + """ + cameras = {} + with open(path_to_model_file, "rb") as fid: + num_cameras = read_next_bytes(fid, 8, "Q")[0] + for _ in range(num_cameras): + camera_properties = read_next_bytes( + fid, num_bytes=24, format_char_sequence="iiQQ") + camera_id = camera_properties[0] + model_id = camera_properties[1] + model_name = CAMERA_MODEL_IDS[camera_properties[1]].model_name + width = camera_properties[2] + height = camera_properties[3] + num_params = CAMERA_MODEL_IDS[model_id].num_params + params = read_next_bytes(fid, num_bytes=8*num_params, + format_char_sequence="d"*num_params) + cameras[camera_id] = Camera(id=camera_id, + model=model_name, + width=width, + height=height, + params=np.array(params)) + assert len(cameras) == num_cameras + return cameras + + +def write_cameras_text(cameras, path): + """ + see: src/base/reconstruction.cc + void Reconstruction::WriteCamerasText(const std::string& path) + void Reconstruction::ReadCamerasText(const std::string& path) + """ + HEADER = "# Camera list with one line of data per camera:\n" + "# CAMERA_ID, MODEL, WIDTH, HEIGHT, PARAMS[]\n" + "# Number of cameras: {}\n".format(len(cameras)) + with open(path, "w") as fid: + fid.write(HEADER) + for _, cam in cameras.items(): + to_write = [cam.id, cam.model, cam.width, cam.height, *cam.params] + line = " ".join([str(elem) for elem in to_write]) + fid.write(line + "\n") + + +def write_cameras_binary(cameras, path_to_model_file): + """ + see: src/base/reconstruction.cc + void Reconstruction::WriteCamerasBinary(const std::string& path) + void Reconstruction::ReadCamerasBinary(const std::string& path) + """ + with open(path_to_model_file, "wb") as fid: + write_next_bytes(fid, len(cameras), "Q") + for _, cam in cameras.items(): + model_id = CAMERA_MODEL_NAMES[cam.model].model_id + camera_properties = [cam.id, + model_id, + cam.width, + cam.height] + write_next_bytes(fid, camera_properties, "iiQQ") + for p in cam.params: + write_next_bytes(fid, float(p), "d") + return cameras + + +def read_images_text(path): + """ + see: src/base/reconstruction.cc + void Reconstruction::ReadImagesText(const std::string& path) + void Reconstruction::WriteImagesText(const std::string& path) + """ + images = {} + with open(path, "r") as fid: + while True: + line = fid.readline() + if not line: + break + line = line.strip() + if len(line) > 0 and line[0] != "#": + elems = line.split() + image_id = int(elems[0]) + qvec = np.array(tuple(map(float, elems[1:5]))) + tvec = np.array(tuple(map(float, elems[5:8]))) + camera_id = int(elems[8]) + image_name = elems[9] + elems = fid.readline().split() + xys = np.column_stack([tuple(map(float, elems[0::3])), + tuple(map(float, elems[1::3]))]) + point3D_ids = np.array(tuple(map(int, elems[2::3]))) + images[image_id] = Image( + id=image_id, qvec=qvec, tvec=tvec, + camera_id=camera_id, name=image_name, + xys=xys, point3D_ids=point3D_ids) + return images + + +def read_images_binary(path_to_model_file): + """ + see: src/base/reconstruction.cc + void Reconstruction::ReadImagesBinary(const std::string& path) + void Reconstruction::WriteImagesBinary(const std::string& path) + """ + images = {} + with open(path_to_model_file, "rb") as fid: + num_reg_images = read_next_bytes(fid, 8, "Q")[0] + for _ in range(num_reg_images): + binary_image_properties = read_next_bytes( + fid, num_bytes=64, format_char_sequence="idddddddi") + image_id = binary_image_properties[0] + qvec = np.array(binary_image_properties[1:5]) + tvec = np.array(binary_image_properties[5:8]) + camera_id = binary_image_properties[8] + image_name = "" + current_char = read_next_bytes(fid, 1, "c")[0] + while current_char != b"\x00": # look for the ASCII 0 entry + image_name += current_char.decode("utf-8") + current_char = read_next_bytes(fid, 1, "c")[0] + num_points2D = read_next_bytes(fid, num_bytes=8, + format_char_sequence="Q")[0] + x_y_id_s = read_next_bytes(fid, num_bytes=24*num_points2D, + format_char_sequence="ddq"*num_points2D) + xys = np.column_stack([tuple(map(float, x_y_id_s[0::3])), + tuple(map(float, x_y_id_s[1::3]))]) + point3D_ids = np.array(tuple(map(int, x_y_id_s[2::3]))) + images[image_id] = Image( + id=image_id, qvec=qvec, tvec=tvec, + camera_id=camera_id, name=image_name, + xys=xys, point3D_ids=point3D_ids) + return images + + +def write_images_text(images, path): + """ + see: src/base/reconstruction.cc + void Reconstruction::ReadImagesText(const std::string& path) + void Reconstruction::WriteImagesText(const std::string& path) + """ + if len(images) == 0: + mean_observations = 0 + else: + mean_observations = sum((len(img.point3D_ids) for _, img in images.items()))/len(images) + HEADER = "# Image list with two lines of data per image:\n" + "# IMAGE_ID, QW, QX, QY, QZ, TX, TY, TZ, CAMERA_ID, NAME\n" + "# POINTS2D[] as (X, Y, POINT3D_ID)\n" + "# Number of images: {}, mean observations per image: {}\n".format(len(images), mean_observations) + + with open(path, "w") as fid: + fid.write(HEADER) + for _, img in images.items(): + image_header = [img.id, *img.qvec, *img.tvec, img.camera_id, img.name] + first_line = " ".join(map(str, image_header)) + fid.write(first_line + "\n") + + points_strings = [] + for xy, point3D_id in zip(img.xys, img.point3D_ids): + points_strings.append(" ".join(map(str, [*xy, point3D_id]))) + fid.write(" ".join(points_strings) + "\n") + + +def write_images_binary(images, path_to_model_file): + """ + see: src/base/reconstruction.cc + void Reconstruction::ReadImagesBinary(const std::string& path) + void Reconstruction::WriteImagesBinary(const std::string& path) + """ + with open(path_to_model_file, "wb") as fid: + write_next_bytes(fid, len(images), "Q") + for _, img in images.items(): + write_next_bytes(fid, img.id, "i") + write_next_bytes(fid, img.qvec.tolist(), "dddd") + write_next_bytes(fid, img.tvec.tolist(), "ddd") + write_next_bytes(fid, img.camera_id, "i") + for char in img.name: + write_next_bytes(fid, char.encode("utf-8"), "c") + write_next_bytes(fid, b"\x00", "c") + write_next_bytes(fid, len(img.point3D_ids), "Q") + for xy, p3d_id in zip(img.xys, img.point3D_ids): + write_next_bytes(fid, [*xy, p3d_id], "ddq") + + +def read_points3D_text(path): + """ + see: src/base/reconstruction.cc + void Reconstruction::ReadPoints3DText(const std::string& path) + void Reconstruction::WritePoints3DText(const std::string& path) + """ + points3D = {} + with open(path, "r") as fid: + while True: + line = fid.readline() + if not line: + break + line = line.strip() + if len(line) > 0 and line[0] != "#": + elems = line.split() + point3D_id = int(elems[0]) + xyz = np.array(tuple(map(float, elems[1:4]))) + rgb = np.array(tuple(map(int, elems[4:7]))) + error = float(elems[7]) + image_ids = np.array(tuple(map(int, elems[8::2]))) + point2D_idxs = np.array(tuple(map(int, elems[9::2]))) + points3D[point3D_id] = Point3D(id=point3D_id, xyz=xyz, rgb=rgb, + error=error, image_ids=image_ids, + point2D_idxs=point2D_idxs) + return points3D + + +def read_points3d_binary(path_to_model_file): + """ + see: src/base/reconstruction.cc + void Reconstruction::ReadPoints3DBinary(const std::string& path) + void Reconstruction::WritePoints3DBinary(const std::string& path) + """ + points3D = {} + with open(path_to_model_file, "rb") as fid: + num_points = read_next_bytes(fid, 8, "Q")[0] + for _ in range(num_points): + binary_point_line_properties = read_next_bytes( + fid, num_bytes=43, format_char_sequence="QdddBBBd") + point3D_id = binary_point_line_properties[0] + xyz = np.array(binary_point_line_properties[1:4]) + rgb = np.array(binary_point_line_properties[4:7]) + error = np.array(binary_point_line_properties[7]) + track_length = read_next_bytes( + fid, num_bytes=8, format_char_sequence="Q")[0] + track_elems = read_next_bytes( + fid, num_bytes=8*track_length, + format_char_sequence="ii"*track_length) + image_ids = np.array(tuple(map(int, track_elems[0::2]))) + point2D_idxs = np.array(tuple(map(int, track_elems[1::2]))) + points3D[point3D_id] = Point3D( + id=point3D_id, xyz=xyz, rgb=rgb, + error=error, image_ids=image_ids, + point2D_idxs=point2D_idxs) + return points3D + +def write_points3D_text(points3D, path): + """ + see: src/base/reconstruction.cc + void Reconstruction::ReadPoints3DText(const std::string& path) + void Reconstruction::WritePoints3DText(const std::string& path) + """ + if len(points3D) == 0: + mean_track_length = 0 + else: + mean_track_length = sum((len(pt.image_ids) for _, pt in points3D.items()))/len(points3D) + HEADER = "# 3D point list with one line of data per point:\n" + "# POINT3D_ID, X, Y, Z, R, G, B, ERROR, TRACK[] as (IMAGE_ID, POINT2D_IDX)\n" + "# Number of points: {}, mean track length: {}\n".format(len(points3D), mean_track_length) + + with open(path, "w") as fid: + fid.write(HEADER) + for _, pt in points3D.items(): + point_header = [pt.id, *pt.xyz, *pt.rgb, pt.error] + fid.write(" ".join(map(str, point_header)) + " ") + track_strings = [] + for image_id, point2D in zip(pt.image_ids, pt.point2D_idxs): + track_strings.append(" ".join(map(str, [image_id, point2D]))) + fid.write(" ".join(track_strings) + "\n") + + +def write_points3d_binary(points3D, path_to_model_file): + """ + see: src/base/reconstruction.cc + void Reconstruction::ReadPoints3DBinary(const std::string& path) + void Reconstruction::WritePoints3DBinary(const std::string& path) + """ + with open(path_to_model_file, "wb") as fid: + write_next_bytes(fid, len(points3D), "Q") + for _, pt in points3D.items(): + write_next_bytes(fid, pt.id, "Q") + write_next_bytes(fid, pt.xyz.tolist(), "ddd") + write_next_bytes(fid, pt.rgb.tolist(), "BBB") + write_next_bytes(fid, pt.error, "d") + track_length = pt.image_ids.shape[0] + write_next_bytes(fid, track_length, "Q") + for image_id, point2D_id in zip(pt.image_ids, pt.point2D_idxs): + write_next_bytes(fid, [image_id, point2D_id], "ii") + + +def detect_model_format(path, ext): + if os.path.isfile(os.path.join(path, "cameras" + ext)) and \ + os.path.isfile(os.path.join(path, "images" + ext)) and \ + os.path.isfile(os.path.join(path, "points3D" + ext)): + print("Detected model format: '" + ext + "'") + return True + + return False + + +def read_model(path, ext=""): + # try to detect the extension automatically + if ext == "": + if detect_model_format(path, ".bin"): + ext = ".bin" + elif detect_model_format(path, ".txt"): + ext = ".txt" + else: + print("Provide model format: '.bin' or '.txt'") + return + + if ext == ".txt": + cameras = read_cameras_text(os.path.join(path, "cameras" + ext)) + images = read_images_text(os.path.join(path, "images" + ext)) + points3D = read_points3D_text(os.path.join(path, "points3D") + ext) + else: + cameras = read_cameras_binary(os.path.join(path, "cameras" + ext)) + images = read_images_binary(os.path.join(path, "images" + ext)) + points3D = read_points3d_binary(os.path.join(path, "points3D") + ext) + return cameras, images, points3D + + +def write_model(cameras, images, points3D, path, ext=".bin"): + if ext == ".txt": + write_cameras_text(cameras, os.path.join(path, "cameras" + ext)) + write_images_text(images, os.path.join(path, "images" + ext)) + write_points3D_text(points3D, os.path.join(path, "points3D") + ext) + else: + write_cameras_binary(cameras, os.path.join(path, "cameras" + ext)) + write_images_binary(images, os.path.join(path, "images" + ext)) + write_points3d_binary(points3D, os.path.join(path, "points3D") + ext) + return cameras, images, points3D + + +def qvec2rotmat(qvec): + return np.array([ + [1 - 2 * qvec[2]**2 - 2 * qvec[3]**2, + 2 * qvec[1] * qvec[2] - 2 * qvec[0] * qvec[3], + 2 * qvec[3] * qvec[1] + 2 * qvec[0] * qvec[2]], + [2 * qvec[1] * qvec[2] + 2 * qvec[0] * qvec[3], + 1 - 2 * qvec[1]**2 - 2 * qvec[3]**2, + 2 * qvec[2] * qvec[3] - 2 * qvec[0] * qvec[1]], + [2 * qvec[3] * qvec[1] - 2 * qvec[0] * qvec[2], + 2 * qvec[2] * qvec[3] + 2 * qvec[0] * qvec[1], + 1 - 2 * qvec[1]**2 - 2 * qvec[2]**2]]) + + +def rotmat2qvec(R): + Rxx, Ryx, Rzx, Rxy, Ryy, Rzy, Rxz, Ryz, Rzz = R.flat + K = np.array([ + [Rxx - Ryy - Rzz, 0, 0, 0], + [Ryx + Rxy, Ryy - Rxx - Rzz, 0, 0], + [Rzx + Rxz, Rzy + Ryz, Rzz - Rxx - Ryy, 0], + [Ryz - Rzy, Rzx - Rxz, Rxy - Ryx, Rxx + Ryy + Rzz]]) / 3.0 + eigvals, eigvecs = np.linalg.eigh(K) + qvec = eigvecs[[3, 0, 1, 2], np.argmax(eigvals)] + if qvec[0] < 0: + qvec *= -1 + return qvec + + +def main(): + parser = argparse.ArgumentParser(description="Read and write COLMAP binary and text models") + parser.add_argument("--input_model", help="path to input model folder") + parser.add_argument("--input_format", choices=[".bin", ".txt"], + help="input model format", default="") + parser.add_argument("--output_model", + help="path to output model folder") + parser.add_argument("--output_format", choices=[".bin", ".txt"], + help="outut model format", default=".txt") + args = parser.parse_args() + + cameras, images, points3D = read_model(path=args.input_model, ext=args.input_format) + + # FIXME: for debug only + # images_ = images[1] + # tvec, qvec = images_.tvec, images_.qvec + # rotation = qvec2rotmat(qvec).reshape(3, 3) + # pose = np.concatenate([rotation, tvec.reshape(3, 1)], axis=1) + # import ipdb; ipdb.set_trace() + + print("num_cameras:", len(cameras)) + print("num_images:", len(images)) + print("num_points3D:", len(points3D)) + + if args.output_model is not None: + write_model(cameras, images, points3D, path=args.output_model, ext=args.output_format) + + +if __name__ == "__main__": + main() diff --git a/imcui/third_party/MatchAnything/src/utils/comm.py b/imcui/third_party/MatchAnything/src/utils/comm.py new file mode 100644 index 0000000000000000000000000000000000000000..26ec9517cc47e224430106d8ae9aa99a3fe49167 --- /dev/null +++ b/imcui/third_party/MatchAnything/src/utils/comm.py @@ -0,0 +1,265 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +""" +[Copied from detectron2] +This file contains primitives for multi-gpu communication. +This is useful when doing distributed training. +""" + +import functools +import logging +import numpy as np +import pickle +import torch +import torch.distributed as dist + +_LOCAL_PROCESS_GROUP = None +""" +A torch process group which only includes processes that on the same machine as the current process. +This variable is set when processes are spawned by `launch()` in "engine/launch.py". +""" + + +def get_world_size() -> int: + if not dist.is_available(): + return 1 + if not dist.is_initialized(): + return 1 + return dist.get_world_size() + + +def get_rank() -> int: + if not dist.is_available(): + return 0 + if not dist.is_initialized(): + return 0 + return dist.get_rank() + + +def get_local_rank() -> int: + """ + Returns: + The rank of the current process within the local (per-machine) process group. + """ + if not dist.is_available(): + return 0 + if not dist.is_initialized(): + return 0 + assert _LOCAL_PROCESS_GROUP is not None + return dist.get_rank(group=_LOCAL_PROCESS_GROUP) + + +def get_local_size() -> int: + """ + Returns: + The size of the per-machine process group, + i.e. the number of processes per machine. + """ + if not dist.is_available(): + return 1 + if not dist.is_initialized(): + return 1 + return dist.get_world_size(group=_LOCAL_PROCESS_GROUP) + + +def is_main_process() -> bool: + return get_rank() == 0 + + +def synchronize(): + """ + Helper function to synchronize (barrier) among all processes when + using distributed training + """ + if not dist.is_available(): + return + if not dist.is_initialized(): + return + world_size = dist.get_world_size() + if world_size == 1: + return + dist.barrier() + + +@functools.lru_cache() +def _get_global_gloo_group(): + """ + Return a process group based on gloo backend, containing all the ranks + The result is cached. + """ + if dist.get_backend() == "nccl": + return dist.new_group(backend="gloo") + else: + return dist.group.WORLD + + +def _serialize_to_tensor(data, group): + backend = dist.get_backend(group) + assert backend in ["gloo", "nccl"] + device = torch.device("cpu" if backend == "gloo" else "cuda") + + buffer = pickle.dumps(data) + if len(buffer) > 1024 ** 3: + logger = logging.getLogger(__name__) + logger.warning( + "Rank {} trying to all-gather {:.2f} GB of data on device {}".format( + get_rank(), len(buffer) / (1024 ** 3), device + ) + ) + storage = torch.ByteStorage.from_buffer(buffer) + tensor = torch.ByteTensor(storage).to(device=device) + return tensor + + +def _pad_to_largest_tensor(tensor, group): + """ + Returns: + list[int]: size of the tensor, on each rank + Tensor: padded tensor that has the max size + """ + world_size = dist.get_world_size(group=group) + assert ( + world_size >= 1 + ), "comm.gather/all_gather must be called from ranks within the given group!" + local_size = torch.tensor([tensor.numel()], dtype=torch.int64, device=tensor.device) + size_list = [ + torch.zeros([1], dtype=torch.int64, device=tensor.device) for _ in range(world_size) + ] + dist.all_gather(size_list, local_size, group=group) + + size_list = [int(size.item()) for size in size_list] + + max_size = max(size_list) + + # we pad the tensor because torch all_gather does not support + # gathering tensors of different shapes + if local_size != max_size: + padding = torch.zeros((max_size - local_size,), dtype=torch.uint8, device=tensor.device) + tensor = torch.cat((tensor, padding), dim=0) + return size_list, tensor + + +def all_gather(data, group=None): + """ + Run all_gather on arbitrary picklable data (not necessarily tensors). + + Args: + data: any picklable object + group: a torch process group. By default, will use a group which + contains all ranks on gloo backend. + + Returns: + list[data]: list of data gathered from each rank + """ + if get_world_size() == 1: + return [data] + if group is None: + group = _get_global_gloo_group() + if dist.get_world_size(group) == 1: + return [data] + + tensor = _serialize_to_tensor(data, group) + + size_list, tensor = _pad_to_largest_tensor(tensor, group) + max_size = max(size_list) + + # receiving Tensor from all ranks + tensor_list = [ + torch.empty((max_size,), dtype=torch.uint8, device=tensor.device) for _ in size_list + ] + dist.all_gather(tensor_list, tensor, group=group) + + data_list = [] + for size, tensor in zip(size_list, tensor_list): + buffer = tensor.cpu().numpy().tobytes()[:size] + data_list.append(pickle.loads(buffer)) + + return data_list + + +def gather(data, dst=0, group=None): + """ + Run gather on arbitrary picklable data (not necessarily tensors). + + Args: + data: any picklable object + dst (int): destination rank + group: a torch process group. By default, will use a group which + contains all ranks on gloo backend. + + Returns: + list[data]: on dst, a list of data gathered from each rank. Otherwise, + an empty list. + """ + if get_world_size() == 1: + return [data] + if group is None: + group = _get_global_gloo_group() + if dist.get_world_size(group=group) == 1: + return [data] + rank = dist.get_rank(group=group) + + tensor = _serialize_to_tensor(data, group) + size_list, tensor = _pad_to_largest_tensor(tensor, group) + + # receiving Tensor from all ranks + if rank == dst: + max_size = max(size_list) + tensor_list = [ + torch.empty((max_size,), dtype=torch.uint8, device=tensor.device) for _ in size_list + ] + dist.gather(tensor, tensor_list, dst=dst, group=group) + + data_list = [] + for size, tensor in zip(size_list, tensor_list): + buffer = tensor.cpu().numpy().tobytes()[:size] + data_list.append(pickle.loads(buffer)) + return data_list + else: + dist.gather(tensor, [], dst=dst, group=group) + return [] + + +def shared_random_seed(): + """ + Returns: + int: a random number that is the same across all workers. + If workers need a shared RNG, they can use this shared seed to + create one. + + All workers must call this function, otherwise it will deadlock. + """ + ints = np.random.randint(2 ** 31) + all_ints = all_gather(ints) + return all_ints[0] + + +def reduce_dict(input_dict, average=True): + """ + Reduce the values in the dictionary from all processes so that process with rank + 0 has the reduced results. + + Args: + input_dict (dict): inputs to be reduced. All the values must be scalar CUDA Tensor. + average (bool): whether to do average or sum + + Returns: + a dict with the same keys as input_dict, after reduction. + """ + world_size = get_world_size() + if world_size < 2: + return input_dict + with torch.no_grad(): + names = [] + values = [] + # sort the keys so that they are consistent across processes + for k in sorted(input_dict.keys()): + names.append(k) + values.append(input_dict[k]) + values = torch.stack(values, dim=0) + dist.reduce(values, dst=0) + if dist.get_rank() == 0 and average: + # only main process gets accumulated, so only divide by + # world_size in this case + values /= world_size + reduced_dict = {k: v for k, v in zip(names, values)} + return reduced_dict diff --git a/imcui/third_party/MatchAnything/src/utils/dataloader.py b/imcui/third_party/MatchAnything/src/utils/dataloader.py new file mode 100644 index 0000000000000000000000000000000000000000..6da37b880a290c2bb3ebb028d0c8dab592acc5c1 --- /dev/null +++ b/imcui/third_party/MatchAnything/src/utils/dataloader.py @@ -0,0 +1,23 @@ +import numpy as np + + +# --- PL-DATAMODULE --- + +def get_local_split(items: list, world_size: int, rank: int, seed: int): + """ The local rank only loads a split of the dataset. """ + n_items = len(items) + items_permute = np.random.RandomState(seed).permutation(items) + if n_items % world_size == 0: + padded_items = items_permute + else: + padding = np.random.RandomState(seed).choice( + items, + world_size - (n_items % world_size), + replace=True) + padded_items = np.concatenate([items_permute, padding]) + assert len(padded_items) % world_size == 0, \ + f'len(padded_items): {len(padded_items)}; world_size: {world_size}; len(padding): {len(padding)}' + n_per_rank = len(padded_items) // world_size + local_items = padded_items[n_per_rank * rank: n_per_rank * (rank+1)] + + return local_items diff --git a/imcui/third_party/MatchAnything/src/utils/dataset.py b/imcui/third_party/MatchAnything/src/utils/dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..73a0a96db5ef2c08c99394a25e2db306bdb47b6a --- /dev/null +++ b/imcui/third_party/MatchAnything/src/utils/dataset.py @@ -0,0 +1,518 @@ +import io +from loguru import logger + +import cv2 +import numpy as np +from pathlib import Path +import h5py +import torch +import re +from PIL import Image +from numpy.linalg import inv +from torchvision.transforms import Normalize +from .sample_homo import sample_homography_sap +from kornia.geometry import homography_warp, normalize_homography, normal_transform_pixel +OSS_FOLDER_PATH = '???' +PCACHE_FOLDER_PATH = '???' + +import fsspec +from PIL import Image + +# Initialize pcache +try: + PCACHE_HOST = "???" + PCACHE_PORT = 00000 + pcache_kwargs = {"host": PCACHE_HOST, "port": PCACHE_PORT} + pcache_fs = fsspec.filesystem("pcache", pcache_kwargs=pcache_kwargs) + root_dir='???' +except Exception as e: + logger.error(f"Error captured:{e}") + +try: + # for internel use only + from pcache_fileio import fileio +except Exception: + MEGADEPTH_CLIENT = SCANNET_CLIENT = None + +# --- DATA IO --- + +def load_pfm(pfm_path): + with open(pfm_path, 'rb') as fin: + color = None + width = None + height = None + scale = None + data_type = None + header = str(fin.readline().decode('UTF-8')).rstrip() + + if header == 'PF': + color = True + elif header == 'Pf': + color = False + else: + raise Exception('Not a PFM file.') + + dim_match = re.match(r'^(\d+)\s(\d+)\s$', fin.readline().decode('UTF-8')) + if dim_match: + width, height = map(int, dim_match.groups()) + else: + raise Exception('Malformed PFM header.') + scale = float((fin.readline().decode('UTF-8')).rstrip()) + if scale < 0: # little-endian + data_type = ' 5000: + logger.error(f"Try to load: {pcache_path}, but failed {failed_num} times") + continue + else: + load_failed = True + failed_num = 0 + while load_failed: + try: + with pcache_fs.open(str(pcache_path), 'rb') as f: + data = np.array(h5py.File(io.BytesIO(f.read()), 'r')['/depth']) + load_failed = False + except: + failed_num += 1 + if failed_num > 5000: + logger.error(f"Try to load: {pcache_path}, but failed {failed_num} times") + continue + + except Exception as ex: + print(f"==> Data loading failure: {path}") + raise ex + + assert data is not None + return data + + +def imread_gray(path, augment_fn=None, cv_type=None): + if path.startswith('oss://'): + path = path.replace(OSS_FOLDER_PATH, PCACHE_FOLDER_PATH) + if path.startswith('pcache://'): + path = path[:9] + path[9:].replace('////', '/').replace('///', '/').replace('//', '/') # remove all continuous '/' + + if cv_type is None: + cv_type = cv2.IMREAD_GRAYSCALE if augment_fn is None \ + else cv2.IMREAD_COLOR + if str(path).startswith('oss://') or str(path).startswith('pcache://'): + image = load_array_from_pcache(str(path), cv_type) + else: + image = cv2.imread(str(path), cv_type) + + if augment_fn is not None: + image = cv2.imread(str(path), cv2.IMREAD_COLOR) + image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) + image = augment_fn(image) + image = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY) + return image # (h, w) + +def imread_color(path, augment_fn=None): + if path.startswith('oss://'): + path = path.replace(OSS_FOLDER_PATH, PCACHE_FOLDER_PATH) + if path.startswith('pcache://'): + path = path[:9] + path[9:].replace('////', '/').replace('///', '/').replace('//', '/') # remove all continuous '/' + + if str(path).startswith('oss://') or str(path).startswith('pcache://'): + filename = path.split(root_dir)[1] + pcache_path = Path(root_dir) / filename + load_failed = True + failed_num = 0 + while load_failed: + try: + with pcache_fs.open(str(pcache_path), 'rb') as f: + pil_image = Image.open(f).convert("RGB") + load_failed = False + except: + failed_num += 1 + if failed_num > 5000: + logger.error(f"Try to load: {pcache_path}, but failed {failed_num} times") + continue + else: + pil_image = Image.open(str(path)).convert("RGB") + image = np.array(pil_image) + + if augment_fn is not None: + image = augment_fn(image) + return image # (h, w) + + +def get_resized_wh(w, h, resize=None): + if resize is not None: # resize the longer edge + scale = resize / max(h, w) + w_new, h_new = int(round(w*scale)), int(round(h*scale)) + else: + w_new, h_new = w, h + return w_new, h_new + + +def get_divisible_wh(w, h, df=None): + if df is not None: + w_new, h_new = map(lambda x: int(x // df * df), [w, h]) + else: + w_new, h_new = w, h + return w_new, h_new + + +def pad_bottom_right(inp, pad_size, ret_mask=False): + assert isinstance(pad_size, int) and pad_size >= max(inp.shape[-2:]), f"{pad_size} < {max(inp.shape[-2:])}" + mask = None + if inp.ndim == 2: + padded = np.zeros((pad_size, pad_size), dtype=inp.dtype) + padded[:inp.shape[0], :inp.shape[1]] = inp + if ret_mask: + mask = np.zeros((pad_size, pad_size), dtype=bool) + mask[:inp.shape[0], :inp.shape[1]] = True + elif inp.ndim == 3: + padded = np.zeros((inp.shape[0], pad_size, pad_size), dtype=inp.dtype) + padded[:, :inp.shape[1], :inp.shape[2]] = inp + if ret_mask: + mask = np.zeros((inp.shape[0], pad_size, pad_size), dtype=bool) + mask[:, :inp.shape[1], :inp.shape[2]] = True + mask = mask[0] + else: + raise NotImplementedError() + return padded, mask + + +# --- MEGADEPTH --- + +def read_megadepth_gray(path, resize=None, df=None, padding=False, augment_fn=None, read_gray=True, normalize_img=False, resize_by_stretch=False): + """ + Args: + resize (int, optional): the longer edge of resized images. None for no resize. + padding (bool): If set to 'True', zero-pad resized images to squared size. + augment_fn (callable, optional): augments images with pre-defined visual effects + Returns: + image (torch.tensor): (1, h, w) + mask (torch.tensor): (h, w) + scale (torch.tensor): [w/w_new, h/h_new] + """ + # read image + if read_gray: + image = imread_gray(path, augment_fn) + else: + image = imread_color(path, augment_fn) + + # resize image + try: + w, h = image.shape[1], image.shape[0] + except: + logger.error(f"{path} not exist or read image error!") + if resize_by_stretch: + w_new, h_new = (resize, resize) if isinstance(resize, int) else (resize[1], resize[0]) + else: + if resize: + if not isinstance(resize, int): + assert resize[0] == resize[1] + resize = resize[0] + w_new, h_new = get_resized_wh(w, h, resize) + w_new, h_new = get_divisible_wh(w_new, h_new, df) + else: + w_new, h_new = w, h + + image = cv2.resize(image, (w_new, h_new)) + scale = torch.tensor([w/w_new, h/h_new], dtype=torch.float) + origin_img_size = torch.tensor([h, w], dtype=torch.float) + + if not read_gray: + image = image.transpose(2,0,1) + + if padding: # padding + pad_to = max(h_new, w_new) + image, mask = pad_bottom_right(image, pad_to, ret_mask=True) + else: + mask = None + + if len(image.shape) == 2: + image = torch.from_numpy(image).float()[None] / 255 # (h, w) -> (1, h, w) and normalized + else: + image = torch.from_numpy(image).float() / 255 # (h, w) -> (1, h, w) and normalized + if mask is not None: + mask = torch.from_numpy(mask) + + if image.shape[0] == 3 and normalize_img: + # Normalize image: + image = Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])(image) # Input: 3*H*W + + return image, mask, scale, origin_img_size + +def read_megadepth_gray_sample_homowarp(path, resize=None, df=None, padding=False, augment_fn=None, read_gray=True, normalize_img=False, resize_by_stretch=False): + """ + Args: + resize (int, optional): the longer edge of resized images. None for no resize. + padding (bool): If set to 'True', zero-pad resized images to squared size. + augment_fn (callable, optional): augments images with pre-defined visual effects + Returns: + image (torch.tensor): (1, h, w) + mask (torch.tensor): (h, w) + scale (torch.tensor): [w/w_new, h/h_new] + """ + # read image + if read_gray: + image = imread_gray(path, augment_fn) + else: + image = imread_color(path, augment_fn) + + # resize image + w, h = image.shape[1], image.shape[0] + if resize_by_stretch: + w_new, h_new = (resize, resize) if isinstance(resize, int) else (resize[1], resize[0]) + else: + if not isinstance(resize, int): + assert resize[0] == resize[1] + resize = resize[0] + w_new, h_new = get_resized_wh(w, h, resize) + w_new, h_new = get_divisible_wh(w_new, h_new, df) + + w_new, h_new = get_divisible_wh(w_new, h_new, df) + + origin_img_size = torch.tensor([h, w], dtype=torch.float) + + # Sample homography and warp: + homo_sampled = sample_homography_sap(h, w) # 3*3 + homo_sampled_normed = normalize_homography( + torch.from_numpy(homo_sampled[None]).to(torch.float32), + (h, w), + (h, w), + ) + + if len(image.shape) == 2: + image = torch.from_numpy(image).float()[None, None] / 255 # B * C * H * W + else: + image = torch.from_numpy(image).float().permute(2,0,1)[None] / 255 + + homo_warpped_image = homography_warp( + image, # 1 * C * H * W + torch.linalg.inv(homo_sampled_normed), + (h, w), + ) + image = (homo_warpped_image[0].permute(1,2,0).numpy() * 255).astype(np.uint8) + norm_pixel_mat = normal_transform_pixel(h, w) # 1 * 3 * 3 + + image = cv2.resize(image, (w_new, h_new)) + scale = torch.tensor([w/w_new, h/h_new], dtype=torch.float) + + if not read_gray: + image = image.transpose(2,0,1) + + if padding: # padding + pad_to = max(h_new, w_new) + image, mask = pad_bottom_right(image, pad_to, ret_mask=True) + else: + mask = None + + if len(image.shape) == 2: + image = torch.from_numpy(image).float()[None] / 255 # (h, w) -> (1, h, w) and normalized + else: + image = torch.from_numpy(image).float() / 255 # (h, w) -> (1, h, w) and normalized + if mask is not None: + mask = torch.from_numpy(mask) + + if image.shape[0] == 3 and normalize_img: + # Normalize image: + image = Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])(image) # Input: 3*H*W + + return image, mask, scale, origin_img_size, norm_pixel_mat[0], homo_sampled_normed[0] + + +def read_megadepth_depth_gray(path, resize=None, df=None, padding=False, augment_fn=None, read_gray=True, normalize_img=False, resize_by_stretch=False): + """ + Args: + resize (int, optional): the longer edge of resized images. None for no resize. + padding (bool): If set to 'True', zero-pad resized images to squared size. + augment_fn (callable, optional): augments images with pre-defined visual effects + Returns: + image (torch.tensor): (1, h, w) + mask (torch.tensor): (h, w) + scale (torch.tensor): [w/w_new, h/h_new] + """ + depth = read_megadepth_depth(path, return_tensor=False) + + # following controlnet 1-depth + depth = depth.astype(np.float64) + depth_non_zero = depth[depth!=0] + vmin = np.percentile(depth_non_zero, 2) + vmax = np.percentile(depth_non_zero, 85) + depth -= vmin + depth /= (vmax - vmin + 1e-4) + depth = 1.0 - depth + image = (depth * 255.0).clip(0, 255).astype(np.uint8) + + # resize image + w, h = image.shape[1], image.shape[0] + if resize_by_stretch: + w_new, h_new = (resize, resize) if isinstance(resize, int) else (resize[1], resize[0]) + else: + if not isinstance(resize, int): + assert resize[0] == resize[1] + resize = resize[0] + w_new, h_new = get_resized_wh(w, h, resize) + w_new, h_new = get_divisible_wh(w_new, h_new, df) + w_new, h_new = get_divisible_wh(w_new, h_new, df) + origin_img_size = torch.tensor([h, w], dtype=torch.float) + + image = cv2.resize(image, (w_new, h_new)) + scale = torch.tensor([w/w_new, h/h_new], dtype=torch.float) + + if padding: # padding + pad_to = max(h_new, w_new) + image, mask = pad_bottom_right(image, pad_to, ret_mask=True) + else: + mask = None + + if read_gray: + image = torch.from_numpy(image).float()[None] / 255 # (h, w) -> (1, h, w) and normalized + else: + image = np.stack([image]*3) # 3 * H * W + image = torch.from_numpy(image).float() / 255 # (h, w) -> (1, h, w) and normalized + if mask is not None: + mask = torch.from_numpy(mask) + + if image.shape[0] == 3 and normalize_img: + # Normalize image: + image = Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])(image) # Input: 3*H*W + + return image, mask, scale, origin_img_size + +def read_megadepth_depth(path, pad_to=None, return_tensor=True): + if path.startswith('oss://'): + path = path.replace(OSS_FOLDER_PATH, PCACHE_FOLDER_PATH) + if path.startswith('pcache://'): + path = path[:9] + path[9:].replace('////', '/').replace('///', '/').replace('//', '/') # remove all continuous '/' + + load_failed = True + failed_num = 0 + while load_failed: + try: + if '.png' in path: + if 'scannet_plus' in path: + depth = imread_gray(path, cv_type=cv2.IMREAD_UNCHANGED).astype(np.float32) + + with open(path, 'rb') as f: + # CO3D + depth = np.asarray(Image.open(f)).astype(np.float32) + depth = depth / 1000 + elif '.pfm' in path: + # For BlendedMVS dataset (not support pcache): + depth = load_pfm(path).copy() + else: + # For MegaDepth + if str(path).startswith('oss://') or str(path).startswith('pcache://'): + depth = load_array_from_pcache(path, None, use_h5py=True) + else: + depth = np.array(h5py.File(path, 'r')['depth']) + load_failed = False + except: + failed_num += 1 + if failed_num > 5000: + logger.error(f"Try to load: {path}, but failed {failed_num} times") + continue + + if pad_to is not None: + depth, _ = pad_bottom_right(depth, pad_to, ret_mask=False) + if return_tensor: + depth = torch.from_numpy(depth).float() # (h, w) + return depth + + +# --- ScanNet --- + +def read_scannet_gray(path, resize=(640, 480), augment_fn=None): + """ + Args: + resize (tuple): align image to depthmap, in (w, h). + augment_fn (callable, optional): augments images with pre-defined visual effects + Returns: + image (torch.tensor): (1, h, w) + mask (torch.tensor): (h, w) + scale (torch.tensor): [w/w_new, h/h_new] + """ + # read and resize image + image = imread_gray(path, augment_fn) + image = cv2.resize(image, resize) + + # (h, w) -> (1, h, w) and normalized + image = torch.from_numpy(image).float()[None] / 255 + return image + + +def read_scannet_depth(path): + if str(path).startswith('s3://'): + depth = load_array_from_s3(str(path), SCANNET_CLIENT, cv2.IMREAD_UNCHANGED) + else: + depth = cv2.imread(str(path), cv2.IMREAD_UNCHANGED) + depth = depth / 1000 + depth = torch.from_numpy(depth).float() # (h, w) + return depth + + +def read_scannet_pose(path): + """ Read ScanNet's Camera2World pose and transform it to World2Camera. + + Returns: + pose_w2c (np.ndarray): (4, 4) + """ + cam2world = np.loadtxt(path, delimiter=' ') + world2cam = inv(cam2world) + return world2cam + + +def read_scannet_intrinsic(path): + """ Read ScanNet's intrinsic matrix and return the 3x3 matrix. + """ + intrinsic = np.loadtxt(path, delimiter=' ') + return intrinsic[:-1, :-1] + +def dict_to_cuda(data_dict): + data_dict_cuda = {} + for k, v in data_dict.items(): + if isinstance(v, torch.Tensor): + data_dict_cuda[k] = v.cuda() + elif isinstance(v, dict): + data_dict_cuda[k] = dict_to_cuda(v) + elif isinstance(v, list): + data_dict_cuda[k] = list_to_cuda(v) + else: + data_dict_cuda[k] = v + return data_dict_cuda + +def list_to_cuda(data_list): + data_list_cuda = [] + for obj in data_list: + if isinstance(obj, torch.Tensor): + data_list_cuda.append(obj.cuda()) + elif isinstance(obj, dict): + data_list_cuda.append(dict_to_cuda(obj)) + elif isinstance(obj, list): + data_list_cuda.append(list_to_cuda(obj)) + else: + data_list_cuda.append(obj) + return data_list_cuda \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/src/utils/easydict.py b/imcui/third_party/MatchAnything/src/utils/easydict.py new file mode 100755 index 0000000000000000000000000000000000000000..e4af7a343311581cd56b486fa4d1cd0f60d1ad86 --- /dev/null +++ b/imcui/third_party/MatchAnything/src/utils/easydict.py @@ -0,0 +1,148 @@ +class EasyDict(dict): + """ + Get attributes + + >>> d = EasyDict({'foo':3}) + >>> d['foo'] + 3 + >>> d.foo + 3 + >>> d.bar + Traceback (most recent call last): + ... + AttributeError: 'EasyDict' object has no attribute 'bar' + + Works recursively + + >>> d = EasyDict({'foo':3, 'bar':{'x':1, 'y':2}}) + >>> isinstance(d.bar, dict) + True + >>> d.bar.x + 1 + + Bullet-proof + + >>> EasyDict({}) + {} + >>> EasyDict(d={}) + {} + >>> EasyDict(None) + {} + >>> d = {'a': 1} + >>> EasyDict(**d) + {'a': 1} + + Set attributes + + >>> d = EasyDict() + >>> d.foo = 3 + >>> d.foo + 3 + >>> d.bar = {'prop': 'value'} + >>> d.bar.prop + 'value' + >>> d + {'foo': 3, 'bar': {'prop': 'value'}} + >>> d.bar.prop = 'newer' + >>> d.bar.prop + 'newer' + + + Values extraction + + >>> d = EasyDict({'foo':0, 'bar':[{'x':1, 'y':2}, {'x':3, 'y':4}]}) + >>> isinstance(d.bar, list) + True + >>> from operator import attrgetter + >>> map(attrgetter('x'), d.bar) + [1, 3] + >>> map(attrgetter('y'), d.bar) + [2, 4] + >>> d = EasyDict() + >>> d.keys() + [] + >>> d = EasyDict(foo=3, bar=dict(x=1, y=2)) + >>> d.foo + 3 + >>> d.bar.x + 1 + + Still like a dict though + + >>> o = EasyDict({'clean':True}) + >>> o.items() + [('clean', True)] + + And like a class + + >>> class Flower(EasyDict): + ... power = 1 + ... + >>> f = Flower() + >>> f.power + 1 + >>> f = Flower({'height': 12}) + >>> f.height + 12 + >>> f['power'] + 1 + >>> sorted(f.keys()) + ['height', 'power'] + + update and pop items + >>> d = EasyDict(a=1, b='2') + >>> e = EasyDict(c=3.0, a=9.0) + >>> d.update(e) + >>> d.c + 3.0 + >>> d['c'] + 3.0 + >>> d.get('c') + 3.0 + >>> d.update(a=4, b=4) + >>> d.b + 4 + >>> d.pop('a') + 4 + >>> d.a + Traceback (most recent call last): + ... + AttributeError: 'EasyDict' object has no attribute 'a' + """ + + def __init__(self, d=None, **kwargs): + if d is None: + d = {} + if kwargs: + d.update(**kwargs) + for k, v in d.items(): + setattr(self, k, v) + # Class attributes + for k in self.__class__.__dict__.keys(): + if not (k.startswith("__") and k.endswith("__")) and not k in ("update", "pop"): + setattr(self, k, getattr(self, k)) + + def __setattr__(self, name, value): + if isinstance(value, (list, tuple)): + value = [self.__class__(x) if isinstance(x, dict) else x for x in value] + elif isinstance(value, dict) and not isinstance(value, self.__class__): + value = self.__class__(value) + super(EasyDict, self).__setattr__(name, value) + super(EasyDict, self).__setitem__(name, value) + + __setitem__ = __setattr__ + + def update(self, e=None, **f): + d = e or dict() + d.update(f) + for k in d: + setattr(self, k, d[k]) + + def pop(self, k, d=None): + if hasattr(self, k): + delattr(self, k) + return super(EasyDict, self).pop(k, d) + + +if __name__ == "__main__": + import doctest \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/src/utils/geometry.py b/imcui/third_party/MatchAnything/src/utils/geometry.py new file mode 100644 index 0000000000000000000000000000000000000000..d6470ca309655e4e81f58bfe515a428b6e8b3623 --- /dev/null +++ b/imcui/third_party/MatchAnything/src/utils/geometry.py @@ -0,0 +1,366 @@ +from __future__ import division +import torch +import torch.nn.functional as F +import numpy as np +# from numba import jit + +pixel_coords = None + +def set_id_grid(depth): + b, h, w = depth.size() + i_range = torch.arange(0, h).view(1, h, 1).expand(1,h,w).type_as(depth) # [1, H, W] + j_range = torch.arange(0, w).view(1, 1, w).expand(1,h,w).type_as(depth) # [1, H, W] + ones = torch.ones(1,h,w).type_as(depth) + + pixel_coords = torch.stack((j_range, i_range, ones), dim=1) # [1, 3, H, W] + return pixel_coords + +def check_sizes(input, input_name, expected): + condition = [input.ndimension() == len(expected)] + for i,size in enumerate(expected): + if size.isdigit(): + condition.append(input.size(i) == int(size)) + assert(all(condition)), "wrong size for {}, expected {}, got {}".format(input_name, 'x'.join(expected), list(input.size())) + + +def pixel2cam(depth, intrinsics_inv): + """Transform coordinates in the pixel frame to the camera frame. + Args: + depth: depth maps -- [B, H, W] + intrinsics_inv: intrinsics_inv matrix for each element of batch -- [B, 3, 3] + Returns: + array of (u,v,1) cam coordinates -- [B, 3, H, W] + """ + b, h, w = depth.size() + pixel_coords = set_id_grid(depth) + current_pixel_coords = pixel_coords[:,:,:h,:w].expand(b,3,h,w).reshape(b, 3, -1) # [B, 3, H*W] + cam_coords = (intrinsics_inv.float() @ current_pixel_coords.float()).reshape(b, 3, h, w) + return cam_coords * depth.unsqueeze(1) + +def cam2pixel_depth(cam_coords, proj_c2p_rot, proj_c2p_tr): + """Transform coordinates in the camera frame to the pixel frame and get depth map. + Args: + cam_coords: pixel coordinates defined in the first camera coordinates system -- [B, 3, H, W] + proj_c2p_rot: rotation matrix of cameras -- [B, 3, 4] + proj_c2p_tr: translation vectors of cameras -- [B, 3, 1] + Returns: + tensor of [-1,1] coordinates -- [B, 2, H, W] + depth map -- [B, H, W] + """ + b, _, h, w = cam_coords.size() + cam_coords_flat = cam_coords.reshape(b, 3, -1) # [B, 3, H*W] + if proj_c2p_rot is not None: + pcoords = proj_c2p_rot @ cam_coords_flat + else: + pcoords = cam_coords_flat + + if proj_c2p_tr is not None: + pcoords = pcoords + proj_c2p_tr # [B, 3, H*W] + X = pcoords[:, 0] + Y = pcoords[:, 1] + Z = pcoords[:, 2].clamp(min=1e-3) # [B, H*W] min_depth = 1 mm + + X_norm = 2*(X / Z)/(w-1) - 1 # Normalized, -1 if on extreme left, 1 if on extreme right (x = w-1) [B, H*W] + Y_norm = 2*(Y / Z)/(h-1) - 1 # Idem [B, H*W] + + pixel_coords = torch.stack([X_norm, Y_norm], dim=2) # [B, H*W, 2] + return pixel_coords.reshape(b,h,w,2), Z.reshape(b, h, w) + + +def cam2pixel(cam_coords, proj_c2p_rot, proj_c2p_tr): + """Transform coordinates in the camera frame to the pixel frame. + Args: + cam_coords: pixel coordinates defined in the first camera coordinates system -- [B, 3, H, W] + proj_c2p_rot: rotation matrix of cameras -- [B, 3, 4] + proj_c2p_tr: translation vectors of cameras -- [B, 3, 1] + Returns: + array of [-1,1] coordinates -- [B, 2, H, W] + """ + b, _, h, w = cam_coords.size() + cam_coords_flat = cam_coords.reshape(b, 3, -1) # [B, 3, H*W] + if proj_c2p_rot is not None: + pcoords = proj_c2p_rot @ cam_coords_flat + else: + pcoords = cam_coords_flat + + if proj_c2p_tr is not None: + pcoords = pcoords + proj_c2p_tr # [B, 3, H*W] + X = pcoords[:, 0] + Y = pcoords[:, 1] + Z = pcoords[:, 2].clamp(min=1e-3) # [B, H*W] min_depth = 1 mm + + X_norm = 2*(X / Z)/(w-1) - 1 # Normalized, -1 if on extreme left, 1 if on extreme right (x = w-1) [B, H*W] + Y_norm = 2*(Y / Z)/(h-1) - 1 # Idem [B, H*W] + + pixel_coords = torch.stack([X_norm, Y_norm], dim=2) # [B, H*W, 2] + return pixel_coords.reshape(b,h,w,2) + + +def reproject_kpts(dim0_idxs, kpts, depth, rel_pose, K0, K1): + """ Reproject keypoints with depth, relative pose and camera intrinsics + Args: + dim0_idxs (torch.LoneTensor): (B*max_kpts, ) + kpts (torch.LongTensor): (B, max_kpts, 2) - + depth (torch.Tensor): (B, H, W) + rel_pose (torch.Tensor): (B, 3, 4) relative transfomation from target to source (T_0to1) -- + K0: (torch.Tensor): (N, 3, 3) - (K_0) + K1: (torch.Tensor): (N, 3, 3) - (K_1) + Returns: + (torch.Tensor): (B, max_kpts, 2) the reprojected kpts + """ + # pixel to camera + device = kpts.device + B, max_kpts, _ = kpts.shape + + kpts = kpts.reshape(-1, 2) # (B*K, 2) + kpts_depth = depth[dim0_idxs, kpts[:, 1], kpts[:, 0]] # (B*K, ) + kpts = torch.cat([kpts.float(), + torch.ones((kpts.shape[0], 1), dtype=torch.float32, device=device)], -1) # (B*K, 3) + pixel_coords = (kpts * kpts_depth[:, None]).reshape(B, max_kpts, 3).permute(0, 2, 1) # (B, 3, K) + + cam_coords = K0.inverse() @ pixel_coords # (N, 3, max_kpts) + # camera1 to camera 2 + rel_pose_R = rel_pose[:, :, :-1] # (B, 3, 3) + rel_pose_t = rel_pose[:, :, -1][..., None] # (B, 3, 1) + cam2_coords = rel_pose_R @ cam_coords + rel_pose_t # (B, 3, max_kpts) + # projection + pixel2_coords = K1 @ cam2_coords # (B, 3, max_kpts) + reproj_kpts = pixel2_coords[:, :-1, :] / pixel2_coords[:, -1, :][:, None].expand(-1, 2, -1) + return reproj_kpts.permute(0, 2, 1) + + +def check_depth_consistency(b_idxs, kpts0, depth0, kpts1, depth1, T_0to1, K0, K1, + atol=0.1, rtol=0.0): + """ + Args: + b_idxs (torch.LongTensor): (n_kpts, ) the batch indices which each keypoints pairs belong to + kpts0 (torch.LongTensor): (n_kpts, 2) - + depth0 (torch.Tensor): (B, H, W) + kpts1 (torch.LongTensor): (n_kpts, 2) + depth1 (torch.Tensor): (B, H, W) + T_0to1 (torch.Tensor): (B, 3, 4) + K0: (torch.Tensor): (N, 3, 3) - (K_0) + K1: (torch.Tensor): (N, 3, 3) - (K_1) + atol (float): the absolute tolerance for depth consistency check + rtol (float): the relative tolerance for depth consistency check + Returns: + valid_mask (torch.Tensor): (n_kpts, ) + Notes: + The two corresponding keypoints are depth consistent if the following equation is held: + abs(kpt_0to1_depth - kpt1_depth) <= (atol + rtol * abs(kpt1_depth)) + * In the initial reimplementation, `atol=0.1, rtol=0` is used, and the result is better with + `atol=1.0, rtol=0` (which nearly ignore the depth consistency check). + * However, the author suggests using `atol=0.0, rtol=0.1` as in https://github.com/magicleap/SuperGluePretrainedNetwork/issues/31#issuecomment-681866054 + """ + device = kpts0.device + n_kpts = kpts0.shape[0] + + kpts0_depth = depth0[b_idxs, kpts0[:, 1], kpts0[:, 0]] # (n_kpts, ) + kpts1_depth = depth1[b_idxs, kpts1[:, 1], kpts1[:, 0]] # (n_kpts, ) + kpts0 = torch.cat([kpts0.float(), + torch.ones((n_kpts, 1), dtype=torch.float32, device=device)], -1) # (n_kpts, 3) + pixel_coords = (kpts0 * kpts0_depth[:, None])[..., None] # (n_kpts, 3, 1) + + # indexing from T_0to1 and K - treat all kpts as a batch + K0 = K0[b_idxs, :, :] # (n_kpts, 3, 3) + T_0to1 = T_0to1[b_idxs, :, :] # (n_kpts, 3, 4) + cam_coords = K0.inverse() @ pixel_coords # (n_kpts, 3, 1) + + # camera1 to camera2 + R_0to1 = T_0to1[:, :, :-1] # (n_kpts, 3, 3) + t_0to1 = T_0to1[:, :, -1][..., None] # (n_kpts, 3, 1) + cam1_coords = R_0to1 @ cam_coords + t_0to1 # (n_kpts, 3, 1) + K1 = K1[b_idxs, :, :] # (n_kpts, 3, 3) + pixel1_coords = K1 @ cam1_coords # (n_kpts, 3, 1) + kpts_0to1_depth = pixel1_coords[:, -1, 0] # (n_kpts, ) + return (kpts_0to1_depth - kpts1_depth).abs() <= atol + rtol * kpts1_depth.abs() + + +def inverse_warp(img, depth, pose, intrinsics, mode='bilinear', padding_mode='zeros'): + """ + Inverse warp a source image to the target image plane. + + Args: + img: the source image (where to sample pixels) -- [B, 3, H, W] + depth: depth map of the target image -- [B, H, W] + pose: relative transfomation from target to source -- [B, 3, 4] + intrinsics: camera intrinsic matrix -- [B, 3, 3] + Returns: + projected_img: Source image warped to the target image plane + valid_points: Boolean array indicating point validity + """ + # check_sizes(img, 'img', 'B3HW') + check_sizes(depth, 'depth', 'BHW') +# check_sizes(pose, 'pose', 'B6') + check_sizes(intrinsics, 'intrinsics', 'B33') + + batch_size, _, img_height, img_width = img.size() + + cam_coords = pixel2cam(depth, intrinsics.inverse()) # [B,3,H,W] + + pose_mat = pose # (B, 3, 4) + + # Get projection matrix for target camera frame to source pixel frame + proj_cam_to_src_pixel = intrinsics @ pose_mat # [B, 3, 4] + + rot, tr = proj_cam_to_src_pixel[:,:,:3], proj_cam_to_src_pixel[:,:,-1:] + src_pixel_coords = cam2pixel(cam_coords, rot, tr) # [B,H,W,2] + projected_img = F.grid_sample(img, src_pixel_coords, mode=mode, + padding_mode=padding_mode, align_corners=True) + + valid_points = src_pixel_coords.abs().max(dim=-1)[0] <= 1 + + return projected_img, valid_points + +def depth_inverse_warp(depth_source, depth, pose, intrinsic_source, intrinsic, mode='nearest', padding_mode='zeros'): + """ + 1. Inversely warp a source depth map to the target image plane (warped depth map still in source frame) + 2. Transform the target depth map to the source image frame + Args: + depth_source: the source image (where to sample pixels) -- [B, H, W] + depth: depth map of the target image -- [B, H, W] + pose: relative transfomation from target to source -- [B, 3, 4] + intrinsics: camera intrinsic matrix -- [B, 3, 3] + Returns: + warped_depth: Source depth warped to the target image plane -- [B, H, W] + projected_depth: Target depth projected to the source image frame -- [B, H, W] + valid_points: Boolean array indicating point validity -- [B, H, W] + """ + check_sizes(depth_source, 'depth', 'BHW') + check_sizes(depth, 'depth', 'BHW') + check_sizes(intrinsic_source, 'intrinsics', 'B33') + + b, h, w = depth.size() + + cam_coords = pixel2cam(depth, intrinsic.inverse()) # [B,3,H,W] + + pose_mat = pose # (B, 3, 4) + + # Get projection matrix from target camera frame to source pixel frame + proj_cam_to_src_pixel = intrinsic_source @ pose_mat # [B, 3, 4] + + rot, tr = proj_cam_to_src_pixel[:,:,:3], proj_cam_to_src_pixel[:,:,-1:] + src_pixel_coords, depth_target2src = cam2pixel_depth(cam_coords, rot, tr) # [B,H,W,2] + warped_depth = F.grid_sample(depth_source[:, None], src_pixel_coords, mode=mode, + padding_mode=padding_mode, align_corners=True) # [B, 1, H, W] + + valid_points = (src_pixel_coords.abs().max(dim=-1)[0] <= 1) &\ + (depth > 0.0) & (warped_depth[:, 0] > 0.0) # [B, H, W] + return warped_depth[:, 0], depth_target2src, valid_points + +def to_skew(t): + """ Transform the translation vector t to skew-symmetric matrix. + Args: + t (torch.Tensor): (B, 3) + """ + t_skew = t.new_ones((t.shape[0], 3, 3)) + t_skew[:, 0, 1] = -t[:, 2] + t_skew[:, 1, 0] = t[:, 2] + t_skew[:, 0, 2] = t[:, 1] + t_skew[:, 2, 0] = -t[:, 1] + t_skew[:, 1, 2] = -t[:, 0] + t_skew[:, 2, 1] = t[:, 0] + return t_skew # (B, 3, 3) + + +def to_homogeneous(pts): + """ + Args: + pts (torch.Tensor): (B, K, 2) + """ + return torch.cat([pts, torch.ones_like(pts[..., :1])], -1) # (B, K, 3) + + +def pix2img(pts, K): + """ + Args: + pts (torch.Tensor): (B, K, 2) + K (torch.Tensor): (B, 3, 3) + """ + return (pts - K[:, [0, 1], [2, 2]][:, None]) / K[:, [0, 1], [0, 1]][:, None] + + +def weighted_blind_sed(kpts0, kpts1, weights, E, K0, K1): + """ Calculate the squared weighted blind symmetric epipolar distance, which is the sed between + all possible keypoints pairs. + Args: + kpts0 (torch.Tensor): (B, K0, 2) + ktps1 (torch.Tensor): (B, K1, 2) + weights (torch.Tensor): (B, K0, K1) + E (torch.Tensor): (B, 3, 3) - the essential matrix + K0 (torch.Tensor): (B, 3, 3) + K1 (torch.Tensor): (B, 3, 3) + Returns: + w_sed (torch.Tensor): (B, K0, K1) + """ + M, N = kpts0.shape[1], kpts1.shape[1] + + kpts0 = to_homogeneous(pix2img(kpts0, K0)) + kpts1 = to_homogeneous(pix2img(kpts1, K1)) # (B, K1, 3) + + R = kpts0 @ E.transpose(1, 2) @ kpts1.transpose(1, 2) # (B, K0, K1) + # w_R = weights * R # (B, K0, K1) + + Ep0 = kpts0 @ E.transpose(1, 2) # (B, K0, 3) + Etp1 = kpts1 @ E # (B, K1, 3) + d = R**2 * (1.0 / (Ep0[..., 0]**2 + Ep0[..., 1]**2)[..., None].expand(-1, -1, N) + + 1.0 / (Etp1[..., 0]**2 + Etp1[..., 1]**2)[:, None].expand(-1, M, -1)) * weights # (B, K0, K1) + return d + +def weighted_blind_sampson(kpts0, kpts1, weights, E, K0, K1): + """ Calculate the squared weighted blind sampson distance, which is the sampson distance between + all possible keypoints pairs weighted by the given weights. + """ + M, N = kpts0.shape[1], kpts1.shape[1] + + kpts0 = to_homogeneous(pix2img(kpts0, K0)) + kpts1 = to_homogeneous(pix2img(kpts1, K1)) # (B, K1, 3) + + R = kpts0 @ E.transpose(1, 2) @ kpts1.transpose(1, 2) # (B, K0, K1) + # w_R = weights * R # (B, K0, K1) + + Ep0 = kpts0 @ E.transpose(1, 2) # (B, K0, 3) + Etp1 = kpts1 @ E # (B, K1, 3) + d = R**2 * (1.0 / ((Ep0[..., 0]**2 + Ep0[..., 1]**2)[..., None].expand(-1, -1, N) + + (Etp1[..., 0]**2 + Etp1[..., 1]**2)[:, None].expand(-1, M, -1))) * weights # (B, K0, K1) + return d + + +def angular_rel_rot(T_0to1): + """ + Args: + T0_to_1 (np.ndarray): (4, 4) + """ + cos = (np.trace(T_0to1[:-1, :-1]) - 1) / 2 + if cos < -1: + cos = -1.0 + if cos > 1: + cos = 1.0 + angle_error_rot = np.rad2deg(np.abs(np.arccos(cos))) + + return angle_error_rot + +def angular_rel_pose(T0, T1): + """ + Args: + T0 (np.ndarray): (4, 4) + T1 (np.ndarray): (4, 4) + + """ + cos = (np.trace(T0[:-1, :-1].T @ T1[:-1, :-1]) - 1) / 2 + if cos < -1: + cos = -1.0 + if cos > 1: + cos = 1.0 + angle_error_rot = np.rad2deg(np.abs(np.arccos(cos))) + + # calculate angular translation error + n = np.linalg.norm(T0[:-1, -1]) * np.linalg.norm(T1[:-1, -1]) + cos = np.dot(T0[:-1, -1], T1[:-1, -1]) / n + if cos < -1: + cos = -1.0 + if cos > 1: + cos = 1.0 + angle_error_trans = np.rad2deg(np.arccos(cos)) + + return angle_error_rot, angle_error_trans \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/src/utils/homography_utils.py b/imcui/third_party/MatchAnything/src/utils/homography_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..5d6ac5adb97c9963c216a97761cca9dfccacd91f --- /dev/null +++ b/imcui/third_party/MatchAnything/src/utils/homography_utils.py @@ -0,0 +1,366 @@ +import math +from typing import Tuple + +import numpy as np +import torch + +def to_homogeneous(points): + """Convert N-dimensional points to homogeneous coordinates. + Args: + points: torch.Tensor or numpy.ndarray with size (..., N). + Returns: + A torch.Tensor or numpy.ndarray with size (..., N+1). + """ + if isinstance(points, torch.Tensor): + pad = points.new_ones(points.shape[:-1] + (1,)) + return torch.cat([points, pad], dim=-1) + elif isinstance(points, np.ndarray): + pad = np.ones((points.shape[:-1] + (1,)), dtype=points.dtype) + return np.concatenate([points, pad], axis=-1) + else: + raise ValueError + + +def from_homogeneous(points, eps=0.0): + """Remove the homogeneous dimension of N-dimensional points. + Args: + points: torch.Tensor or numpy.ndarray with size (..., N+1). + eps: Epsilon value to prevent zero division. + Returns: + A torch.Tensor or numpy ndarray with size (..., N). + """ + return points[..., :-1] / (points[..., -1:] + eps) + + +def flat2mat(H): + return np.reshape(np.concatenate([H, np.ones_like(H[:, :1])], axis=1), [3, 3]) + + +# Homography creation + + +def create_center_patch(shape, patch_shape=None): + if patch_shape is None: + patch_shape = shape + width, height = shape + pwidth, pheight = patch_shape + left = int((width - pwidth) / 2) + bottom = int((height - pheight) / 2) + right = int((width + pwidth) / 2) + top = int((height + pheight) / 2) + return np.array([[left, bottom], [left, top], [right, top], [right, bottom]]) + + +def check_convex(patch, min_convexity=0.05): + """Checks if given polygon vertices [N,2] form a convex shape""" + for i in range(patch.shape[0]): + x1, y1 = patch[(i - 1) % patch.shape[0]] + x2, y2 = patch[i] + x3, y3 = patch[(i + 1) % patch.shape[0]] + if (x2 - x1) * (y3 - y2) - (x3 - x2) * (y2 - y1) > -min_convexity: + return False + return True + + +def sample_homography_corners( + shape, + patch_shape, + difficulty=1.0, + translation=0.4, + n_angles=10, + max_angle=90, + min_convexity=0.05, + rng=np.random, +): + max_angle = max_angle / 180.0 * math.pi + width, height = shape + pwidth, pheight = width * (1 - difficulty), height * (1 - difficulty) + min_pts1 = create_center_patch(shape, (pwidth, pheight)) + full = create_center_patch(shape) + pts2 = create_center_patch(patch_shape) + scale = min_pts1 - full + found_valid = False + cnt = -1 + while not found_valid: + offsets = rng.uniform(0.0, 1.0, size=(4, 2)) * scale + pts1 = full + offsets + found_valid = check_convex(pts1 / np.array(shape), min_convexity) + cnt += 1 + + # re-center + pts1 = pts1 - np.mean(pts1, axis=0, keepdims=True) + pts1 = pts1 + np.mean(min_pts1, axis=0, keepdims=True) + + # Rotation + if n_angles > 0 and difficulty > 0: + angles = np.linspace(-max_angle * difficulty, max_angle * difficulty, n_angles) + rng.shuffle(angles) + rng.shuffle(angles) + angles = np.concatenate([[0.0], angles], axis=0) + + center = np.mean(pts1, axis=0, keepdims=True) + rot_mat = np.reshape( + np.stack( + [np.cos(angles), -np.sin(angles), np.sin(angles), np.cos(angles)], + axis=1, + ), + [-1, 2, 2], + ) + rotated = ( + np.matmul( + np.tile(np.expand_dims(pts1 - center, axis=0), [n_angles + 1, 1, 1]), + rot_mat, + ) + + center + ) + + for idx in range(1, n_angles): + warped_points = rotated[idx] / np.array(shape) + if np.all((warped_points >= 0.0) & (warped_points < 1.0)): + pts1 = rotated[idx] + break + + # Translation + if translation > 0: + min_trans = -np.min(pts1, axis=0) + max_trans = shape - np.max(pts1, axis=0) + trans = rng.uniform(min_trans, max_trans)[None] + pts1 += trans * translation * difficulty + + H = compute_homography(pts1, pts2, [1.0, 1.0]) + warped = warp_points(full, H, inverse=False) + return H, full, warped, patch_shape + + +def compute_homography(pts1_, pts2_, shape): + """Compute the homography matrix from 4 point correspondences""" + # Rescale to actual size + shape = np.array(shape[::-1], dtype=np.float32) # different convention [y, x] + pts1 = pts1_ * np.expand_dims(shape, axis=0) + pts2 = pts2_ * np.expand_dims(shape, axis=0) + + def ax(p, q): + return [p[0], p[1], 1, 0, 0, 0, -p[0] * q[0], -p[1] * q[0]] + + def ay(p, q): + return [0, 0, 0, p[0], p[1], 1, -p[0] * q[1], -p[1] * q[1]] + + a_mat = np.stack([f(pts1[i], pts2[i]) for i in range(4) for f in (ax, ay)], axis=0) + p_mat = np.transpose( + np.stack([[pts2[i][j] for i in range(4) for j in range(2)]], axis=0) + ) + homography = np.transpose(np.linalg.solve(a_mat, p_mat)) + return flat2mat(homography) + + +# Point warping utils + + +def warp_points(points, homography, inverse=True): + """ + Warp a list of points with the INVERSE of the given homography. + The inverse is used to be coherent with tf.contrib.image.transform + Arguments: + points: list of N points, shape (N, 2). + homography: batched or not (shapes (B, 3, 3) and (3, 3) respectively). + Returns: a Tensor of shape (N, 2) or (B, N, 2) (depending on whether the homography + is batched) containing the new coordinates of the warped points. + """ + H = homography[None] if len(homography.shape) == 2 else homography + + # Get the points to the homogeneous format + num_points = points.shape[0] + points = np.concatenate([points, np.ones([num_points, 1], dtype=np.float32)], -1) + + H_inv = np.transpose(np.linalg.inv(H) if inverse else H) + warped_points = np.tensordot(points, H_inv, axes=[[1], [0]]) + + warped_points = np.transpose(warped_points, [2, 0, 1]) + warped_points[np.abs(warped_points[:, :, 2]) < 1e-8, 2] = 1e-8 + warped_points = warped_points[:, :, :2] / warped_points[:, :, 2:] + + return warped_points[0] if len(homography.shape) == 2 else warped_points + + +def warp_points_torch(points, H, inverse=True): + """ + Warp a list of points with the INVERSE of the given homography. + The inverse is used to be coherent with tf.contrib.image.transform + Arguments: + points: batched list of N points, shape (B, N, 2). + H: batched or not (shapes (B, 3, 3) and (3, 3) respectively). + inverse: Whether to multiply the points by H or the inverse of H + Returns: a Tensor of shape (B, N, 2) containing the new coordinates of the warps. + """ + + # Get the points to the homogeneous format + points = to_homogeneous(points) + + # Apply the homography + H_mat = (torch.inverse(H) if inverse else H).transpose(-2, -1) + warped_points = torch.einsum("...nj,...ji->...ni", points, H_mat) + + warped_points = from_homogeneous(warped_points, eps=1e-5) + return warped_points + + +# Line warping utils + + +def seg_equation(segs): + # calculate list of start, end and midpoints points from both lists + start_points, end_points = to_homogeneous(segs[..., 0, :]), to_homogeneous( + segs[..., 1, :] + ) + # Compute the line equations as ax + by + c = 0 , where x^2 + y^2 = 1 + lines = torch.cross(start_points, end_points, dim=-1) + lines_norm = torch.sqrt(lines[..., 0] ** 2 + lines[..., 1] ** 2)[..., None] + assert torch.all( + lines_norm > 0 + ), "Error: trying to compute the equation of a line with a single point" + lines = lines / lines_norm + return lines + + +def is_inside_img(pts: torch.Tensor, img_shape: Tuple[int, int]): + h, w = img_shape + return ( + (pts >= 0).all(dim=-1) + & (pts[..., 0] < w) + & (pts[..., 1] < h) + & (~torch.isinf(pts).any(dim=-1)) + ) + + +def shrink_segs_to_img(segs: torch.Tensor, img_shape: Tuple[int, int]) -> torch.Tensor: + """ + Shrink an array of segments to fit inside the image. + :param segs: The tensor of segments with shape (N, 2, 2) + :param img_shape: The image shape in format (H, W) + """ + EPS = 1e-4 + device = segs.device + w, h = img_shape[1], img_shape[0] + # Project the segments to the reference image + segs = segs.clone() + eqs = seg_equation(segs) + x0, y0 = torch.tensor([1.0, 0, 0.0], device=device), torch.tensor( + [0.0, 1, 0], device=device + ) + x0 = x0.repeat(eqs.shape[:-1] + (1,)) + y0 = y0.repeat(eqs.shape[:-1] + (1,)) + pt_x0s = torch.cross(eqs, x0, dim=-1) + pt_x0s = pt_x0s[..., :-1] / pt_x0s[..., None, -1] + pt_x0s_valid = is_inside_img(pt_x0s, img_shape) + pt_y0s = torch.cross(eqs, y0, dim=-1) + pt_y0s = pt_y0s[..., :-1] / pt_y0s[..., None, -1] + pt_y0s_valid = is_inside_img(pt_y0s, img_shape) + + xW = torch.tensor([1.0, 0, EPS - w], device=device) + yH = torch.tensor([0.0, 1, EPS - h], device=device) + xW = xW.repeat(eqs.shape[:-1] + (1,)) + yH = yH.repeat(eqs.shape[:-1] + (1,)) + pt_xWs = torch.cross(eqs, xW, dim=-1) + pt_xWs = pt_xWs[..., :-1] / pt_xWs[..., None, -1] + pt_xWs_valid = is_inside_img(pt_xWs, img_shape) + pt_yHs = torch.cross(eqs, yH, dim=-1) + pt_yHs = pt_yHs[..., :-1] / pt_yHs[..., None, -1] + pt_yHs_valid = is_inside_img(pt_yHs, img_shape) + + # If the X coordinate of the first endpoint is out + mask = (segs[..., 0, 0] < 0) & pt_x0s_valid + segs[mask, 0, :] = pt_x0s[mask] + mask = (segs[..., 0, 0] > (w - 1)) & pt_xWs_valid + segs[mask, 0, :] = pt_xWs[mask] + # If the X coordinate of the second endpoint is out + mask = (segs[..., 1, 0] < 0) & pt_x0s_valid + segs[mask, 1, :] = pt_x0s[mask] + mask = (segs[:, 1, 0] > (w - 1)) & pt_xWs_valid + segs[mask, 1, :] = pt_xWs[mask] + # If the Y coordinate of the first endpoint is out + mask = (segs[..., 0, 1] < 0) & pt_y0s_valid + segs[mask, 0, :] = pt_y0s[mask] + mask = (segs[..., 0, 1] > (h - 1)) & pt_yHs_valid + segs[mask, 0, :] = pt_yHs[mask] + # If the Y coordinate of the second endpoint is out + mask = (segs[..., 1, 1] < 0) & pt_y0s_valid + segs[mask, 1, :] = pt_y0s[mask] + mask = (segs[..., 1, 1] > (h - 1)) & pt_yHs_valid + segs[mask, 1, :] = pt_yHs[mask] + + assert ( + torch.all(segs >= 0) + and torch.all(segs[..., 0] < w) + and torch.all(segs[..., 1] < h) + ) + return segs + + +def warp_lines_torch( + lines, H, inverse=True, dst_shape: Tuple[int, int] = None +) -> Tuple[torch.Tensor, torch.Tensor]: + """ + :param lines: A tensor of shape (B, N, 2, 2) + where B is the batch size, N the number of lines. + :param H: The homography used to convert the lines. + batched or not (shapes (B, 3, 3) and (3, 3) respectively). + :param inverse: Whether to apply H or the inverse of H + :param dst_shape:If provided, lines are trimmed to be inside the image + """ + device = lines.device + batch_size = len(lines) + lines = warp_points_torch(lines.reshape(batch_size, -1, 2), H, inverse).reshape( + lines.shape + ) + + if dst_shape is None: + return lines, torch.ones(lines.shape[:-2], dtype=torch.bool, device=device) + + out_img = torch.any( + (lines < 0) | (lines >= torch.tensor(dst_shape[::-1], device=device)), -1 + ) + valid = ~out_img.all(-1) + any_out_of_img = out_img.any(-1) + lines_to_trim = valid & any_out_of_img + + for b in range(batch_size): + lines_to_trim_mask_b = lines_to_trim[b] + lines_to_trim_b = lines[b][lines_to_trim_mask_b] + corrected_lines = shrink_segs_to_img(lines_to_trim_b, dst_shape) + lines[b][lines_to_trim_mask_b] = corrected_lines + + return lines, valid + + +# Homography evaluation utils + + +def sym_homography_error(kpts0, kpts1, T_0to1): + kpts0_1 = from_homogeneous(to_homogeneous(kpts0) @ T_0to1.transpose(-1, -2)) + dist0_1 = ((kpts0_1 - kpts1) ** 2).sum(-1).sqrt() + + kpts1_0 = from_homogeneous( + to_homogeneous(kpts1) @ torch.pinverse(T_0to1.transpose(-1, -2)) + ) + dist1_0 = ((kpts1_0 - kpts0) ** 2).sum(-1).sqrt() + + return (dist0_1 + dist1_0) / 2.0 + + +def sym_homography_error_all(kpts0, kpts1, H): + kp0_1 = warp_points_torch(kpts0, H, inverse=False) + kp1_0 = warp_points_torch(kpts1, H, inverse=True) + + # build a distance matrix of size [... x M x N] + dist0 = torch.sum((kp0_1.unsqueeze(-2) - kpts1.unsqueeze(-3)) ** 2, -1).sqrt() + dist1 = torch.sum((kpts0.unsqueeze(-2) - kp1_0.unsqueeze(-3)) ** 2, -1).sqrt() + return (dist0 + dist1) / 2.0 + + +def homography_corner_error(T, T_gt, image_size): + W, H = image_size[..., 0], image_size[..., 1] + corners0 = torch.Tensor([[0, 0], [W, 0], [W, H], [0, H]]).float().to(T) + corners1_gt = from_homogeneous(to_homogeneous(corners0) @ T_gt.transpose(-1, -2)) + corners1 = from_homogeneous(to_homogeneous(corners0) @ T.transpose(-1, -2)) + d = torch.sqrt(((corners1 - corners1_gt) ** 2).sum(-1)) + return d.mean(-1) diff --git a/imcui/third_party/MatchAnything/src/utils/metrics.py b/imcui/third_party/MatchAnything/src/utils/metrics.py new file mode 100644 index 0000000000000000000000000000000000000000..32703f64de82aa45996011195a59b5f493a82bf1 --- /dev/null +++ b/imcui/third_party/MatchAnything/src/utils/metrics.py @@ -0,0 +1,445 @@ +import torch +import cv2 +import numpy as np +from collections import OrderedDict +from loguru import logger +from .homography_utils import warp_points, warp_points_torch +from kornia.geometry.epipolar import numeric +from kornia.geometry.conversions import convert_points_to_homogeneous +import pprint + + +# --- METRICS --- + +def relative_pose_error(T_0to1, R, t, ignore_gt_t_thr=0.0): + # angle error between 2 vectors + t_gt = T_0to1[:3, 3] + n = np.linalg.norm(t) * np.linalg.norm(t_gt) + t_err = np.rad2deg(np.arccos(np.clip(np.dot(t, t_gt) / n, -1.0, 1.0))) + t_err = np.minimum(t_err, 180 - t_err) # handle E ambiguity + if np.linalg.norm(t_gt) < ignore_gt_t_thr: # pure rotation is challenging + t_err = 0 + + # angle error between 2 rotation matrices + R_gt = T_0to1[:3, :3] + cos = (np.trace(np.dot(R.T, R_gt)) - 1) / 2 + cos = np.clip(cos, -1., 1.) # handle numercial errors + R_err = np.rad2deg(np.abs(np.arccos(cos))) + + return t_err, R_err + +def warp_pts_error(H_est, pts_coord, H_gt=None, pts_gt=None): + """ + corner_coord: 4*2 + """ + if H_gt is not None: + est_warp = warp_points(pts_coord, H_est, False) + est_gt = warp_points(pts_coord, H_gt, False) + diff = est_warp - est_gt + elif pts_gt is not None: + est_warp = warp_points(pts_coord, H_est, False) + diff = est_warp - pts_gt + + return np.mean(np.linalg.norm(diff, axis=1)) + +def homo_warp_match_distance(H_gt, kpts0, kpts1, hw): + """ + corner_coord: 4*2 + """ + if isinstance(H_gt, np.ndarray): + kpts_warped = warp_points(kpts0, H_gt) + normalized_distance = np.linalg.norm((kpts_warped - kpts1) / hw[None, [1,0]], axis=1) + else: + kpts_warped = warp_points_torch(kpts0, H_gt) + normalized_distance = torch.linalg.norm((kpts_warped - kpts1) / hw[None, [1,0]], axis=1) + return normalized_distance + +def symmetric_epipolar_distance(pts0, pts1, E, K0, K1): + """Squared symmetric epipolar distance. + This can be seen as a biased estimation of the reprojection error. + Args: + pts0 (torch.Tensor): [N, 2] + E (torch.Tensor): [3, 3] + """ + pts0 = (pts0 - K0[[0, 1], [2, 2]][None]) / K0[[0, 1], [0, 1]][None] + pts1 = (pts1 - K1[[0, 1], [2, 2]][None]) / K1[[0, 1], [0, 1]][None] + pts0 = convert_points_to_homogeneous(pts0) + pts1 = convert_points_to_homogeneous(pts1) + + Ep0 = pts0 @ E.T # [N, 3] + p1Ep0 = torch.sum(pts1 * Ep0, -1) # [N,] + Etp1 = pts1 @ E # [N, 3] + + d = p1Ep0**2 * (1.0 / (Ep0[:, 0]**2 + Ep0[:, 1]**2) + 1.0 / (Etp1[:, 0]**2 + Etp1[:, 1]**2)) # N + return d + + +def compute_symmetrical_epipolar_errors(data, config): + """ + Update: + data (dict):{"epi_errs": [M]} + """ + Tx = numeric.cross_product_matrix(data['T_0to1'][:, :3, 3]) + E_mat = Tx @ data['T_0to1'][:, :3, :3] + + m_bids = data['m_bids'] + pts0 = data['mkpts0_f'] + pts1 = data['mkpts1_f'].clone().detach() + + if config.LOFTR.FINE.MTD_SPVS: + m_bids = data['m_bids_f'] if 'm_bids_f' in data else data['m_bids'] + epi_errs = [] + for bs in range(Tx.size(0)): + mask = m_bids == bs + epi_errs.append( + symmetric_epipolar_distance(pts0[mask], pts1[mask], E_mat[bs], data['K0'][bs], data['K1'][bs])) + epi_errs = torch.cat(epi_errs, dim=0) + + data.update({'epi_errs': epi_errs}) + +def compute_homo_match_warp_errors(data, config): + """ + Update: + data (dict):{"epi_errs": [M]} + """ + + homography_gt = data['homography'] + m_bids = data['m_bids'] + pts0 = data['mkpts0_f'] + pts1 = data['mkpts1_f'] + origin_img0_size = data['origin_img_size0'] + + if config.LOFTR.FINE.MTD_SPVS: + m_bids = data['m_bids_f'] if 'm_bids_f' in data else data['m_bids'] + epi_errs = [] + for bs in range(homography_gt.shape[0]): + mask = m_bids == bs + epi_errs.append( + homo_warp_match_distance(homography_gt[bs], pts0[mask], pts1[mask], origin_img0_size[bs])) + epi_errs = torch.cat(epi_errs, dim=0) + + data.update({'epi_errs': epi_errs}) + + +def compute_symmetrical_epipolar_errors_gt(data, config): + """ + Update: + data (dict):{"epi_errs": [M]} + """ + Tx = numeric.cross_product_matrix(data['T_0to1'][:, :3, 3]) + E_mat = Tx @ data['T_0to1'][:, :3, :3] + + m_bids = data['m_bids'] + pts0 = data['mkpts0_f_gt'] + pts1 = data['mkpts1_f_gt'] + + epi_errs = [] + for bs in range(Tx.size(0)): + # mask = m_bids == bs + assert bs == 0 + mask = torch.tensor([True]*pts0.shape[0], device = pts0.device) + if config.LOFTR.FINE.MTD_SPVS: + epi_errs.append( + symmetric_epipolar_distance(pts0[mask], pts1[mask], E_mat[bs], data['K0'][bs], data['K1'][bs])) + else: + epi_errs.append( + symmetric_epipolar_distance(pts0[mask], pts1[mask], E_mat[bs], data['K0'][bs], data['K1'][bs])) + epi_errs = torch.cat(epi_errs, dim=0) + + data.update({'epi_errs': epi_errs}) + + +def estimate_pose(kpts0, kpts1, K0, K1, thresh, conf=0.99999): + if len(kpts0) < 5: + return None + # normalize keypoints + kpts0 = (kpts0 - K0[[0, 1], [2, 2]][None]) / K0[[0, 1], [0, 1]][None] + kpts1 = (kpts1 - K1[[0, 1], [2, 2]][None]) / K1[[0, 1], [0, 1]][None] + + # normalize ransac threshold + ransac_thr = thresh / np.mean([K0[0, 0], K1[1, 1], K0[0, 0], K1[1, 1]]) + + # compute pose with cv2 + E, mask = cv2.findEssentialMat( + kpts0, kpts1, np.eye(3), threshold=ransac_thr, prob=conf, method=cv2.RANSAC) + if E is None: + print("\nE is None while trying to recover pose.\n") + return None + + # recover pose from E + best_num_inliers = 0 + ret = None + for _E in np.split(E, len(E) / 3): + n, R, t, _ = cv2.recoverPose(_E, kpts0, kpts1, np.eye(3), 1e9, mask=mask) + if n > best_num_inliers: + ret = (R, t[:, 0], mask.ravel() > 0) + best_num_inliers = n + + return ret + +def estimate_homo(kpts0, kpts1, thresh, conf=0.99999, mode='affine'): + if mode == 'affine': + H_est, inliers = cv2.estimateAffine2D(kpts0, kpts1, ransacReprojThreshold=thresh, confidence=conf, method=cv2.RANSAC) + if H_est is None: + return np.eye(3) * 0, np.empty((0)) + H_est = np.concatenate([H_est, np.array([[0, 0, 1]])], axis=0) # 3 * 3 + elif mode == 'homo': + H_est, inliers = cv2.findHomography(kpts0, kpts1, method=cv2.LMEDS, ransacReprojThreshold=thresh) + if H_est is None: + return np.eye(3) * 0, np.empty((0)) + + return H_est, inliers + +def compute_homo_corner_warp_errors(data, config): + """ + Update: + data (dict):{ + "R_errs" List[float]: [N] # Actually warp error + "t_errs" List[float]: [N] # Zero, place holder + "inliers" List[np.ndarray]: [N] + } + """ + pixel_thr = config.TRAINER.RANSAC_PIXEL_THR # 0.5 + conf = config.TRAINER.RANSAC_CONF # 0.99999 + data.update({'R_errs': [], 't_errs': [], 'inliers': []}) + + if config.LOFTR.FINE.MTD_SPVS: + m_bids = data['m_bids_f'].cpu().numpy() if 'm_bids_f' in data else data['m_bids'].cpu().numpy() + + else: + m_bids = data['m_bids'].cpu().numpy() + pts0 = data['mkpts0_f'].cpu().numpy() + pts1 = data['mkpts1_f'].cpu().numpy() + homography_gt = data['homography'].cpu().numpy() + origin_size_0 = data['origin_img_size0'].cpu().numpy() + + for bs in range(homography_gt.shape[0]): + mask = m_bids == bs + ret = estimate_homo(pts0[mask], pts1[mask], pixel_thr, conf=conf) + + if ret is None: + data['R_errs'].append(np.inf) + data['t_errs'].append(np.inf) + data['inliers'].append(np.array([]).astype(bool)) + else: + H_est, inliers = ret + corner_coord = np.array([[0, 0], [0, origin_size_0[bs][0]], [origin_size_0[bs][1], 0], [origin_size_0[bs][1], origin_size_0[bs][0]]]) + corner_warp_distance = warp_pts_error(H_est, corner_coord, H_gt=homography_gt[bs]) + data['R_errs'].append(corner_warp_distance) + data['t_errs'].append(0) + data['inliers'].append(inliers) + +def compute_warp_control_pts_errors(data, config): + """ + Update: + data (dict):{ + "R_errs" List[float]: [N] # Actually warp error + "t_errs" List[float]: [N] # Zero, place holder + "inliers" List[np.ndarray]: [N] + } + """ + pixel_thr = config.TRAINER.RANSAC_PIXEL_THR # 0.5 + conf = config.TRAINER.RANSAC_CONF # 0.99999 + data.update({'R_errs': [], 't_errs': [], 'inliers': []}) + + if config.LOFTR.FINE.MTD_SPVS: + m_bids = data['m_bids_f'].cpu().numpy() if 'm_bids_f' in data else data['m_bids'].cpu().numpy() + + else: + m_bids = data['m_bids'].cpu().numpy() + pts0 = data['mkpts0_f'].cpu().numpy() + pts1 = data['mkpts1_f'].cpu().numpy() + gt_2D_matches = data["gt_2D_matches"].cpu().numpy() + + data.update({'epi_errs': torch.zeros(m_bids.shape[0])}) + for bs in range(gt_2D_matches.shape[0]): + mask = m_bids == bs + ret = estimate_homo(pts0[mask], pts1[mask], pixel_thr, conf=conf, mode=config.TRAINER.WARP_ESTIMATOR_MODEL) + + if ret is None: + data['R_errs'].append(np.inf) + data['t_errs'].append(np.inf) + data['inliers'].append(np.array([]).astype(bool)) + else: + H_est, inliers = ret + img0_pts, img1_pts = gt_2D_matches[bs][:, :2], gt_2D_matches[bs][:, 2:] + pts_warp_distance = warp_pts_error(H_est, img0_pts, pts_gt=img1_pts) + print(pts_warp_distance) + data['R_errs'].append(pts_warp_distance) + data['t_errs'].append(0) + data['inliers'].append(inliers) + +def compute_pose_errors(data, config): + """ + Update: + data (dict):{ + "R_errs" List[float]: [N] + "t_errs" List[float]: [N] + "inliers" List[np.ndarray]: [N] + } + """ + pixel_thr = config.TRAINER.RANSAC_PIXEL_THR # 0.5 + conf = config.TRAINER.RANSAC_CONF # 0.99999 + data.update({'R_errs': [], 't_errs': [], 'inliers': []}) + + if config.LOFTR.FINE.MTD_SPVS: + m_bids = data['m_bids_f'].cpu().numpy() if 'm_bids_f' in data else data['m_bids'].cpu().numpy() + + else: + m_bids = data['m_bids'].cpu().numpy() + pts0 = data['mkpts0_f'].cpu().numpy() + pts1 = data['mkpts1_f'].cpu().numpy() + K0 = data['K0'].cpu().numpy() + K1 = data['K1'].cpu().numpy() + T_0to1 = data['T_0to1'].cpu().numpy() + + for bs in range(K0.shape[0]): + mask = m_bids == bs + if config.LOFTR.EVAL_TIMES >= 1: + bpts0, bpts1 = pts0[mask], pts1[mask] + R_list, T_list, inliers_list = [], [], [] + for _ in range(5): + shuffling = np.random.permutation(np.arange(len(bpts0))) + if _ >= config.LOFTR.EVAL_TIMES: + continue + bpts0 = bpts0[shuffling] + bpts1 = bpts1[shuffling] + + ret = estimate_pose(bpts0, bpts1, K0[bs], K1[bs], pixel_thr, conf=conf) + if ret is None: + R_list.append(np.inf) + T_list.append(np.inf) + inliers_list.append(np.array([]).astype(bool)) + print('Pose error: inf') + else: + R, t, inliers = ret + t_err, R_err = relative_pose_error(T_0to1[bs], R, t, ignore_gt_t_thr=0.0) + R_list.append(R_err) + T_list.append(t_err) + inliers_list.append(inliers) + print(f'Pose error: {max(R_err, t_err)}') + R_err_mean = np.array(R_list).mean() + T_err_mean = np.array(T_list).mean() + # inliers_mean = np.array(inliers_list).mean() + + data['R_errs'].append(R_list) + data['t_errs'].append(T_list) + data['inliers'].append(inliers_list[0]) + + else: + ret = estimate_pose(pts0[mask], pts1[mask], K0[bs], K1[bs], pixel_thr, conf=conf) + + if ret is None: + data['R_errs'].append(np.inf) + data['t_errs'].append(np.inf) + data['inliers'].append(np.array([]).astype(bool)) + print('Pose error: inf') + else: + R, t, inliers = ret + t_err, R_err = relative_pose_error(T_0to1[bs], R, t, ignore_gt_t_thr=0.0) + data['R_errs'].append(R_err) + data['t_errs'].append(t_err) + data['inliers'].append(inliers) + print(f'Pose error: {max(R_err, t_err)}') + + +# --- METRIC AGGREGATION --- +def error_rmse(error): + squard_errors = np.square(error) # N * 2 + mse = np.mean(np.sum(squard_errors, axis=1)) + rmse = np.sqrt(mse) + return rmse + +def error_mae(error): + abs_diff = np.abs(error) # N * 2 + absolute_errors = np.sum(abs_diff, axis=1) + + # Return the maximum absolute error + mae = np.max(absolute_errors) + return mae + +def error_auc(errors, thresholds, method='exact_auc'): + """ + Args: + errors (list): [N,] + thresholds (list) + """ + if method == 'exact_auc': + errors = [0] + sorted(list(errors)) + recall = list(np.linspace(0, 1, len(errors))) + + aucs = [] + for thr in thresholds: + last_index = np.searchsorted(errors, thr) + y = recall[:last_index] + [recall[last_index-1]] + x = errors[:last_index] + [thr] + aucs.append(np.trapz(y, x) / thr) + return {f'auc@{t}': auc for t, auc in zip(thresholds, aucs)} + elif method == 'fire_paper': + aucs = [] + for threshold in thresholds: + accum_error = 0 + percent_error_below = np.zeros(threshold + 1) + for i in range(1, threshold + 1): + percent_error_below[i] = np.sum(errors < i) * 100 / len(errors) + accum_error += percent_error_below[i] + + aucs.append(accum_error / (threshold * 100)) + + return {f'auc@{t}': auc for t, auc in zip(thresholds, aucs)} + elif method == 'success_rate': + aucs = [] + for threshold in thresholds: + aucs.append((errors < threshold).astype(float).mean()) + return {f'SR@{t}': auc for t, auc in zip(thresholds, aucs)} + else: + raise NotImplementedError + + +def epidist_prec(errors, thresholds, ret_dict=False): + precs = [] + for thr in thresholds: + prec_ = [] + for errs in errors: + correct_mask = errs < thr + prec_.append(np.mean(correct_mask) if len(correct_mask) > 0 else 0) + precs.append(np.mean(prec_) if len(prec_) > 0 else 0) + if ret_dict: + return {f'prec@{t:.0e}': prec for t, prec in zip(thresholds, precs)} + else: + return precs + + +def aggregate_metrics(metrics, epi_err_thr=5e-4, eval_n_time=1, threshold=[5, 10, 20], method='exact_auc'): + """ Aggregate metrics for the whole dataset: + (This method should be called once per dataset) + 1. AUC of the pose error (angular) at the threshold [5, 10, 20] + 2. Mean matching precision at the threshold 5e-4(ScanNet), 1e-4(MegaDepth) + """ + # filter duplicates + unq_ids = OrderedDict((iden, id) for id, iden in enumerate(metrics['identifiers'])) + unq_ids = list(unq_ids.values()) + logger.info(f'Aggregating metrics over {len(unq_ids)} unique items...') + + # pose auc + angular_thresholds = threshold + if eval_n_time >= 1: + pose_errors = np.max(np.stack([metrics['R_errs'], metrics['t_errs']]), axis=0).reshape(-1, eval_n_time)[unq_ids].reshape(-1) + else: + pose_errors = np.max(np.stack([metrics['R_errs'], metrics['t_errs']]), axis=0)[unq_ids] + logger.info('num of pose_errors: {}'.format(pose_errors.shape)) + aucs = error_auc(pose_errors, angular_thresholds, method=method) # (auc@5, auc@10, auc@20) + + if eval_n_time >= 1: + for i in range(eval_n_time): + aucs_i = error_auc(pose_errors.reshape(-1, eval_n_time)[:,i], angular_thresholds, method=method) + logger.info('\n' + f'results of {i}-th RANSAC' + pprint.pformat(aucs_i)) + # matching precision + dist_thresholds = [epi_err_thr] + precs = epidist_prec(np.array(metrics['epi_errs'], dtype=object)[unq_ids], dist_thresholds, True) # (prec@err_thr) + + u_num_mathces = np.array(metrics['num_matches'], dtype=object)[unq_ids] + u_percent_inliers = np.array(metrics['percent_inliers'], dtype=object)[unq_ids] + num_matches = {f'num_matches': u_num_mathces.mean() } + percent_inliers = {f'percent_inliers': u_percent_inliers.mean()} + return {**aucs, **precs, **num_matches, **percent_inliers} diff --git a/imcui/third_party/MatchAnything/src/utils/misc.py b/imcui/third_party/MatchAnything/src/utils/misc.py new file mode 100644 index 0000000000000000000000000000000000000000..9c8db04666519753ea2df43903ab6c47ec00a9a1 --- /dev/null +++ b/imcui/third_party/MatchAnything/src/utils/misc.py @@ -0,0 +1,101 @@ +import os +import contextlib +import joblib +from typing import Union +from loguru import _Logger, logger +from itertools import chain + +import torch +from yacs.config import CfgNode as CN +from pytorch_lightning.utilities import rank_zero_only + + +def lower_config(yacs_cfg): + if not isinstance(yacs_cfg, CN): + return yacs_cfg + return {k.lower(): lower_config(v) for k, v in yacs_cfg.items()} + + +def upper_config(dict_cfg): + if not isinstance(dict_cfg, dict): + return dict_cfg + return {k.upper(): upper_config(v) for k, v in dict_cfg.items()} + + +def log_on(condition, message, level): + if condition: + assert level in ['INFO', 'DEBUG', 'WARNING', 'ERROR', 'CRITICAL'] + logger.log(level, message) + + +def get_rank_zero_only_logger(logger: _Logger): + if rank_zero_only.rank == 0: + return logger + else: + for _level in logger._core.levels.keys(): + level = _level.lower() + setattr(logger, level, + lambda x: None) + logger._log = lambda x: None + return logger + + +def setup_gpus(gpus: Union[str, int]) -> int: + """ A temporary fix for pytorch-lighting 1.3.x """ + gpus = str(gpus) + gpu_ids = [] + + if ',' not in gpus: + n_gpus = int(gpus) + return n_gpus if n_gpus != -1 else torch.cuda.device_count() + else: + gpu_ids = [i.strip() for i in gpus.split(',') if i != ''] + + # setup environment variables + visible_devices = os.getenv('CUDA_VISIBLE_DEVICES') + if visible_devices is None: + os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" + os.environ["CUDA_VISIBLE_DEVICES"] = ','.join(str(i) for i in gpu_ids) + visible_devices = os.getenv('CUDA_VISIBLE_DEVICES') + logger.warning(f'[Temporary Fix] manually set CUDA_VISIBLE_DEVICES when specifying gpus to use: {visible_devices}') + else: + logger.warning('[Temporary Fix] CUDA_VISIBLE_DEVICES already set by user or the main process.') + return len(gpu_ids) + + +def flattenList(x): + return list(chain(*x)) + + +@contextlib.contextmanager +def tqdm_joblib(tqdm_object): + """Context manager to patch joblib to report into tqdm progress bar given as argument + + Usage: + with tqdm_joblib(tqdm(desc="My calculation", total=10)) as progress_bar: + Parallel(n_jobs=16)(delayed(sqrt)(i**2) for i in range(10)) + + When iterating over a generator, directly use of tqdm is also a solutin (but monitor the task queuing, instead of finishing) + ret_vals = Parallel(n_jobs=args.world_size)( + delayed(lambda x: _compute_cov_score(pid, *x))(param) + for param in tqdm(combinations(image_ids, 2), + desc=f'Computing cov_score of [{pid}]', + total=len(image_ids)*(len(image_ids)-1)/2)) + Src: https://stackoverflow.com/a/58936697 + """ + class TqdmBatchCompletionCallback(joblib.parallel.BatchCompletionCallBack): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + def __call__(self, *args, **kwargs): + tqdm_object.update(n=self.batch_size) + return super().__call__(*args, **kwargs) + + old_batch_callback = joblib.parallel.BatchCompletionCallBack + joblib.parallel.BatchCompletionCallBack = TqdmBatchCompletionCallback + try: + yield tqdm_object + finally: + joblib.parallel.BatchCompletionCallBack = old_batch_callback + tqdm_object.close() + diff --git a/imcui/third_party/MatchAnything/src/utils/plotting.py b/imcui/third_party/MatchAnything/src/utils/plotting.py new file mode 100644 index 0000000000000000000000000000000000000000..87d39733f57a09e4db61b61e53f82ad80e4a839b --- /dev/null +++ b/imcui/third_party/MatchAnything/src/utils/plotting.py @@ -0,0 +1,248 @@ +import bisect +import numpy as np +import matplotlib.pyplot as plt +import matplotlib + +import torch + +def _compute_conf_thresh(data): + dataset_name = data['dataset_name'][0].lower() + if dataset_name == 'scannet': + thr = 5e-4 + elif dataset_name == 'megadepth': + thr = 1e-4 + else: + raise ValueError(f'Unknown dataset: {dataset_name}') + return thr + + +# --- VISUALIZATION --- # +def make_matching_figure( + img0, img1, mkpts0, mkpts1, color, + kpts0=None, kpts1=None, text=[], dpi=75, path=None): + # draw image pair + assert mkpts0.shape[0] == mkpts1.shape[0], f'mkpts0: {mkpts0.shape[0]} v.s. mkpts1: {mkpts1.shape[0]}' + fig, axes = plt.subplots(1, 2, figsize=(10, 6), dpi=dpi) + axes[0].imshow(img0, cmap='gray') + axes[1].imshow(img1, cmap='gray') + for i in range(2): # clear all frames + axes[i].get_yaxis().set_ticks([]) + axes[i].get_xaxis().set_ticks([]) + for spine in axes[i].spines.values(): + spine.set_visible(False) + plt.tight_layout(pad=1) + + if kpts0 is not None: + assert kpts1 is not None + axes[0].scatter(kpts0[:, 0], kpts0[:, 1], c='w', s=2) + axes[1].scatter(kpts1[:, 0], kpts1[:, 1], c='w', s=2) + + # draw matches + if mkpts0.shape[0] != 0 and mkpts1.shape[0] != 0: + fig.canvas.draw() + transFigure = fig.transFigure.inverted() + fkpts0 = transFigure.transform(axes[0].transData.transform(mkpts0)) + fkpts1 = transFigure.transform(axes[1].transData.transform(mkpts1)) + fig.lines = [matplotlib.lines.Line2D((fkpts0[i, 0], fkpts1[i, 0]), + (fkpts0[i, 1], fkpts1[i, 1]), + transform=fig.transFigure, c=color[i], linewidth=1) + for i in range(len(mkpts0))] + + axes[0].scatter(mkpts0[:, 0], mkpts0[:, 1], c=color, s=4) + axes[1].scatter(mkpts1[:, 0], mkpts1[:, 1], c=color, s=4) + + # put txts + txt_color = 'k' if img0[:100, :200].mean() > 200 else 'w' + fig.text( + 0.01, 0.99, '\n'.join(text), transform=fig.axes[0].transAxes, + fontsize=15, va='top', ha='left', color=txt_color) + + # save or return figure + if path: + plt.savefig(str(path), bbox_inches='tight', pad_inches=0) + plt.close() + else: + return fig + + +def _make_evaluation_figure(data, b_id, alpha='dynamic', use_m_bids_f=False): + if use_m_bids_f: + b_mask = (data['m_bids_f'] == b_id) if 'm_bids_f' in data else (data['m_bids'] == b_id) + else: + b_mask = data['m_bids'] == b_id + conf_thr = _compute_conf_thresh(data) + + img0 = (data['image0'][b_id][0].cpu().numpy() * 255).round().astype(np.int32) + img1 = (data['image1'][b_id][0].cpu().numpy() * 255).round().astype(np.int32) + kpts0 = data['mkpts0_f'][b_mask].cpu().numpy() + kpts1 = data['mkpts1_f'][b_mask].clone().detach().cpu().numpy() + + # for megadepth, we visualize matches on the resized image + if 'scale0' in data: + kpts0 = kpts0 / data['scale0'][b_id].cpu().numpy() + kpts1 = kpts1 / data['scale1'][b_id].cpu().numpy() + + epi_errs = data['epi_errs'][b_mask].cpu().numpy() + correct_mask = epi_errs < conf_thr + precision = np.mean(correct_mask) if len(correct_mask) > 0 else 0 + n_correct = np.sum(correct_mask) + n_gt_matches = int(data['conf_matrix_gt'][b_id].sum().cpu()) if 'conf_matrix_gt' in data else data['gt'][1]['gt_prob'].sum() + recall = 0 if n_gt_matches == 0 else n_correct / (n_gt_matches) + # recall might be larger than 1, since the calculation of conf_matrix_gt + # uses groundtruth depths and camera poses, but epipolar distance is used here. + + # matching info + if alpha == 'dynamic': + alpha = dynamic_alpha(len(correct_mask)) + color = error_colormap(epi_errs, conf_thr, alpha=alpha) + + text = [ + f'#Matches {len(kpts0)}', + f'Precision({conf_thr:.2e}) ({100 * precision:.1f}%): {n_correct}/{len(kpts0)}', + f'Recall({conf_thr:.2e}) ({100 * recall:.1f}%): {n_correct}/{n_gt_matches}' + ] + + # make the figure + figure = make_matching_figure(img0, img1, kpts0, kpts1, + color, text=text) + return figure + +def _make_confidence_figure(data, b_id): + raise NotImplementedError() + +def _make_gt_figure(data, b_id, alpha='dynamic', use_m_bids_f=False, mode='gt_fine'): + if 'fine' in mode: + mkpts0_key, mkpts1_key = 'mkpts0_f_gt', 'mkpts1_f_gt' + else: + mkpts0_key, mkpts1_key = 'mkpts0_c_gt', 'mkpts1_c_gt' + + if data['image0'].shape[0] == 1: + b_mask = torch.tensor([True]*data[mkpts0_key].shape[0], device = data[mkpts0_key].device) + else: + raise NotImplementedError + + conf_thr = _compute_conf_thresh(data) + + img0 = (data['image0'][b_id][0].cpu().numpy() * 255).round().astype(np.int32) + img1 = (data['image1'][b_id][0].cpu().numpy() * 255).round().astype(np.int32) + try: + kpts0 = data[mkpts0_key][b_mask].cpu().numpy() + kpts1 = data[mkpts1_key][b_mask].cpu().numpy() + except: + kpts0, kpts1 = np.ones((0, 2)), np.ones((0, 2)) + + # for megadepth, we visualize matches on the resized image + if 'scale0' in data: + kpts0 = kpts0 / data['scale0'][b_id].cpu().numpy() + kpts1 = kpts1 / data['scale1'][b_id].cpu().numpy() + + # matching info + if alpha == 'dynamic': + alpha = dynamic_alpha(len(kpts0)) + color = error_colormap(np.full((kpts0.shape[0]), conf_thr), conf_thr, alpha=0.1) + + text = [ + f'#Matches {len(kpts0)}', + ] + + # make the figure + figure = make_matching_figure(img0, img1, kpts0, kpts1, + color, text=text) + return figure + +def make_matching_figures(data, config, mode='evaluation'): + """ Make matching figures for a batch. + + Args: + data (Dict): a batch updated by PL_LoFTR. + config (Dict): matcher config + Returns: + figures (Dict[str, List[plt.figure]] + """ + figures = {mode: []} + for b_id in range(data['image0'].size(0)): + if mode == 'evaluation': + fig = _make_evaluation_figure( + data, b_id, + alpha=config.TRAINER.PLOT_MATCHES_ALPHA, + use_m_bids_f=config.LOFTR.FINE.MTD_SPVS) + elif mode == 'confidence': + fig = _make_confidence_figure(data, b_id) + elif 'gt' in mode: + fig = _make_gt_figure(data, b_id, use_m_bids_f=config.LOFTR.FINE.MTD_SPVS, mode=mode) + else: + raise ValueError(f'Unknown plot mode: {mode}') + figures[mode].append(fig) + return figures + +def make_scores_figures(data, config, mode='evaluation'): + """ Make matching figures for a batch. + + Args: + data (Dict): a batch updated by PL_LoFTR. + config (Dict): matcher config + Returns: + figures (Dict[str, List[plt.figure]] + """ + assert mode in ['evaluation', 'confidence', 'gt'] # 'confidence' + figures = {mode: []} + for b_id in range(data['image0'].size(0)): + if mode == 'evaluation': + if config.LOFTR.MATCH_COARSE.SKIP_SOFTMAX and config.LOFTR.MATCH_COARSE.PLOT_ORIGIN_SCORES: + plots = [data['histc_skipmn_in_softmax'][b_id].reshape(-1)] # [-30, 70] scores + if 'histc_skipmn_in_softmax_gt' in data: + plots.append(data['histc_skipmn_in_softmax_gt'][b_id].reshape(-1)) + elif config.LOFTR.MATCH_COARSE.PLOT_ORIGIN_SCORES: + pass + else: + pass + print(plots[0], plots[-1]) + group = len(plots) + start, end = 0, 100 + bins=100 + width = (end//bins-1)/group + fig, ax = plt.subplots() + for i, hist in enumerate(plots): + ax.set_yscale('log') + x = range(start, end, end//bins) + x = [t + i*width for t in x] + ax.bar(x, hist.cpu(), align='edge', width=width) + + elif mode == 'confidence': + raise NotImplementedError() + elif mode == 'gt': + raise NotImplementedError() + else: + raise ValueError(f'Unknown plot mode: {mode}') + figures[mode].append(fig) + return figures + + +def dynamic_alpha(n_matches, + milestones=[0, 300, 1000, 2000], + alphas=[1.0, 0.8, 0.4, 0.2]): + if n_matches == 0: + return 1.0 + ranges = list(zip(alphas, alphas[1:] + [None])) + loc = bisect.bisect_right(milestones, n_matches) - 1 + _range = ranges[loc] + if _range[1] is None: + return _range[0] + return _range[1] + (milestones[loc + 1] - n_matches) / ( + milestones[loc + 1] - milestones[loc]) * (_range[0] - _range[1]) + + +def error_colormap(err, thr, alpha=1.0): + assert alpha <= 1.0 and alpha > 0, f"Invaid alpha value: {alpha}" + if thr is not None: + large_error_mask = err > (thr * 2) + x = np.clip((err - thr) / (thr * 2), 0, 1) + else: + large_error_mask = np.zeros_like(err, dtype=bool) + x = np.clip(err, 0.1, 1) + + cm_ = matplotlib.colormaps['RdYlGn_r'] + color = cm_(x, bytes=False) + color[:, 3] = alpha + color[:, 3][large_error_mask] = alpha * 0.6 + return color \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/src/utils/profiler.py b/imcui/third_party/MatchAnything/src/utils/profiler.py new file mode 100644 index 0000000000000000000000000000000000000000..6d21ed79fb506ef09c75483355402c48a195aaa9 --- /dev/null +++ b/imcui/third_party/MatchAnything/src/utils/profiler.py @@ -0,0 +1,39 @@ +import torch +from pytorch_lightning.profiler import SimpleProfiler, PassThroughProfiler +from contextlib import contextmanager +from pytorch_lightning.utilities import rank_zero_only + + +class InferenceProfiler(SimpleProfiler): + """ + This profiler records duration of actions with cuda.synchronize() + Use this in test time. + """ + + def __init__(self): + super().__init__() + self.start = rank_zero_only(self.start) + self.stop = rank_zero_only(self.stop) + self.summary = rank_zero_only(self.summary) + + @contextmanager + def profile(self, action_name: str) -> None: + try: + torch.cuda.synchronize() + self.start(action_name) + yield action_name + finally: + torch.cuda.synchronize() + self.stop(action_name) + + +def build_profiler(name): + if name == 'inference': + return InferenceProfiler() + elif name == 'pytorch': + from pytorch_lightning.profiler import PyTorchProfiler + return PyTorchProfiler(use_cuda=True, profile_memory=True, row_limit=100) + elif name is None: + return PassThroughProfiler() + else: + raise ValueError(f'Invalid profiler: {name}') diff --git a/imcui/third_party/MatchAnything/src/utils/ray_utils.py b/imcui/third_party/MatchAnything/src/utils/ray_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..9d6c7bf3172f4d53513338b1b615efcfded1c4c9 --- /dev/null +++ b/imcui/third_party/MatchAnything/src/utils/ray_utils.py @@ -0,0 +1,134 @@ +from asyncio import Event +from typing import Tuple +import numpy as np +import random + +import ray +from ray.actor import ActorHandle +from tqdm import tqdm + + +@ray.remote +class ProgressBarActor: + counter: int + delta: int + event: Event + + def __init__(self) -> None: + self.counter = 0 + self.delta = 0 + self.event = Event() + + def update(self, num_items_completed: int) -> None: + """Updates the ProgressBar with the incremental + number of items that were just completed. + """ + self.counter += num_items_completed + self.delta += num_items_completed + self.event.set() + + async def wait_for_update(self) -> Tuple[int, int]: + """Blocking call. + + Waits until somebody calls `update`, then returns a tuple of + the number of updates since the last call to + `wait_for_update`, and the total number of completed items. + """ + await self.event.wait() + self.event.clear() + saved_delta = self.delta + self.delta = 0 + return saved_delta, self.counter + + def get_counter(self) -> int: + """ + Returns the total number of complete items. + """ + return self.counter + + +class ProgressBar: + progress_actor: ActorHandle + total: int + description: str + pbar: tqdm + + def __init__(self, total: int, description: str = ""): + # Ray actors don't seem to play nice with mypy, generating + # a spurious warning for the following line, + # which we need to suppress. The code is fine. + self.progress_actor = ProgressBarActor.remote() # type: ignore + self.total = total + self.description = description + + @property + def actor(self) -> ActorHandle: + """Returns a reference to the remote `ProgressBarActor`. + + When you complete tasks, call `update` on the actor. + """ + return self.progress_actor + + def print_until_done(self) -> None: + """Blocking call. + + Do this after starting a series of remote Ray tasks, to which you've + passed the actor handle. Each of them calls `update` on the actor. + When the progress meter reaches 100%, this method returns. + """ + pbar = tqdm(desc=self.description, total=self.total) + while True: + delta, counter = ray.get(self.actor.wait_for_update.remote()) + pbar.update(delta) + if counter >= self.total: + pbar.close() + return + +# Ray data utils +def chunks(lst, n, length=None): + """Yield successive n-sized chunks from lst.""" + try: + _len = len(lst) + except TypeError as _: + assert length is not None + _len = length + + for i in range(0, _len, n): + yield lst[i : i + n] + +def chunks_balance(lst, n_split): + if n_split == 0: + # 0 is not allowed + n_split = 1 + splited_list = [[] for i in range(n_split)] + for id, obj in enumerate(lst): + assign_id = id % n_split + splited_list[assign_id].append(obj) + return splited_list + + +def chunk_index(total_len, sub_len, shuffle=True): + index_array = np.arange(total_len) + if shuffle: + random.shuffle(index_array) + + index_list = [] + for i in range(0, total_len, sub_len): + index_list.append(list(index_array[i : i + sub_len])) + + return index_list + +def chunk_index_balance(total_len, n_split, shuffle=True): + index_array = np.arange(total_len) + if shuffle: + random.shuffle(index_array) + + splited_list = [[] for i in range(n_split)] + for id, obj in enumerate(index_array): + assign_id = id % n_split + splited_list[assign_id].append(obj) + return splited_list + +def split_dict(_dict, n): + for _items in chunks(list(_dict.items()), n): + yield dict(_items) \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/src/utils/sample_homo.py b/imcui/third_party/MatchAnything/src/utils/sample_homo.py new file mode 100644 index 0000000000000000000000000000000000000000..d4fdac234e1058b5a1106a98b5c3f77426af4b91 --- /dev/null +++ b/imcui/third_party/MatchAnything/src/utils/sample_homo.py @@ -0,0 +1,58 @@ +import numpy as np + +# ----- Similarity-Affinity-Perspective (SAP) impl ----- # + +def similarity_mat(angle, tx, ty, s): + theta = np.deg2rad(angle) + return np.array([[s*np.cos(theta), -s*np.sin(theta), tx], [s*np.sin(theta), s*np.cos(theta), ty], [0, 0, 1]]) + + +def affinity_mat(k0, k1): + return np.array([[k0, k1, 0], [0, 1/k0, 0], [0, 0, 1]]) + + +def perspective_mat(v0, v1): + return np.array([[1, 0, 0], [0, 1, 0], [v0, v1, 1]]) + + +def compute_homography_sap(h, w, angle=0, tx=0, ty=0, scale=1, k0=1, k1=0, v0=0, v1=0): + """ + Args: + img_size: (h, w) + angle: in degree, goes clock-wise in image-coordinate-system + tx, ty: displacement + scale: factor to zoom in, by default 1 + k0: non-isotropic squeeze factor - 1 +(stretch x, squeeze y) [0.5, 1.5] + k1: non-isotropic skew factor, - 0 +(up-to-left, down-to-right) [-0.5, 0.5] + v0: left-right perspective factor, - 0 +(move left) [-1, 1] + v1: up-down perspective factor, - 0 +(move up) [-1, 1] + """ + # move image to its center + max_size = max(w/2, h/2) + M_norm = similarity_mat(0, 0, 0, 1/max_size).dot(similarity_mat(0, -w/2, -h/2, 1)) + M_denorm = similarity_mat(0, w/2, h/2, 1).dot(similarity_mat(0, 0, 0, max_size)) + + # compute HS, HA and HP accordingly + HS = similarity_mat(angle, tx, ty, scale) + HA = affinity_mat(k0, k1) + HP = perspective_mat(v0, v1) + + # final H + H = M_denorm.dot(HS).dot(HA).dot(HP).dot(M_norm) + return H + + +def sample_homography_sap(h, w, angle=180, tx=0.25, ty=0.25, scale=2.0, k1=0.1, v0=0.5, v1=0.5): + angle = np.random.uniform(-1 * angle, angle) + tx = np.random.uniform(-1 * tx, tx) + ty = np.random.uniform(-1 * ty, ty) + scale = np.random.uniform(1/scale, scale) + + k0 = 1 # similar effects as the ratio of xy-focal lengths + k1 = np.random.uniform(-1 * k1, k1) + + v0 = np.random.uniform(-1 * v0, v0) + v1 = np.random.uniform(-1 * v1, v1) + + H = compute_homography_sap(h, w, angle, tx, ty, scale, k0, k1, v0, v1) + return H \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/src/utils/utils.py b/imcui/third_party/MatchAnything/src/utils/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..6f2f3d6f96fbb800d290c83a174f93ead187dcf3 --- /dev/null +++ b/imcui/third_party/MatchAnything/src/utils/utils.py @@ -0,0 +1,600 @@ +from pathlib import Path +import time +from collections import OrderedDict +from threading import Thread +from loguru import logger +from PIL import Image + +import numpy as np +import cv2 +import torch +import matplotlib.pyplot as plt +import matplotlib +matplotlib.use('Agg') + +class AverageTimer: + """ Class to help manage printing simple timing of code execution. """ + + def __init__(self, smoothing=0.3, newline=False): + self.smoothing = smoothing + self.newline = newline + self.times = OrderedDict() + self.will_print = OrderedDict() + self.reset() + + def reset(self): + now = time.time() + self.start = now + self.last_time = now + for name in self.will_print: + self.will_print[name] = False + + def update(self, name='default'): + now = time.time() + dt = now - self.last_time + if name in self.times: + dt = self.smoothing * dt + (1 - self.smoothing) * self.times[name] + self.times[name] = dt + self.will_print[name] = True + self.last_time = now + + def print(self, text='Timer'): + total = 0. + print('[{}]'.format(text), end=' ') + for key in self.times: + val = self.times[key] + if self.will_print[key]: + print('%s=%.3f' % (key, val), end=' ') + total += val + print('total=%.3f sec {%.1f FPS}' % (total, 1./total), end=' ') + if self.newline: + print(flush=True) + else: + print(end='\r', flush=True) + self.reset() + + +class VideoStreamer: + """ Class to help process image streams. Four types of possible inputs:" + 1.) USB Webcam. + 2.) An IP camera + 3.) A directory of images (files in directory matching 'image_glob'). + 4.) A video file, such as an .mp4 or .avi file. + """ + + def __init__(self, basedir, resize, skip, image_glob, max_length=1000000): + self._ip_grabbed = False + self._ip_running = False + self._ip_camera = False + self._ip_image = None + self._ip_index = 0 + self.cap = [] + self.camera = True + self.video_file = False + self.listing = [] + self.resize = resize + self.interp = cv2.INTER_AREA + self.i = 0 + self.skip = skip + self.max_length = max_length + if isinstance(basedir, int) or basedir.isdigit(): + print('==> Processing USB webcam input: {}'.format(basedir)) + self.cap = cv2.VideoCapture(int(basedir)) + self.listing = range(0, self.max_length) + elif basedir.startswith(('http', 'rtsp')): + print('==> Processing IP camera input: {}'.format(basedir)) + self.cap = cv2.VideoCapture(basedir) + self.start_ip_camera_thread() + self._ip_camera = True + self.listing = range(0, self.max_length) + elif Path(basedir).is_dir(): + print('==> Processing image directory input: {}'.format(basedir)) + self.listing = list(Path(basedir).glob(image_glob[0])) + for j in range(1, len(image_glob)): + image_path = list(Path(basedir).glob(image_glob[j])) + self.listing = self.listing + image_path + self.listing.sort() + self.listing = self.listing[::self.skip] + self.max_length = np.min([self.max_length, len(self.listing)]) + if self.max_length == 0: + raise IOError('No images found (maybe bad \'image_glob\' ?)') + self.listing = self.listing[:self.max_length] + self.camera = False + elif Path(basedir).exists(): + print('==> Processing video input: {}'.format(basedir)) + self.cap = cv2.VideoCapture(basedir) + self.cap.set(cv2.CAP_PROP_BUFFERSIZE, 1) + num_frames = int(self.cap.get(cv2.CAP_PROP_FRAME_COUNT)) + self.listing = range(0, num_frames) + self.listing = self.listing[::self.skip] + self.video_file = True + self.max_length = np.min([self.max_length, len(self.listing)]) + self.listing = self.listing[:self.max_length] + else: + raise ValueError('VideoStreamer input \"{}\" not recognized.'.format(basedir)) + if self.camera and not self.cap.isOpened(): + raise IOError('Could not read camera') + + def load_image(self, impath): + """ Read image as grayscale and resize to img_size. + Inputs + impath: Path to input image. + Returns + grayim: uint8 numpy array sized H x W. + """ + grayim = cv2.imread(impath, 0) + if grayim is None: + raise Exception('Error reading image %s' % impath) + w, h = grayim.shape[1], grayim.shape[0] + w_new, h_new = process_resize(w, h, self.resize) + grayim = cv2.resize( + grayim, (w_new, h_new), interpolation=self.interp) + return grayim + + def next_frame(self): + """ Return the next frame, and increment internal counter. + Returns + image: Next H x W image. + status: True or False depending whether image was loaded. + """ + + if self.i == self.max_length: + return (None, False) + if self.camera: + + if self._ip_camera: + # Wait for first image, making sure we haven't exited + while self._ip_grabbed is False and self._ip_exited is False: + time.sleep(.001) + + ret, image = self._ip_grabbed, self._ip_image.copy() + if ret is False: + self._ip_running = False + else: + ret, image = self.cap.read() + if ret is False: + print('VideoStreamer: Cannot get image from camera') + return (None, False) + w, h = image.shape[1], image.shape[0] + if self.video_file: + self.cap.set(cv2.CAP_PROP_POS_FRAMES, self.listing[self.i]) + + w_new, h_new = process_resize(w, h, self.resize) + image = cv2.resize(image, (w_new, h_new), + interpolation=self.interp) + image = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY) + else: + image_file = str(self.listing[self.i]) + image = self.load_image(image_file) + self.i = self.i + 1 + return (image, True) + + def start_ip_camera_thread(self): + self._ip_thread = Thread(target=self.update_ip_camera, args=()) + self._ip_running = True + self._ip_thread.start() + self._ip_exited = False + return self + + def update_ip_camera(self): + while self._ip_running: + ret, img = self.cap.read() + if ret is False: + self._ip_running = False + self._ip_exited = True + self._ip_grabbed = False + return + + self._ip_image = img + self._ip_grabbed = ret + self._ip_index += 1 + #print('IPCAMERA THREAD got frame {}'.format(self._ip_index)) + + def cleanup(self): + self._ip_running = False + +# --- PREPROCESSING --- + + +def process_resize(w, h, resize): + assert(len(resize) > 0 and len(resize) <= 2) + if len(resize) == 1 and resize[0] > -1: + scale = resize[0] / max(h, w) + w_new, h_new = int(round(w*scale)), int(round(h*scale)) + elif len(resize) == 1 and resize[0] == -1: + w_new, h_new = w, h + else: # len(resize) == 2: + w_new, h_new = resize[0], resize[1] + + # Issue warning if resolution is too small or too large. + if max(w_new, h_new) < 160: + print('Warning: input resolution is very small, results may vary') + elif max(w_new, h_new) > 2000: + print('Warning: input resolution is very large, results may vary') + + return w_new, h_new + + +def frame2tensor(frame, device): + """ Depth image to tensor + """ + return torch.from_numpy(frame/255.).float()[None, None].to(device) + + +def read_image(path, device, resize, rotation, resize_float): + image = cv2.imread(str(path), cv2.IMREAD_GRAYSCALE) + if image is None: + return None, None, None + w, h = image.shape[1], image.shape[0] + w_new, h_new = process_resize(w, h, resize) + scales = (float(w) / float(w_new), float(h) / float(h_new)) + + if resize_float: + image = cv2.resize(image.astype('float32'), (w_new, h_new)) + else: + image = cv2.resize(image, (w_new, h_new)).astype('float32') + + if rotation != 0: + image = np.rot90(image, k=rotation) + if rotation % 2: + scales = scales[::-1] + + inp = frame2tensor(image, device) + return image, inp, scales + + +# --- GEOMETRY --- +def estimate_pose(kpts0, kpts1, K0, K1, thresh, conf=0.99999): + if len(kpts0) < 5: + return None + + f_mean = np.mean([K0[0, 0], K1[1, 1], K0[0, 0], K1[1, 1]]) + norm_thresh = thresh / f_mean + + kpts0 = (kpts0 - K0[[0, 1], [2, 2]][None]) / K0[[0, 1], [0, 1]][None] + kpts1 = (kpts1 - K1[[0, 1], [2, 2]][None]) / K1[[0, 1], [0, 1]][None] + + E, mask = cv2.findEssentialMat( + kpts0, kpts1, np.eye(3), threshold=norm_thresh, prob=conf, + method=cv2.RANSAC) + + # assert E is not None # might cause unexpected exception in validation step + if E is None: + print("\nE is None while trying to recover pose.\n") + return None + + best_num_inliers = 0 + ret = None + for _E in np.split(E, len(E) / 3): + n, R, t, _ = cv2.recoverPose( + _E, kpts0, kpts1, np.eye(3), 1e9, mask=mask) + if n > best_num_inliers: + best_num_inliers = n + ret = (R, t[:, 0], mask.ravel() > 0) + return ret + + +def estimate_pose_degensac(kpts0, kpts1, K0, K1, thresh, conf=0.9999, max_iters=1000, min_candidates=10): + import pydegensac + # TODO: Try different `min_candidatas`? + if len(kpts0) < min_candidates: + return None + + F, mask = pydegensac.findFundamentalMatrix(kpts0, + kpts1, + px_th=thresh, + conf=conf, + max_iters=max_iters) + mask = mask.astype(np.uint8) + E = (K1.T @ F @ K0).astype(np.float64) + + kpts0 = (kpts0 - K0[[0, 1], [2, 2]][None]) / K0[[0, 1], [0, 1]][None] + kpts1 = (kpts1 - K1[[0, 1], [2, 2]][None]) / K1[[0, 1], [0, 1]][None] + + # This might be optional (since DEGENSAC handle it internally ?) + best_num_inliers = 0 + ret = None + for _E in np.split(E, len(E) / 3): + n, R, t, _ = cv2.recoverPose( + _E, kpts0, kpts1, np.eye(3), 1e9, mask=mask) + if n > best_num_inliers: + best_num_inliers = n + ret = (R, t[:, 0], mask.ravel() > 0) + + return ret + +def rotate_intrinsics(K, image_shape, rot): + """image_shape is the shape of the image after rotation""" + assert rot <= 3 + h, w = image_shape[:2][::-1 if (rot % 2) else 1] + fx, fy, cx, cy = K[0, 0], K[1, 1], K[0, 2], K[1, 2] + rot = rot % 4 + if rot == 1: + return np.array([[fy, 0., cy], + [0., fx, w-1-cx], + [0., 0., 1.]], dtype=K.dtype) + elif rot == 2: + return np.array([[fx, 0., w-1-cx], + [0., fy, h-1-cy], + [0., 0., 1.]], dtype=K.dtype) + else: # if rot == 3: + return np.array([[fy, 0., h-1-cy], + [0., fx, cx], + [0., 0., 1.]], dtype=K.dtype) + + +def rotate_pose_inplane(i_T_w, rot): + rotation_matrices = [ + np.array([[np.cos(r), -np.sin(r), 0., 0.], + [np.sin(r), np.cos(r), 0., 0.], + [0., 0., 1., 0.], + [0., 0., 0., 1.]], dtype=np.float32) + for r in [np.deg2rad(d) for d in (0, 270, 180, 90)] + ] + return np.dot(rotation_matrices[rot], i_T_w) + + +def scale_intrinsics(K, scales): + scales = np.diag([1./scales[0], 1./scales[1], 1.]) + return np.dot(scales, K) + + +def to_homogeneous(points): + return np.concatenate([points, np.ones_like(points[:, :1])], axis=-1) + + +def compute_epipolar_error(kpts0, kpts1, T_0to1, K0, K1, enable_MEinPC=False): + """ Comupute the squared symmetric epipolar distance (SED^2). + The essential matrix is calculated with the relative pose T_0to1. + SED can be seen as a biased estimation of the reprojection error. + Args: + enable_MEinPC: Mean Error in Pixel Coordinate + """ + kpts0 = (kpts0 - K0[[0, 1], [2, 2]][None]) / K0[[0, 1], [0, 1]][None] + kpts1 = (kpts1 - K1[[0, 1], [2, 2]][None]) / K1[[0, 1], [0, 1]][None] + kpts0 = to_homogeneous(kpts0) + kpts1 = to_homogeneous(kpts1) + + t0, t1, t2 = T_0to1[:3, 3] + t_skew = np.array([ + [0, -t2, t1], + [t2, 0, -t0], + [-t1, t0, 0] + ]) + E = t_skew @ T_0to1[:3, :3] + + Ep0 = kpts0 @ E.T # N x 3 + p1Ep0 = np.sum(kpts1 * Ep0, -1) # N + Etp1 = kpts1 @ E # N x 3 + if enable_MEinPC: + d = 0.5 * np.abs(p1Ep0) * (np.linalg.norm([K1[0, 0], K1[1, 1]]) / np.linalg.norm([Ep0[:, 0], Ep0[:, 1]], axis=0) + + np.linalg.norm([K0[0, 0], K0[1, 1]]) / np.linalg.norm([Etp1[:, 0], Etp1[:, 1]], axis=0)) # N + else: + d = p1Ep0**2 * (1.0 / (Ep0[:, 0]**2 + Ep0[:, 1]**2) + + 1.0 / (Etp1[:, 0]**2 + Etp1[:, 1]**2)) # N + return d + + +def compute_homogeneous_error(kpts0, kpts1, H): + """ warp kpts0 to img1, compute error with kpts1 + """ + kpts0 = to_homogeneous(kpts0) + + w_kpts0 = kpts0 @ H.T # N x 3 + w_kpts0 = w_kpts0[:, :2] / w_kpts0[:, [2]] + + d = np.linalg.norm(w_kpts0 - kpts1, axis=1) + return d + + +def angle_error_mat(R1, R2): + cos = (np.trace(np.dot(R1.T, R2)) - 1) / 2 + cos = np.clip(cos, -1., 1.) # numercial errors can make it out of bounds + return np.rad2deg(np.abs(np.arccos(cos))) + + +def angle_error_vec(v1, v2): + n = np.linalg.norm(v1) * np.linalg.norm(v2) + return np.rad2deg(np.arccos(np.clip(np.dot(v1, v2) / n, -1.0, 1.0))) + + +def compute_pose_error(T_0to1, R, t, ignore_gt_t_thr=0.0): + R_gt = T_0to1[:3, :3] + t_gt = T_0to1[:3, 3] + error_t = angle_error_vec(t, t_gt) + error_t = np.minimum(error_t, 180 - error_t) # ambiguity of E estimation + error_R = angle_error_mat(R, R_gt) + if np.linalg.norm(t_gt) < ignore_gt_t_thr: # NOTE: as a close-to-zero translation is not good for angle_error calculation + error_t = 0 + return error_t, error_R + +def convert_gt_T(T_0to1): + gt_R_degree = angle_error_mat(T_0to1[:, :3], np.eye(3)) + gt_t_dist = np.linalg.norm(T_0to1[:, 3]) + return gt_t_dist, gt_R_degree + +def pose_auc(errors, thresholds, ret_dict=False): + sort_idx = np.argsort(errors) + errors = np.array(errors.copy())[sort_idx] + recall = (np.arange(len(errors)) + 1) / len(errors) + errors = np.r_[0., errors] + recall = np.r_[0., recall] + aucs = [] + for t in thresholds: + last_index = np.searchsorted(errors, t) + r = np.r_[recall[:last_index], recall[last_index-1]] + e = np.r_[errors[:last_index], t] + aucs.append(np.trapz(r, x=e)/t) + if ret_dict: + return {f'auc@{t}': auc for t, auc in zip(thresholds, aucs)} + else: + return aucs + + +def epidist_prec(errors, thresholds, ret_dict=False): + precs = [] + for thr in thresholds: + prec_ = [] + for errs in errors: + correct_mask = errs < thr + prec_.append(np.mean(correct_mask) if len(correct_mask) > 0 else 0) + precs.append(np.mean(prec_) if len(prec_) > 0 else 0) + if ret_dict: + return {f'prec@{t:.0e}': prec for t, prec in zip(thresholds, precs)} + else: + return precs + +# --- VISUALIZATION --- +def plot_image_pair(imgs, dpi=100, size=6, pad=.5): + n = len(imgs) + assert n == 2, 'number of images must be two' + figsize = (size*n, size*3/4) if size is not None else None + _, ax = plt.subplots(1, n, figsize=figsize, dpi=dpi) + for i in range(n): + ax[i].imshow(imgs[i], cmap=plt.get_cmap('gray'), vmin=0, vmax=255) + ax[i].get_yaxis().set_ticks([]) + ax[i].get_xaxis().set_ticks([]) + for spine in ax[i].spines.values(): # remove frame + spine.set_visible(False) + plt.tight_layout(pad=pad) + + +def plot_keypoints(kpts0, kpts1, color='w', ps=2): + ax = plt.gcf().axes + ax[0].scatter(kpts0[:, 0], kpts0[:, 1], c=color, s=ps) + ax[1].scatter(kpts1[:, 0], kpts1[:, 1], c=color, s=ps) + + +def plot_matches(kpts0, kpts1, color, lw=1.5, ps=4): + fig = plt.gcf() + ax = fig.axes + fig.canvas.draw() + + transFigure = fig.transFigure.inverted() + fkpts0 = transFigure.transform(ax[0].transData.transform(kpts0)) + fkpts1 = transFigure.transform(ax[1].transData.transform(kpts1)) + + fig.lines = [matplotlib.lines.Line2D( + (fkpts0[i, 0], fkpts1[i, 0]), (fkpts0[i, 1], fkpts1[i, 1]), zorder=1, + transform=fig.transFigure, c=color[i], linewidth=lw) + for i in range(len(kpts0))] + ax[0].scatter(kpts0[:, 0], kpts0[:, 1], c=color, s=ps) + ax[1].scatter(kpts1[:, 0], kpts1[:, 1], c=color, s=ps) + + +def make_matching_plot(image0, image1, kpts0, kpts1, mkpts0, mkpts1, + color, text, path=None, show_keypoints=False, + fast_viz=False, opencv_display=False, + opencv_title='matches', small_text=[]): + + if fast_viz: + make_matching_plot_fast(image0, image1, kpts0, kpts1, mkpts0, mkpts1, + color, text, path, show_keypoints, 10, + opencv_display, opencv_title, small_text) + return + + plot_image_pair([image0, image1]) # will create a new figure + if show_keypoints: + plot_keypoints(kpts0, kpts1, color='k', ps=4) + plot_keypoints(kpts0, kpts1, color='w', ps=2) + plot_matches(mkpts0, mkpts1, color) + + fig = plt.gcf() + txt_color = 'k' if image0[:100, :150].mean() > 200 else 'w' + fig.text( + 0.01, 0.99, '\n'.join(text), transform=fig.axes[0].transAxes, + fontsize=15, va='top', ha='left', color=txt_color) + + txt_color = 'k' if image0[-100:, :150].mean() > 200 else 'w' + fig.text( + 0.01, 0.01, '\n'.join(small_text), transform=fig.axes[0].transAxes, + fontsize=5, va='bottom', ha='left', color=txt_color) + if path: + plt.savefig(str(path), bbox_inches='tight', pad_inches=0) + plt.close() + else: + # TODO: Would it leads to any issue without current figure opened? + return fig + + +def make_matching_plot_fast(image0, image1, kpts0, kpts1, mkpts0, + mkpts1, color, text, path=None, + show_keypoints=False, margin=10, + opencv_display=False, opencv_title='', + small_text=[]): + H0, W0 = image0.shape + H1, W1 = image1.shape + H, W = max(H0, H1), W0 + W1 + margin + + out = 255*np.ones((H, W), np.uint8) + out[:H0, :W0] = image0 + out[:H1, W0+margin:] = image1 + out = np.stack([out]*3, -1) + + if show_keypoints: + kpts0, kpts1 = np.round(kpts0).astype(int), np.round(kpts1).astype(int) + white = (255, 255, 255) + black = (0, 0, 0) + for x, y in kpts0: + cv2.circle(out, (x, y), 2, black, -1, lineType=cv2.LINE_AA) + cv2.circle(out, (x, y), 1, white, -1, lineType=cv2.LINE_AA) + for x, y in kpts1: + cv2.circle(out, (x + margin + W0, y), 2, black, -1, + lineType=cv2.LINE_AA) + cv2.circle(out, (x + margin + W0, y), 1, white, -1, + lineType=cv2.LINE_AA) + + mkpts0, mkpts1 = np.round(mkpts0).astype(int), np.round(mkpts1).astype(int) + color = (np.array(color[:, :3])*255).astype(int)[:, ::-1] + for (x0, y0), (x1, y1), c in zip(mkpts0, mkpts1, color): + c = c.tolist() + cv2.line(out, (x0, y0), (x1 + margin + W0, y1), + color=c, thickness=1, lineType=cv2.LINE_AA) + # display line end-points as circles + cv2.circle(out, (x0, y0), 2, c, -1, lineType=cv2.LINE_AA) + cv2.circle(out, (x1 + margin + W0, y1), 2, c, -1, + lineType=cv2.LINE_AA) + + # Scale factor for consistent visualization across scales. + sc = min(H / 640., 2.0) + + # Big text. + Ht = int(30 * sc) # text height + txt_color_fg = (255, 255, 255) + txt_color_bg = (0, 0, 0) + for i, t in enumerate(text): + cv2.putText(out, t, (int(8*sc), Ht*(i+1)), cv2.FONT_HERSHEY_DUPLEX, + 1.0*sc, txt_color_bg, 2, cv2.LINE_AA) + cv2.putText(out, t, (int(8*sc), Ht*(i+1)), cv2.FONT_HERSHEY_DUPLEX, + 1.0*sc, txt_color_fg, 1, cv2.LINE_AA) + + # Small text. + Ht = int(18 * sc) # text height + for i, t in enumerate(reversed(small_text)): + cv2.putText(out, t, (int(8*sc), int(H-Ht*(i+.6))), cv2.FONT_HERSHEY_DUPLEX, + 0.5*sc, txt_color_bg, 2, cv2.LINE_AA) + cv2.putText(out, t, (int(8*sc), int(H-Ht*(i+.6))), cv2.FONT_HERSHEY_DUPLEX, + 0.5*sc, txt_color_fg, 1, cv2.LINE_AA) + + if path is not None: + cv2.imwrite(str(path), out) + + if opencv_display: + cv2.imshow(opencv_title, out) + cv2.waitKey(1) + + return out + + +def error_colormap(x, alpha=1.0): + assert alpha <= 1.0 and alpha > 0, f"Invaid alpha value: {alpha}" + return np.clip( + np.stack([2-x*2, x*2, np.zeros_like(x), np.ones_like(x)*alpha], -1), 0, 1) + +def check_img_ok(img_path): + img_ok = True + try: + Image.open(str(img_path)).convert('RGB') + except: + img_ok = False + return img_ok \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/.gitignore b/imcui/third_party/MatchAnything/third_party/ROMA/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..ff4633046059da69ab4b6e222909614ccda82ac4 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/.gitignore @@ -0,0 +1,5 @@ +*.egg-info* +*.vscode* +*__pycache__* +vis* +workspace* \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/LICENSE b/imcui/third_party/MatchAnything/third_party/ROMA/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..ca95157052a76debc473afb395bffae0c1329e63 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2023 Johan Edstedt + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/README.md b/imcui/third_party/MatchAnything/third_party/ROMA/README.md new file mode 100644 index 0000000000000000000000000000000000000000..284d8f0bea84d7f67a416bc933067a3acfe23740 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/README.md @@ -0,0 +1,82 @@ +# +

+

RoMa 🏛️:
Robust Dense Feature Matching

+

+ Johan Edstedt + · + Qiyu Sun + · + Georg Bökman + · + Mårten Wadenbäck + · + Michael Felsberg +

+

+ Paper | + Project Page +

+
+

+
+

+ example +
+ RoMa is the robust dense feature matcher capable of estimating pixel-dense warps and reliable certainties for almost any image pair. +

+ +## Setup/Install +In your python environment (tested on Linux python 3.10), run: +```bash +pip install -e . +``` +## Demo / How to Use +We provide two demos in the [demos folder](demo). +Here's the gist of it: +```python +from roma import roma_outdoor +roma_model = roma_outdoor(device=device) +# Match +warp, certainty = roma_model.match(imA_path, imB_path, device=device) +# Sample matches for estimation +matches, certainty = roma_model.sample(warp, certainty) +# Convert to pixel coordinates (RoMa produces matches in [-1,1]x[-1,1]) +kptsA, kptsB = roma_model.to_pixel_coordinates(matches, H_A, W_A, H_B, W_B) +# Find a fundamental matrix (or anything else of interest) +F, mask = cv2.findFundamentalMat( + kptsA.cpu().numpy(), kptsB.cpu().numpy(), ransacReprojThreshold=0.2, method=cv2.USAC_MAGSAC, confidence=0.999999, maxIters=10000 +) +``` + +**New**: You can also match arbitrary keypoints with RoMa. A demo for this will be added soon. + +## Reproducing Results +The experiments in the paper are provided in the [experiments folder](experiments). + +### Training +1. First follow the instructions provided here: https://github.com/Parskatt/DKM for downloading and preprocessing datasets. +2. Run the relevant experiment, e.g., +```bash +torchrun --nproc_per_node=4 --nnodes=1 --rdzv_backend=c10d experiments/roma_outdoor.py +``` +### Testing +```bash +python experiments/roma_outdoor.py --only_test --benchmark mega-1500 +``` +## License +All our code except DINOv2 is MIT license. +DINOv2 has an Apache 2 license [DINOv2](https://github.com/facebookresearch/dinov2/blob/main/LICENSE). + +## Acknowledgement +Our codebase builds on the code in [DKM](https://github.com/Parskatt/DKM). + +## BibTeX +If you find our models useful, please consider citing our paper! +``` +@article{edstedt2023roma, +title={{RoMa: Robust Dense Feature Matching}}, +author={Edstedt, Johan and Sun, Qiyu and Bökman, Georg and Wadenbäck, Mårten and Felsberg, Michael}, +journal={arXiv preprint arXiv:2305.15404}, +year={2023} +} +``` diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/data/.gitignore b/imcui/third_party/MatchAnything/third_party/ROMA/data/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..c96a04f008ee21e260b28f7701595ed59e2839e3 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/data/.gitignore @@ -0,0 +1,2 @@ +* +!.gitignore \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/demo/demo_3D_effect.py b/imcui/third_party/MatchAnything/third_party/ROMA/demo/demo_3D_effect.py new file mode 100644 index 0000000000000000000000000000000000000000..5afd6e5ce0fdd32788160e8c24df0b26a27f34dd --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/demo/demo_3D_effect.py @@ -0,0 +1,46 @@ +from PIL import Image +import torch +import torch.nn.functional as F +import numpy as np +from roma.utils.utils import tensor_to_pil + +from roma import roma_outdoor + +device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') + + +if __name__ == "__main__": + from argparse import ArgumentParser + parser = ArgumentParser() + parser.add_argument("--im_A_path", default="assets/toronto_A.jpg", type=str) + parser.add_argument("--im_B_path", default="assets/toronto_B.jpg", type=str) + parser.add_argument("--save_path", default="demo/gif/roma_warp_toronto", type=str) + + args, _ = parser.parse_known_args() + im1_path = args.im_A_path + im2_path = args.im_B_path + save_path = args.save_path + + # Create model + roma_model = roma_outdoor(device=device, coarse_res=560, upsample_res=(864, 1152)) + roma_model.symmetric = False + + H, W = roma_model.get_output_resolution() + + im1 = Image.open(im1_path).resize((W, H)) + im2 = Image.open(im2_path).resize((W, H)) + + # Match + warp, certainty = roma_model.match(im1_path, im2_path, device=device) + # Sampling not needed, but can be done with model.sample(warp, certainty) + x1 = (torch.tensor(np.array(im1)) / 255).to(device).permute(2, 0, 1) + x2 = (torch.tensor(np.array(im2)) / 255).to(device).permute(2, 0, 1) + + coords_A, coords_B = warp[...,:2], warp[...,2:] + for i, x in enumerate(np.linspace(0,2*np.pi,200)): + t = (1 + np.cos(x))/2 + interp_warp = (1-t)*coords_A + t*coords_B + im2_transfer_rgb = F.grid_sample( + x2[None], interp_warp[None], mode="bilinear", align_corners=False + )[0] + tensor_to_pil(im2_transfer_rgb, unnormalize=False).save(f"{save_path}_{i:03d}.jpg") \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/demo/demo_fundamental.py b/imcui/third_party/MatchAnything/third_party/ROMA/demo/demo_fundamental.py new file mode 100644 index 0000000000000000000000000000000000000000..fd89df18664446fbc5ca299e7c966663e8f30aed --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/demo/demo_fundamental.py @@ -0,0 +1,32 @@ +from PIL import Image +import torch +import cv2 +from roma import roma_outdoor + +device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') + + +if __name__ == "__main__": + from argparse import ArgumentParser + parser = ArgumentParser() + parser.add_argument("--im_A_path", default="assets/sacre_coeur_A.jpg", type=str) + parser.add_argument("--im_B_path", default="assets/sacre_coeur_B.jpg", type=str) + + args, _ = parser.parse_known_args() + im1_path = args.im_A_path + im2_path = args.im_B_path + # Create model + roma_model = roma_outdoor(device=device) + + + W_A, H_A = Image.open(im1_path).size + W_B, H_B = Image.open(im2_path).size + + # Match + warp, certainty = roma_model.match(im1_path, im2_path, device=device) + # Sample matches for estimation + matches, certainty = roma_model.sample(warp, certainty) + kpts1, kpts2 = roma_model.to_pixel_coordinates(matches, H_A, W_A, H_B, W_B) + F, mask = cv2.findFundamentalMat( + kpts1.cpu().numpy(), kpts2.cpu().numpy(), ransacReprojThreshold=0.2, method=cv2.USAC_MAGSAC, confidence=0.999999, maxIters=10000 + ) \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/demo/demo_fundamental_model_warpper.py b/imcui/third_party/MatchAnything/third_party/ROMA/demo/demo_fundamental_model_warpper.py new file mode 100644 index 0000000000000000000000000000000000000000..c5cb86202867dad000f7357d18e3bf1e8b4955a5 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/demo/demo_fundamental_model_warpper.py @@ -0,0 +1,34 @@ +from PIL import Image +import torch +import cv2 +import sys +from pathlib import Path +sys.path.append(str(Path(__file__).parent.parent.resolve())) +from roma.roma_adpat_model import ROMA_Model + +device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') + + +if __name__ == "__main__": + from argparse import ArgumentParser + parser = ArgumentParser() + parser.add_argument("--im_A_path", default="assets/sacre_coeur_A.jpg", type=str) + parser.add_argument("--im_B_path", default="assets/sacre_coeur_B.jpg", type=str) + + args, _ = parser.parse_known_args() + im1_path = args.im_A_path + im2_path = args.im_B_path + # Create model + model = ROMA_Model({"n_sample": 5000}) + + + W_A, H_A = Image.open(im1_path).size + W_B, H_B = Image.open(im2_path).size + + # Match + match_results = model({"image0_path": im1_path, "image1_path": im2_path}) + kpts1, kpts2 = match_results['mkpts0_f'], match_results['mkpts1_f'] + # Sample matches for estimation + F, mask = cv2.findFundamentalMat( + kpts1.cpu().numpy(), kpts2.cpu().numpy(), ransacReprojThreshold=0.2, method=cv2.USAC_MAGSAC, confidence=0.999999, maxIters=10000 + ) \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/demo/demo_match.py b/imcui/third_party/MatchAnything/third_party/ROMA/demo/demo_match.py new file mode 100644 index 0000000000000000000000000000000000000000..0b49ad510c02f9dd022e077667c13ee2bcb7eca8 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/demo/demo_match.py @@ -0,0 +1,50 @@ +from PIL import Image +import torch +import torch.nn.functional as F +import numpy as np +import sys +from pathlib import Path +sys.path.append(str(Path(__file__).parent.parent.resolve())) +from roma.utils.utils import tensor_to_pil + +from roma import roma_outdoor + +device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') + + +if __name__ == "__main__": + from argparse import ArgumentParser + parser = ArgumentParser() + parser.add_argument("--im_A_path", default="assets/toronto_A.jpg", type=str) + parser.add_argument("--im_B_path", default="assets/toronto_B.jpg", type=str) + parser.add_argument("--save_path", default="demo/roma_warp_toronto.jpg", type=str) + + args, _ = parser.parse_known_args() + im1_path = args.im_A_path + im2_path = args.im_B_path + save_path = args.save_path + + # Create model + roma_model = roma_outdoor(device=device, coarse_res=560, upsample_res=(864, 1152)) + + H, W = roma_model.get_output_resolution() + + im1 = Image.open(im1_path).resize((W, H)) + im2 = Image.open(im2_path).resize((W, H)) + + # Match + warp, certainty = roma_model.match(im1_path, im2_path, device=device) + # Sampling not needed, but can be done with model.sample(warp, certainty) + x1 = (torch.tensor(np.array(im1)) / 255).to(device).permute(2, 0, 1) + x2 = (torch.tensor(np.array(im2)) / 255).to(device).permute(2, 0, 1) + + im2_transfer_rgb = F.grid_sample( + x2[None], warp[:,:W, 2:][None], mode="bilinear", align_corners=False + )[0] + im1_transfer_rgb = F.grid_sample( + x1[None], warp[:, W:, :2][None], mode="bilinear", align_corners=False + )[0] + warp_im = torch.cat((im2_transfer_rgb,im1_transfer_rgb),dim=2) + white_im = torch.ones((H,2*W),device=device) + vis_im = certainty * warp_im + (1 - certainty) * white_im + tensor_to_pil(vis_im, unnormalize=False).save(save_path) \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/demo/demo_match_opencv_sift.py b/imcui/third_party/MatchAnything/third_party/ROMA/demo/demo_match_opencv_sift.py new file mode 100644 index 0000000000000000000000000000000000000000..3196fcfaab248f6c4c6247a0afb4db745206aee8 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/demo/demo_match_opencv_sift.py @@ -0,0 +1,43 @@ +from PIL import Image +import numpy as np + +import numpy as np +import cv2 as cv +import matplotlib.pyplot as plt + + + +if __name__ == "__main__": + from argparse import ArgumentParser + parser = ArgumentParser() + parser.add_argument("--im_A_path", default="assets/toronto_A.jpg", type=str) + parser.add_argument("--im_B_path", default="assets/toronto_B.jpg", type=str) + parser.add_argument("--save_path", default="demo/roma_warp_toronto.jpg", type=str) + + args, _ = parser.parse_known_args() + im1_path = args.im_A_path + im2_path = args.im_B_path + save_path = args.save_path + + img1 = cv.imread(im1_path,cv.IMREAD_GRAYSCALE) # queryImage + img2 = cv.imread(im2_path,cv.IMREAD_GRAYSCALE) # trainImage + # Initiate SIFT detector + sift = cv.SIFT_create() + # find the keypoints and descriptors with SIFT + kp1, des1 = sift.detectAndCompute(img1,None) + kp2, des2 = sift.detectAndCompute(img2,None) + # BFMatcher with default params + bf = cv.BFMatcher() + matches = bf.knnMatch(des1,des2,k=2) + # Apply ratio test + good = [] + for m,n in matches: + if m.distance < 0.75*n.distance: + good.append([m]) + # cv.drawMatchesKnn expects list of lists as matches. + draw_params = dict(matchColor = (255,0,0), # draw matches in red color + singlePointColor = None, + flags = 2) + + img3 = cv.drawMatchesKnn(img1,kp1,img2,kp2,good,None,**draw_params) + Image.fromarray(img3).save("demo/sift_matches.png") diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/demo/gif/.gitignore b/imcui/third_party/MatchAnything/third_party/ROMA/demo/gif/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..c96a04f008ee21e260b28f7701595ed59e2839e3 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/demo/gif/.gitignore @@ -0,0 +1,2 @@ +* +!.gitignore \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/demo_single_pair.py b/imcui/third_party/MatchAnything/third_party/ROMA/demo_single_pair.py new file mode 100644 index 0000000000000000000000000000000000000000..d2e11bce5a7197967697f7ebb2a76a9c7250f09a --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/demo_single_pair.py @@ -0,0 +1,329 @@ +import os +#os.chdir("..") +import torch +import cv2 +from time import time +from loguru import logger +import numpy as np +import matplotlib.cm as cm +import matplotlib.pyplot as plt +from notebooks.notebooks_utils import make_matching_figure, show_image_pair +import PIL +import torch.nn.functional as F +import pydegensac +from roma.roma_adpat_model import ROMA_Model + +def extract_geo_model_inliers(mkpts0, mkpts1, mconfs, + geo_model, ransac_method, pixel_thr, max_iters, conf_thr, + K0=None, K1=None): + if geo_model == 'E': + f_mean = np.mean([K0[0, 0], K1[1, 1], K0[0, 0], K1[1, 1]]) + pixel_thr = pixel_thr / f_mean + + mkpts0, mkpts1 = map(lambda x: normalize_ketpoints(*x), [(mkpts0, K0), (mkpts1, K1)]) + + if ransac_method == 'RANSAC': + if geo_model == 'E': + E, mask = cv2.findEssentialMat(mkpts0, + mkpts1, + np.eye(3), + threshold=pixel_thr, + prob=conf_thr, + method=cv2.RANSAC) + elif geo_model == 'F': + F, mask = cv2.findFundamentalMat(mkpts0, + mkpts1, + method=cv2.FM_RANSAC, + ransacReprojThreshold=pixel_thr, + confidence=conf_thr, + maxIters=max_iters) + elif ransac_method == 'DEGENSAC': + assert geo_model == 'F' + F, mask = pydegensac.findFundamentalMatrix(mkpts0, + mkpts1, + px_th=pixel_thr, + conf=conf_thr, + max_iters=max_iters) + elif ransac_method == 'MAGSAC': + params = cv2.UsacParams() + # params.threshold = pixel_thr + # params.confidence = conf_thr + # params.maxIterations = max_iters + # params.randomGeneratorState = 0 + # params. + # F, mask = cv2.findFundamentalMat(mkpts0, + # mkpts1, + # method=cv2.USAC_MAGSAC, + # ) + F, mask = cv2.findFundamentalMat(mkpts0, + mkpts1, + method=cv2.USAC_MAGSAC, + ransacReprojThreshold=pixel_thr, + confidence=conf_thr, + maxIters=max_iters) + else: + raise ValueError() + + if mask is not None: + mask = mask.astype(bool).flatten() + else: + mask = np.full_like(mconfs, True, dtype=np.bool) + return mask + +def extract_inliers(data, args): + """extract inlier matches assume bs==1. + NOTE: If no inliers found, keep all matches. + """ + mkpts0, mkpts1, mconfs= extract_preds(data) + K0 = data['K0'][0].cpu().numpy() if args.geo_model == 'E' else None + K1 = data['K1'][0].cpu().numpy() if args.geo_model == 'E' else None + if len(mkpts0) >=8 : + inliers = extract_geo_model_inliers(mkpts0, mkpts1, mconfs, + args.geo_model, args.ransac_method, args.pixel_thr, args.max_iters, args.conf_thr, + K0=K0, K1=K1) + mkpts0, mkpts1, mconfs = map(lambda x: x[inliers], [mkpts0, mkpts1, mconfs, detector_kpts_mask]) + +# The default config uses dual-softmax. +# The outdoor and indoor models share the same config. +# You can change the default values like thr and coarse_match_type. +if __name__ == "__main__": + # matching_method = 'SuperPoint+SuperGlue' + matching_method = 'ROMA' + # enable_geometric_verify = False + enable_geometric_verify = True + loftr_cfg_path = "configs/loftr/matchanything/exps/loftr_ds_dense_PAN_M2D_noalign_repvgg_fpn_fp16_nf_conly_inter_clip0_dense_skipsoft_match_sparse_spv.py" + loftr_model_path = "logs/tb_logs/megadepth_trainval_1024_with_depth_modal_with_glddepthwarp_with_thermaltest@-@loftr_ds_dense_PAN_M2D_noalign_repvgg_fpn_fp16_nf_conly_inter_clip0_dense_skipsoft_match_sparse_spv-bs12/version_0/checkpoints/last.ckpt" + pixel_thr = 2.0 + img_resize = 840 + img_warp_back = True + if matching_method == 'SuperPoint+SuperGlue': + matcher = SPPSPG() + matcher = matcher.eval().cuda() + elif matching_method == 'LoFTR': + config = get_cfg_defaults() + config.merge_from_file(loftr_cfg_path) + config = lower_config(config) + matcher = LoFTR(config=config['loftr']) + # matcher = LoFTR(config=default_ot_cfg) + ckpt = torch.load( + loftr_model_path, map_location="cpu" + )["state_dict"] + for k in list(ckpt.keys()): + if 'matcher' in k: + newk = k[k.find("matcher")+len('matcher')+1:] + ckpt[newk] = ckpt[k] + ckpt.pop(k) + matcher.load_state_dict(ckpt) + matcher = matcher.eval().cuda() + elif matching_method == 'ROMA': + # matcher = ROMA_Model({"n_sample": 5000, "load_img_in_model": False}) + matcher = ROMA_Model({"n_sample": 5000, "load_img_in_model": True}) + + # rotation_degree = -90 + # rotation_degree = 30 + # rotation_degree = -90 + # rotation_degree = -45 + # rotation_degree = 45 + # rotation_degree = 15 + + # scene_name = 'thermal' + # # img0_pth = "assets/rgb_daytime.jpg" + # # img0_pth = "/data/hexingyi/code/LoFTR/assets/rgb_daytime_6446.jpg" + # img0_pth = "/data/hexingyi/code/LoFTR/assets/rgb_daytime.jpg" + # img1_pth = "/data/hexingyi/code/LoFTR/assets/thermal_daytime1.jpg" + # rotation_degree = 0 + # pixel_thr = 1.0 + + # scene_name = 'satellite' + # img0_pth = "/data/hexingyi/code/LoFTR/assets/satellite.jpg" + # img1_pth = "/data/hexingyi/code/LoFTR/assets/airplane.jpg" + # rotation_degree = 60 + # pixel_thr = 4.0 + + # scene_name = 'satellite4' + # img0_pth = "/data/common_dataset/uva_localization_data/cropped_map_images/214_115.9440317_115.9540317_40.367160000000005_40.37716.png" + # img1_pth = "/data/hexingyi/code/UAV_Loc/0.png" + # # img1_pth = "/data/hexingyi/code/LoFTR/assets/airplane2.png" + # rotation_degree = -30 + # pixel_thr = 4.0 + + # scene_name = 'satellite2' + # img0_pth = "/data/hexingyi/code/LoFTR/assets/satellite.jpg" + # img1_pth = "/data/hexingyi/code/LoFTR/assets/airplane2_cropped.jpeg" + # # img1_pth = "/data/hexingyi/code/LoFTR/assets/airplane2.png" + # rotation_degree = 0 + # pixel_thr = 2.0 + + # scene_name = 'satellite3' + # # img0_pth = "/data/hexingyi/code/LoFTR/assets/airplane3_cropped.jpeg" + # img0_pth = "/data/hexingyi/code/LoFTR/assets/airplane3_squere.jpg" + # img1_pth = "/data/hexingyi/code/LoFTR/assets/satellite_squere.jpg" + # # img1_pth = "/data/hexingyi/code/LoFTR/assets/airplane2.png" + # pixel_thr = 2.0 + + # scene_name = 'yanshen_demo' + # # img0_pth = "/data/hexingyi/code/LoFTR/assets/airplane3_cropped.jpeg" + # img0_pth = "/data/hexingyi/code/LoFTR/assets/view3_new.png" + # img1_pth = "/data/hexingyi/code/LoFTR/assets/view1.png" + # # img1_pth = "/data/hexingyi/code/LoFTR/assets/airplane2.png" + # rotation_degree = 0 + # pixel_thr = 2.0 + + # scene_name = 'map' + # img0_pth = "/data/hexingyi/code/LoFTR/assets/pair76_1.jpg" + # img1_pth = "/data/hexingyi/code/LoFTR/assets/pair76_2.jpg" + # pixel_thr = 4.0 + # rotation_degree = 0 + + scene_name = 'sar' + img0_pth = "/data/hexingyi/code/LoFTR/assets/rgb_pair_24_1.jpg" + img0_pth_ = "/data/hexingyi/code/LoFTR/assets/rgb_pair_24_1_edited.jpg" + img1_pth = "/data/hexingyi/code/LoFTR/assets/sar_pair24_2.jpg" + img1_pth_ = "/data/hexingyi/code/LoFTR/assets/sar_pair24_2_edited.jpg" + pixel_thr = 4.0 + rotation_degree = 0 + + + # scene_name = 'sar2' + # img0_pth = "/data/hexingyi/code/LoFTR/assets/pair183_1.jpg" + # img1_pth = "/data/hexingyi/code/LoFTR/assets/pair183_2.jpg" + # img1_pth_ = "/data/hexingyi/code/LoFTR/assets/pair183_2_edited.jpg" + # pixel_thr = 4.0 + # rotation_degree = 0 + + # scene_name = 'medacine' + # img0_pth = "/data/hexingyi/code/LoFTR/assets/ct.png" + # img1_pth = "/data/hexingyi/code/LoFTR/assets/mri.png" + # rotation_degree = 0 + # pixel_thr = 0.8 + + # scene_name = 'medacine2' + # img0_pth = "/data/hexingyi/code/LoFTR/assets/ct2.png" + # img1_pth = "/data/hexingyi/code/LoFTR/assets/mri2.png" + # rotation_degree = 0 + # # pixel_thr = 0.8 + + # scene_name = 'deepsea' + # img0_pth = "/data/hexingyi/code/LoFTR/assets/deepsea540.png" + # # img1_pth = "/data/hexingyi/code/LoFTR/assets/deepsea700.png" + # img1_pth = "/data/hexingyi/code/LoFTR/assets/deepsea789.png" + # rotation_degree = 0 + + img0_raw = cv2.imread(img0_pth, cv2.IMREAD_GRAYSCALE) + img1_raw = cv2.imread(img1_pth, cv2.IMREAD_GRAYSCALE) + + try: + img0_origin = cv2.cvtColor(cv2.imread(img0_pth_), cv2.COLOR_BGR2RGB) + except: + img0_origin = cv2.cvtColor(cv2.imread(img0_pth), cv2.COLOR_BGR2RGB) + # img0_origin = cv2.rotate(img0_origin, cv2.cv2.ROTATE_90_CLOCKWISE) + if not img_warp_back: + img0_origin, warp_matrix = rotate_image(img0_origin, rotation_degree, preserve_full_img=False) + + try: + img1_origin = cv2.cvtColor(cv2.imread(img1_pth_),cv2.COLOR_BGR2RGB) + except: + img1_origin = cv2.cvtColor(cv2.imread(img1_pth),cv2.COLOR_BGR2RGB) + + # Inference with LoFTR and get prediction + with torch.no_grad(): + batch = matcher({"image0_path": img0_pth, "image1_path": img1_pth}) + mkpts0 = batch['mkpts0_f'].cpu().numpy() + mkpts1 = batch['mkpts1_f'].cpu().numpy() + mconf = batch['mconf'].cpu().numpy() + + kpts0 = batch['keypoints0'][0].cpu().numpy() if "keypoints0" in batch else None + kpts1 = batch['keypoints1'][0].cpu().numpy() if "keypoints1" in batch else None + + if enable_geometric_verify and mkpts0.shape[0] >= 8: + t0 = time() + # inliers = extract_geo_model_inliers(mkpts0, mkpts1, mconf, + # geo_model="F", ransac_method='MAGSAC', pixel_thr=1.0, max_iters=10000, conf_thr=0.99999, + # K0=None, K1=None) + + inliers = extract_geo_model_inliers(mkpts0, mkpts1, mconf, + # geo_model="F", ransac_method='MAGSAC', pixel_thr=pixel_thr, max_iters=10000, conf_thr=0.99999, + geo_model="F", ransac_method='DEGENSAC', pixel_thr=pixel_thr, max_iters=10000, conf_thr=0.99999, + K0=None, K1=None) + t1 = time() + mkpts0, mkpts1, mconf = map(lambda x: x[inliers], [mkpts0, mkpts1, mconf]) + print(f"Ransac takes:{t1-t0}, num inlier:{mkpts0.shape[0]}") + else: + logger.info("Geometry Verify is not Performed.") + + # Draw + alpha = 0.5 if matching_method == 'SuperPoint+SuperGlue' else 0.15 + color = cm.jet(mconf, alpha=alpha) + text = [ + matching_method, + 'Number of Matches: {}'.format(len(mkpts0)), + ] + + vertical = True + #fig = make_matching_figure(img0_raw, img1_raw, mkpts0, mkpts1, color, text) + # fig = make_matching_figure(img1_raw, img1_raw, mkpts0, mkpts1, color, text, path="/home/hexingyi/code/LoFTR/matching_vertical.jpg", vertical=False) + if kpts0 is not None and kpts1 is not None: + text=[] + fig = make_matching_figure(img0_origin, img1_origin, mkpts0, mkpts1, color, kpts0=kpts0, kpts1=kpts1,text=text, draw_detection=True, draw_match_type=None, path=f"matching_horizontal_{matching_method}_{scene_name}_detection.jpg", vertical=vertical, plot_size_factor=3 if matching_method == 'SuperPoint+SuperGlue' else 1) + # fig = make_matching_figure(img0_origin, img1_origin, mkpts0, mkpts1, color, text=text, path=f"matching_horizontal_{matching_method}_{scene_name}.jpg", vertical=False) + + text=[] + # draw_match_type = "color" + draw_match_type = "corres" + # fig = make_matching_figure(img0_origin, img1_origin, mkpts0, mkpts1, color, text=text, path=f"{scene_name}_{matching_method}_matching{'_ransac' if enable_geometric_verify else ''}.jpg", vertical=vertical, plot_size_factor= 3 if matching_method == 'SuperPoint+SuperGlue' else 1) + # fig = make_matching_figure(img0_origin, img1_origin, mkpts0, mkpts1, color, text=text, path=f"{scene_name}_{matching_method}_matching{'_ransac' if enable_geometric_verify else ''}_{draw_match_type}.jpg", vertical=False, plot_size_factor= 3 if matching_method == 'SuperPoint+SuperGlue' else 1, draw_match_type=draw_match_type, r_normalize_factor=0.4) + fig = make_matching_figure(img0_origin, img1_origin, mkpts0, mkpts1, color, text=text, path=f"{scene_name}_{matching_method}_matching{'_ransac' if enable_geometric_verify else ''}_{draw_match_type}.jpg", vertical=True, plot_size_factor= 3 if matching_method == 'SuperPoint+SuperGlue' else 1, draw_match_type=draw_match_type, r_normalize_factor=0.4) + # fig = make_matching_figure(img0_origin, img1_origin, mkpts0, mkpts1, color, text=text, path=f"{scene_name}_{matching_method}_matching{'_ransac' if enable_geometric_verify else ''}_{draw_match_type}.jpg", vertical=False, plot_size_factor= 3 if matching_method == 'SuperPoint+SuperGlue' else 1, draw_match_type=draw_match_type, r_normalize_factor=0.4, use_position_color=True) + + # # visualize pca + # from sklearn.decomposition import PCA + # pca = PCA(n_components=3 ,svd_solver='arpack') + + # # visualize pca for backbone feature + # # feat: h*w*c + # feat0 = feat_c0 + # feat1 = feat_c1 + + # h,w,c = feat0.shape + # feat = np.concatenate([feat0.reshape(-1,c), feat1.reshape(-1, c)], axis=0) + # test_pca = np.random.rand(*feat.shape) + # feat_pca = pca.fit_transform(feat) + + # feat_pca0, feat_pca1 = feat_pca[:h*w].reshape(h,w,3), feat_pca[h*w:].reshape(h,w,3) + # feat_pca_cv2 = cv2.normalize(np.concatenate([feat_pca0,feat_pca1], axis=1), None, alpha=0, beta=255, norm_type=cv2.NORM_MINMAX,dtype=cv2.CV_8UC3) + # feat_pca_cv2_resize = cv2.resize(feat_pca_cv2,(w*2*8, h*8), interpolation=cv2.INTER_LINEAR) + + # feat_pca_resize0, feat_pca_resize1 = feat_pca_cv2_resize[:,:w*8,:], feat_pca_cv2_resize[:,w*8:,:] + # feat_map_gapped = np.hstack((feat_pca_resize0, np.ones((h*8, 10, 3),dtype=np.uint8)*255, feat_pca_resize1)) + + # # draw backbone feature pca + # fig, axes = plt.subplots(1,1,dpi=100) + # axes.imshow(feat_map_gapped) + # axes.get_yaxis().set_ticks([]) + # axes.get_xaxis().set_ticks([]) + # plt.tight_layout(pad=.5) + # plt.savefig('/home/hexingyi/code/LoFTR/backbone_feature.jpg') + + # # visualize pca for loftr coarse feature + # # feat: hw*c + # feat0 = loftr_c0 + # feat1 = loftr_c1 + + # h,w = feat_c0.shape[:2] + # c = loftr_c0.shape[-1] + # feat = np.concatenate([feat0, feat1], axis=0) + # feat_pca = pca.fit_transform(feat) + # feat_pca0, feat_pca1 = feat_pca[:h*w].reshape(h,w,3), feat_pca[h*w:].reshape(h,w,3) + # feat_pca_cv2 = cv2.normalize(np.concatenate([feat_pca0,feat_pca1], axis=1), None, alpha=0, beta=255, norm_type=cv2.NORM_MINMAX,dtype=cv2.CV_8UC3) + # feat_pca_cv2_resize = cv2.resize(feat_pca_cv2,(w*2*8, h*8), interpolation=cv2.INTER_LINEAR) + + # feat_pca_resize0, feat_pca_resize1 = feat_pca_cv2_resize[:,:w*8,:], feat_pca_cv2_resize[:,w*8:,:] + # feat_map_gapped = np.hstack((feat_pca_resize0, np.ones((h*8, 10, 3),dtype=np.uint8)*255, feat_pca_resize1)) + + # # draw patches + # fig, axes = plt.subplots(1,dpi=100) + # axes.imshow(feat_map_gapped) + # axes.get_yaxis().set_ticks([]) + # axes.get_xaxis().set_ticks([]) + # plt.tight_layout(pad=.5) + # plt.savefig('/home/hexingyi/code/LoFTR/loftr_coarse_feature.jpg') \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/experiments/roma_indoor.py b/imcui/third_party/MatchAnything/third_party/ROMA/experiments/roma_indoor.py new file mode 100644 index 0000000000000000000000000000000000000000..61734f2d452f47c448f4eb1f115bf391c92d16ab --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/experiments/roma_indoor.py @@ -0,0 +1,320 @@ +import os +import torch +from argparse import ArgumentParser + +from torch import nn +from torch.utils.data import ConcatDataset +import torch.distributed as dist +from torch.nn.parallel import DistributedDataParallel as DDP + +import json +import wandb +from tqdm import tqdm + +from roma.benchmarks import MegadepthDenseBenchmark +from roma.datasets.megadepth import MegadepthBuilder +from roma.datasets.scannet import ScanNetBuilder +from roma.losses.robust_loss import RobustLosses +from roma.benchmarks import MegadepthDenseBenchmark, ScanNetBenchmark +from roma.train.train import train_k_steps +from roma.models.matcher import * +from roma.models.transformer import Block, TransformerDecoder, MemEffAttention +from roma.models.encoders import * +from roma.checkpointing import CheckPoint + +resolutions = {"low":(448, 448), "medium":(14*8*5, 14*8*5), "high":(14*8*6, 14*8*6)} + +def get_model(pretrained_backbone=True, resolution = "medium", **kwargs): + gp_dim = 512 + feat_dim = 512 + decoder_dim = gp_dim + feat_dim + cls_to_coord_res = 64 + coordinate_decoder = TransformerDecoder( + nn.Sequential(*[Block(decoder_dim, 8, attn_class=MemEffAttention) for _ in range(5)]), + decoder_dim, + cls_to_coord_res**2 + 1, + is_classifier=True, + amp = True, + pos_enc = False,) + dw = True + hidden_blocks = 8 + kernel_size = 5 + displacement_emb = "linear" + disable_local_corr_grad = True + + conv_refiner = nn.ModuleDict( + { + "16": ConvRefiner( + 2 * 512+128+(2*7+1)**2, + 2 * 512+128+(2*7+1)**2, + 2 + 1, + kernel_size=kernel_size, + dw=dw, + hidden_blocks=hidden_blocks, + displacement_emb=displacement_emb, + displacement_emb_dim=128, + local_corr_radius = 7, + corr_in_other = True, + amp = True, + disable_local_corr_grad = disable_local_corr_grad, + bn_momentum = 0.01, + ), + "8": ConvRefiner( + 2 * 512+64+(2*3+1)**2, + 2 * 512+64+(2*3+1)**2, + 2 + 1, + kernel_size=kernel_size, + dw=dw, + hidden_blocks=hidden_blocks, + displacement_emb=displacement_emb, + displacement_emb_dim=64, + local_corr_radius = 3, + corr_in_other = True, + amp = True, + disable_local_corr_grad = disable_local_corr_grad, + bn_momentum = 0.01, + ), + "4": ConvRefiner( + 2 * 256+32+(2*2+1)**2, + 2 * 256+32+(2*2+1)**2, + 2 + 1, + kernel_size=kernel_size, + dw=dw, + hidden_blocks=hidden_blocks, + displacement_emb=displacement_emb, + displacement_emb_dim=32, + local_corr_radius = 2, + corr_in_other = True, + amp = True, + disable_local_corr_grad = disable_local_corr_grad, + bn_momentum = 0.01, + ), + "2": ConvRefiner( + 2 * 64+16, + 128+16, + 2 + 1, + kernel_size=kernel_size, + dw=dw, + hidden_blocks=hidden_blocks, + displacement_emb=displacement_emb, + displacement_emb_dim=16, + amp = True, + disable_local_corr_grad = disable_local_corr_grad, + bn_momentum = 0.01, + ), + "1": ConvRefiner( + 2 * 9 + 6, + 24, + 2 + 1, + kernel_size=kernel_size, + dw=dw, + hidden_blocks = hidden_blocks, + displacement_emb = displacement_emb, + displacement_emb_dim = 6, + amp = True, + disable_local_corr_grad = disable_local_corr_grad, + bn_momentum = 0.01, + ), + } + ) + kernel_temperature = 0.2 + learn_temperature = False + no_cov = True + kernel = CosKernel + only_attention = False + basis = "fourier" + gp16 = GP( + kernel, + T=kernel_temperature, + learn_temperature=learn_temperature, + only_attention=only_attention, + gp_dim=gp_dim, + basis=basis, + no_cov=no_cov, + ) + gps = nn.ModuleDict({"16": gp16}) + proj16 = nn.Sequential(nn.Conv2d(1024, 512, 1, 1), nn.BatchNorm2d(512)) + proj8 = nn.Sequential(nn.Conv2d(512, 512, 1, 1), nn.BatchNorm2d(512)) + proj4 = nn.Sequential(nn.Conv2d(256, 256, 1, 1), nn.BatchNorm2d(256)) + proj2 = nn.Sequential(nn.Conv2d(128, 64, 1, 1), nn.BatchNorm2d(64)) + proj1 = nn.Sequential(nn.Conv2d(64, 9, 1, 1), nn.BatchNorm2d(9)) + proj = nn.ModuleDict({ + "16": proj16, + "8": proj8, + "4": proj4, + "2": proj2, + "1": proj1, + }) + displacement_dropout_p = 0.0 + gm_warp_dropout_p = 0.0 + decoder = Decoder(coordinate_decoder, + gps, + proj, + conv_refiner, + detach=True, + scales=["16", "8", "4", "2", "1"], + displacement_dropout_p = displacement_dropout_p, + gm_warp_dropout_p = gm_warp_dropout_p) + h,w = resolutions[resolution] + encoder = CNNandDinov2( + cnn_kwargs = dict( + pretrained=pretrained_backbone, + amp = True), + amp = True, + use_vgg = True, + ) + matcher = RegressionMatcher(encoder, decoder, h=h, w=w, alpha=1, beta=0,**kwargs) + return matcher + +def train(args): + dist.init_process_group('nccl') + #torch._dynamo.config.verbose=True + gpus = int(os.environ['WORLD_SIZE']) + # create model and move it to GPU with id rank + rank = dist.get_rank() + print(f"Start running DDP on rank {rank}") + device_id = rank % torch.cuda.device_count() + roma.LOCAL_RANK = device_id + torch.cuda.set_device(device_id) + + resolution = args.train_resolution + wandb_log = not args.dont_log_wandb + experiment_name = os.path.splitext(os.path.basename(__file__))[0] + wandb_mode = "online" if wandb_log and rank == 0 and False else "disabled" + wandb.init(project="roma", entity=args.wandb_entity, name=experiment_name, reinit=False, mode = wandb_mode) + checkpoint_dir = "workspace/checkpoints/" + h,w = resolutions[resolution] + model = get_model(pretrained_backbone=True, resolution=resolution, attenuate_cert = False).to(device_id) + # Num steps + global_step = 0 + batch_size = args.gpu_batch_size + step_size = gpus*batch_size + roma.STEP_SIZE = step_size + + N = (32 * 250000) # 250k steps of batch size 32 + # checkpoint every + k = 25000 // roma.STEP_SIZE + + # Data + mega = MegadepthBuilder(data_root="data/megadepth", loftr_ignore=True, imc21_ignore = True) + use_horizontal_flip_aug = True + rot_prob = 0 + depth_interpolation_mode = "bilinear" + megadepth_train1 = mega.build_scenes( + split="train_loftr", min_overlap=0.01, shake_t=32, use_horizontal_flip_aug = use_horizontal_flip_aug, rot_prob = rot_prob, + ht=h,wt=w, + ) + megadepth_train2 = mega.build_scenes( + split="train_loftr", min_overlap=0.35, shake_t=32, use_horizontal_flip_aug = use_horizontal_flip_aug, rot_prob = rot_prob, + ht=h,wt=w, + ) + megadepth_train = ConcatDataset(megadepth_train1 + megadepth_train2) + mega_ws = mega.weight_scenes(megadepth_train, alpha=0.75) + + scannet = ScanNetBuilder(data_root="data/scannet") + scannet_train = scannet.build_scenes(split="train", ht=h, wt=w, use_horizontal_flip_aug = use_horizontal_flip_aug) + scannet_train = ConcatDataset(scannet_train) + scannet_ws = scannet.weight_scenes(scannet_train, alpha=0.75) + + # Loss and optimizer + depth_loss_scannet = RobustLosses( + ce_weight=0.0, + local_dist={1:4, 2:4, 4:8, 8:8}, + local_largest_scale=8, + depth_interpolation_mode=depth_interpolation_mode, + alpha = 0.5, + c = 1e-4,) + # Loss and optimizer + depth_loss_mega = RobustLosses( + ce_weight=0.01, + local_dist={1:4, 2:4, 4:8, 8:8}, + local_largest_scale=8, + depth_interpolation_mode=depth_interpolation_mode, + alpha = 0.5, + c = 1e-4,) + parameters = [ + {"params": model.encoder.parameters(), "lr": roma.STEP_SIZE * 5e-6 / 8}, + {"params": model.decoder.parameters(), "lr": roma.STEP_SIZE * 1e-4 / 8}, + ] + optimizer = torch.optim.AdamW(parameters, weight_decay=0.01) + lr_scheduler = torch.optim.lr_scheduler.MultiStepLR( + optimizer, milestones=[(9*N/roma.STEP_SIZE)//10]) + megadense_benchmark = MegadepthDenseBenchmark("data/megadepth", num_samples = 1000, h=h,w=w) + checkpointer = CheckPoint(checkpoint_dir, experiment_name) + model, optimizer, lr_scheduler, global_step = checkpointer.load(model, optimizer, lr_scheduler, global_step) + roma.GLOBAL_STEP = global_step + ddp_model = DDP(model, device_ids=[device_id], find_unused_parameters = False, gradient_as_bucket_view=True) + grad_scaler = torch.cuda.amp.GradScaler(growth_interval=1_000_000) + grad_clip_norm = 0.01 + for n in range(roma.GLOBAL_STEP, N, k * roma.STEP_SIZE): + mega_sampler = torch.utils.data.WeightedRandomSampler( + mega_ws, num_samples = batch_size * k, replacement=False + ) + mega_dataloader = iter( + torch.utils.data.DataLoader( + megadepth_train, + batch_size = batch_size, + sampler = mega_sampler, + num_workers = 8, + ) + ) + scannet_ws_sampler = torch.utils.data.WeightedRandomSampler( + scannet_ws, num_samples=batch_size * k, replacement=False + ) + scannet_dataloader = iter( + torch.utils.data.DataLoader( + scannet_train, + batch_size=batch_size, + sampler=scannet_ws_sampler, + num_workers=gpus * 8, + ) + ) + for n_k in tqdm(range(n, n + 2 * k, 2),disable = roma.RANK > 0): + train_k_steps( + n_k, 1, mega_dataloader, ddp_model, depth_loss_mega, optimizer, lr_scheduler, grad_scaler, grad_clip_norm = grad_clip_norm, progress_bar=False + ) + train_k_steps( + n_k + 1, 1, scannet_dataloader, ddp_model, depth_loss_scannet, optimizer, lr_scheduler, grad_scaler, grad_clip_norm = grad_clip_norm, progress_bar=False + ) + checkpointer.save(model, optimizer, lr_scheduler, roma.GLOBAL_STEP) + wandb.log(megadense_benchmark.benchmark(model), step = roma.GLOBAL_STEP) + +def test_scannet(model, name, resolution, sample_mode): + scannet_benchmark = ScanNetBenchmark("data/scannet") + scannet_results = scannet_benchmark.benchmark(model) + json.dump(scannet_results, open(f"results/scannet_{name}.json", "w")) + +if __name__ == "__main__": + import warnings + warnings.filterwarnings('ignore', category=UserWarning, message='TypedStorage is deprecated') + warnings.filterwarnings('ignore')#, category=UserWarning)#, message='WARNING batched routines are designed for small sizes.') + os.environ["TORCH_CUDNN_V8_API_ENABLED"] = "1" # For BF16 computations + os.environ["OMP_NUM_THREADS"] = "16" + + import roma + parser = ArgumentParser() + parser.add_argument("--test", action='store_true') + parser.add_argument("--debug_mode", action='store_true') + parser.add_argument("--dont_log_wandb", action='store_true') + parser.add_argument("--train_resolution", default='medium') + parser.add_argument("--gpu_batch_size", default=4, type=int) + parser.add_argument("--wandb_entity", required = False) + + args, _ = parser.parse_known_args() + roma.DEBUG_MODE = args.debug_mode + if not args.test: + train(args) + experiment_name = os.path.splitext(os.path.basename(__file__))[0] + checkpoint_dir = "workspace/" + checkpoint_name = checkpoint_dir + experiment_name + ".pth" + test_resolution = "medium" + sample_mode = "threshold_balanced" + symmetric = True + upsample_preds = False + attenuate_cert = True + + model = get_model(pretrained_backbone=False, resolution = test_resolution, sample_mode = sample_mode, upsample_preds = upsample_preds, symmetric=symmetric, name=experiment_name, attenuate_cert = attenuate_cert) + model = model.cuda() + states = torch.load(checkpoint_name) + model.load_state_dict(states["model"]) + test_scannet(model, experiment_name, resolution = test_resolution, sample_mode = sample_mode) diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/experiments/roma_outdoor.py b/imcui/third_party/MatchAnything/third_party/ROMA/experiments/roma_outdoor.py new file mode 100644 index 0000000000000000000000000000000000000000..2d58b3d8c3c5d8c13228bf3463885eae80990934 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/experiments/roma_outdoor.py @@ -0,0 +1,327 @@ +import os +import torch +from argparse import ArgumentParser + +from torch import nn +from torch.utils.data import ConcatDataset +import torch.distributed as dist +from torch.nn.parallel import DistributedDataParallel as DDP +import json +# import wandb + +from roma.benchmarks import MegadepthDenseBenchmark +from roma.datasets.megadepth import MegadepthBuilder +from roma.losses.robust_loss import RobustLosses +from roma.benchmarks import MegaDepthPoseEstimationBenchmark, MegadepthDenseBenchmark, HpatchesHomogBenchmark + +from roma.train.train import train_k_steps +from roma.models.matcher import * +from roma.models.transformer import Block, TransformerDecoder, MemEffAttention +from roma.models.encoders import * +from roma.checkpointing import CheckPoint + +resolutions = {"low":(448, 448), "medium":(14*8*5, 14*8*5), "high":(14*8*6, 14*8*6)} + +def get_model(pretrained_backbone=True, amp=True, coarse_resolution = (560, 560), coarse_backbone_type='DINOv2', coarse_feat_dim=1024, medium_feat_dim=512, coarse_patch_size=14, upsample_preds = False, symmetric=False, attenuate_cert=False, **kwargs): + import warnings + warnings.filterwarnings('ignore', category=UserWarning, message='TypedStorage is deprecated') + gp_dim = medium_feat_dim + feat_dim = medium_feat_dim + decoder_dim = gp_dim + feat_dim + cls_to_coord_res = 64 + coordinate_decoder = TransformerDecoder( + nn.Sequential(*[Block(decoder_dim, 8, attn_class=MemEffAttention) for _ in range(5)]), + decoder_dim, + cls_to_coord_res**2 + 1, + is_classifier=True, + amp = amp, + pos_enc = False,) + dw = True + hidden_blocks = 8 + kernel_size = 5 + displacement_emb = "linear" + disable_local_corr_grad = True + + conv_refiner = nn.ModuleDict( + { + "16": ConvRefiner( + 2 * medium_feat_dim+128+(2*7+1)**2, + 2 * medium_feat_dim+128+(2*7+1)**2, + 2 + 1, + kernel_size=kernel_size, + dw=dw, + hidden_blocks=hidden_blocks, + displacement_emb=displacement_emb, + displacement_emb_dim=128, + local_corr_radius = 7, + corr_in_other = True, + amp = amp, + disable_local_corr_grad = disable_local_corr_grad, + bn_momentum = 0.01, + ), + "8": ConvRefiner( + 2 * medium_feat_dim+64+(2*3+1)**2, + 2 * medium_feat_dim+64+(2*3+1)**2, + 2 + 1, + kernel_size=kernel_size, + dw=dw, + hidden_blocks=hidden_blocks, + displacement_emb=displacement_emb, + displacement_emb_dim=64, + local_corr_radius = 3, + corr_in_other = True, + amp = amp, + disable_local_corr_grad = disable_local_corr_grad, + bn_momentum = 0.01, + ), + "4": ConvRefiner( + 2 * int(medium_feat_dim/2)+32+(2*2+1)**2, + 2 * int(medium_feat_dim/2)+32+(2*2+1)**2, + 2 + 1, + kernel_size=kernel_size, + dw=dw, + hidden_blocks=hidden_blocks, + displacement_emb=displacement_emb, + displacement_emb_dim=32, + local_corr_radius = 2, + corr_in_other = True, + amp = amp, + disable_local_corr_grad = disable_local_corr_grad, + bn_momentum = 0.01, + ), + "2": ConvRefiner( + 2 * 64+16, + 128+16, + 2 + 1, + kernel_size=kernel_size, + dw=dw, + hidden_blocks=hidden_blocks, + displacement_emb=displacement_emb, + displacement_emb_dim=16, + amp = amp, + disable_local_corr_grad = disable_local_corr_grad, + bn_momentum = 0.01, + ), + "1": ConvRefiner( + 2 * 9 + 6, + 24, + 2 + 1, + kernel_size=kernel_size, + dw=dw, + hidden_blocks = hidden_blocks, + displacement_emb = displacement_emb, + displacement_emb_dim = 6, + amp = amp, + disable_local_corr_grad = disable_local_corr_grad, + bn_momentum = 0.01, + ), + } + ) + kernel_temperature = 0.2 + learn_temperature = False + no_cov = True + kernel = CosKernel + only_attention = False + basis = "fourier" + gp16 = GP( + kernel, + T=kernel_temperature, + learn_temperature=learn_temperature, + only_attention=only_attention, + gp_dim=gp_dim, + basis=basis, + no_cov=no_cov, + ) + gps = nn.ModuleDict({"16": gp16}) + proj16 = nn.Sequential(nn.Conv2d(coarse_feat_dim, medium_feat_dim, 1, 1), nn.BatchNorm2d(medium_feat_dim)) + proj8 = nn.Sequential(nn.Conv2d(512, medium_feat_dim, 1, 1), nn.BatchNorm2d(medium_feat_dim)) + proj4 = nn.Sequential(nn.Conv2d(256, int(medium_feat_dim/2), 1, 1), nn.BatchNorm2d(int(medium_feat_dim/2))) + proj2 = nn.Sequential(nn.Conv2d(128, 64, 1, 1), nn.BatchNorm2d(64)) + proj1 = nn.Sequential(nn.Conv2d(64, 9, 1, 1), nn.BatchNorm2d(9)) + proj = nn.ModuleDict({ + "16": proj16, + "8": proj8, + "4": proj4, + "2": proj2, + "1": proj1, + }) + displacement_dropout_p = 0.0 + gm_warp_dropout_p = 0.0 + decoder = Decoder(coordinate_decoder, + gps, + proj, + conv_refiner, + amp = amp, + detach=True, + scales=["16", "8", "4", "2", "1"], + displacement_dropout_p = displacement_dropout_p, + gm_warp_dropout_p = gm_warp_dropout_p) + h,w = coarse_resolution + encoder = CNNandDinov2( + cnn_kwargs = dict( + pretrained=pretrained_backbone, + amp = amp), + amp = amp, + use_vgg = True, + coarse_backbone=coarse_backbone_type, + coarse_patch_size=coarse_patch_size, + coarse_feat_dim=coarse_feat_dim, + ) + matcher = RegressionMatcher(encoder, decoder, h=h, w=w, upsample_preds=upsample_preds, symmetric=symmetric, attenuate_cert=attenuate_cert, **kwargs) + return matcher + +def train(args): + dist.init_process_group('nccl') + #torch._dynamo.config.verbose=True + gpus = int(os.environ['WORLD_SIZE']) + # create model and move it to GPU with id rank + rank = dist.get_rank() + print(f"Start running DDP on rank {rank}") + device_id = rank % torch.cuda.device_count() + roma.LOCAL_RANK = device_id + torch.cuda.set_device(device_id) + + resolution = args.train_resolution + wandb_log = not args.dont_log_wandb + experiment_name = os.path.splitext(os.path.basename(__file__))[0] + wandb_mode = "online" if wandb_log and rank == 0 else "disabled" + wandb.init(project="roma", entity=args.wandb_entity, name=experiment_name, reinit=False, mode = wandb_mode) + checkpoint_dir = "workspace/checkpoints/" + h,w = resolutions[resolution] + model = get_model(pretrained_backbone=True, resolution=resolution, attenuate_cert = False).to(device_id) + # Num steps + global_step = 0 + batch_size = args.gpu_batch_size + step_size = gpus*batch_size + roma.STEP_SIZE = step_size + + N = (32 * 250000) # 250k steps of batch size 32 + # checkpoint every + k = 25000 // roma.STEP_SIZE + + # Data + mega = MegadepthBuilder(data_root="data/megadepth", loftr_ignore=True, imc21_ignore = True) + use_horizontal_flip_aug = True + rot_prob = 0 + depth_interpolation_mode = "bilinear" + megadepth_train1 = mega.build_scenes( + split="train_loftr", min_overlap=0.01, shake_t=32, use_horizontal_flip_aug = use_horizontal_flip_aug, rot_prob = rot_prob, + ht=h,wt=w, + ) + megadepth_train2 = mega.build_scenes( + split="train_loftr", min_overlap=0.35, shake_t=32, use_horizontal_flip_aug = use_horizontal_flip_aug, rot_prob = rot_prob, + ht=h,wt=w, + ) + megadepth_train = ConcatDataset(megadepth_train1 + megadepth_train2) + mega_ws = mega.weight_scenes(megadepth_train, alpha=0.75) + # Loss and optimizer + depth_loss = RobustLosses( + ce_weight=0.01, + local_dist={1:4, 2:4, 4:8, 8:8}, + local_largest_scale=8, + depth_interpolation_mode=depth_interpolation_mode, + alpha = 0.5, + c = 1e-4,) + parameters = [ + {"params": model.encoder.parameters(), "lr": roma.STEP_SIZE * 5e-6 / 8}, + {"params": model.decoder.parameters(), "lr": roma.STEP_SIZE * 1e-4 / 8}, + ] + optimizer = torch.optim.AdamW(parameters, weight_decay=0.01) + lr_scheduler = torch.optim.lr_scheduler.MultiStepLR( + optimizer, milestones=[(9*N/roma.STEP_SIZE)//10]) + megadense_benchmark = MegadepthDenseBenchmark("data/megadepth", num_samples = 1000, h=h,w=w) + checkpointer = CheckPoint(checkpoint_dir, experiment_name) + model, optimizer, lr_scheduler, global_step = checkpointer.load(model, optimizer, lr_scheduler, global_step) + roma.GLOBAL_STEP = global_step + ddp_model = DDP(model, device_ids=[device_id], find_unused_parameters = False, gradient_as_bucket_view=True) + grad_scaler = torch.cuda.amp.GradScaler(growth_interval=1_000_000) + grad_clip_norm = 0.01 + for n in range(roma.GLOBAL_STEP, N, k * roma.STEP_SIZE): + mega_sampler = torch.utils.data.WeightedRandomSampler( + mega_ws, num_samples = batch_size * k, replacement=False + ) + mega_dataloader = iter( + torch.utils.data.DataLoader( + megadepth_train, + batch_size = batch_size, + sampler = mega_sampler, + num_workers = 8, + ) + ) + train_k_steps( + n, k, mega_dataloader, ddp_model, depth_loss, optimizer, lr_scheduler, grad_scaler, grad_clip_norm = grad_clip_norm, + ) + checkpointer.save(model, optimizer, lr_scheduler, roma.GLOBAL_STEP) + wandb.log(megadense_benchmark.benchmark(model), step = roma.GLOBAL_STEP) + +def test_mega_8_scenes(model, name, resolution, sample_mode): + mega_8_scenes_benchmark = MegaDepthPoseEstimationBenchmark("data/megadepth", + scene_names=['mega_8_scenes_0019_0.1_0.3.npz', + 'mega_8_scenes_0025_0.1_0.3.npz', + 'mega_8_scenes_0021_0.1_0.3.npz', + 'mega_8_scenes_0008_0.1_0.3.npz', + 'mega_8_scenes_0032_0.1_0.3.npz', + 'mega_8_scenes_1589_0.1_0.3.npz', + 'mega_8_scenes_0063_0.1_0.3.npz', + 'mega_8_scenes_0024_0.1_0.3.npz', + 'mega_8_scenes_0019_0.3_0.5.npz', + 'mega_8_scenes_0025_0.3_0.5.npz', + 'mega_8_scenes_0021_0.3_0.5.npz', + 'mega_8_scenes_0008_0.3_0.5.npz', + 'mega_8_scenes_0032_0.3_0.5.npz', + 'mega_8_scenes_1589_0.3_0.5.npz', + 'mega_8_scenes_0063_0.3_0.5.npz', + 'mega_8_scenes_0024_0.3_0.5.npz']) + mega_8_scenes_results = mega_8_scenes_benchmark.benchmark(model, model_name=name, scale_intrinsics = False) + print(mega_8_scenes_results) + json.dump(mega_8_scenes_results, open(f"results/mega_8_scenes_{name}.json", "w")) + +def test_mega1500(model, name, resolution, sample_mode): + mega1500_benchmark = MegaDepthPoseEstimationBenchmark("data/megadepth") + mega1500_results = mega1500_benchmark.benchmark(model, model_name=name) + json.dump(mega1500_results, open(f"results/mega1500_{name}.json", "w")) + +def test_mega_dense(model, name, resolution, sample_mode): + megadense_benchmark = MegadepthDenseBenchmark("data/megadepth", num_samples = 1000) + megadense_results = megadense_benchmark.benchmark(model) + json.dump(megadense_results, open(f"results/mega_dense_{name}.json", "w")) + +def test_hpatches(model, name, resolution, sample_mode): + hpatches_benchmark = HpatchesHomogBenchmark("data/hpatches") + hpatches_results = hpatches_benchmark.benchmark(model) + json.dump(hpatches_results, open(f"results/hpatches_{name}.json", "w")) + + +if __name__ == "__main__": + os.environ["TORCH_CUDNN_V8_API_ENABLED"] = "1" # For BF16 computations + os.environ["OMP_NUM_THREADS"] = "16" + torch.backends.cuda.matmul.allow_tf32 = True # allow tf32 on matmul + torch.backends.cudnn.allow_tf32 = True # allow tf32 on cudnn + import roma + parser = ArgumentParser() + parser.add_argument("--only_test", action='store_true') + parser.add_argument("--debug_mode", action='store_true') + parser.add_argument("--dont_log_wandb", action='store_true') + parser.add_argument("--train_resolution", default='medium') + parser.add_argument("--gpu_batch_size", default=4, type=int) + parser.add_argument("--wandb_entity", required = False) + + args, _ = parser.parse_known_args() + roma.DEBUG_MODE = args.debug_mode + if not args.only_test: + train(args) + experiment_name = os.path.splitext(os.path.basename(__file__))[0] + checkpoint_dir = "workspace/checkpoints/" + checkpoint_name = checkpoint_dir + experiment_name + ".pth" + + test_resolution = "high" + sample_mode = "threshold_balanced" + symmetric = True + upsample_preds = upsample_preds + attenuate_cert = True + + model = get_model(pretrained_backbone=False, resolution = test_resolution, sample_mode = sample_mode, upsample_preds = upsample_preds, symmetric=symmetric, name=experiment_name, attenuate_cert = attenuate_cert) + model = model.cuda() + weights = torch.load(checkpoint_name) + model.load_state_dict(weights) + test_mega1500(model, experiment_name, resolution = test_resolution, sample_mode = sample_mode) diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/notebooks/demo_single_pair.ipynb b/imcui/third_party/MatchAnything/third_party/ROMA/notebooks/demo_single_pair.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..0b67c712a2edc2a8ab509711c246b4a4a474c7ad --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/notebooks/demo_single_pair.ipynb @@ -0,0 +1,247 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "source": [ + "# Demo LoFTR-DS on a single pair of images\n", + "\n", + "This notebook shows how to use the loftr matcher with default config(dual-softmax) and the pretrained weights." + ], + "metadata": {} + }, + { + "cell_type": "code", + "execution_count": 20, + "source": [ + "import os\n", + "os.chdir(\"..\")\n", + "from copy import deepcopy\n", + "\n", + "import torch\n", + "import cv2\n", + "import numpy as np\n", + "import matplotlib.cm as cm\n", + "from src.utils.plotting import make_matching_figure" + ], + "outputs": [], + "metadata": {} + }, + { + "cell_type": "markdown", + "source": [ + "## Indoor Example" + ], + "metadata": {} + }, + { + "cell_type": "code", + "execution_count": 9, + "source": [ + "from src.loftr import LoFTR, default_cfg\n", + "\n", + "# The default config uses dual-softmax.\n", + "# The outdoor and indoor models share the same config.\n", + "# You can change the default values like thr and coarse_match_type.\n", + "_default_cfg = deepcopy(default_cfg)\n", + "_default_cfg['coarse']['temp_bug_fix'] = True # set to False when using the old ckpt\n", + "matcher = LoFTR(config=_default_cfg)\n", + "matcher.load_state_dict(torch.load(\"weights/indoor_ds_new.ckpt\")['state_dict'])\n", + "matcher = matcher.eval().cuda()" + ], + "outputs": [], + "metadata": {} + }, + { + "cell_type": "code", + "execution_count": 10, + "source": [ + "# Load example images\n", + "img0_pth = \"assets/scannet_sample_images/scene0711_00_frame-001680.jpg\"\n", + "img1_pth = \"assets/scannet_sample_images/scene0711_00_frame-001995.jpg\"\n", + "img0_raw = cv2.imread(img0_pth, cv2.IMREAD_GRAYSCALE)\n", + "img1_raw = cv2.imread(img1_pth, cv2.IMREAD_GRAYSCALE)\n", + "img0_raw = cv2.resize(img0_raw, (640, 480))\n", + "img1_raw = cv2.resize(img1_raw, (640, 480))\n", + "\n", + "img0 = torch.from_numpy(img0_raw)[None][None].cuda() / 255.\n", + "img1 = torch.from_numpy(img1_raw)[None][None].cuda() / 255.\n", + "batch = {'image0': img0, 'image1': img1}\n", + "\n", + "# Inference with LoFTR and get prediction\n", + "with torch.no_grad():\n", + " matcher(batch)\n", + " mkpts0 = batch['mkpts0_f'].cpu().numpy()\n", + " mkpts1 = batch['mkpts1_f'].cpu().numpy()\n", + " mconf = batch['mconf'].cpu().numpy()" + ], + "outputs": [], + "metadata": {} + }, + { + "cell_type": "code", + "execution_count": 11, + "source": [ + "# Draw\n", + "color = cm.jet(mconf)\n", + "text = [\n", + " 'LoFTR',\n", + " 'Matches: {}'.format(len(mkpts0)),\n", + "]\n", + "fig = make_matching_figure(img0_raw, img1_raw, mkpts0, mkpts1, color, text=text)" + ], + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/plain": [ + "
" + ], + "image/svg+xml": "\n\n\n\n \n \n \n \n 2021-08-18T00:38:02.543658\n image/svg+xml\n \n \n Matplotlib v3.3.4, https://matplotlib.org/\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAugAAAEcCAYAAACVsUECAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAuJAAALiQE3ycutAAEAAElEQVR4nOz9ebxtyVXfCX4jYg9nuve+++ac50mZUioloSElKCEQM4UAA8YUpqvoKkMZN+5q292uNl1uF+4ylN1tu2yXqzxQZYONBQYKGZlJCCVCSKQEUkrKVCrHly/z5ZvvdKY9RET9ETv2jRNvn3NviqFwf976fO49e4gd8/BbK9ZaIay1XKfrdJ2u03W6TtfpOl2n63Sd/mSQ/D86A9fpOl2n63SdrtN1uk7X6Tpdp326DtCv03W6TtfpOl2n63SdrtN1+hNE1wH6dbpO1+k6XafrdJ2u03W6Tn+C6DpAv07X6Tpdp+t0na7TdbpO1+lPEF0H6NfpOl2n63SdrtN1uk7X6Tr9CaLrAP06XafrdJ2u03W6TtfpOl2nP0GUrHoppbTvfOc7OXXqFFVVoZRCSokQAiklSikAtra28O4arbUopdqw/n5ra4vLly8jhMBaixCiTcffx79bW1torduw8XfLaJnryDCeoijY3t5eGvZPKm1ubpJlWXsvhGj/wjbwbeTD+OfhPYAxpv0+vvdx+me+7bXWWGvbe2stWmuklG2aYVsZYzDGkCQJQoj2XinV3odl0VoDtPGF9z4+nz7Q9hHf3+LyhGF9Hfiw/p2/jt+F9RDXcxznQX3Th19G/vvD9PE/bFqVt7j/vPDCC1RV9ceSrz8sOnr0KDfeeOMfWfynT59mOBxe89yPjaqqKMuS2WzG3t4eu7u7C31OCEGapuR5jhCi/f3Kr/xKvvqrv5o8z9na2uLKlSt89KMf5ad+6qcwxhyqoyil7Pd93/dx//33UxRF+9y3q1IKrTVnzpyhLMuFd358+n5/8eJFzp8/DyyOxy7y7y9fvrw0TDj/HrbfV1XF3t4eRVG089N/aHT69Om2jf//haSUC20RzinGmAP7y2tNJ56zuuJ9rWnGZeh6dtA8/sdJXXjqsPTSSy/9BzePb2xscOrUqc53vo3+IO1z8uRJBoNBJy7sqmMhBI888gibm5sLz+J57cEHH2R9fR3Yn/N+6Zd+iR//8R9fOY+vBOh+8huNRm2hkyQhSZIFEH7zzTe3YEop1QIx/41/FlZcCAI9uAsrQCnF9vZ2C766QFIXyPeALK7MsNKMMRRFwc7ODlpriqJgb2+P+XxOr9fj2LFjpGnahi3Lkrquqeu6BfchONRat/VirSVN04U8hI3lFzu4FiSGgNozPz6cn8w3NjbavPlF3de3f+bTTNN0gamq65qiKMiyrI1je3ubJEnaNp5MJtR1TZZl9Pt9qqpiMpkgpWQ0GpEkSbs4rq2tkec5s9mMyWRCmqYMh0OstdR13V5PJhMmkwnD4ZDhcMh4PGY2m3HkyBEAZrMZeZ6TZRlSSqbTKcYYBoMBeZ4zmUzQWjMYDJBSMpvNKMuyHUg7Ozvkec5wOKQoCpRS9Ho9hBAURcF8Pm/L4vtwmqbXxDufzwEYDodtv+j1eiRJQlVV7Xe+HdM0bdvaM6IxgPf9uyzLdhzEfdaPFSllG2cXc+XHYzx5LJucwzEWhw2fx79h2iHj4+ncuXPUdd2GiSfE+L4rra58d02scT10Pe8a6/H9xsYGJ0+evCb+Pyw6duwYvV7vmudVVTGbzdjd3eXixYu89NJLfP7zn+dzn/sczz77LJPJBKUUaZq2Yy7P8/b3y77sy3jjG99IlmXs7u6ys7PD2bNnX1PerLVUVcXa2hpra2tLmdlTp05d01YhAx0+W8bEd83POzs7CyA/Zp67+mF4HTIxWmsuXLjAb/zGb/D000+38YTj4qD+EI+fcP71Y1hKSZIk7VyZJAl5nre/foxmWbbA6Ph29POZXxf8HO3jO3XqFIPBoF1H/ZwfCj3i/P9BgEfXWhk+P4xQa1m9LgOIcdyxgOOwgo2u/MXCvJC6wHTIDPs04zW6ay4Jw3aVzaexrA5W5dPHs6p/vlbg7fuPT7MrXaUU1lrOnz9PXdfX9I1ldNA60/V8VRt3rU9dbRLS2toaR48evSZPXe0TMmfxdddYEkKwubnZzuNhHR5UzmX5jSlk9o4fP74yLBwA0H2EHjj5Scl3Aj+5ePDtJxgPQuKJpWsQLON6rLUcOXJkAdCGUttwwg/z6is0HBiewgo3xnD8+HHqumY+n7Ozs8N4PKbf73P8+HF6vV5bmWVZUhRFK8kty5LpdIqUsgXufmL2k3SYX/9N3IBpmrbA2YPKcLL28QDkeb5QRz4uv2iEUmqlFMYY5vP5wmJijGFvbw+t9QLQ9+B6bW2NwWDA7u5uC7Zhf0Cvr6+3+SjLkrW1NXq9HoPBoAXTeZ63EsN+v9/2i16vR7/fb0F+lmUcOXKENE3Z2tpCSsnGxgbW2pbRWVtba8sDMBgMWmBdVVWbv16v16a9u7vbArIkSZhOpwBkWdaCJc8ozOdzxuMxg8GA0WjUvu/3+6RpSlmWLRPk+1fMSIVtEUrzwn7oGTitddu+/vtwUfe/frz5ND34iCfcZQv5QWB2FSCPf0NG0ffnm266qTPu8FkMxFeBsK57/yyuhzD+8P1h4/njJGMMdV2jtaaua2azGePxmMlkwnQ6XZBeeRDn+5e1lo2NDW655RbSNG3Hdzi/vhYKQXlYp/FOUQzKuxanw+w6hnFtbm5eA1RCUB/v4q3qx55x9uDWx+fnvHCNiPMT5n0Z4PLlNsa0aYXg2Y/NJEnauS8E8L7Ni6JomW0/hvxc7eeiXq/X/nlA79eAXq/Xxhuut3HZDgtyY5C4LHzXehnW/2uhgyTqq+I9SOK9CgyFmKEL/HYBbk+HBcShFL9rXX4t+e3qq+Gu8rJ8+X4Vlzcc6+E7/96Pm9OnT18j9OzqT/F11xrUlb+w/X2aIVgO+/KyNjmozrrWwK41IO7Hy+p12diK26Sr3MvqoOvZYXd0DpzpPWjwE0WSJLz3ve/lPe95Dz/yIz+yANp9h/H3Xo0hBubGGB599FEeffTRa9IzxvChD32I9773vQdm/md/9mcRQvDt3/7t7TOtNZPJhGeeeYaPf/zjLaj25Cs6lO5nWdZKX0Mp5TJO3INGv13sF46qqhbAlS+Pn9j99rGftMM6jevQA7dwAfPfeqm0lLKVFFtrmc/nJEnCcDhECNFKv48cOUKe52355/M5UkrW19eRUlJVFUVRtIAaaMGDz5NnMDzgDAd/16LhF6p+v0+SJG2aSZLQ7/ep63oB8HoGyEurtdaMRqN2UfJt49vL11FZlm0+0jRlMBi0Engv3ZrNZi3D4dNOkoSNjY1Wyu/TCQF0lmVtu4Zl8wM3ltz5Rd339xBU+boP2zmUuIf1GE4EXX2xa+IIgfRhaNkEu2rC8WO5a2KK2/+wC91h6T9ENQY/VvwOildxmU6n7O3tLexCAAtzkjGGY8eOtWO0KAqqqkIIwcsvv3wokBxTvBOyrN27pLixhDwG22Ec8aIfPvfjIQbKXZKqkJkI5xsvTFBKtRLAGGB35SksdxeFafi0w7Hs51RgQaUvDO8BUCi0Chnw4XDY7pT43Tn/58G+EKJdc7MsYzAYLHzjgXy/32/LEgJ5TweNydc6Tg/LDHiKx+yyMfyHoZrg6bWUaRm4PGw5/XwYfrMsna7xEqax7HnX93Heu/IZCzPC8dUFaMP7uPxdYyguX0hdwDxc48J8hfk9DGjueh7nuatNw3J2vYu/Oeh+Vb7Cftw1j3p67rnnDpzHDwXQvQqAL6SX4HiJsZ+UwgaIty/jzuUnvZ/5mZ9ZaExjDLu7u7z//e9vJY7Hjh3jq77qq/jwhz/MpUuX2rxdvXq13bb+0Ic+xNWrV0nTlDvuuIM3v/nNpGnKY4891oaPpUUelPkO1ev1qKqqlXr5csYTh5SSfr/fgnRfJ+FWZp7nbXp+ou/3+9eo7PjJ3P/Wdd0uxD6PQAtoPYMgpWRnZ4e6rtnc3Gzrdjwet2obSZIwm83Y2dlhc3PzGtUjgH6/3070IRj04CIc4KEkdz6fMxgMAFqpuW/DoiiYzWZtel69xJfX69p6lZFer9fWuV945vP5AvD1cfuFy4NuL7n3ajn+e98mXjrl77MsW9jW8/q3ZVm2jIEveyx98H3oILAYLtJhnw+BuR8vXkrmyxUCjVDCGaYf98llW6xdu1I+L/73oMnPfxdO8nEf6lpYusZ7HO8qOmy4ZYvUHzaD8KWQZ9B8/5rP5+zt7bGzs9OqaYVjP2TKe71eqw8phGil8EIIXnrppS8pP378+XaM+7fvR11CjWUAI/zWlzkGyz4OH2+YdthfYrC+bOH0Qgg/X4b5ixfqMP04na4+chAAitc138ZhHfj5O47Xz6VeUOHns/DPqzb5ucwDez+HeeGOfyalU3/0tkn+u1A9x8878Xoc5q2rTeNnXfNh3DaHefZHSavS6Srjqj7gKd69CwUvcX9ZxvTGeYjDr+qfh/ld1q7+ecyc+7Bd2g2r2vkw83i8hsVCqjD9g0DzqjS61povJa6ucbGsXyxLM6TYPiJMP5wnn3322aV58rQSoIegIpwcfMeMAWyYsRCYhgWOO4o3OPLf+IJ7fWDY38rZ3t7m4sWLCxO5j/fq1atcuHABgLNnz7K5uckDDzywANCXARKlFHmeMxgM2Nvbu0Za5NUT6rpuJbN+IfUSba/H7dVhPGD0304mE4B2C1NKp0s9m83aNHx5/GJuraXX62GMaVVqfPq+XTwY9pJqIUQrmfOTswee/r2vT99GXroedioPWMNtVg9ee70e0+m0TScE51I6XXVrbav+U1VV22dCfX1f7n6/jzGmXZg2NzeZTqf0+/32fjwet3kM9e79YuWZCaCVilvrdG/95OqlXzHI9hNuKAEPdwfCvhNPBF338cQYxhMyhSFY79omjbc5V/Xh8LuYDvr+MItu/H08+caAKl5o/Df+faimsWxy/VIW9bj+D1owvxRaNTmHc5PvS16Nzttq7O7uMp/P23EYjn//7alTp7jvvvvaHTI/htfX178kFZcwf12LcdiGYd11qZ90SYR8mGVqDPFuUbiId0n2uvLrrz1AX7Vr1NVGMbPb9U1Ylq7nXeG7wnWlFdZHaIwb1mm4k+fnSa+m6IUPMQjP87ydR8L52qvg+F8P/mPVm3ge6qrXZSBwVT12zR2raJngo0sl4LXMgV1zwDKguIriHZMwL8vmmRCYhf0jnjO7+tiq+MK2CPO0aq6L5+eD5t14Dj1oHvXtFM8Pq1RKlpWvK0xc78v617L2P8zaskzY1RVf+E0XMxKmE7dbuFO/ig6c6UNpn+9YYcShWovPwGg04t3vfje33347SZJw+fJlPvaxj/HCCy9cU7i4QF2F89QlHenaFlNKcenSJW699Vb6/T7T6fSaCo3j8xPgfD6/ZvvSb0+HOsQe+JZlyXA4bA0LlFLtYry+vt7WX5Zlrf631532Kijz+ZyjR4/S6/XQWjOfz1s1DQ+MPcPi9VHDtvGLu9a6fe8l9R6kevUbb/DqdyviOk7TlNFo1IJrXz9hnQvhJHw+zXCht9aSZRnr6+sLkmq/IPlFwNdBkiQLKjhesu6BiVcP8hKksB8aY1oA7iXePo+htL9LUhhPPqGBb1f/i7fmli3aMUgJJ40QrPvfcGx1gUtP8eS7LA8HSRPCMPEiE6YZLhzLFmbPmHnGNQanftfEt4Hvt0mScPTo0VY6HJYpzE+c9y5mKa6zmA56/6XQQXH5Mvt68Lrn0+m0Bemh56GQOQfXd9fX1zlx4gRpmjKfz1t1Oq/29aXkWYh9g/xVgMBf+8W/C0x01Wk8zkLVq3jR7mJklgEHP9b9c8+k+7ku3N0N89E1luJ+7+kwIG3VmO8Kt+z7ML1VID4MF85Z4U5iuPvmJfAexHuw7oVJYdjw2zzPF8C/B/FhvH63PBYmrKK47g8C9svmqj+IelsIGlfNk4cF6V2qGF3zZNdYissU12M4vroYgTDNOP0w7q416iCQumo9OYjCeTkWSIXxLAPQcfnicHE9HxTHYcLE75eNRx9u2fsQF3RRLAB5rXSgF5fLly+3Ehu/LRdOlDG4Vkrxp//0nyZNUz7ykY8wnU55+OGH+dZv/VZ+7ud+rt2eDeMIGyLUW/edLZTSdElgwrz4MGtra610WYh9FZwwztCFoxCCwWDQ6kLH6ezu7raGPWtra616RVVVLQPgVSuklIzH4xY8egmxlLI1IPKA3gP96XR6jZGYV7fxk6KXzns9bV8e7yEEaKXevjPked56MvGeWrzLzFiv2Ru7eiMlv4UaA6FQ4uL7RQjWvVTeg24vufEGm37yDxfX4XC4II31jEkIvFcB8LAvxAs1sLC4hBNJ12QQDvBli/GywR+DSF+3XYtsHKZLOtDV5+OBvoxZ7VoEwrzEdRQupL49Qz173x7+Otwh8X3TM7LhvWcQQ884b3nLW3jooYcYDofXMP2r6jWkGIR1tc0fJjA/LPl5zBiz4Frx8uXLbG1tMZ1OF9ostt3J85wbb7yR4XDYzg3eBiP0GnJYstZy9uzZTpDt89s1B8eCDJ/HcMyF/bqLGfZhwv4dMvPxe2MM2sDnt2/jWG/CLaOrnWXyetlhGfVtD1GLhOS5T/2B2r1rLvijoGXgq2v+CevPz9Xxd7Dfh8KdOmBBEu+9BA2Hw5bR8eo0HojDvmMCL333kvvQIYBfL0JhUVe9LStTVxm66uSwFIfv2oX8UtLpAtPL4onHVjh/HwQ2D1pvVjF/4by+qt8uG5/x+xh3rSq3T9OvOV1C0FVlDdOImfWucbIsz1113VX2MMxhxvhB636ILbsoXt8P268PlKDv7OwsSI3jyTyuyNe//vVsbm7ykz/5k1y6dAljDC+++CLf933fx6OPPtouFOA6/Q//8A8vpPeJT3yCj3/84wudZNmWQZwXa536xW233cZdd93F7/zO77TphB0lBEchuFNKtSA9rGwvhfALa6g36iVcZVm2nl98GkArRQ69D4Q6/R5Qh4DTq6Z4gOP13UMmw0/SvhyhDrVXg/FGVD4ffgfAg3NjTCsh8ZL1EBT7hdRP4OG2q6//cIt1Mplcw8T5uvBuEMOB65mLkEI1k/AvBABxXwivQ5sIn04MzMO+G1/7PIRxdk1oywb2sokoBKFd46gL3KyaSOMBD7RuQD2FOr8+Pc+chrsOHkiG/rr9ro3vV3Vdt6pXHnD7sP53Pp+3efDfeADv81XXdevy8Pbbb2/79muhZZNvV7g/bvJzSlh2v6M2Ho+5ePFiu0sHtIAmnJd6vR6nTp1qbTOm0ynz+ZzhcNgyva+Vrly5ssAAh9QlFYoZNz8fdjGDoYSya20I34X9138LoLVhpjN2i4wPnH0rn756B1j4fzzy77h97fICAGnnrf4QMzyCGW6gH34vk+/5UbCW3v/yl+h95KeoTn0Vszf9bdSVxxk8/oMIu6hbH7dbDBDC+vnj6kurwEIXUAmfhbrRsR1BGIevx1idJvTI5ed8/+fXuhCIhzryoSTeu9/1AicvyYdFQdofRp0etm26gOJBwGyZUKCLVgHoLpDdNQ5jMH9QmjGDuyqeeCzH/TsME+9WxWpuIcV1FAtb4rx15Tlk8LsEB6uA+rL7VYA6xpBdz0Na1sdC4XGXmlFXuZflaRkdSpkxBDYxJ+8z569vueUWrl69yrlz51qwprXm6aef5u1vf/tCIYwxvP/9719omMlkck0jLctTmIdv+7ZvW3j/1FNP8dnPfnYBkPnfcJIKO6CfTEJDTqCVKE8mE4QQC/rPsfcPr4seglVPHviPRqOF+L1qiiefD19HfkEKAbi1+1vD3nWgdw0YtktYfv9trGLht9m9ay8/EXvf34PBoAVtXuXEG1t6ZiLUgQ87rZe0huA6Nhbr6shdfSBmrsLJPgwXPo8nl3C7fhnQPkhaEn4b0ypJb0jxGIpp2YAO9eVDkO3Btd+18QDR6z/7vuOZMN8ffH/xYNyDSg8s/TMP4n2b+TR9nmLmyZch7MOevJqGb88/DAD0fxSg6qK4jrwU3btync/nGGMWgEv47ebmJseOHWvnJj9+QteCXwqtWtBiqZV/5sN1Lejhbwy65pVgXA0Z1z3GVY+9yv3uFhm7VY/tqs9u82xa5cyqDAvkaU0lFXogETcZ/s2JtzHIC6YmY6ZzZiZjrlLm9yjMXX8LZhXMCjASvpBBBeUN/xX1O/9j6hNfDkkfk59GH/sJksu/s7LNDvPsj5q6wJbPy2HmCn/fJciImffYA87u7i6wf26Jn2NDVZcQtHuvMt52KNR/j/XkQw80Xvoeuhb112F+/fWXMra7AHlMB8UT7uZ3URcQ7arrGIzHwHNVnMvSjNeuVaC+K32fx3gdXFbfy9a6OK6D1rWusoRhu4Q1IYY5qC0Oy9x4Ogj0L6MQnHsK7eDiNJbV70H0mqyNugofGh+CU2fwQDZcuEO3gKEB6IULF9oJ3hcmLFQXsOkq4Ic//GGuXLlCnuc8/PDDPPDAA1y4cIEnn3xyoQN6YBvraYVAPTRihH2Avr6+3roy9FLkUMfN14mXRHgPL9badmvRq934xdarvXgABSz40fVl9sDL16fPm/cE4dVIBoNBC8LCidXH5V0peoDgJ1LvdSV08xXWkwfkoQRWCNGm5fMZ65eG7dW1aMQTTazjGHtCCSkG512TXtzOyyQUywZqHGf4bQj6l8UVD+IQ2ML+QVe+rsIdBF/PoWehsK/4HZaiKFq/2v7eS629wbJnsEKGKfQWFDJPPv9hPrvG3TIw5+/DevAgP9xeD6XHh520DhNu1aL3R0Wx4MK3nWeSiqJgPB63bkE9KAkN9TzDcvr0aY4dO9bugPm467rmlVdeaQ3OXyt5xixJkra9w7YOmaWwTWoN4zpnu+xzuR6xrUdslX126z7jBZCdUpQpRZlgtESmBpkaRAo2BZsIdCIgkYjUoIRGYVDWMrBz0BahQVaGeZnCK/DC2ZNoo5DCkKmKXNWsqxlHzYTJlVeYbJ1D1GOgpHrTe7CpJP3c/4rcOoMVCn3iXWBL5N5zr7k94/s/yn60bI57LQx8uMYto4PK5ef0cDcunFtDRizcUY2Bu1+P/F/ojcbf+2ehBN4Dd88chJJ7vzbF/fOPipEK1QC71PC66i6eE0NwGQuLYoDdRWGbxmtlV/ssA9mhYKqL4h2zrjm5C5TGa2O85sdlWXUdr53LxkLXWr9sDV82TrowQZjGKvJtGtZHrC4c5yMe34fts68JoK+SNPoCz+dzTp482S44vuG9jrFfoMI4Q3DeNVHFC58HcWGn2tra4uLFiwC88sorfMd3fAdve9vbeOaZZxbcXllrrzFcCivcL4qhqkpoHOr974LTpe73+xRFsaDT7iXLflH15fGg3j/zxj4hqA0nQA+2PFgOAbqXEscTslef8TQajVpw7gF7r9drddn9LoBvHx+fj9ODu3CyCsOF7R9SPPn4evd12jXAYjWQWJ8t7g+rQHbXgA2/X9bXlk0Ksd5sGNZ7h4knCw+uQwbGg+RQnSQ0Qo6vvSTbq0X4ew/UQ+m5Ty8cX+FCG0snfLjwuc+3b4OwnUIwF05O8QQX1pnPg/da4seYX4i7JnPf/7t2MmKm7LC6oX9cFI6fWM3Ft1uso+mliJ7yPG9VhsKzGa5evcre3h5Xrlw5dH7EBxHkfer1E1zKjzIXOTu2z7YeslMM2Kv6jHWfSZkzrTJmRUZRplRFQl1KdCWhFiAspAKURSiLFBYhml8swoK1YKRAZBY0WCFQwpLbmlxX9GzJwJQMVUFflfRkSU+VpKLEItjs7dFTJbkoyGXJMNPkomCQahKhgf21Ymtriw9/8cP83u/93j6A+oxY2EFMX/7fMMfegpycQRb77nm/1D6yCuB0PT/sIrwMZMTz3Kp56g9C8Tzd9c5fh2tueJDT3t7ewlzhx7kXboWqMkqp9pC70BjVHxAXusX1AD5UqZFSLoB5/94zDF0qNK+1vnw/C+e6rt3arjUhXtt83YWMRRfYjNsivA/jW6V2sqyc4bzc1Z+W6U6HFK4rsWbCKjpMHpcB5jD/PtxBoDz8pquOwvjCssTPl1FYD+EOS8zgdI1lf727u8vW1taBaR0KoMcgtQtU+UyfPXuW++67j5MnT3L58uU23L333sv58+dbA8UQkIbgz4dfNlF5UNq1vRBuSf3O7/wO3/RN38SDDz7Ipz/96YXK9OWJKxScxNZ7TIjdAXqw0+XiK/bG4YFWOFF5f+NeZ9vvKIR62KHOXjgpeF/jvr6SxB20472phPUTM07hgRaePGj30tm4vsO0PKgI2yhsO1/vYfnjycyXpcurQzxRhDsZcZiwr4XUNUgO4u7D+5jpiNU3Qil3qFbipaQhWPb1WhQFRVEsqCrExpPhb6jnHeqHw74EOmyDUPc3rLtlQGLZ5F/VNa/c+1fYO/4oNz/79zhy5bEFRjKcjP23xhjsfTfBX/8eOHsJ+9f+Jbaseek//a/Zu//N3PyTf5uNJz62AOLD8wX8GFg1wXdJG+IF7aBv/zgoZlp9P/AnFO/t7bX2JydOnGgZbiH2fYN7ht6rhW1v7zCziiLtMUs32ZOCsZG8VOecf/3Xw+3vRnyQH6Fkk11uZMJJZhxjzgYlI0p6VORUKP6zKU9Whr/+UQHNpoiQgLD4arKAtQIQKKVJlGaUFPTykmE6Z5SUDGTBIKnoq4q8Add9VdNPKgZJTU9V9Jv3mShQtkDKxTk7XITdM8vffvJ9vDA9xQPrL/FD936QvSLlF868k54q+Y9v+xhJ1A/8/OKFEVpr6uQoe4/+LDbdZPDx70FtfwbbO0J986OoK+vIV35jZRuGfTsWPujRHcwe+VHUzhfoPfGjCL60bfGDaNWC3gX6uuigsfFaQGtXfrrAfBjOz91+vhPCuUwO51//vW/HUELuPaINh8PWGNUD91AXPpSme1Dv1S1Dl5Thu1AgtQy4hRQDsbj8y+poGcV4o2tNWgaUYw2DOP/LwGgXaOxiKpbFG6bftQO+LI44XFzeg9JaFucycN9Fq9o3rq+wHbpwYZw3j2fCZ115XcZ4TafT1nX0KjoUQPegNl4c77333gUpm7XOW8D29jbve9/7+OhHP9p6cTl69Cg///M/31nwUAe8awvFU1eDxkDfA8uzZ89y/vx53vCGN/C5z32uXQj9ougXwxj0hJJpDxKtte0k4oGUB8aeYw9dyvmJIgRuflvPezIJuf0Q5EopWx3d8Xi8MCllWbYQvwc6/mTMsBxdE4iXsIYgK6a4DXwZPYUGl0Ls6xB36XaHbRV6qggBfRcHvAyYh4xD+CzM6zIJUNjnZrNZ216xdDuUenopdqgi4tVHfDjPiIVgO3zvmT3PBPm28tdduxGxXn48kfuxErahfxe2X9d48s/DupsO7uLyTd8OKuO5h36M4c5nERiErRGmBqvba4lBoJ3B3bvuZ5OS/A23Y//Fg5ipoTe6ld3iJC//5b/F+dkYoTXUFVQVoq64omsuJymf3VhnY6/HcCLoC0EfSx8YAkMJIwHryrAuBGsI1qVlLREME8soESQKUgVZAqmERMIhhEB/aOTrLmTQAKpac35nzCtbY168eJVXdyZcmuZcHNzF1Ttvo7gnp0x6jI1irPsUckgh+hg5pLB9tO7z7+uM+hMJFCBLi6yACqjB1qCTh2FDwE/zN9oMOdRoEFQICgS7wB6ww8sf/LIjA8ObXncLG+mUUTpvgXZPlfSTmkzMGaSaTJQo2S19XLbI+/HvwgmmdcZW0We3PMKk7rNb5ozrHntV7lRi6h67ZY+9use47FOaDCw8tXU7v3L2YT516V5emR5DCgsSHth8hbnJmOmUwvSY6ZTxXPDK+tvYfst/gU7W0aM7oH8ahGD6pv8PvTP/nPkjfxmzfjeYmuGvfAdy7wyoPlb19n+TQfPbb59Z1cPKHiQ9rOpT3fyN2N4p6tPvIbn8CdJzv3ro/nEQLWM4lwGew8Z5mPQPAvzL3sWgMnzWlW5XOuGOpBdGgNsZ8YKcUMXFrwv+ZFUP4L2fd68y48P5OEJQPhgMuOGGGzh16tSCzVdMIUju0kGPx0CXECisv1X12LWGdYHfOO4u6gKWXUznKiB9GArX+sMwjcvyGFJXHa+699904cFYQLlK0NP1fFWaoVAlFKDGOGMVOD8Mc+jpUADdA6owMSkl73vf+64J+/GPf5yf/umf5t3vfjfvfve7SZKES5cu8YEPfICXX375mo4WN7SvgLAQyxo11guLt6Eef/xxvvmbv5n77ruPp556qk23SxosxKKOmJ80hBALwHgymbQHA3nPJGVZtkd3ew5+fX0dYEG/zk8KHpT7ScUbYYbc/fr6eusnOtRTDcmXJ3RtZe3iNmQI/P27LsmlB8VhfCEQjzn7mJGJJ+suzjNkxOKdlzieZZNdqNazTK+ua/EI62s+n/PSSy+1rjBDcB7qcIeeSUIVknDXwQPxEOiH/t+78rIM8MQM47J69eWGRcOU8Docs2F9hnXu6zMrLyLNHAMMdp/i5Av/FJlkaCuRKkVbCSoFkWCFAplihITdHuXF26i2Evj4y1gLkzfeTy+boMoJ2d5lTJpiE4VVEttPIO1RZCmvZinnlMUI4yC/ENgG/hsExkqsdr/GSqxxvyAAi8A06hW6UbHwqhYGYUEa90wakBaEAWVANr9Ku9/ECBLt/qTxfxJhBdIKhJEIFzNaQGVgri1FDWUNZS0oq5S6Tqlqga4FVOuI0kBhEIWFUkAFthZgBFZLsK4cePUQaJ5BogzDpGaQGgaZZpRphv2CI72K3Gzx0V97P1tnPw3v+YUvQ3IGyZb9J+z73YtIiG+0tz38MN/xdT+wFDSE4yzso7VV7BU5e3WPvTJnXPcZ1z0mDch2wLsB3OWAae3AdiZrlDBIXOGsFRgr0EairUQbz2S7OqBp2l989Z1OnaYHJhV8tHiIj118EISrHitcW1ghMCcEnBRgDRjb9A0ww7cxvfVtgAIjoFJMvvz9iHoCeo7Qs/ZX6DnUzbWZQz0D/7zcReqLyMlZdLYJAuT8wlKw0HUdjr8/KjoIjC97f1ggH3+zKp0u4LUM6HSFj+2sQpWl8Fu/VnqVl9BdpJe6hyez5nnO0aNHOXbs2IHl6BJq+LJ0lSMGdcv6QheOWYaD4nTDuT/8btX60JWvVeP/MHXStWPeBXxX9bk4Tz7ernysYm7ieFettWHeltFBTHG47nZpfayKN/x9LWNOHMCR2dtuu43v/d7vbcGiB0ZeChgPMp+JEAR69ZgwjB+A8XNfgLAi4oKF4HHZAIj1tMOOE3PFsX6xP4Z7OBy20mxjDOfOneOVV15hbW2N48eP0+/3AZhMJuzt7ZFlWetNBVg4mMjryxljKIqi3dLr0mvzdRZK/eO68eWKmZSwzGFd+XDxr/8mrt9Qwr1MihB3/i7GJ8yXf9clPV81eLqAehenGoZfRbu7u3z+85/ni1/8Iru7uy1zFQJ1fx2qmYTXYTox1/5apQrLJuWwvJ66tkBXTWgxkxW3jX9f58eYD+5guP1pJy0P+ka4nefjaiemN94Jl3cR5646BmbjOPMbbmP0xU8jrWkP6vE2DxsbG7z5zW/me7/3e7nvvvtaX8txX2jHN5YKSyWgsIaphLG17FrLBNjScLkSXKxgq4StWrBbw14tmNaCSS2YzQWzmaCcS6pCUM8FuhSYQmArsKWASkBlnQpIHfxVOB3ssrnHOrGGpEH+DcPg5NdgwFaCFi6nFpEZRG6QmUbmBtnTyEyjcotUFiEtUjbw0kioFXaeoOeSulDoSlJbSW2uafswt9WS65qLn3kgSxUnjx8jkQJrBNa639pI6lpRa0VtJForKqPQRmK0ACtd+fBcBC3g9tfuecNwSJb/KUCJoO5oWB+LxCLRJMKQoDFGkoiajWRKJit6oqQna3qypK9KMjtl+/wLPPPEJxhvvYrQc0Q6dHnb+aID2v0TFK//c6hLn6T39L9cOmbCZ37tCJ+JpEd5y/uQ4+dJLj9+zXchHQaYxM8PC5Rf67xyEH0pca2aa0IgGVO8xnTFuUySuarcsfDIz8X+zwP348eP8853vpOHHnqIm2++eUEgdVA9xBgkXq9jcBn++nAhQ7JMjSUMu+p9V/hwPTwMwF3WRsuYkzBMHEdX3XSlFff1OO9h+K644nqM4+4S8nWlH1MXtjmo7LH0fFmeu/JvjOHChQt893d/N1/4whew7WR6LR0oQfcH98QVGAOVcFEN9dC6wHQILpcN2K53ceeOgX8MzGPwGlM8YFZ1MKUUN9xwA0IIrly5QlVVrY7b2traApj1W2zHjh1rJeMetIdbdvGgiifqLr23kEmK4wgnp5BCP/Zx3D7euB09dekfx3Xj87fMqDOkUALeBfxD6X/XwA4Bfih9D9sxnvBj6WBYrrqu2dvba/XBVnHQsW/hVQxJ2Dfja/9tCHxDLxpxGcLrLvWVuC/E+Yu/tQjGx95BVl2lP/6ie6Zz9HyERZGoRZDi4yk2b2R24z2MnvoY0tSoNGHQL9A9Q+HbZusiazuXm3RYYHp8uw2HQ0gytrRiOhWMrWCvluxp2NWCvVqwVUquziRX57A1E2zPYK8QTOYwKQSzAuYl1KVA1pbEgKpBNLDUVAJdQdWYd+QZ9HPYyGGYQz+FXgI91dg+KkAIag2FgJmFSW3ZrWF7apmVgrW+ZX1oGA01/b4mH9TkA0M2MqRDSEaWUs3Y0zvs1DvYgaCWirkVVEKiVYpWKValmCTFSnetRYIRCm0VRitMpdCVgFo1ENsgjUFpjagtlBpbGpgnmjGCiUzsjNQWQjAX2EoI6mZbQiNQr6esBS+f940SdFQvybfhO+Gk2AJQDnRbKUAKB6ylRUrTMBYWqQxKGpRsgLYwDnR7A1I8GAeBwViBFdLtSliJRmKswCDRVlHZFK0EhgHbZt0xPoBEo4RBNXstdvQw1Zu/FlOXYGonSbcG9Bx0hbBOPcuu38H4jveB1WBqhKnAaqe+1Vxjaqe2ZSqMGoFIEdOXkLZGWAO2xo6OUp98Y6P2VSOMBls3cTb6R6bC6mohPkztdnqMC0v7fRPmmjjqa/Tc/0OgcO7Rg9Mgc9T4TPssZnxWMSVdc/ey+3j+9YIUIURrC+LtFcLd5q41bZn6ZFc5/Rp0EDCMr7vi6ip7F0B9rd8uA7pdz/161CXECcPAPn5YBrjDtJbVwypmZFlZ47W8K724nCG9lvqIKbRxjDHSQczJQc+W0aG9uPjOGHPCsTS0y7NDXFHW2lYCD/vuv0KA6A05Y2m6D+PTCY0Zw7wuk7CH3/r8xSohPp2iKNoj6f3gPnnyZAuyvfTPD3qvh9pWbmNkGqvexHkLO2oX8xLWeRf4jaXwXYNgFagP6yJmslZR10QU1uGy8PGzcDLuytNhJvYuA5YQ1HvygNH3mZBhCk/nWzbol6UZ643HeQzBvZ/cQilOl2TCX3dNkLExaFyHcf5ChuzCXT/E5dv+LKA4/czfIZtf5KWjfxNMQla/wsnJTyNtiTUl0pZIU2J7fV78M/8NDCXpO1/hxCd/gfV330LylmMgYfdDLzG/qql1n9oMqBlRixFaDKhsDyP6WDngednnUyLn//frElVblAbZgGrrsAx16SS0aWrJMxjkMMgtoxxu6MFG37I5tBw7AccGgkEKWJwKhYaiduB9WsLeHLYncHkMl/fg8ja8OoU8gRNrcDz+G+1fHxviPIkkNUpUTGclO5OSvVnNpDBMS0uhFbVNMTJjvis4d6nkmeevcPGVi1Q2YbB+jOOjYxRaMqtcnqalpiJxyjlSYUSKtk6FyImXHdAVws+lYK0DsU7TPAUEWDJ8P2twtvsg/Gv6kpdgJyBSC5mFHOgDA4sYCMwAGAhnANATkHrJv0HmBpUZlGxcI6KbPyfxTqwmMZoETWo0qalJrSbVNanR5Eaj6oqsrsl0RaZrsqoirUryqiarSvKyIqtKXt06zj9/8pupK8P/+a4Pcu/GK/zy9lv5wvRWvmbzk9w9uEhRGV69cInHfu9jnLtwhXr0OqZv+XsgErLn/h7pxd8EmYBMQXrVrOZeKKxMQeZYr3sunR667t1MfePXg7Word9B7D6BlVnzrcKKpLlOmmsXHzLBygSE/3WqYPvXLqz7RjXfJdjgGhmoMHrwbw3OT6VFVNtgKsdgeNuQheuI4VhgPvaZg8VrDbZqvtUN49AdxwJT4ePxjIqpEFZjTUV95D4mb/9xQDB67AfpnfnFhXnNr+ld81dI4TwYhu9ab+Kw/nv/TehuOIwjBlbh+t8FLOPfeN7uEjCtWktWAcz43oftwg0xLZOor2KKfFli4U9sEBkD+FVtGJdhWVt1hV0WV5dArgv3dGHQLvwQ5ye8j+uiq4xhfpa9fy2gPKTX7GYxBnKwHFDEqhahhDdsKCnlgv4s7PtjjoF7l2QvlNSHafk8+7Ah+A3jD4G7/6uqqg3nT/r0W2abm5vXdAo/+MOJJKyHmMI6CCcEX68hoIsngS4QG74Pwy0Dz7G0+TDXXZxmV+cL2yncBYjz3TVRx2HjCWnVfZckO3zvvwnDeSMiD+R9W7wWBqVr4Vg2gYd5iMdTHM7TMslGDN7DMi/UJ6CTHNMbYfvrTG57MxzPQFkuHv9+J61LcqgkpbiNl9VfdoCvkXm2wO8VQAuq6nbOHfuLnPs9EI/bxnhRgBYI5QCdymuSvCbtVQz6JUmvIu3tkPYukeYVaa8iyWpkqp2aR2IRyoCFusjQ84RqmlOOc6pJRjnNuDBNeflKSj1LqQtFXTpJs7WQJJZE+T9BIi2JFCRKoJSTkKsUThyHU40KuLGwY+DqDjy5BbWBSkOprdMv1wJISKQilRmp7JNK6+IWFiUsSoCSAikExlhmZcLeZESR3oMRGaLOYc/p8xsrsFZCYl3iXi3GilZK7CvbNKol4ITZbVgASqinkG++hBQ7wDaCK0guI7iA4FXgFQRnEbzIP5WXb7zxRv78n//z7W5f146OrCV2x2J3BVollDJhniQUWUqRpMyUokhSqjRhrhLKJKFUCYVKqKS7r6SikgmVzJjKhFIqKpNQFgnVLKWaKfRUoWcKPZbYscSMBUwFzIVTJdKNqsxJy/9svxku4XYSLPzE7tciruB0+iuBfT3wun0mRK5p7Ff9OcrpfwKVwNYCW0usVqAV1iRYq4DUfWAN2BJhS4QpHYAuXb8XRUrywm8jPKj3NhiiAd0erDfvEAkWuXDfhpUuvA2eWRGA9/CdB+8iwQxux2bHAE320r+hd+4nF5iNaxmP/TisVB3XIXOxf21lAkmvjdPGcV4Tf/h9ul8+qTC9kyAzAKqb3rMA0OM1fRmwiue5rnkxft/1LBTohecuhLQKpHelsQoUd8W3TKi0Kky8vi9LPwahYZhl4HzVmr1Mgt7lRGPVmtVVrlXtGIP8GGscpj4OYlziHeWYDhL8HZTnrn67iiE6DB0I0EPQGG9rhBxjF0gJwVcsxfQgKDw9zH8bS2C7nsdS8BBghemE1AUIQ4AcSlu9u0V/wFKYTldDhGBpFfcac2dhPmIj0C7dt7iDLgOGcXrx89AIcRnYXxb/Ko6wi4EK0112H0+GoaTbp7Xs+OplcXXF2ZWHeHKO66Nr4Mdxx5KGsH8um5xXMR7QCECzAbo3wvbXML0ROh+ie2uY/jFM/xg6PYLONrDpOjpdx6ghVg4wso8RPSyZk7hq6awaK+s0lF8FtMSYE0BjPSksyuyh7C5ST5F6QlKPUXoXWe4wv/0+ilM3cewLH2B47rPkpxWn/08PUp7f5fxPPOF0m+WAKl3HZEfQ2Tpztck0O4JN1yFbR+XrpL1TIHrUWlJXkroWGG9gaQVIp5eNACGsMwxsXAA6tQuNSi1yvaaXV6hck+Y1SepAv/urUKlGSA1ITCNhr43EaImpJUYrdKmoqwRdKnSZUNcJppJYmaC0xBhJrQW1lsy0cK0ifOOIfUm1AbQAm4Feo23Ayu5f2+YilGzji9VIzK3zkoOtEdRIapQypIml35P0M8Wr516imI1h7cufRzgVfQQKwTFgA7jLRdLoqL/rH7PV7/ELlx+gP85QojGklQafC2Et0Mz1+HlOtAad2kq0FVQ6YV5nTOuMwqQUOqXSitokrZ68RrYqLAgWddD9tWBfZcZnIgPSgHGZCpjQbCE4JsbWAs+pWGtZ2C1AYKYJ5sIa2DVfMFp1nTZd/2ebPDivLVY2jIGyCAn61FuZPfQv2/4omjHidjY0QhiEcFUsvKcjW4GtkbpCmAJ0BbpA1CWinkM1R5QzKMaIcoYtZoi6wJYFVAWUpbsu59iyoNp4B8Xt/6WrnksfQe08c81c8odFBwkkYlomHND9U+y+9/3YZED/yf9xaVqrJMc+ri8lX/7bGFPAvleWwwDuZXmO8x2W5bVIS7uwUpxWXKZV+e1Ke1lZlzEl0C0lj591pROXrSvdMP0urBbjmi7hXVfZDlP2ZXmN83UYitNd9t1B9XQQHQjQPZjzFRqC2C7VEk9dHOgqri0+8jqOz1PccaxdPHgobOiwU4VSVh9vl6pJyIR44zavyhLmK5R6x0B4GThfpuseTiZxGVZxpr4MYfm6Jr1l0oAw78s6zjKJ9Spu2FOoswWLXkbCdA8yKu3KU3wdxreMO/Z/3gDXmH1f5mFdxHYNvpxhfXYxIm3ZhMD0RjDYQA/WMPmIOh9iepuYwXFM7yg6O4JON9DpOjYZYeQQowZY0cOSO2BtUwemK9P8OSk1RuGAYtUs/AXSzhBmhtQzlN5G1FNkPUPWE6SeI6sZEo3zxNfoBTsEDCrFiAwjexiZO7Ci+hiZo+WIKjuOyXPMVg+7k3Fp+J9w4d4MSHnq3+K2u+8y+1LhxuwPq1x6tgRToGYFoqhYkzWnjmmOb0iOjiybA8PRoeXEmuHYCDb6giwRGOOk2to47x9FDbPS6aBfmQguT1KuTFKuNjrquxMYb8GkhFkNhXbfCmmdVkEDzCwWY0Srsox1AFX4bmMstoG31jR1jkU0YK/VD7Y4ryxGYI1EKI1KNaqnUWmNTAyy2RkQLchzH3rPMNY03mq0wGpnmOmuFcYkWC2ojKA0gr2pwE4E9N7g1FPm9t0uI2If5MYkgNv/C2bA45eaez9mLewXOhwstO3o4ggBrmhBLBIniM5YBN5B3ezvEtD0D+uk5La5BpS1CJxuuRCNXrswzZ/TZbcSjJIMegWJNA4km5K93S3Gu1cwunBScCqsKRrVjBrZqmfo1mVoq65htVPrEMYBdSVBCadvrySkCaKfQ55CniJ6mTNm6GWQpZBl7l2WYXP3zOYZpCkmzSAbYNMUm/i/BKtSrPKqLc1aQ+O+VBikbP4USKHJMOTsIo1GmL/jdOt1hdAlonJjX1QlVAWinEMxRxQzmBeI+QTmM5hOEbMpdjJBTKcwHWMmU6cHVlTYosLOC+y8grLCzitsUWKLRZXNZWRlRnXsEZKdLyCKbdTsApu/+B9dG26JQKZLUgrLVQpCWgbe4nj93Bx7MFuGScJ7H24ZKHst4NDH5deiEEd0CYTiPK6Ks0uAt4wJ6lrLluG6g2gZuO7K7yoB4kF5jmlZvSxL+zDM02GZq2UC02VxvVZg7ulQKi7hQHnnO9/JO9/5TmazGf/oH/2ja4DUV37lV/KmN72JnZ0d/tk/+2fXgFgfTwx4rbXcf//9AN6y9RrA37XF78P9wA/8AJ/4xCf4zGc+01bIsu2JEBjGnFoI0NbW1tq4wvchIDuoIz/wwAPcfvvtrTeYyWTCiy++yOc///kFKesjjzzCTTfd1HrLGY/HPP/88zzzzDMLeez3+zz44IPccMMN5HnOdDrlpZde4umnn+4sbzyphFtUYZmEENcwYjGD0KWSE8YdphHaCITUxWDFE5YPtwxs++/CtonjisOF91prLskRv/z1fxNbzrjlf/1hrL3C9t3/Vya3fQdrF/41w50PofMhpr+J7h/H9I6he5uYdAOTbWDSNUyyhkk20NlpsAKhK6xJGmBtoTBQWwfwdINeRGPJaCSUCjGfIOtdhCmRdoIw227xNQ2QMBqEBCExakSZ3wlokuoiVmZY5QB1LdcgzdyINgXCFEhTInSJVmtY2SMpL5BWVzEypVi/DWFK8q3nGzd/CdBsd4sUkwyo+8fc8etVjRUZWAnWIvWMpLqCSAzi9BpCF6iXziLqkmLtdup0nXz8LGm5TW0VRiRYmSFUD/oj9uo+4ys5T191UlltnQaDx3HWg0gPDDv/GvCnmw+NBUOrDiIb6bbyKiQWtHbBsKKNQnkPLDhVGWOceokUTpUlVYYs0eRKkyWGXGl6iaaXaoYZDHqS0SChl8DeeIeXXz3P2YsXEGnG+rHjCJFhZULaGyCzHIN0cnHjNDm0te5agzbGMSO1QGsBtUTXogHwyqlpAEJppDIIVSPb3QbHHBnj+po1AqMd+LdGuP7mpf9tvYrGs4p1WhdegixpWTiARGpSVZOpkl7SHFJERWZqMlOT24rMVPRsTa6dUaW1kloItFVo6TzE1EJyudzgUrXBsWSHnizRRjnJu5UY04RvJPbexaa/rm3Crk2dQWmjMqT1jU43XwRLmcSNsdatjtm/zvugEqimiHrmZ4X9jgNOH0oIMCXMcVbDwjZ90jNDzbVsOqR091YIxwkK2fGb7DMwAscsCIvFoG21n0cMCM84uPIIKRAqA5k3DFKTphIIKbBKIJTAqiY95fJhlYR1CUdEwxT4vDbMpjTuTznmSCQNI5k4RgmhkUI37kzr5jyECkGFMY5RsPlxhMqR6zOyVz8CszGycIwBM8ccMK+x0xoxvghFhQmZg6LEzkoo3XM7L917vShcQwiO/vd/jvxdr2f7R36C+a9/CoDygTdBMSN97snOdUII0R5+tOpwtC5hY7h+LLOtWiZ4W0VduGOZYO4gyXksOIvjPShPXdgqjh+udfPcVY4wz6vu43Itq4vw/bJ8xfddv12Cxq44umjZ7sFBcawC5d4980F0KIDeBdSUUtx9990899xzC6Dsvvvuaw9ngUVwHwO5sKDGGB544AGstTz99NNtOiFgi5mB2PNFV6Us48xikBlylB4oxjrtYYePgXrcATylacqZM2fY3t5Ga83x48d5wxvewJEjR3jssccWDAafeeYZJpMJ1lpOnz7Nm970JobDIU888UQb96OPPspoNOJzn/sc4/GY48eP8+CDD5JlGU888UQnQI3zHbsIWlZXy9ouLGMXUO5SE+miWO0pftbVnqEkvGsSi42U47rwfx+/8Z3sHLsDhOCp/9svOkAiB/AY7Kz9JXaq/8qBaiuaxbJCUCJMgZjOkWaK1GNML4E0cdJWXSNsAaJu9DIb9S07d39m7tzB2ZoquwvmAmvXMDqc9CMkKkWjKiCd5HwiwEDNiEDU6X6FboCGwjJAi4EDEdot2LW4iTq7yQWfCqzsM19/ZF/9wEtJsW4RXxOQWQfePZ+VSEw1wNQD0CClxoo+9tajTmJqBEhLceQBirAorjNQYpk2xRTNcfEOKzq9bmFAauefXGqwtUBasNZJ1K31YNYBW+MBeZiU3RfQGo+55P5hRkqCktbhq+bX16Ft/tsGPBsLMxRTrRq/7KIRCjftAo16Cki7hjU3YPIarOXStpfYNVUbHgAkHEOw70TFx+twiWnKDLK5MY1RnsXOjWuOJK8QSOewHdn4KrSNmxPvDlIgjETaNq8S4/68lBrj2kE299J5o5fSS3X3K1ZISyksFbLRM86wdn9PBn9t3Q6FNgpjFcZIB9pNgrWCMlkno17AxsK6ehc09dI4sJfSsksfLRWp0KzLMSinw1PWNfOqxsgM8k1X5mwGdoJIFCQKm0jcyVa5sw4uBfx2hjj7lAPjzfaKVQm2N4KNE27n6vIFRLHTgPIGtNOAbSHcoBG4Z9K/a8aqB/AifN70UCGbDtEYmJbK7SpgaXuyD4v0o7vtne2fje6DkPt/jSdOmTVxWkR1AVFvN0AdN8dI2XrqsVIi/EDxIF8qUBnIXvNdM5ikxKocEMhHamT6tdCXsJZiM4VJEmyaQpJALqDvVLhk8yeaPiikQapm96RhDgTaedHRtRNgoBmv5UyVxf7o3czt/0z5+rcy/4bvAWPZ+JE/S/74b3ZKZvv9PkKI1k5s2brU5eQivl8mpT4IqK6SKh8khY/X5lhivGwXoit8V3m76CCAvSzssvuDnsUC3FCA2kVd9bMMd3S9P4xUe5UdYRdWWhVv+O6VV17hxRdfPDD91+zFxWfq2Wef5f777+fZZ59tn995551kWcbzzz/P6dOn28zHlRiC2y5JrafQoBSu1W+ODe18mFBvOS5DF1d1kJrOMolyzHDE3wMtuPZhLly4QJqmPPjgg6Rp2nJRjz/++EKZz58/T7/f5/bbb+eJJ57AGMPa2honTpzg8ccf54UXXgDg8uXLjEYjbr311haghxRLouNOFZc7HCBxh+vqeF1hDuqgMTcaqpjE6XRJBrr6Q/hd1wQVxmWM4YbtF/j0qbdgEdz4734MPvkrvPLGf4tVJ0imZzj9mf8MUe8h6kmjGtJd7r2bvoMrD/63gODo53+E/tbjDoR7MG6KBpztM0RG5Lz0xt/EqA2y2TOceOH/CdRYXTVSq9zpkqcb1GqETTbQao3Z2tuZr70NlCApXwKZYhKnd441SD1BmpnTfbUlAuNA8fAut6jaEmn2MPkI0l6zYO9L6K/R1y2tAw5C0CKnVvpqIBfYnkD0NDKrkT0DPcA4Y1ErLKIGWwpE0fgdLyQ0v+5eOPG5lg5LtIi1+YVrMYi49s/4cA4pOxWMxDo/5ApsYtEKtLTUHl+xL0yUjdWoMK6sHixKa1s1aeH1x0MxqJVYK9AGqtqitcDaHIREo7FoEukwcyJt6wbcV6UH4zXC7TY0et9O77lGSN2qxlgpMI13kCYjBZYxVuwAW1i2Mexi2cOwS8Uuml0++gN/50hf8bVf9R8xarxSLY7LZgESEq0NTuHEdQIrpNvhsAmlUMxJmdgeU3pMbc5c5MxtQmEySpNSmcT5VNcKawRCWWRqEJlrD60kKKg3hkxqnITft70WtAazJmh73/8sVGPJFbPpalAEnBk4aXduIeuD6GNrnA/7tqBBxT+QYh54S7vz0qrkeCH2NjA9je2f8gOefVBMmx9amwKf2aCfts/DObm5tgFQt9ByyCICAta68pua7OLPkOx9smEWGqNPVKPvL/efiYY7k45bE0JSjd6EXn8ELGSv/jvU5CkQqfuTCYh039BVJM27xgC0fR4+U+4bmWL6pzHDW7FP72BnlxfiEiIFtYZVI5CQzH6LjTP/JTbvYbMcm/Ug77vrvAdZD5v3qPsDGA6xgyEMBtAfwPoa+de9A4uiulRRfOX7qO95CJIUdE19x+vIH//NoF/vY40sy1oj0VV0GF3rWOIbh1v2fBnAjtNbFveydbVLWh4L3roAdiz8XLUDf9jncRmWhV9WLzHeOqi84bPD7GCsAvExdTkdCdPvCtcVvov+UCXoIWjz9OSTT/It3/It5HnObOa2Cu+77z6ee+45yrJcTCRJeNe73sWtt97K+vo68/mcV199lY997GPs7e0B8O3f/u3cdJOT7v3QD/0QAE899RQf+tCHsNZy4sQJ3v72t3PDDTcgpWRnZ4dPf/rTfPGLX1yojDe+8Y28/vWvJ8syXn31VR577DGm0+lCgz7yyCPcc889jEYjptMpX/jCF/jMZz6Dtft+zN/61rdyyy230Ov1KIqCK1eu8Fu/9VvXlC2so2UNJIRoDzsC51s+BLLLPH34U9S8rrR/V5blwqDrcg8Yg/JlTNAy0L1M/y+Ot4ub78pHPLF5Wqa2FH/vvw0lHMuk68vuw29fd/HTbH36t5jubMPLX2BvvMdNv/W1VBsPke9+BmmK/XIsYfYA1l75GbLdzyH1nHT6woJdhTEGIzJ0uonNj2PSo1RqA5McZXjl56kH94CUXL77b6CTI5hkA5OMAImwTqe2Qa1O7UAohJpjpaQe3dC4JklAGkA4PXY5bKRbyklddYk0M9A1st5DVhNkeRbqBFXukhRXEfUUpadIXQKm2Vq21GsnkcWMpJhiREqVH6Ue3gDzGqNGGDXA9NYxJsWYlFrmWFIaNOlrqgEzHtDg9K8b4CKlJVWQpZZeD/qZ+00zQ5JZRArkYFKLzaBOLKWA0loq25zKY0DX1qmG1AIqMJXAVKI5NEhgaumYDRMCf7sIzLwRaGIRafOXWUQDMEVmQRnXHZpPhRE4T3WO2RClhNJiK4utBVqnaAuFw1KLov5rulWYN4Elxdp0H+iFHl9csBGSEYrTjeTc6YSHxpgS+K7/ie0E/o0U+4csqeDPN5UJ6qJuul5bycJdmwAYh+CY4Lpx5+j4F4H2kmYrWoGuvbBf1LYt3EXw48GvaSITiNSgrG6y4HSD6qLE6hqSPmIqkJf2nBqG3x2QoW63RaagRIXKLCp1fzK1qKbPyZ5C3WNIVIVKDarxNOQZWNvo/BvhVFqslM7ziZD7znmMxZrGfksbjNaY2mDrGlNVmMrd1+lpdFEhrnwBW8wx0zlmPsNMZ+jxlFLcQXHqBxDzbdLdX0Ha84iqQFSF0zevisbwtPntACpCuF2N8vg3IPSEbOs32+chHUaiGIc/DIAzyXG2H/oANj3J8Pm/j9i52OyaLa4Vy4RqIcm/v0F6100Uj3+BvjaY07ew/V//Q8RsQv/f/6uFb8J4/Zkly4R34by9LExXfg4CeQdRKCXuirsLeIbhuwRvXXH497HAM8QeXYD0sG0cP4tBdPw8FqaF+YtpFcaIDU0PA74PAtDt2r1EpakrT6voIAZrFR0I0MNjzcMKOHPmDFVVcc899/DEE0+Qpil33303H/zgB7nrrrsWMuZB7yc+8Qmm0ynD4ZBHHnmEb/u2b+Mnf/InMcbw4Q9/mK/5mq/BWstjjz2GtbYF/jfccAPve9/7uHLlCh/5yEeYz+ccP36c0WiEEPsuCV/3utdx+fJlHnvsMfr9Pm9/+9v5qq/6Kj7wgQ+0+f7qr/5qbr75Zj796U9z+fJlTpw4wSOPPEKWZXzyk58E4O1vfzs333wzn/rUp9jd3aXf73PTTTctNOzXf/3XMxqN+Lmf+7m2rF2dyJO1ljRNOXr0KA888ADPPvvswpHw4UBSSnHq1CnuuOMOnnzyybYOx+MxFy9e5HWvex2TyYTJZMLx48e57bbb+OIXv9gC2K4JJgbrAF0HUHVJm2Od9Tjero63TEKwDDTHE06oJhMyDMsAehdYjw2Ew3dCSMq9DaqJJfV5SfrU/aNkkx40AH2BS0dSDO9jdvQ9JMUFTLKOzk9SZyfR2QlMuolN1jFq5AwtReYkTV452oMyKZrtYbMIdmQjUTO1wytaILR1YEOXYCpkVTpDMeN8Fwu8YvW+FM02kjErneGnlTlW9qnlDU76KgE9Q4oSkZeIzOnrCtvUFwJL4iTzPYfmrGiQn6kRfd2CW1s0CE8o0DWCAqkqZLGDqneh2oV6jBI1eWI5feoo99x1KyeObqKShELDrBJMS8GsgnklKGooakExdYahlYZKC6rGaLRueAjd6HHbBthKgl17l1vfcE0D+37jsm90o2biu6eX2roD6rFtRGEbCSeh9+A0fubJi8o9hdJeD4wTnNeSpAHOidkH0CGglyIAxEE/QrgKcFJfgcG2vxWNgTGWygp0A7CngZTYg/79TNLqCLVMRFM2Yd3uSFh2L9kO6xiCOESj+hHnfRGMiybakG/a3zVx6jL7akuKGhW8N5D1cFySBWPQ6Zq7N6Y5TMhdo7Wrk1LBfIIwZWOYUAfhKoRu/Inr5vAg7XajhKiRjU62VBohNFLW7qCmLMEev42EPZTZQmYSlUtkvv+b5aK5FyRrApULVH6GJBeofAOVb5BkliR3jEKSGaQyGPN+p3bFe/b176XEWIXG+dDXZNQ6Qde4vxJMZalLiykNutTowqDnNab4bvc7rzGzAj0r0dMSMyswkzl6WmBn84YRmENZtIwAZUF54uuwDMif/QlUsQ1lwyjU5TU8pxACWV/m6Kff0d63rR+tJ/WJWyne8S1kT3yY9MXPdYYxl3YoLu24Z0KgLrzMsR/+lmuERuG1lM49cnxIUUiHkZqH18sA2jKJevjtMulrV9xd5VlFywRxXeX09+Hu7jLpdReD8FoAZxfjsOp9nIcu/LJKMPgHoYPUbFfV7zLpflhnB7VRSAcC9KtXr7K7u8uxY8euAURPP/00999/P5/97Ge55557MMbwwgsvLAB0a92BP7/xG7+xUMnnzp3j+7//+7n11lt57rnnuHr1aisxPn/+/EIDPfroo0wmE37+53++BWavvPLKAgMAjpn45V/+5fZZnuc8+uij9Pt95vM5p0+f5s477+TXfu3XOHPmDFJKXn31VaSUPPzwwzzxxBNUVcWJEydaA02f3zNnzlwjyY0H9bJB2+v1+FN/6k+198899xyPP/74NQYrR48e5eu+7uva+n3yySd5+umnF9L96Ec/ytvf/na+5mu+pv3u6aef5gtf+AKwqJri4w4HYTzR+Lx3gfPwOn4Wljmkg6TvcZzhu648+Gexqk6sFhNaoIf3XWUC+P3pW/nU5jfDpmB94yOY+Q7jm74JZMIVYxDVGETu9K89UmqkvtcYL0pBg/7A631aDXbmfmUGqucSNkUDjHJnfOZRXHNgiNM1bqTmFqxttrFt7j5vy2D3JfuqSVooWv3YRm3FNmoYzqOGaKTjAuhh6AWNEzeW2Nc7X5A4K5wf6aYubFAPQmF7feyJjLoaUZc3Nd5QHCicWMFVK3jyOeA5WoAX4lzBvv1bImj8jFtSaekpp8qaSqdKnCWCXEEvdVJ4axvDSw/kNVTaMqsEs9oy1TCroDCCUoNJBCKxLWh27qodFPIOCFUjdVWJk7LKtJHqpy6DJpFo1TjY8YyXNQhdN9hetJpD+4DT4UqHJwXeqwxGBNayTf1bL5JlXx1j/8+rJYsGaDfoR4i2zQQCIVsmQySm6c4WbwzqjGilM/rNDTLXyJ5B9WtkX6N6zWFFqW4lykniDEcTY1DaoGpDWmvS2pBWFVmtyaqSfuUOJMrKkrws6VUVWVmQ6xplrTM9tO4EV3eaqjsE1aCcprxQaCSf13fzvLmV++Vz3JxcRpNQIdgZF7zw0itc3p5R3Pgt2HyT5OpHob7q1CWyPmQ59HJsb4i5/fWu7q72EBfOYFUCctD4+nZeVkjWGol/RevDXApaPfRWJSzgwGxb37SnmhrjmqGwMLf7bWmD8EG/aMedbxOg2ToLflX0nTOCbS0Ymmsh/K+zGxDsn/zqvBrZ/R0FZZAjQ7puUUlzLoHU7lvh7BNaPS8pQCrWbt7l+IM3UWuJJkfbhNok6MpiSqgriykMujLoQqPnGlPWmLlGzyv0rELPSupZiZmVmGnB5Cv/U2w+ZPrtf4lj//kDiGJyzRpz83e/iRu/9SGe/x8+ypWPvnDN+/A6lDQPBoN2vY0B8io97IMoTi8G8/FauErCG9uyhfEuS3vVetsFZOP0YuAYxr3svmvdX5XHVeHi+lt1HTM6McU45iApehct6w9drr+7sE+cv1WMzmHoUG4WlyXwhS98ge/6ru9iNBrxwAMPtOomXY1+77338qY3vYkjR46Q53kbx9GjR3nuueeuSUOIfY8jp0+fbqXbnsLj033FnT17dqExt7e3AeeNZT6fc8stt1BVFS+99NLCQD137hxvfvObOXHiBOfOnePKlSvcc889TKdTzp07x9bWFkBrYALw67/+6wv5jQ+28SehglNp+eAHP0iSJBw/fpyHHnoIIQQf//jHF+LY2dnhV37lV0jTlJMnT/LAAw8ATp3IA9S3vOUtbGxs8Lu/+7tMp1OOHz/OAw88QFVVLUhf1jm6BllYX7HUOp5YYpDeNTGEgDrmGOM4wrBhOmHeuxgDuJbLLbVgXPXYK3N2y4zdIm/vx7X73Stz9qoee2XGbjWgEfWxN/pyRG8HTOYQYSKR5Q6yHiPrLVRxBVVeRFVX2b79L4NVYDX9y7/h9L7rMUJPEXoW6J7PkaZAmBm7t/wZiuPvBGFJyvOo2QWK/pfBXAIGVb7qEJY17LslYX9B9GBbyvb0QtvqeQYnUHpJYasa0MiRLc4FnXXeXYSZOV11O0fYCgeJakSrgy6YbjyEzY804LD2mWn+PBixjUqET89CH+StNaJvEUPj/tYMcs0ghhZaAzBQCJJKICtJUglSLUk0pEaiZgI5l4iZhEKiZ2DmgnImGE8Fl6aCYgJlAVWDh3Bqsc67Y6t+4bkH3GyX06rqJonzklFb4apPepC9/70WqtHKsO7UTGHJpKGnDP0E+olBGU01mXDl/KvMxxMwmrXBGmujNZI0R9hmZ4MEbZ1Hk0orSi0ptWRWK+ba5TEVTmprdYmwBiNcmxuv+4toGLkassEE0ai2JEgkSfOnEKICnPuNYjxECGSSYVFYK5HanfKZyJpU1ChZk0iDtLbx3W6dB6IZkIJNJDY12EygE4VOE+aN1yCRgk0sDBpuLZx2FqTiovEuIxqXkgrTqB/ZWmIKiZ4rZ6Mwl+6goeawIe+J5+PyjU5VqfZ/gtb7ew2MoH7v17rwrXqO3Q/rf/eOYPpHGnGzZt/iOHEF7lmEuIwsLuBP3WxP1mzvKzAaYQ3Vze+A3hHQJcMP/uckFz/ndLtV2oJ/ZONiUShE89z/ieadaMO7w3+sTF0cojlpy3+j8uY+w6rMXass+Na7c0z3v7MJNH/WeMGDGzDWNAce1Q2TryJoYDTtVpNQiEsW9fszpBnDYBMhDfe84UnWn/wnqEwgBzlqkCF7KaqfodYTZJ6gegkqV+0Og8okKhWotAf8a+fBRyvM7/8FzKxsJP0Vel6DsWy+9VaEFBx55CZ+7d6/tVRiGq59vV6PwWBAlmXXhAuBVyh8WwakutbNWKobr79droW74lqW/y7qUg31aRzm+5BiYZiPf5mqT1w/qxiBrmddADaux2Ugt4tJ+IOC4JCWnePS5bO9634ZAxKX5Q9Ngu4TiUGStZZXX32VnZ0d3vzmN3Pbbbfxsz/7s9dIXK213H333Xz91389Tz75JL/7u7/LZOI44+/8zu9sudq4AvxA6ff7SCmZTqcHSl29brcvvAfISZJgraXX65GmKd///d/fWc48zxFCtKo4DzzwAG9961tbPfXPfvazSweoB5q+PN7nt+/MV69eRUrJ5cuXmc/nPProozzzzDNcvXq1Td8Yw9bWFkIILl26hDGGhx56iGeffZbZbMaNN97Irbfeyq//+q+3TMPly5cRQvDAAw/w/PPPt+pIMTiOueG4feO2Drnz0FViGDZ2oejD++uuw3pWSdbb76xgr1DsVQ2oLnvt9bjK2W3ux2XePO8x1yl9VTJK56xlBWvpnFE6Z5jMOZKOuWV4iVFaMErnZGaXl86e5RcvvBtTjrnp+f8Xs93zXLn5TzG768+w/vT/yOjcr1yTRyEEYjZmcsN3sHHmHzK8/GvXlCXmwC2QTp7m8oP/HVLPWXvpX2HVEHV0h9nwYbLieYS06OQoOjmCVWtY2XeSMFM2gNvBWQfQZAu0pRk3ILtwB6S0ruBU42+5h1E9Z3Ca9JymTV05tRmtkbpA1WOkHiP1BFGPUbJAypr+7tPs3PjVKHOVI+c/gJQ1QhqstNhEYfMRRX4zu5vvQYkZuX4ek/Wpbr4dPViHK3uYXTAywx2W1IM6wbmXFE7kXFhaVQx/rI6XFsc60oKmbIGksgHcznOHdfrXqfMCIZpflVnSBJSy5AIyacmNIdGWzBiSWnPuSsp0orhrsMXATNElFFXK3CTMdXMgj0kpTEJpFGObsOtRqBCNclGGrXqgZyR2Tllq5lNFIiuK2YzaCpJ8iE0G1CKntJJSJxTauQtMpJNoOq0Vicn6zi2eUghboiiRjLH1jGq6C7qA4488CTifdx6M+30P0fjNhJRXP/cuVMLmidMkWY8ahUZRC4kWkpnsYaRAy4YJ9HOxwYFb7YxUif6sZH8Xx+uve5+Zvl39tW52BHQgRfa8k5/6292WYKfAM36eCSybbYJ2Omt2mvyuwzng/b5tcG4DM7NvU+DtCwYGUvee5iRblEWoGtSc9O6C5PQMWyZQSWypsGWCLVTjN7zpvzONnczJ6seo1u9DXDkHr7+Leu8Y7Owips3fbA8xOY+c7SGq+b46WQCOXHdavXAfBpAeVrq5Ki23ESMRKmt2GRzwNzKluOW7MNkReq/8LNZWjL/1n2E2b+fzv3yEI//434LR10gbw+tOKbGUmNe9heprv4f+Z36V3vMfR+UJopegeimql5BtDth4440IKZhf2LumXpYBOqUUWZZds2N9WF/fHp8sk277MnVhmFCQtaxOVtXLMqwRxhGXfZlU/DDl9HGH58F05TcO738PqtNVjMSqXYCDKC7ra2VQDgofS8xfS93GZXwteXtNXly6APJTTz3F29/+dsbjMefOnbumkqSU3HvvvVy9epVf/dVfbYG3P3K6Kw0fRkpJURQtUA/DLKPYojaMuyxLyrLkgx/8YJv/ELzu7e1hraUsSx5//HE++clPsrGxwd13382b3vQmZrNZK+0PB1/IYIRcc2gsGHZcD8pHoxFXrlxpn8cH5GxtbSGlZDgcMpvN2NjYAGBnZ2dhEG5vb6OUYjAYsLOzc80giReBuL7DcLFqSKgn3zXpxNKA0GDVPYNZLdkte4yrHrtF1oLt3cID7GwBhE+qnEQaB7KzgrW0CK7nnNjYY5TOGSUNCFdThsmcVF3LaMTlFUIwnU7Zkxd40+W/xu7uLoV1fWzt+X/FkTP/xn27ZFCtn/vXjC79MjrdZH7kbeh0E50epU6PorMbMNkpdHYcnRzDJGtYNcTrbpMY5ne/HQ+4EYoiOQ6Nf2GEaSRVE4RKsGnutteZIsQYxRTBBCXnCAoQpfP0oYxTYVDCHbSiZGtAapNGiqcSrGr8pquMWp3Eypv2j/mmOXbcsu8w3FoqcyMX7npoXxXCSOdxpaZRDrfo2lBWN8IOcJ4GXJ2g3ZqXwunbN6oiQgmssOgGZ2U96KUwzCyjHmwMLBt9GOWwnlvW+tBPnVqLNTCuBFtTwdYMtgvYLWBcwqy2FIWgnOK23o0zIC0NzWFC7KuPLOj+A8ry++PT+2DRK7F7AwWa72Df20wDNJ3vdAXJCMzIGYVawVWvH+4P8vFdqlE7kKkhUxV5YumnGmUrdLFHOd2hmG87OwFZubCJRKUJtTTs2B1IFdz1yDlUKzlXCBIkOZIEQYogQ5ByfBNUj+loDSNzjJVUJoFSQmGQzV9SaGRZI2rHONnGUNQ0nmqcbYI3jBT7uvFKNCo0dl/xX9MAdho9eYuI/qQ1zQFRDSMvneEljWGpEU7FBWFJhPNmkw9KUlmTiYpMlihTMN+7zM6Vl9DlHKwhmZ7BzK5gix1UPXW7WhQoVUGqoacQowHyyBqsH4H1DVhbg8EQ1jYwR+5H7oHYu4rs5dDLEHmK2EgQmUJkPUQ6QKQSkQjv5KRxrnIjQn2nMyrF9VfT+KX3h1JZI5tdBLejYOvG0Lg5+oAaTGWwlYFKYwuDLStsWcO8OVRoNsfO5jCZYqczmEwQe7vYvV0Y7yKLCUz3kPM9xGwXMdlBlu7UUnR1rX1yRKLJvNAFQgduk4Hkqb+7MLce+Ymvoj71etJLTwF20ayBa0EmdKzj1iI//zj55x93h8guydfOZ85x9O23cfFXn15Yj8K4ffyekiRp9c+71ucw/DIAFa91YTqrpLpdeKUrra61NI6nK46DpMdxeUIV0FCg2FXeLv/uXYB6mdBtFWDuul9WL+H7sM2/1N2HPwgdhD9XAfA474ehQwN0n7m4oZ566ilOnjzJiy++2NkRrbUkSXKNvvbrXve69toPlrquSdN0If66rjl//jz33Xcfn/rUp1ZWfhdQDp+//PLLPPzww0gpuXTpUifnGQ+Mvb09fv/3f5/777+fzc1NYFHK7O/jfC2zADbGcPLkyTbumDsP83Py5EmstUyn03YXAWBzc5MrV660YY8cOQLQ+lAPt6Dich1Ecbm2Zhn/39/7Cgqt+IuPfJQTgxm7Xl2kUSe5VsKdNyom7pm2glFaspYVrGdFK+UepXNuHGyzdqQB4VnzPJmTyQopF7cbuzp2aJFuzHIJU8yMbZtj/M6N/xh7ouTGF3+Uwowp1VFM/ybq/BS6dwvV8Faq46+DSiLmBVbkOAVkaJWIvc6oV/3wytSJdafkJBqSEpHq5tpAapxxYGpBNceiS0nrPg3lpKe+KHaANT203cT5QCdIV7h0rXUrPDhQby2iniCMdgZwlYbCXQtbIswcazSiFqAFde8EVvSRe7uIosTIPjZbg9piS+3K5/VupddjFoFuLvunStJUhxD7/tXF/nOlDIkUgUGnA7sTKxhP4dWxcEJU432eNyeCCqdrnirnen5WOj3zUxtwdARHB4JeYuml0E8N/dTQSyyDxNBTmn5S01M1mSgReo4u5xTzOZfHmnO7KZdnOdt1nz3TZ6pz5iaj1Am1afx4m6AsogGjiUX0rJPg9yz0QQwNrIHsWRrTAQfIagGF20FwHl4U2iTMDez4dtRHoL45UOHAAdwKp3JSWeg3xo6/y9c33lacYahmH0XvO2qXcB9YmHkA7dtOuXurJFpJjNp/b3OB7btGU0aTWE1qKzIqepT0mTMUM9bElBF7rKsJG2rGmpwykjMHopUmEbrxs+7moGmd8Xvb93Fz/zw3568ujOOQwW93ooRibhS17DG2I4SAwqZUIqMiZ1oLXtrdYjp5FWtTbDrEbN6BPv46d9hWOoRsiE2H2HwdjhzH6mYnR5eIauLacmuKfPkSohojqm1ENUHUU6gm7rr9mzr7lIX7CfWdb2H+5d9H9rv/iv7Hf9IVqpchezm2lyF6OQz62NE6bByBjU3HGIyGiNE6djhAjEaIfg/R76F6uTu5tJchhgkia5iDtA/JAJnKiDGgOWyomZpMo0bUMgZy8ZlnELRjxIwWjechi60b5qDQ2Epji9odKjQvsfMCZnPHFEwn2PEUxmPEdEw9fyNiNob5LmI2drsFzZ+sG4PTukBYy/Srf5DZN/0l0id+hdE/+8F2egjXwOruN1M+8rXkH/u3JK+4s1Gmr+wx/eAzsNMN4eO12BhDmqZkWbZUuhsCv2UAtwuch2HC3653yyTFh9k9WQb2V+Wlqyzhd13S/q40lz1bVV/xNzG2icu8TJC2LO9xuIMA+7IyriprmLdV+OkwEvFldXUYOjRAX8YRbW9v84u/+IudBfFc2JkzZ7j77rt517vexdmzZ7nxxhu57777rtGjunr1Kg899BB33nkn4/GY6XTKeDzmYx/7GN/6rd/Kt33bt/HEE08wn8/Z3NxEKcUTTzzRmc9Qn9qnc/78eZ5//nne+9738tnPfpYrV66glGJtbY1bb72VX/s1p7Lwjd/4jbz00kvt4UK33noreZ5z7ty5Nv73vve9DIdDfvEXf3GpzppSive85z288MIL7O3tIaXkxIkT3H///bzyyiutOsv6+jpvfOMbOXv2LJPJhCRJOH36NHfddRfPPfcc8/m8zf94POZtb3sbTz75ZKuDfv/993PmzJnW/WJMqwZRWEf+LwTCv/nyHTy3cxSL4K989BvQVtFPStbSglFaOHWSbM5aWrCZT7h1dLW99yB8mFZIcS0zE6rAhHnx6de1+w0Po4oZiPCZtoJxlTOu+o5hqHqM6z475YCr8yE71YBx2Wdc5eyUAyf1VPDsnT/p93Qd0G0rzsJcQA/sMHE+lgc49YxEtQCntSNVDcjC7huANoZdFvYBt1+OGnC83xjGgWxrnMqK0c3JfbWTetnaHfPt9V+toFQPwKvOgFWYGTbpw6aATGDrEbY2+9LMFvALB56F2Jd6apd/kx7dlxorYIg74SfBMR2ypjk5qFFXsPtS0prG7aB0AMgkTZgSaZ3kc2OQcLSfsdFTbOSW9Rw2epb1nuVIz3J0IDjatxwZCI704Ugf1nqw0RP0UoEQoK3glz8v+K6/L9EIrk4tr38DDAeGpOdUFeYWLlcwngjGRcJ4ljCZ95jPBWXl3DEaL1GHBWm6UNbpsWc0h7UAwqCEcSYKxqKMIEFAbaiLiuJqga40GIkUCUqkWKvQtaDWAqcP4vX33SmRzoDPnQTpu4e1Dly5ASKaXQux3z/97ovzAJOTNoxBinNH5FScsdJZHgAGY6XfGnAHQxk3Hq11B5DWTi/cmMSpQWlNQk1qnX56Jtxv/JdYjUYxESMK02PH6o5wTke7NgmlTvj1C29lq3RA+4du/ymOp1vO3DNivrUxmCylUoIySfjXfAvn1Wnu7z/Do4NPItIakYJSksGOZu3umlIZ6Ank0GKyApMXkBeIfIbNJoijGtYM9TSl+NgZ9G9/nurOd2GP3gGsIZ/6fYTeglEPm/axyTo26WGTHJIcm2SgckgyZ0yq0kbHO3HnCiCYfdnbmD38Yw3QdyCeAMiLaoIox4hXp/DSBFFOENU5976eQjluvnG/sp65+3q6AGJjaufQNEHkGfRzxxQ0v2I4xK6NEOvrsLbpdgxGI+xohBgOSPp9GPQdg5BnkKeIUYrIG8YgkYhsgEhGDVPgDKy9XbqTD4hrGYN218CfbOvuB0bQ11PMW78c3vermOmsUR0qYV5CWVM++JVYo5h9+3ez/t98M2JtwPCn/jvExoj5//3vUv2Lf9dZD2H9SCnp9XqdKi5d4eN3oRR5WZhYUNcF2rokwWG7+TCr1GD8N8vcMi/LZyg178JwB0mEuyiOZxWAXfbtYXYBDiNcXLZDcdCzLurSNV/FnMWMxmvN+0H0mlRc4m0Ir5IRd9C44j/3uc+xvr7O6173Oh5++GHOnTvHBz7wAb7ne75n4bCh3//932djY4P3vOc99Hq91g/6xYsX+fmf/3ne9ra38e53vxshROsHvYszjSs0fP7hD3+Yhx56iHvvvZc3v/nN1HXN7u4uZ8+ebQfIxYsXueuuu1o3jjs7O3zkIx/h5ZdfXqiH+DCfuAHrumZnZ4f777+fwWCAMYbxeMxnPvMZnn32WcANmqIoKMuSBx98kF6vR1VVjMdjHn/8cc6cOdOWRWvNY489xoMPPtieHjqbzXj66afb01e76j+WQof15d/F4Nxf37l+Gb/W/5l7P8l7bnmORC6C+HgS6+I8jVlMN6Qw3f1vBHOdNCC756T0Zc5W0eNqMWS7GDaS+z6zOmVeZ1QmaU9JFDTY0ThPJpmq6amSQVJwNNkjr69y3twCwOaVn0NdegwxOUtSXUbpLYQeM3nT+7j6p38cEBz7mb9K/sLjvPq+n8aOjpFceZZTv/IXEPUcqpn7recgEl757o9gesdIt77Iicf+CpgKW1dI2wBuq7G6RFrv4q0Gq/c1IKTCqHVqdQSdbGLSo5jsKDo5Sp2dQvdOUefHMfkx6Fv3JwVWDB3TAF7MDlmjc2A88N9X7rW+jwjlDM1aBN8wGh4Pmsafod8oUDhXKqmFTDggm+MOLsqtu8+AtG7cBjojxwK42Pw5/WbpDP68vnIJXPX3Yv+Zdwfvw2rhpMk3AsYyN/Arz+NAsNdb9sVoO13z1x5GY/ftca1tTmq3CGHb0zWVsN55pT9V3fE0QqKa8a+1ZjYrYTJv1OMTRKKcHTECY5wSvRKahCnKzlF2htRTlJkyyDRHN4f0BwmT2S47e9tMyzkoiVYKo1JM2sNkQ0y2RpWuo9N117mlGmNFjRW26fYJBanVpFgEia3IjEFNemKgyY4kyHVgzcKagXUQA7BJM+4qHGNZCkRpoRLUFdS1RFQZok6bLuAMKm0lsc1hU6b0B08pTCExc4mdS8xMYkuJyA1iYBAnDGpQ03vbhH8++EZsIff/SoGdKxdf+GxPOn/2Ep75sjv4Yn2XUwspnRQYK7DHPTgUMGv+AITrEEIYx6+kzQJcDOGhO7D5yDHbQmOPHEXqLdATd4LotGGGdWMg2vyiHaOMacavrph+01/DDo7ATXM2fvSdzugzbST3yWD/urmnuTbrxxbfpcP2nf+ObOSY92rqdsXKEPg7Sb8H/tc8q8awM0FcmSLrKVQX3bflGFFNQRcLw+SgHeqYrLVOpa6fI3uLTAF55kD/2ghGIxg6FSI5GlG+5evg+E0IPSG/8DnoZdBPEZtu50D01hievtxsztUk/+pvIY6MYG2IEIL0z3wD9b/8pWvWEr+G+3VQKUWapuR53u7OQ/cOd9caeVhgF67/Bx200xVvHD7OQ4ituiTpy8BgbOjYJZyL89RV/lVlWSa8PczuQNc3IaOybFdiGWPSJeU/KL8hxULj0AXlQfEcZjfhtXoMEqsQvhBuyfnBH/xB7rjjjgVJpgeo3lCwqxOFp33GAycEkj6OUAfbU8wtdqXjKyPmZmLw7OMKO2p4HcZ5kKVyGGd4HZ9u6su8rIxd3GM84OI8rvo+jsPnNaRY8uwBuc97DJgvztYoKsGNw62FMsftGscfdkgv3d4rc/Zqp4++U/a5MuuzVYzYaQD4pMqZ1RlF7Y4EV427MKxofQAnsqanKvpJ6aT46YSNbMZmPmYjmzBs9NOHyYxROqefFO4gzSZfZVny0ksv8fGnd9nbvoocP8d4PKaqqoXyWGspT98DVUF+9SwAJh1SHruf/NJnEbpcqKe2X/Q2KDbvpXf5c1hduAOEkk1MdgyTOJ11nR5F926k7t+Ezk9i8k10uo5NB05KJy0IHUh2pfuDBmx7xCloDyYyZeOlpURUY5TeRpoxUkyQlEhRIKgQogapodHrRSpMPkT3jyBnO1iZYZM+9dox0AprJMgeRuZYkWFJm9/EuaG0SeMi0Pk2FLVtrm3jZhGwAiGVYz4Qgf6yk+D709FpStUai9rgmqa4jRqGUGAzi2gYBDOwjlHI2GdUvLvCAnfS5BS3KzJr7kuxb8yogzQ9k9LqqBP4Lve/FpE4gNv6SleCfYPWdkQG5fEMAq4NOyT4rWGs96WuCOKnPQTI+193kkzjfmVzLa0Vyl8bsf+e/cOWGobEjVXrAO5MYKcSM1XYicBMJXai3O9UYiYSOxNOVSe1kNvmlFDanSQbHCHfRN6U3+6XHV9OJ4VFOnd/QhhUoxaTUqFsTWZLtu0GRTVk017hFvUqovGPbrRlMim4cnWb2azGn3RpSTEiRag+JH1nJK2cZBzVb8ZR5YxtVQKmQI4vOA9M9Rz0HFHP3G97XQTXPtwMoQtskqBvup/k7KdR22ddOF2AnkE92w9rFxfoZcKkBRIS0kEL2E0ygHSEzRZBvLseYdPBItBP3XMWGIEBJH3HYLRgf7wo6feS+1jNJ3hGOYZyvC/pr8buvVm0RYrJCoE5cSfy6suIurhmHhVCUN3/Dsq3fAO9j76f5MznEEfWGP77f4i85TTTP/ffoj/40WvjjXBGmqa84Q1v4Ku/+qu57777WFtba8OuMmTtyne4Ji9bt+Nn1zTlAUB1mbrrKul6jFnCd4cxhF2GbbrK18UYLIsvzvOycPH1qvi68rsME8Z0EOMVu2gOcU2X8HFZWQ7z/Ld/+7f59m//dh/30sK/Jh30EPCG0tcuABlKWIF2e6lrAgqBf1xJnsKK76qkrrhD0Bzn31Osrx1zv2FacXnCOgjz3MVJeZeRYT4OQzFTsapjxIxJXOYuoB4D8p0i4bNXbuDe9Qts5k6n/dI4pzIJNw63G2bDUJh0Xxe9ytkpe2wVI7bKIdvzPjtlrwXb8zqh1o10W1iwtGBbCU2e1PSTkmFScEO+zcbalM18wmY+YZQ6kL3RqxgmM/pyipL7DIbPux9MsWrTfp0T9QNJ2TtF2Vdku88sxNdOkkB97EZEMcVcPgNApU4yT95EvXkSm65TZ6eo0+OY/BS6dxyTHsFmRzDpAIT3fgDtAS9eHCugBYE0YBwNokaIGUJUzTcKwRRoVEsAIaUDIiisyLEyb7y9JFibYxmAPYbWtzXHbNoGgIbqLsKpoTRH3GM0zBvQLmqErKFQCEqkniKokUyBXecsRLjsCyGc5w+UMzZVKVpm2DzH5hmIxs1KIKlW4E5Z1GCMQFlLIp1eeZJYkqTRLPGA3YLWglnhBKK2OYilrkBXDoCbCme8SgAW2wOAgOYESa++IvvWuX5MrNMjT2wLuEViMaqZhxoXgLZuTibVTiXEexyxrfcSmpM3m2Zt9NNJrZMe9yyi76TIcmCQQ4McaFSunf9xLGiNNbb1WCKspVVxaYx0rdeFt83RoY1uu6mEtbW0tpLWlsLaWjYuDAV2ahI7l41RqNw/FXT/MKN914WSfQak1Ve3+wxVH6f21HpSaVTdmyw2M5H7Mez3b3/p/Wn7qcjXpxXo5vTZumGSZuGuR/P9lj3Glji2H5+/2ADWmw+s/zNYYxDGOMl3pRs96L0GfFbQSsLd7pZzCmOBHGszsBuOwUhwBthCuOx7tzZCOjeJ6QCbjyjv+Rb3XCVY6d0fpo7pFriOaiowjfGlaUC/KcDMnRGnmQVMwMy5b9VTB/DrKdQT96yeI8o5YraN1AW2nrpner7IQNhr1yVrLeXxr2F2518mu/orZJd/wQH5/Ah2eLIxal+U7NvBCUxyewD6h44hSAZOz78Jj0odMxMBe6ousO+BvgP3sp7uMwmX9uh/4CcQ9RSTDhHbY8bv+LNtGYQQmGyN8Zf/91iVs/bYX0LMLhOSEM4HepIk1wjHvJGkD7dKShpjjnBN7RIsht/Hdd8lzOoK79NYXMe61WeWCVq7JMJd+QhpFbPShe+68rMM7If12UXLyhfmJ26r14KnuvIb0zKXiodJY1neDsWId9ChALqXrsaVswwwL+uMqwBsDKbhWu4vNAj021Ux59TV4bskynHY+KRKn89VUujDcJ5d5Y3fLeP+YubgoHoPn8X38SAPQWx4/zc++bVcno+QwvB1N3+eF3aP8dnLN4GFXlJhraDUgXS7BdsCgSVPanpJxTApOZpNWB9e5kg25VhvwkY2ZRTopzvPK3oBWMf1HPpj7Tp8yIcJw8ffx2St5TP2HXzq5m+GG2Ht6EcwkyvUagOTHMGmm5hkDdPfhKzvgMDdXlotWDhZcgGYgAMuIjhS3e4b5QlBe2qNCMJ6A0yZgMywwmKFBaucyoc4ivN93XxrcEgVnBSsEu7eFE7q2pMuXVO7NEzqPKwI2YAtGeS5CSNNI5CXTkIu+y4/gLaa9jAVr0ZiGiNZ7zbP0LjSayqmOUDFgZ85wlgSqUhE4sClFu1phxqBTCw2BZkK0tSSpzTGntDLLC9sw2TPZfor7jYcHcAvPC4RBm4/rvmed5T82pGaz57SCGX5CxPDnbOaQVFSlyVX54aLVc37n72BF4shFrhj8CpSagqbMi8zinlGoTMKnVLVKUJY0lSTZjUqM6jconKLzEFkFpsYaqmpqLGpgFRik0YBvBbOGLQSeLBsa4ndSaiveq8eDaPU9Bvnh96pYgivQi73/4R00mljbVPXaU0pJKUQjSqQpMCpqczZ3yXw+vSZY0acLYF1vFPux0TTDxODSgxKaZTUpKoilTWJ0M14N0hjsDXYSlKWCUWZMZv1qeYpAzNlPZmwljTMtZoyUDPnaSmd0xczUlU4gCukcymJoKgNiAQjBJW2zXP3rpWni4QyyShkj7nIKURGITJmVrA7r5hp0Rw01HPgWCYtI+sK69qG1GKzzWaCEPt91/fjZmjuD2mxOMZFM5bb+2YusridGt045m8ZBQ12us+rWHAMQK9hCJp+YH1kniHzY65pm/B7v4PWxue/t/t5t379bPJi7P64tAZkD6xkfvcj1NvvgnpCfdtXQNJHvfo46Yu/4cD+bIwcXw7A/8yp9QWMgNBzt1Og51ir3byUjZykfwHQN6o9a7dR3vYNyOIq6urnsUeGHZJ/txvg7xHyGhUem61jRjeBNUx3zzD6+F8nJCklSZKQpulKIO2adTl+6QJqq96tineZAHAZ1liGCcJ7f+2fH2QQG14vk3QvS6PrXYy/luGyg7BSV5m7mJZldeOp68ChgwB2iJFC5m0ZmO7CX11t18XIjMfjlXnxdCiALoS4xgVg6NszBlYxBxI3VswNdlVcl3vC+GCimGno4tR8/sPnIcWds6vjhP7Mu8B9WE9ddRCn1wWqD0PLJol4YC8rb5j3UDUpVG25UgyprVvIdsscL9y1VjDKCh469gqb+YSj+YT13BmBeq8suaqRsltaH9KClNqulgD4fHcdZuR/pZRU2jJt9NUnVZ9x3WNc9tmqRmzXQ3brIRPTY2J6zEzGzOYtYN07+VUOGPmTILFQlw7YDIAMhBkji8vo4e1w1QFQNXvBSZfrCUJPkHqGxTB/0zfCMXeYUXr+C1CXyLqRjtVzhC4RukTq0rkxM6WTlAV6rkYM2Tnyf4GLCqwhnT8H2EaqCmCwMqM6ci9sNAu6MQ6Ieamx9wTjF/DWT7rd965ilcPopdhX8fBqHm03CqaJVk2hObrThuLQhgJ/7O4kRpdMJdwf4MDiunX+yhvgWGcGk1jmqUWkxkmWU5dXfZdlkLh4HhdgtaD3TQJbSV7Rgh+bCOxOhj3jdJN/rGr0l0vhjHEFTlKe0ejswxfFzc6dZqteIxpGyoKyWInzFY5o3AGCnIOYNThaW3eE+rzGlhYqi9QCWSuUBmEcAyFtU29aOw8ZtQVtGmNaZ1RXG4vWFi0ESOX0ev0Jln7bwTOFbT2LBOGV5HG2yN6YNxWCNQsjDLWWzvhTILw+f+PWTzRePIR2+cM6QKyFBJEyE31a2wOx4q95PxVDpnrIeYOTzBN9e1A8bZ+zrdS6legndkGVpr1vDbWD941P/P3dlOa9MPvMMzRAF/hUArsN42r3s9Be+92AVu3K7oN5/0wDT8HKZdWPq2vmuwBYt9cLiD6IgLCiaE84XQOxcR41fxpkc+CRylpJvm0OMMKfmppl7W5effSNTvKd9FwfOP4AyblPYpOTWOVVhHKs6jsGSPWC6xx8mKRRH9JlsAMwb+a9Rk2onqGPP4zNNjC9gv7v/hjJucf2i9O1boNTVVoA8EPKW9/L7JE/D9Yid55vvwu/zbKMNE0719lY2uu/XwWaQ1DbtbN9GCHdQQK+rjoI4+oC2nE+Y1WNrvDx/bI6WpbfuF7iOFaB27jMXXUTh1+Vx7C9lgnmDou1lmlSxPEdFG8X82St5fnnnz9UPl6zH/Tw3oNQfyhPlzg/dnsYN64P46X0YfiQg/FAMm6oLmOC8Hn4zP8u485iOogzDsH9QbpWoRR7GaBeNkF0TRjL8hk/i4FtCHbjQW2t5Qcf+Ai/9NJDvPPUM3zFqaeZ65R/wTspdMr3PfBR1rPZNXm2dt/TyjLAHZdxpdqRhWmVtX7TJ3WfvbLHVjXkajVktxow1v0WbBc2pSZBKIOUppU0aiRCWDJqemlJXxZsyjG3yDF6doUXihuxxS7HXvjn1FsvYKeXEMUVlJmBtej1E2z92b+FKKcc/am/iiwmjG/8biY3fifrZ/4hg8sf6qz7Pf1xpo98G+u/8Q/oP/0bnfUQXvu+3ba9UJh8E3s6Y3r0qxjMPowU2+jBSXTvGCY/hsk3sHkf0Z81erUSdA/q2klQJwZRlmBU495DNWC6QeCCVqdd1CWiniPrKdJMkHaCUDXV8VtI5Q7D3ccR2QyRW8Qwb9RXeti8h8n72KyPzXrYJMMmqZNiSoUVqjn0xrmS9J5MvBs4U0vsXGCL5hTJKtk/OXIuFg067X7WgQCw2v3fBltjLdaKxrDVQg+n4tKz7oRTf8rpmoGRdkyAALsrMZcV9rKCHYnZczrXFNKBL990CwyMclxDo/ZjG56kFhahHOMh/c6LlVgrWzUV3bi6M1rsq3F4iXl4SFMiHJKWzR+VA5qDI9ttTkQL0RVCpAgUUtRIaqpZD6FJ84QkkQ2/ZlvJrZdUW+u8b4TSWmGcr3KMaTx2GLfh0wijwwOlrBLOOLbxaGRb8CxadZ9FHXoCZm5hlOwDXn/QkT8F1D9rDz7C1V0RP2vCeem41/9vyo5sOpMSzm+nMFBccSeF6rJR0ShAl5iTD7tv7Zi19/9pxGTX+RivCtBzsFC+7pspX/dN9F7+CZLtp2A4wPQHMDqCHW06zymjdexoA4Yj7GANhgMYDGDQh36OHeSIfgb9BHoJoqegJxFSYEvr7ANK6U5anQmYa+xMw9xgqxFW4/ye72YwmcJkAuMx7Owh9nZgvAPjbcTOFmK6DUKg1x5GbX8cNXkGpGH8Xf8b+tQbGTz+18k/87+wijrBlxBO11/1HNhP+g2Y7zUgv49VOfOH/yL62MOu684vXxPvNQANGl3+GWK+f3ZIeuGTpOd+C2RGevY3XPoReT/oy9wex7ZxMfBaJozrErZ1vYvn/GXfhWG66iIUSK0SaPlyxWXsyuNB6a+iZSC6i6l5Lc9XScljOqgeloU/bBmXpRP2jS4BsacurBkKSA+iQxmJ/sAP/AB33nlnC5q11iilOrcRYiOH2LB0GTgNO5AxhiRJ2mv/PqyIWAe8q5LC9LoYhxgkxnnpyneY/9hCOqq7a/LjgWx4H8bfxbWHXPpBnbaLEw3zEOpnC+EMV8PO4q+7VJq6+knc/l15MsYyqxMmdZ9x1WNcupNAr5YjtqsB2w3YHuu8BduldXrbyhm7XQu2RUVfFgzljLVkxnoyYTMZs55MGIoZo2TOQMwYyCk9Wbv1PwDBWmvOnj3L008/zcWLFymKgr29PaqqWihHzOV3TbRxO0KDbdQAq0boZIN6eAO6dwrTP4UZnMD0jmLyTUy+jsnX3JZv2nMHEyUpJEkjcbUNSLVgKoRtXC0K5wlGyAZkNaJTazKMycAqhCqRaYXMSueSLtfuRMWedcfd96TzupI2agA2wVrl1DCaQ2psLZpDVKQ7Ctwp0SKEdn+2RuLyhS6RunaOyRsDUVM5VQhBglI5/d46KulTm4R5JZhXjkcYpTDMLWuZZZRZ1nJYyyxruWUtcwcWef/mmbIYa5nVMNPC/VaWuXa/s9pS1O5+XltKbSk0lM1foSWlEdRWYdhXscA6cLlgrCmCv9DQU7NvXFoCpd3X4Q4NTv3JqK3RJ4T+xxdcdHqw2nYgFo1KQ2GqdQyF8/nItdLlxoB1QZrcvg+ee5Ds9cJb2wifF7GYHy0Wy+nL2urkg2hBMfsMVaOnb0M1jkalxyuySIw/35TE1iSmRpra7T5YsFZgjMQYRW0UlUkpdUZlU5SsUYzR1VWs2UEwRsgZQsxAzkBVkNTQS7F5hugPsL0+5D3HbA6GzlA0S7Fp5owh0sSNjcT/SfYNd5t6acvYjFVtWkNpao2oascwVxWiKKEqEUWBKAsoCkQxRxRTmM0R8ynMp8jZGKYTxHQPMR07RqCegykR1IhcOTDfHyLWjmDXjmDXNrBrx2G0hhikMBo60D/swyCHQQaDDDFIoK8QPYnVFuY4BrnxuMNcYGfGnY46rWBWwaTATueI8RymU+x4jBhPYLIHuw70i71txN4WlLtuR1G7HUX0GGmmjapdRMmA8q5vQ249TXrpk52grVMYJfvunZldGyf7a7v/djQa8d73vpd3vetd3HTTTQu78CG2iNM8SCAXhlkG0LvCxWt7F2D034c79+F6vApHxdddmghxnuI4DsM4rMIfy/IWr6fL8M9B6cZhw/r6g9JriWMZg7LsWYil/sE/+Af8zb/5NwGwf1AjUX9yZRNZ28ljffH4Pt4m6JIQLytQyNmG0uewIVZJjrvAapiP0NtKV6df1mlCLjak+NTQeICG9115DFVofPy+MWOA3NU5/fNlrh/jeEKwHtdJCOy35j3+h8+8l1In/ODrf43NfMwvX3yY37zyOh4YnOMNgzNcrUZsl0N26gF7tQPbU5MzNwkVCVYIlHKSN6fCHIHtrGRNzbgxucpGMmUzHXMknbKWzOkzYajmrCUFudg/vCjeGYgZhf17gTHXtuGWPs5vHfmLmOGUW5798yDGoHoYNcIma9hkhL3lTswP/znsnsb80w9i5gnTe7+e+tg9JHsvI/UE21/D9gZYD7BV6iTHrV6pAGEQ0iCURjSHFclEY2UjNUeijMRq3RjNaXe8ONJ5QakFaHcCaCuV9OBMGXfwkXJ/MilA1shGh8E2BnAYjdUG9jRs+wOLGlBhNFV6M0aukcxeRtVbaLlBnd8KtiatLmOFQidHsSIDU2OxGJGjxXAfQLrC0BrCpjQ6zk6vulKWNBVkCQwTy2Zz6FCmIJGWVLidj4mAz25LxoXg/mOGG4aW7Rl85oLznf7lt9SkyvCJVLOlLO9AcKuybGYGYTWycV1ZlRUffOYolyYZX3H6DDfkF6jLOaaRjArrfJsLY5jbjHGxzkSPmNZrTPSAvWrIpO4xq3KKWlHXEuNBt5dySxZB8dDu63v3GjAsxD5AD73AeDWNkDEIJcr+z0Irsvf1bEQoXTbO/SSWWlgnZRaWCrcTUZUpRiAa1TXrjU5dREiv8w6NxyQcw9KAa2uEM0ZtVN1aI2NrGik7jmlor2HBY4sR2Ob0UBeXbQG7tgptVcCERGuCsPuMg2CfmfD9TIAWKVocBXG0eWcX6y/+8/r97TYBDUfqKtEZVWoEM/wRn86AtNEHshqBxuK9IDU7MP4030QEOwcCm0psIhCZwqQ9SIeQKOxIwVHlGIJcOXePWdK4MJXNPc05BE0fsk3/8IyasY4p0I4poNKIuoaycm4id0ooSkQxdkxBOYdiDnWFGd6CHdyEuvoZ1Ni5ELajYzBYR04uQirdAUlHRogbE8SgD8MjiGEOw8Yt4iCFgXKnAxcWO7fYucAUSQv+mVnsrIZpjZ2WMHVuSdGg9/aotrZgPEGM92C86wD/eAcx3kaYSQv4df8Oxq/7nwDBxme+lXT3k9gsxXzvdyKmM8RP/3yjPbS/fiul6PV6S1Vc2m52gNR2lcQ7/qbr25BpiPFQeN0l+OkSEMUYpSvdLuoygFxV1hg/hCDTM0PLwsdxLxNoxXXhaZlArCvMYcq+ipbhuVXU1Se6JOtxXl8LHQqgj8fjVmIdA9eQ04sz6n/9dSg9jgHxKgAexu/T1Fp3cpJxHrqArFKqswHi8i0r77Jdh1U+LoUQbZ59+vH2Whw+3H0Iy7CKw10FwGO3lyEoj0Gvj+Ojr9zNS3vHsAj++if+FAbpFrdjgk/O7uST8zsbsK3JRcUgKxmqOafVNusN2N5MJ6ynTuI9VDPWVEkuK6Cb8VjWli6P+/f+N57Iwp2ROE7/+6nZuxlnt0Bf8PRbftmpiCQJiBJk5bbyBxnyi04v1Xz1w9hSIVIDCOrjdweLfiNVk7bZOsdJWL0xpcZJpgsJOoParaeYALgp2/w1SstSI9ICkVUIWwAl0laI2nmBEHWJKJyfZmGaQ41MhTSNcrGu3AmiRjdeNhIQGVakIDOMyNxWtMzR6QY63wQrqdQdVOkdDgDMBdQplb7JgQCTOOCme05HHwn3CjhinWFcVSLrClFWyHIG8wlitkdWTdjMLQ/dcYqH7zzFbUczTo8Ep4aCIxmspTBSDocAfPSs5Ft+rk+pBU/vSu66q+LxFxNme4KZhcd3FJubhnO3aNL7Cj5WwOWpYK2C4VyjpoZqKjlzacgL233Qgg++eCeJvb3BWM3hKYHk14FuFqXOGQ5w93Bge6ARQ7ADi0gDLOldOTYSZdGeAioaXW/hlO9bybLYB68egYf65bG0HONUTHQJ5Qw72Yb5Htz4yGMIMUVSNgg7hK77MvoXfvcrUSmjI0fprW1QkzSyaqcaVtsULRTCGndAkXtKKitSVZPYilRo0uZdKmqU1Q14x0m2tURrRa0TKp1QVhllnTI3GYXJKeqc1JbOYDSZMVITRmpMLis+efH1VCbl9Zuf5x0nPkFPTsjEjERaag3u2CSXMy0yaptQ2cRdo6i04tXL2zx35jx7hcKkG9jsFNWpb3BVUF1B7X0Oq9axaghqCLLnmM1kw7Vf7saHU8HS7Bs5B/NG2yYB2m+NOINrHyZiOKxnBvzuzDLGIwzT+RcycHbxF5yBuc9SB0PT9rU9CQnUN7+H+mjlmIJMObubY2af0QA3BpKmz2oLpYGpgYsGqrpxr+p+rUyhv4EQFrn3IumZ32t2ITJElsGxFPv6W7AnTyKFJrnySpNu1gD+FNFXzqPR3GLnYGdAkTCcT7Fzgb76/0Zc+F3s/ffAww+C1ihtsO//hYV1vdfrMRgMyPO8XTcP2vk9SDoc44oukLhMWhxTLEWOaZk6S5iP2KYvzF9czlVgPhZYLquTLkwVft/1Lo5rmfC2K604z8uEvl8KLdtZeC3uKeNnf1BQHtKhvbjElb2vc9xtKOC/6+LAfMbDLRwfPtZZj9PrUvfw4T0485W0ymq7S2LepSMeA8FlAymurziecCB56nKdFOYl7oQxlx12cv8sbKu4fr1KS+jzPCxb/L0xhjvWzruDSRB81z2/zZff+EV+/Mw389L8GBvJlL92x88xVA5sh+WPJ4hlEoC473h7hmXMTpd0obKKsckZ65wJA6a2z8T2mNJnbHpMbJ+p7TO2fabW3Re3ZM22NKSzC6jxGcTORdT8KmJ+FVVsIx+8AfmN34zoG+Tf/qeYJ17gytf+XazaRF69TO8LH3JuFdMj6PwYJj+O6R1zC2gCwkzJLvwesriCKi4j55dQ80uo2UXU7DzJ/II7PMTUdPWouL8ZBCY/gs6OYHpHsf1j6NxdO7WZI+jeUUxvE93fbNRoNkBXqMKVSRVXUbNtsvICar6FKrehmHD5lr+KtX3y3c9y/JV/ws7oaxhvfBMIwfr2L2Gx7B37ehAJst4lqa9QDY5jX1h3QNbUkChMkoJS6ATH9KxbyhTGGZwtBf/+WdtIjcW+BLkFp/sgg3ucr/e5gZ/TEm40DTCAF+fw4lYG5zNmvz2CUvAJr3oRqljYhmlqjA3rRDYeTWzkY1y0TJI/rEg1/rgzaeilllEGwxT6BgYF2J0JV8+f5fyLz1HtXSU1Bev9lJNHRqwPc/oJ5NKSSu1O5JQOZs52r7J79SKXL15gVimy0Qnmts+FiWBvcJL65G3oE6ewR49iNkbuePik0UfRAqohdraG2VLYHfMVdk+6MmfWiJQSZScIrmA4S8nzdiI/x7//0a9k63N82Tse4F3vfCdSygX7HmstViaUVqFFRoUDwLVwgLgSbiesJqUWDr5XIqWSilplVFa5e5tQC43COKPTyjoVp9KiSk1VJv87bX8erM2S3/WBn8ys/dnOft7tLt19b9/eu9VqSSAEIvoOICyEwJJZHMCAIQzymIFBKAIcAWNsjwUxhMcYSzJD2MyE7UBmGTYBFhqxCNDaoqVu9d733vfedzv7Oc9We2bOH1n1nDr1Pue8b0uajKhTdeqpJSsrq+r7++b39/sxr4ZMq7Hzk6hxCaesgBA+G32Iz158oImoImkz1QpsE7SoeVc0oPeS8286zR2xYvJBXB4738Xo1xBVE7LQHCJMhjAF1dbrICQsKjZ/8t1u9Cu+jYluYdp5dAsTu7ltlrEamT1B5gfI7AkiP0BmB8j8CTI/ROUHyPwAdNoYxz4GBcIH4TkHThlcOl7KJmOpijAqBBmuDGgXkSbEqrBx9HTLyCbEqgzc/8JvtvWdIS4ah1AaGZtohm2Es9tsuOna50sGyszVx/Nd+1U1Ir3A+ttu+7gkfPzn8fOfxQzHkIyxgxE2HmLjATYZQjzAxgPM3kvoV38tVrrnROcaQr/xR/GxMkKP90ANochJ/vJ/S/ijf//qu0+IhrFPXKKjQYLe+RDZR74fIkt89E8R3jn2lXfRWiA29OkX3/dJkuQKg76OTPzlAKnnYYCvIxCvY8zXMbLd7/mzCLp+vdr91mGrPo5YF4P9JuD8vOdfB7L7QTduIubWGUXXnftXUm4yKtadZx1+6W5z3f9fU51u2kk0GvRv//Zv55Of/ORTgK89+bob0J+v6yDXadj7zHLbefoPWF/Dtc7CWvfbOmb6Ogu5H55wnQHRLnd18eCcU/qhJfvXclOd152n26FvArH9h6wLwNcBdHAAvqtHb49/mo+preRW4qROpRY8Kne5E54RyvrK/e86/K6zJq9jtNuSa8HSOIDtwLUD1AsTsTQhiwZ8LxvgvbQRJQERBQORkYiMAZlj60VOQuZ06bIgISURGaGec/HkTT59v+J0rqmqlIUZUPpbmHgPHe6go11MtIsZ3kJ7W2AtKj9GFqcIcryLN5uP8Ilbnx+h8hNkfuSwxc578Q8+67KF8vQLxAqJjTYx4RY6akB2uIWJNtDhpgPa4SYm2uxo1iegS1R+hizOUcU5Mj93xkR+5v5v1slyjtI5tkzdffETrBdj/YQ6HkE0wsRDdDTERgN0soEe7GJVCVGCDRJ0OHE6XOU5508/wCi/Yf48JyAXPaZhxTSKSw11yy53M4T2Hf0qGtmGver8p21P0ywu5SVXkgY1U9AB/FI6BlQaZGgQkXHG1tAghxo50oiRRgbOAKUU6BOFOfLQZwp7oTBzl6DHFhKpLIFnGChDpAvq+RHF9BEiP0JlR/jlEza8Kbsjxe7EY3sSMIgVkW+JQn8FEqy1FEXBbDbj5OSEw8NDHj9+TJqmT8kEtdYgBMIfYqMd5nXEWR0i7r4b9Xu+/68TmXfh23sCtoEhVgRWo6iEy/I5l9gLiZ039yk0SN+gZI1vKyKdM9QLJtWcsZkzkktiz43SWDzHVhufyniUJiKtE9I6JNUxhYkotMuRUBkPbRXGSq5IKkXzp2V5pUWIlnQWjZ7bGWYSgyfqJpOrXhlLK8NNCIwQIARaOsdjI13EGeupRuZVu1EsZcAPQCqcg61q+oOG2kUSoc7ciJAYIvMjZH7QrG9CB/aXq+Z/XYBVLrqJTLBqBN4Y401ciFZ/BxPugr8D1QKVHyCyg0tA34B6WRw28yOEKVlXnsW8Pu/6dcUMX6De/Xr8h/8cyilWCPJ/77/A3PoA8Y/8Z6ijL1PufDvpB/6fyOxthp/+NmQ9W3u+/vutft9vxI528H7+77mRvKa021Uf+QTL7/8fkA/vM/wTfwCZp9fW88p3rFkn2292kqD/+B9BpCnyf/h/IxuCpyXE3vWud/E7fsfv4EMf+hDD4fDKcft+b88TleU6XHHdd7qt903f+z4R08dT635ft9+60vfdW3cd69r5uro8DzB/Fri/qU3651lnBK1jvdfVq6sYuO6anxXa+7pyU5s/y4Cw9vk16M8F0H/rb/2tvP7660+xLnCZgOg67TU8rXtqH551lmxX377OwbQL2tvztg6rde3AYpfdb4/bjbbSbax1xsA6i64LitfVo1+6D3v3GP0Hs1+fdcx4f333WN1r619zd103hnw77xoP7f9dlr3rady9hwsSvlC+yKv+I7bU8ql71J6zwmdhIuY6YK5DUmI3twlLGzI3LdBuwLaJKPGJRc5gNWUMZQu0cxKRrtYPREZsUxKRo3DyoUIr5iZhphMWZsjcJMzrAXMzYGEGzHTMrE5YmAE1Cr86R+XHiOwIkR2h8iNkdoTMTxD5EXr8CjI7IHr0zxBAfesO5dd9I+FP/ivU9ByEJHv5N4EF7+INbLztIrDEW+hw6xJgR5uYeHsFvG0wAlMhy3kTpjFHWJfp0wqNkAbrCfAk1nfaVBsop3NvnEmt8puEKG04wwaZtgiolUhYGt0wV0Fxyzh3AfEq0oVZTaLRswsaY6NeInSFMAZjQxcuss6aSCQSrEJbD2NDEDGWGAhQ1qySfAonfaeqBGUlCH3LJLZsDw27Q8OdieHOuObuxLCd1EwGms8Zn49ulnxs6MDMW4HhDVXzLZmHLGumRcmPiprBcc7gIOVoVnIwNxws4FwHTOuAuR2wVCOWIqKUzrirpYdBYaRAJtYlExoY5EA3YN5A7AAj2jG/ZqbQxx7mXGEuFGamsMuGzQ65HBlYabYdG+xGbSxSW2RtEGWNLQpMUSJMk1qeeqV5FqZ27W5qBAZdl1RFDtbArd/6T91D7m4Ll7KWAJdSaIS0Q+qLO8jIMaS20Tm00W/6r67uK9n2pvYsQtBmIZXC4KmaQFaEqiTyChI/Y+ilxF5OogoSLyeWGZEqCVVBIApiVeGLHJ8Maa/K3VpH9fb9ZYxhyi7nZov3BPfx1NWPtRCC45NzfvJTX+TNRwvnKxHuYP0dKrXhAHOwgw133fpoG4ItoEKYKUJPEXaBsEswKYLMycqEG5YRQoMwLvSl14QWbLOTNstufnUZrwkyX5dOlqZr1wcMQJNoSgRAm9gnddk8yzminCKKc0R+isxPkNkRIjt28b/rvDEWmgylzVzUOba6msXzeUsfB3wtjORNAOiXU64jrfqlS5Z1cUW7XinFBz7wAX7bb/ttvPbaawwGg2cCsHXAsD3XdSzudQC1Leu+5dcdo3td62Q4/WPfBND7EebWMfXXAeG2LbuYon+uZwH+fj27x1+3/zoAflO91hkvX2v5lQD05xnNuO7e/6o6ibYHbk/Waqj7Nw+eZrX7bG57rHXsr+d5K4lDX/N+XQdtwXm73D32TRbfTcz7dY2+bsiiNVCAp0Ln9I2WLshe18H64L17rhZE989/HShfV+d2ue8g29eh99nw7rUYY/hL5/8+U5sgsXwy/gyZDVmYYMVoL4wD2xUeSQO0h7IB3NItj8WS295JZ33WgPCiYc8ur7EygrlOmGsHvGd1wqHeZNasm5vBal7YkIFMGcqUkVy6SS3Z9i54WT4iqM+Yn3+V80dfoJw+IV3OqeuaNE0py9LFkwg3MNEmi1f/AOmrvw+kxDv/DMIWVB/8hAPC77cuygEhGLliBy/D5Imrzn+rFPAtGwhYH6O3oN5qALO4ZItX2mR7CZaNQRQacgfghCkbJyrH6ok6Q1QZsloiywWyTBFliiyXiGKJzBeocoksly4uuy5cG6sIqxKMirD+EOOP0OEGNtpEB5vUwQY62qWMX20YzwmUeVNfH60FspwidI21TXZTEYEpUfUZfv2AF7bg/S9t8t57I25tSPZGhp2BZn9s2RlqYv/y2exG22mfqf/zO5v8yEWMPRf81XvH3B3nfN+dOQDvTyR/8lDxg7ctPzsKMa+G/OGfueCWOWJTL7hTXlCWLq59e7xuNIfLD6KlWijquU9tfSrtsTAJMzPhwoyZ1iNmesBUj5jLmMKTaE+Cp5GhRcSAtU7bXgkXg71smOzCrWtD/rUJQx2aHzirpYmGuZoEl0B69UC3fwRYvq0Bz+IpMN11uvQ2GoOtHd3ojEo0RchG3nMlzKNLHmWVwPq4e2+BSkItsAnoGHSkKH1FKgM8GxPoAXGVsVnO2avOSEyGkjWRLBiqlFguiFUOWNJK8ndPvpuFTviu7X/Iln++ei/V2pKamCf6Dn83//2A5SP1Z/jtw3+0el+19y7wJeMwJyy+QrWoEFIy+7V/g/L2byH44g8w+sXv40oRCoItrL+DCbaxwQ7G38H62w7M+9uY4Pbl7/426BqRnSKrE2R1iizdsqhOLpdL95uoTkAvEKoF7i7koPFCF3rQi8B3c+uPMPEeJtrFhttOvhZuYCavYne/zsX+9hJQCmyr5dIIXIQrpHRRaFTgRrRM7UB71cQf7wB5VqMBeQfct/83Mcuv/HbVAFgd54qBkK+cM59VVu91FVJ83R9B5Bf4n/1frjh3dstNhkOXpOuva7eNomhtFtF1x1v3/3X16P92E+Pbrut+v6+Tq3QxU//YN9XzJrC4Dg/08Ud33q3Hs47f3/+mct15uvW67rpuwma/EsPwOmfQdetv8lts6/GsfvK1lucC6N2PWLf0G2yd5dUHn+ss0y5b0gLedcxzl3HuA9e+MbDOKOjXpb9Nt7OsixTTNRbWvRzWXVPbHn3NVf/a+lKadn6d9dq/1nUP9boO0d2uz1J167OOPXfgHWY2QeOhseTWZ1POuSudrGSkSoYiZ6hyYnKUuDx29zjawELHDnDrhBO9t1qe1w0YN26emphIFIzVkpFKGcolY7VkrBbc8Q7cOrFg7GWMVLYC+JW2ZMROg97Mz5nwKPkgT3bfS7aXoMMN6mCDKpigg01stAF1jszPnL7Tb7Jpjl5E5YcQCHjJgG/xHr2FTraw6Q4sBKK4QGWnUOQrsCyqJbJYOMCczxxYzueIfIHI5qh8jiiXTTa+ogHaOdKUV5iwZ74EhMCoBBNvOY16w97baMtJZsJb6KHTpetWOhNsYlWEyKdNPTJEVSLq1gFQYasAW8eYYgKLxrIwBlEbLE2iE8Awcb/ZriQhQsvbaH+fNxeWh1+Cn3hTEHk42YcHg8CShJZhaBiGjkUfxZpJpBlFmsgzJL7hM0c+VTMK8GePt1hWEutVjG5NOcbwk8a6QBaFhyklf7ncwTQA2eWjssgMRO4mCjCVRNcKXauVTANACY0v68tJ1XiiXdaMZMmmWOKJCnSGqRZUxZwim5GWBbkWaBVjgyFiuIUeblFvDtBxjI1CNxpipYuM0oB5CgfobSEuDbVW0gMONLdt295/Kc0KnBvEFQ1+QOPcCoQWQuPkP4EBzyCURXnWKT80jWYcaEJsUuPqUwsXmz5v5lUnks1U0H5CLDRxTaAgZi7GHLHPl1pjtN1U0gyhsOo7rv/CXzn6HjypXaQn4QIvCiyKuon0IvhK/Qo/Yb6lUVApKgJKItJKcvrS7+Ji16DlABtsYJJ7IATl+/4TBp/5C73HxUJ16hxI0/XA4Mr/CIyaYFsmPnAMvfG3scEe1eCDKyDfAn0wiOoMWR43oP0UWZ5czvMTZHWMLL+IqE4Q9XQVTWdtHbwhJrrlQrZGtxqt/O2VPt7ETiOPFI6Bb4wFqnNkNQU9b8IfLl2YQimxXnjJ+kcTjHfLjRL460cGXLKgS4MDFbjKVR3wf8UI6ID4zrK+/XH0/kdXxoT/+b/z1HU/D/C77hvXfnfjOCYIgrWs9HUj/tfhgv58Hahch1vWne86AN7//zonxnWle91taRUG/bqtO14/tPJ1171u3bMMnXWG1k3HvW7f65bXlXX3tl/6kuru+nXb9q9h3XVdR5T+cgyJ584k2mea+yC2ayWuk23cdOxuaWOsrwPg3YvvP3DdjtkFl9d1tOuuZR3Q79b1OktvHdt83fX3j9kFyv3fu8C+a3X3HXf7L40+YO+PePR/60c+6W5zdagN/qPh/86P51/Hrw0+zzdFX7qsq4XURMzrhAfFJrMWaOuYmR5cYcCXJsYXNSOVroD3SKaM1JK96LRhv1OG0gFviSG1ESkJcxOuNOhzM+CQHZYmIq0SlmXMkojUJmREeFROk95IYiIWVN4hUr9FOH0Dv56jinPKi8fY5YljyHSOtZY62GL6jX8RUaeMf/bPoHTGsvpushe+m+Ff+yGin/xXmMEe09/w/YhyzuQn/jOUzrmp9O9Td92qfwA2nFCHjaPnSo/eatEbkN0C7eZ/hEKWF4j8ApVPEVWOqAoHuDXYVEEKQuRItQBPoT3lJCgN467KY1R1iledoMqTy3l9xmz4naTJN7F99j8SVm9QBC9xeud78MtDJsd/DyMitBph1IjKJtRiiPU3UNEWGzsvEEd7aELyUrJMBee1aMJGCxftscGel00jrrZK8++F8Nz/MuBc7SI9i2yirtiBQSYGLzGIgUYMa9g3iKRGJTUycVp0QovwLCK02MDiWUOYaeK0Jp5pwoua+FwTnteIOdgUdOWTa5+sdtPSxOR6h1z55J5PHvmUNsQiUSZF6AWyWiBPFng8IcFJOqgX2GoGFoyKKUyIlgl1MMEmW5hwjA0GWBNgSx9y6YB6iMtY77nRGGuMohbuLR4bI2JbE9qUkDMBT2wlHttMPuYn/8KfQM9JJhPCrVvYcIyJR8hkQjDZRiUjqiCkjn1q5aGVWk1GCWwb5s+zbm4sZBZSi5xbyEDk1sXRrsA2IwW2jVZje7dTCJ6S1giwSCorm6RQztizVlBbf8X8L+yYf5G/zlMSrnZZaRc5Ka+h0I4pryumv+7fgS0QpgBTIIxLWS9MhtCpcx7VS4ReOBBbzRF6hqjmSD2D6gJRzRDZfbzsq8/82FoE1pusGHgH5Hfd3N+mGry2YusvAT0O0FcdEF+eNMz86YqZl8tD1PnnEPXFFUAPYKzFBpvYDnC/dHp9FTtqnV33QWfI7ACRP0GkT5DTA2T2lcbZ9dL5VZjiqffU6hsh5GpEwKjQgXg/WQF460UIP3YA37+UB9W3Pn5ZaRlc257PA85v2jcMwysAHa76SXXXtft0v//tedad4zo5RB879P32+sfsk5frWO3r8Mi65WdFk7sJVN60X/88/f1vuld9/AbXs899MnXd+Z8H7D7PdfVHX7rrr7u/rdGz7lu+7rz9e/21lOfSoH/Hd3wHn/zkJ5+6CV2td7dBu0x3t9Gvs0q6jdF11mhBd6uL7h5znf6sLe0D0dVgrevgnuetAGpfX7/OWu56O6/TLvWZ4nb/dcx8t3RBcXuNfTlLf953flj3gPYNp67hUtf16rxdkN5lz916S2b8S013y3LrgdN6143euwHiEhh5S8YqbUB3u5wxUk5yMpAZvrJUwmNhwksn0J4DaDfqSkaET+WANo0zqHCymISs0aa3/6crfXoor4bjLIqCN998k89//vNcXFzg+z7GGPI8pyzLpwyXm0q/3dcVY4EGRJt4C9s4gzoJiQPcVxxCG+ANIIszZH6BKs4RxRmyWK4At9USS4AVMVYOMGqE8SYYfwvjOzZb1nNEvUDWqQMhde5kMbp2mmbj4lhbPBdFQoRYGWFEhJVtBIkIK9yyu6ASaR3AEbZA2twtm9yttwVCZ1idQZ2iREkcCt7/3pf56Ifey62dMVEAsW8JfUvoG2LfJSGKfIuvakLPRU7xpUZSk5WGRQaLHBYZHJ6X3D9Iefuw4DT1Mf4WlZxQisQB50KQVYJcC6paUBrQQqCFWEmwV/GwG005VkBgkJED8DIxyNhNIjROlx5aRGBQyqCsJtA1UVkyygo2Fgt2lnNG2RSqijQ3nC9qTqc1Z0uPWZlQskHtbWP8PWq1hcHDN+fI8hhRHiGrEzx9gqqOUfUpqnYMq6qOkWaGDcZMdchMR4i7L+P/pz/wfyc0rwjsuzDsYRhTE9lSeLYQ2Exi5wpzLrGpWjnJiqB2iaxMSqiXbNiCSZ0RljnCKjIbk+mEzMTkJqawIZUJVk6gNE13+SC07YkD1hjnCiHtZVImD/AFwgfjicapV7gRqbAB/sKyStbUnEOYltEXjuG3XMrI2ne5tYjaujDmuYVUwcLAooasdvpuU7o+bxsdOAKEaHTgEkQAwneRTkQb6aT57WoQ9c7FW2hipruwjLUbxrC105zbEkx5xSi4NAyaZ1IvEbVL6iNs3YREbaKsCB8rY4wcQLDZsPYNQx9sAwpRnzXgvQHzDUMvyhNEebz6n+IYUZ0jGicDg3AjASvwfgsb38HGt7Gxi2Bj49vYcAdRnCHyJw64t6A9a0B9s14UR1h9NdFb9z3ZX2/9Ifk3/UlEfk7wqR9sYs+zdr+vZX03Y2gURbz++uu8/vrr3L179woeuA6cX0fCrSMpb2JU17Hi14Hbbj2uIwT7x+jim2eVPkt8XX3WKR/W1bN/bX2c9TxGRPea1v22bt/r6v3LYabbch1ua8t1QP1Z57zJmIL/P2jQ24bpOxZ2b0oXRPeTGXWBYnusvnYcuALOr7OAnmVddmUy/Q5znfZr3brrzt9f1x0xaDOttqVvaV0H4vvtvK4DXGdMtfeie57uuVqfgX929D7+weGH+cbJm3z3rZ9nWkWclyGzOmFahZyXEbP6UnbinCkTtJUMVcq4mebEHNgtXvYP+DWDzxOoGiVd1s/KeqREl4DbhDywew5464hl5cB2QNWA7EuHzzYKy4a8uPwfB8iHqsCneup+rhsRWLWduLpNez/7TsTrHGbXnkMoZq9+P/nedxCf/3+I0n+LaSQkNtpFxzuraCw23MBEE6w/BGsQjfabKkOUOaJuMgvWFlKJXfhgCyTnCLFcpcfW6h61epcLlxbj4nG3zpzGOtChS5d4p8yQ6QVKP0bUS6TJkSZH2BxhCscWNuBaNgD7Elg3c90C7wJbpygKpC0vf+dmo6Vtr7quqeuaMAzZ29vjt77rd/Ktnxiyc9vnj/7zDX7h2Of/9P4FHxtqphk8nAmWhWJe+ExTwT/8YsLRQvGB7YKBpzmpDIemRmcBehGQVp4Di54llgUjVTAISsZxxb1txcZAMopgFFkSvyb2amKv4o1Txd/+7BYvjJb8kY98EeoCawx5UTPNPE6rkFM74EwkTEXMXPmkvkchpIv+V0mqXFJmPulSYubOSVRP3YQWyIlGjjVqXyNeMu4tay3koKcWc1wj8nN8m+F7JZ5fIaMQWw4xRU0lLcYPMfUA6U/Q1RZKn6P0HDF7AicPsW/9S8oP/0//FTAGRs18DOwjuI3gLnAPaW8zf+ObCTewxRBb+GADsCHYEaWFuRU86L9bBKwSJymQnkGpiqHM2RDn7IhzNr1zNrwlEzllIi5IZE7saTwlVoTAVY1/p+9oMJWhXkrmesCJHiPQpCImFTGZTMhFRCYjchFy5N8hkzGxzKhF4Jx7fQ8tFVYp51DdGgIeLu62DNwzUo9cwq8GR7vMn7iIQZVwU24gr6AsEPkMkV0g02PE4gly9hi5eICYvYVcHDoj2R9j/THWG2PVaJXcDDXAqEETbz1pIr1EzgBWQ4y/BW1oxCYUojMKvOc3CtoA9FRYb4D1Yoy94zTpVyynJsqSbGO8CzA56NQZBfXMsfDlGSI/QmaHyPOfRuYPEOmbyPwxQnrYaN+FlozvNMD9FnrrE5i7t5uwk7fBHyHyIwfcs4PVtXgHP4pcvInKDxHZY3cuLJRzgp/+AcCuBefd9+6VbnkNA9zOu8ue5xEEwVP682eF8O0fs3/s9v8ulmn3vw5UrtunD+Zv+r5fx1C369cF3eiOFHQxyrrjPQ/bvA6T9a//pvqvq/fXev5nHfd5S1d23GK2ddfWJ5C79W+3ue76bvr/WeRfW74mgN5lmruAcJ2VJIS4EtO6f6E3WX/dC+6+4PvAfJ1OvPt//4FdB+q7del3mv7QRH8IpF/6FnDL/vev6boHrgXV6yzUPmDs16U9Xtfhs922HYH44ccfp7aKHz99P//89P0MVc7Iy5h4DngP5ZJdf8q7gocrrffEy0hk1hlKFfxfZn8ckHyFe3ylfoGgrlZhDS8ZbucMuqUumnCHDauNi8ISSr0WBK+7n26lG77tPhDr2qu7f78d2yKEwEqPOtrBDPao/DEpCZU3XslFdEdeYptILDbcaD7ugqz+j8jqPwBaglZNqMDaRWyYFi6hkJ4jzQWgnSOXlFg5wKoNF9PYjyDwXFSUeoGs5sh6iizPkeUZsjx1kpPiEFUcoMpjx7w1oBpbro2f3i3XMSHXvfDb0gVVQoUYNaCUE0Qwxnojp/NVDXOvBhhv6P73RmiRoGWCUQPwxzwKN/jsFzYoPh9SrxxqBf/Fl8aMI0vsWwahYRgaJpFBW8vDhcIg+cxFyIf3Mqp7F7z/419huLlkWFXYVPOZjQ/xTvoepK25N33CTpWzm2fsm5IXI8Ur45h3TQbEUbgyyl7/oR3mlc/nTyf8j7/0Ye4mKZ51SZ3qSlNWgmnmM81DLrKIWRGhrWAc5myGOZPITeMwJwxLZKSxtzX6FUm66THb9phuhKQjRT0UmBhEaJGecUljtMA0qdXNQmHmCfpiiL7Yw5wp9JlHfeFTzzxs2oA2iWOmV8loBI0P8bR9Np6atxMC1HuaRDIaj9TJNsozvOIxg+ohL00WvLQX4o09puMJJ8E+F94WMzEmMwPKKnDa/kwxX4yYLSc8CF9CJBqZWKdv950kXqEJdMWgStkoZ2wXMzbslJFYMmimkcwIrUvVvjQDfmD5H1MS8Bu8f8nH/Z8ntQmLMiQVCSkJD+VLnNU7WAR3zCP+g+p/Xn1P2lFQay2Hh4f81E//NI9PlpjBS5iNV6k2X0Fvv4LZeAGT7EI4wXoJKN+x9oF100jApkviZeUIK/cx8n1cTfkkLo2tVRZX2wB9l71TlIXzQSlS52uSXSDyc2Q+R2TTZjpDZueI5RkyPUGmx5BfINd8xwyAN8J4Y/A3nA7eaw0Cl3jJqgE0z59tjAJkYxiodmQswEXyccs22IHwlpOoXMmKRGfe7VTtcuMcYTWYCqoKUSzBnjehOROs/wFs8gII0KOPO3Y9mECw6QyG8hShU2z0EliL//B/xjv/GcfONyy9raarEZLrQOB1TGqLU5RS+L5/7fe9Lev8v57F7PaJIbga0vC6uq4D0n2c0T9Htw36ZGS7Txdj9a/rOifaft3WXWf/mq9r/26917X3s855E7hfV4eb7s91ZR0G7eLM7jm6dXrWSMVNbbIOi3wt5bkAegvy4BKY9yOUwFWg2wWmfVB+HdvblXfc1Gm6YLTbsP3j9vdbt9w95jqQ2AfWfVa6vZ51pX1RtO3RH05Zp1PvtkX3OP32WvewdBnhbmm3fe/giC8t9hh7Kf/lK3/nCiPdv8ddkO+CRrTtDi/LJzww+2ww5/uG/+sqFnq3fbovknVGjrWX/697aa27T9326W5TGklGsgrV6CQxCZlwWvfWQTRtfl/shVS/KUFUS7zywiXryU7dh7NJVORP7yPyM0R2isjPkNUSawMuPvi/YcNdZPWY6Pwfu0RFwabTmYY7mHAPG02gOkcVc2R9jCyOUOWJA9jFMao4do5jxRGqGXa+6QF+nhfqU0aq8LDeEKuG4I8xaohWLmaz9YfYBlhbrwHYqgXYQ4wadZaHjvEzBVIvkPUC2Wh1lXbLUi+Qxs298gBbzjDFFFHPif2aF/bH/Hu/+dfzjV/3fnZu3+X1f3yLZQ2/7lbOd7yUcZJLjlPh5rniMFUYBGiLNoK3vBimt5h92icZLYlVgZcUDBYL3jf+DCKB+q7gfhHzucUG6emQ7CShfDPELCR+WuMvLSIVFDMFC9dWb2Yxj4MQJS3aQm4kmZYkQc1mXHB3Y87HB4/Yi6aMw5yhV5IETobgdOiKTAdu+dRHHCqC2mOoFUoHZNoj1z6pDshMQKF9LBIpNUJZrGdXkX6swP1pHUeNcI6eEpDWjVwY67KS6mYERVFgSdEsMSLHUGCoG923xDRR4pfvvIKKUOEAVERtY4y4BfGHWCRwKCw/eyLgBIQ1CCwS7ZI1ofFFTSIKfFERyBIlK7TxqGqPeu5ReR6V9KmkR2kCsjpiWmzwWNyF2CBCd53WCidZaa+hdYQFUPCv4tf51/ZbCSmIZE7MkoGZr1hhiSGyV+Nld9/Bvu8TRxG+OUJffBY5+xzBffPU+8IdTLlntnWwTO5gN17CbLyEHtxxz7HaxtoheAUyKWFkYVMgJi6jsMUgJC7KjVVYHTiNNROsBwyakKcejX7fdhh+cQn4G58CB/QNVLqZKkRZOla/zB3wz5cN+F80zuaO7RfZA0R25kB/dgqLI0R6jKyL9e+HawCRtdbp52XidOzJy5j4Hja404Sp3MX6m1h/Aq2B4E2aUQHPgXYMl2FdIygUopgDczdSIAOsf8v5ViiLnnwMG+84aU10CxvfctKgVlLTJoBq9PIycxIbmR8gTf5U/dtvke/7RFF0JUnROoC6Dpz1sUUXGK5jT/vfuHUAu8+4Pwu0PQvgrmNlb7q+LsZ4XqLmOlLzawGiz2NcPav8cgFutzyLte72nXU47HkY+5t+f9a+68pzM+iwHmz3Kf5+J+gCquuAWPtbvyO1BkC/Y3eHJ9YB3n5HugoMnwbeLUjs1tfamxMaPe8QRev02i/rNHDdDtIv69q3/3sfvPa3/1Pv/jEepBvsqFMCUXNdf+ky8esMm++J/zYHdpd9cYpPDVyVOa0zJtr9+8fud/rCqBWYTtusoMY5h7b6dBdH3QHv1MaUBITklxr0NmGRdHHTd8WpWyYjqGecPfoKb33+U5weHyOSW1TeFgs9pPK20MEu2t9GB69iRruYcA8T7mL9CaJhtUV1iMrfxgqFSt9EnXcAd+lCrglbX9t3+m29uk9CuUgNatiZjxqgfQmorTe8ZKxV8/9q2c1RLsxhC6qFXjiWXrvlFbDWS/ziAKGXqGYb93sDxBsduxLrM7x25WjtdVRVRV3XKKUIw5Cdu+/jzrhid6zYiWv+zXc+4eFS8dooxxh9xf/B933iOKaUQx4vAjYSn68eK7585PG5002OTrb56lszDi4smY6p6hBTKQdqLeCDCjR+VJEkC7xBjRhq1B2NHFgGkUD7kkr4lHnELA/QhYeqDF5tSEoNueA0jziqYn7pZK/RXdsmFBwImgyhShPImkAaAqXxpEEKgwEqo8iNR2F9UuOjjUcgSwJR46ERxkIhqI2g0orS+Bgr8ewCZeYIFhBkyE0PtR+gbvnIXYHYs3i7FrVhkJ4NEYQINrFYDDWWDCOmWA6xfElI87mT7/5j/9f6c29yN0z4pg99lFdfeQXf9/n0p3+RL7/xgM3dF3jh5Q8QjG+T2xGpTsh1QmFjch2TWSc7SWXCTA4pZeCcSaWHEY0zaJuQynVqFz1GWLAKm9sVo28tzSiUbTKj4kC6tDA12ACKMKAMfebBABvtgS+QFEgDb+cv8zfqP8ZIz5iIKWN9zlimJGKBDQ6Ro4cgH4PW1z5z4AwR1RjL8NlrGT0rA0wj8WjBvB7cwU5ewgzvut/UDogA4S0RcYkcGeSWQOx6iF0FXo4QJRiNLSQsAswiwJ5F2AuLPjMwF1BIFzvdM067LzVWNTkRlHAjWV6IiHddbgTfh8AD38cGAfjKRZ3yG/AvnZF7FfhXHaa/QBQ5FCkUSwf+8zkim0E+RaQXiOwcmZ4h0q8iLo4Ry2PIz5E8/fGwMl5FuSnv/V7s8H2os58F4WODxkm2iX5DGyq0tli9ha0KxPIAr/ocojwBk7mOISVW+dhgQD1+DfZ+/UpmY8Nd57jbZHFVhcvgqspDvOKIalBRePtNBtX1IG8dSdYH50/1nd43cR3L219u32/XEXP9Y930vVhnINyEHdYdow+wryOBnlWfddv21/ev87pzrtt3HdH3rLJWUgdPkZ79//vb9LdrSdabruM6Q+WXA86B53cSff31158Cpy1gv64y7e/doZZufPMue9oF/9cNLXTP9zwA+aZOvo7V7bPx66yn/oPbNx6+lnKd80b3YesbEutA+HVlHdveMuX942itqet6dU19HXb3IbyoI75Yv4tX/QdsitlTxo1bhlIELHREamPmJiRtmO1Fk6AoJSE10RVn0BqfeOUEmjfztImn7iQyMcuOdj0nIsUTDeuPYlHHLM2IRT1wzqxVzKL5f1bFnOcBCz2kVmOXtr46QeTHUM2xMsC7+DQqvY/ID5GFA96yOEHydAIQi8R6A8cmeUOsP8I2wNoB6kugjTfEtMPTjRykZbndPHHOZR1QLTuAWdSXIFuuQHaT6Kier7aXZunmXDpt9Y2nbhFCUEWvUsYfIZn9GELPQPgsN74Nr3pCtPz5K/11de3KZ/n134Z3+pDkrV8EII9fJUveh//4RwhEwXA45P3vfz+//bf/dj760Y8yHI44XwqOZ5LjueQ8DZnmERd5yNnS52imeHIuOJzC6dx9xDcGMEosSQRRCEUxZ3pxihE1KAmeh1UBpQgpCclNQNmETPSkRkkLwmKFy0SpkVgEnl8ThCV+VCEamYZjtSXaSoxW1LVHnXuYXEGBmyoByqCEcbIEg5OuaIE2DYGBi0BigVDWDLycsZ+xFczZ9OeM1JKxv2SoUjbCgqFc4OkpVZlTVRWHh4ecnJxwcnLC6ekpaZpS1zV5nnNwcOCSVf39/GUZ1vei7eWHg1HxdSrSr8igvisDsyU9MxSeCYRnlUv22iDk2rqsolOJPbfUjyqqA40/D1BZSF2GlHVAaUIq66NtExKyeRcoofFETSBKYpkTi5ShWDISMzbEjG1vxqY9ZUgGeGR1xLJOSHXMoorITUKmQ7I6INcRuQnIdExuQgrcOWvrYZDuvFc7aiPzEav6XJFqS3upzrAWjHa+GTpzcrPyHNE+y/kTVPoAtbyPTN9xz5FOEfbSQf5rKdYbYuJb6Mbh0sRNFJXhPezwrlv2dkAYpFogogwx1IhNgdj1EbdD5IYFXMhVm1nsXGEXAWYRY+YRdh5izzX23EAmoE10ZAqEniOqC+coWhxis0fI6hApSqyvEYom4ZmPjcfYaNzMRxANseEAwgQbxhCE2DCEIGgAv+fAf8v6Gy5Bf62hrBuJT9mR+GRQpIhiicgXkM2R+RSyqWP80zPIFohKQ2ldSFc1gmAHG+xi/R305icw8UuIau5kO94YzBKhCsTsPjI/RJjUSf2EQSgFXujixgdj5HAPMbhNbiI2ooqdpGQ7LtlJSnaTkp2kYHdQsxOX7A4qNqJyFfmz6wvX9od1YLydd7FBl33tE1fdQBPXgfN15NW6QBd9SU23Tt3Sx1Hd/a4D9utA97ptnvV7ez3X1e06ou5rPVf/+q6TILflWQD9Wf+vW3cdw76uLj/wAz/AX/yLfxEA+6uVqKitRLejtb+tA6/tcj8pyLoIJTc5TXbP31/3LC17v/59mU3fCu3rk9adf91NeB4Lb51GrDvvb7vufH1mu3vMfhv1wXm7b3ddu/46WVH/Wo2x/Hfz/5A5A2Rm+XXBv6OgjcDSsN3NskFestkNoE6abKCbcs49cUisUoayaLTpGREZSvTOaQVLnTCrEhZ6wKxKOKs3meshCz1goQfMazdPdUIoC4ZqychbXs69lP3whMhOSas3efDmp5gefJnQcwZLVhje/qafBRVTJR8ivv9D2I2XrziAuXnDYqvL/52j5qJhmx1Ylqv/L4G2zN7B0wtk46DVstiiXkA1Q5mlk5Lc8BF4XkNQiEuvgXV99wq74+/y+JV/CCi83e9h6/GfZ779H5Jt/GYQMDn474gWP+eYXb2Aeo6yKae/58+z/JbvBiTjH/7vUSea8/0/AZVAvPzn8Rdf4ViNeCA3+PEfm1D97xF55fp14IPng1TQRtYrrIsuEQSW0QReuAWjuGZzIJnEonH6BFsY3vzi21TpGZEsSbyaUWgJPYtUHsIL0PhUBGTWyZyWOmapIz4/vcu8jkGAMII0HxAuNLFv8ZvkQOdLSV22zxcYXMZMLC4uuAEZWReycWAQA4tNBDbyILT4XkVoC6Kqwss0ci7QM8XFLOHhdBuBZTuYN2B9xna4YDuYs+FN2YrnbI6WjEYjXnzxRWazGScnJxweHnJ0dMT9+/fdjawK+BQHBq9KmcxS+BKwg2UHwwtYXsByC8sus4cfJZy4uNXWuwS3xl2v8O0qwova0qhhjRqXhOMFwagkSlL3HGVzts5mbB6fsjmbMcoq4kWO0OZKfxKd+OYjf77qs10Gqn1n5bXkbx79LhbVgO/a+lvcUW+u+qm1ltpYDjbu8De/5XswqeC1z32OW+884ETsMWWbpR6T1QllFWLKZqSyiRwjPAXEWBuD2Uabl1nFlu++YoQAOsB/NTmdtTAl6NwB+GreRF5pn++5i19eTZH1OaI8R84eIqePyG7/J6jpI5L7fwRpKycdCbcceO8D+fg29eAOJrmN9e66bKZyjghT5DBFTSy86CG/MUbcixFR4UJBzgrs1GJWYP4edvFezCJGz3zseQ1lM4KBB8J3TqLlHLE4Q5THiPwAlT1CZA8RxRGiaCLAFCdQnV8J5WikgmTb6fkHu9hkq5k2sPEGNp5gozHEQ2w4xE62MHt3IYzRQdgw/S3z74EnHeMPDei3UDdsvwlcQqwqxL//w1gZUH/978RWEXx5iPe//RAEuw07v4NVmxixi5XbGLGNLSaEVcHeIGOkDWFuKGrJ40XEQyJKK8mNYlkrLgqfygp2BhU7SdUA+Ir9oQP2O4OK3UHF3qBiFOimq1zvs3Ydq7xqxx4GWvde7r7zr2Nv++V5vhHr1AQ3bdvFSH2Wvy3Pgx2uG5F/3vK8+OomyXG73Tqjpbvvs3Tn3X3acp2CoW9Y3VTHfvmaAHrLLHfZ0u4Q9zo2uRvXXAhBVVWr31rg3r3hXe163yJpL7ZNBb2O6e52unWar3Vs/zoLqAverwO7/e37IHfd788LsvqyoHUGTHsPug/cujbrLnf15d06dTvXdUC+OTsLBqtERRrFtrzgRfGEhLTDfKcksnwqTu+q/Sws65h5HTPXQ07rfRZ6yLxOHPNdu1COi3rAUid4omoAd8rIWzBogPducMbQWzKUSwZqzshLCZWmX9r2K8uSd07f4bg+ZG5LXLw3XBhBbwDCw3pjTLDr2OrywUrqQfNBbkH1So9tn51J76aX2lMvst5vz3rZ99u37dM3sRLaGIyMkOEG1fADEChQAXX8IqeD/xod3HaOdAKm7/lepm0SooaIxQr4soUvAkYw40+6KDNLQAhssEW5840goBAAwsUeb4B5EEISwiiGrQHcHgtuT+Du2HArqRiQUhYpDx8/YXNnj93bL3CWwskCHp1FzIM7HGZ75HZIXg9Jq4TCRgSiYCAzlyhLFgSixhe6cYOz1IWkHQiphZOuFEJSAL40DP0aWUjHlBuXANazgv2xZX9SsT0qGQ8KpF+yxHBWexznIbMTn6Wx2EGFt1ETTErktsYOQEcC7RusAmUkxSLieDlguvR5MJ8gZxZ7IajOfMo0wCBIooLEywlVhdzX1PuWQgvmhQEbgD+GnBxLhaVu/PckTrlugDmCc+CUR78Ai0cExQFJcUBQHDPxc379x1/lzs6In//5T6G15u57X6N+93uYDna4CLeYjSZkWwPKoc8iGTILRzzy7rr+aS0C6zKQ1hZZGLxcEy1KkmnK5GzG5nTBxsWUeF5TnUuyIuZcbHNhN7gwQ2Z6xGm1xYXcgC3B3/nQ7+HO5CGpn5AFMWmQkPsx0lqMkBAIZq9t8u0HfxsrFNoIhO/Ydm0EVQ1feuecz90/JxObTgYxuYce3MJEe1h/yzlb2ghUjYxqiC1yAEQCrHEJmioBlcQWEpv7UITYauTWG9H4ALR6HTpgX3Tmju2vA0Hxvj/sjm00WI0wldNY66IB/g3bnJ7g1fedwU7djBZIkMolCwpi6mCCibcgvgX+PsJcIOQMGaSIZImYzJE7Cu99IeLeAHkvgXIG5ws4zzBT7Zj5eYBZJpjlezDLj1AvB05mU9VgjLOa8d11mKWTklSniOLY6cDTBy5++skxqvxMA+pPXGSY3vvopneXEC4ulA2G2OEuNtnGDnawg32Kj/3fIAgR9QHq/qfRuy+5KgUCsVMQPPlbq+NIKVda85YU/MCHPsavf/13snPvA2RmyCwPuch8LjKP8yzgIvOZZlAXkiCzFLliuvDQoWAaeNxX1iVmBUoryWrJvFIoBVuDir1hyf6wYm9Qsjuo2R04UL+TlOwNKpLgKsG3zl+uD8a77/s+8XYdSXlT+7bH6WZZ75Kr6/BQ//70vz/X1aO/fBM7/jzfyn75WgiqdaULwvvKjvb3fukD6XXAug/Su/M+lmrxaz/r/HXluZ1E+0weXNWfrrO0uvv3PWaNcWG42rilfTDYBZFt6Xbi7nHXOQH1O+K6c7THWLfc10n3r2edk+y6411nRHSvrVu3/lDWdSz7OgDd7RT9bbsWe7fT9JM6dQF8/yF2/1t+f/z3+YnyG/hG7zN8PPxS57iWzMTM6wGPq70V290y3PN6wKJOmDegWwrtosV4S4aeA9wjL+XF+DFDtWSoFgzUgpG3JFL1lXq251zX3t3f+oZWN+xlt0g9Z+Nzf4L0zu9l8PYP4R/9f9e2QVtWfdH907BwT9ejX+f+/teV9W0PyADjNxIab3ipR/dGmIbdN94Q6w+f+l2roTM+5BBEAlY12f5qVnGcdY0Wt6D0G7xRoepTpJkh9AxZz5ANk258n/J934iYnxN86VMINaYYfZQ62EdWh4jIQwwG2MEAG4VYz8Mq5SLbIZgby+ECmAGPacJGCsecEQIbYO+4L6Rt5A2AFCGC9yBsjbQ1HpaAgogMrKS0PgdmQGU9Epkz8lLG/pKxn/Hh8X3uL/a5FZ/zLbufI1IVtfW5WMLBIuI4G3Eq7nEgblFJRRRqXtyybMWCQEryLODkNOBkJnl04TMMa+5tZHx0NOPWcMaeN2VUzammhrOzAYfVgMNyzEk9YFo3QwW2ppaSXERgWqdQ6bJ8RoDFsf4mvtRod4sB8hLp1RcSc4Kxj20l39CZ+hK5/CpL3mn2cS6JP/mXfwLh4Q0nyHhILSac611+4eEtzv1XmI7vcXxyzvRoj2G2gVAevjVs2CkbYomxEotq/E4lpRew8AfM4xHLIKH0QirhY4zCaoGtpMtIX4qVoUMIJDShQgEfFyddNEDXQP4g5J0vvQhLgc3ApgKV1phm1AULT3iJ/wYXP9hlDDauVqLR/+uK+laGNbUDw1UN5zXW1AgOUPYRAuOil0gF0sNKH6sCF1XJa2L+yyZDpqoRfo2MLGILxFAgYgueBuVCy1orsbVqQD3Y0mIKD3Ll7oAoIF80jp4pUuduBERrB4StdI6TaoCJ7mHVZbQVpN+ESVRuOKdl+0sBhcGaCdYOMdY0BoNpXzDNPp57jihB5AiZI8ISkgIxMci9GfIjKfK1FLkrYXoBZ3PsWYadltiZwC5C7DLBLIeY9APo9JugTEAbqCv33HZtQ71AVFNEeYooXHhFmT1E5gdNfPYTB/SLYzCZM2eKORRzOH1z9c7zf+lH0JufwDv5Ny55FOB99eeoX/tmwh/9oafelV1iQghBEnlsJTnv3poRx9VaOW7Lpiql0EYyzT3OU5+LzOc89ThbepynHhfZ5fLJ0ufi1OOzj0d8URlC3+ArCy2YN5LcSHzPsBHXbA8q9kYVdyclt0cle0PHxu/EjpkPvWfLP7rfgu5v/Tbob9eWfsjT7rbrMET3GH38ct12/fpedz3r/r/uW7iOub+uPC8r/cvdf10d+3i0u65LfPYx168qQF8HvLsg8lkN2HeIFOJSm946k/WdT/vn6P7ved6VC2yZ9/a4XWu1fx19/Xx73O687wDbBdPdhm63ua6zdgH3OiDZ7tvWvdtW3WOt01R1271vAPU7fb8du9u0MUD79bsE+5bchiz1cAW2F3rIS/UTvlK/zKf1B5mvQHiCEDBSLeBOV0D7bnjAaLBk5KcM1YKhWhKKAimvGl3de3K13usBcr/dur91234d49DX2Q+O/gGDo3/gHqBr7tm69u8ed932xgrwG32656KoEIw74HmI9ccN8B5eatk7YLtdRoWg8yYJ0dINuVc5QlegDUKDtQKsjzU+toyw9QAjR1g1BqubBDhvoapDVHWCqtvkOCeI8givPsXTpy6TYr//CInxJ+hgExNsoX/2xzDhFibZwISGMPgsynuAHmxCtI0NE7Q3wjOaiSjZSQT3tkO2E4hVhdIZQldkhSHLFWmpmBeKaS6Z1opFKcmMRKPQtbs2I2wTtjIAGVAKLoELOJBfA5UlLSNSHXPIFm1cbyHgJB/z+fMXULhIJdKUTebVBYG54G5wRhwYfBEh8jFnekhqIxY6INWQhwJ9G+a+xxe9EV8QI3R6xwFSIRrnPJwzqAXReXytuBxXksYitXVa3honoanB1tYl56mba2l3ELa5jgAj2DBCbQCvAL9hlU1T0AJgLaStxbf+XagzSp2h6xRbLJC25K0qYHm+RS0KzoYXTAcj/OEYHUZUno8WPhVOh661QlcKW0ps0QDvGkRkkYFBBTWRVxB4BWHsorCEfokflojIUEce5cCjHHnUAx8dS2zYgnRHFOMZpKcR0rWJKjVeVhLOM+LlktF8xnB6weTsmMliTpwWK3mNlC6k7aNHj/j0pz/NyckJAL7vU5bltc7614IJcNGZVnKU2+hWkjK6ixnew4T7WDlBiBwZLBCDAjWxyB2J3PeQdzxkXCHIsUZjU4VZDjDpFiaNMHOBmVrMXGCXCpcK1yCqBaK4QObvINND5MLFYZfpEWRnTtddG6yR0CQns8EmxttwsdnV+PI90oZg9AdNTPaRi4W/UHAs0W8I+KmmzwrA7rcvsKYVjNP2K4MILMQCuSXBLxEiB5qsrCaHusRqjaglpvIx5Rbk90D+RpCxC0GrdQPo2+utGkB/7oB7fojMHyOzB4jyHOvtUY+XqPQNRHmC96kfQf3CTyCr8yv3sAuGuqD7JqlCu107+b4ijuDWRoUQ9ZV3fJ+sMcZQG5hmDrSfpz4Xqc9Zqpq5x8HM52Tpc3oacP9RTFZKpADPswhh0QhqKwh9wzAybCQ1O4OK/VHJnUnJixs5L2yU7I1qtuMaKcyV716fgOqy8uv69U2a6XVGwHWkVHuMPg7ql+dh+a8DvdcZJr9aZZ3Rsa4+1/2/jsxdB8r7hGeLebtKkpvK1xTFpdv54fqb2Qd/3X2727TAs8so97Xt3dL+1s102a5vQXv/5l53rP4DfRPgXgdu11l264BwF3x3DYS2Tfr7rAN83WNcZxysGw7rTkII/tnjD/GPH3yUj2++wXfe+ylmVcy0jJiVSbMcM69iZnXiJCb1kIVOsFYw9NIrwHvkLbkTHTPy7jOQC0a+W5eoEiGeHt3oXvvlC+Rpa/pZL5l+e11n9fYdfLr7do2zbrn44H9O+sJ3MfjyDxJ+4b/FyAjrj8AfO5Dsjxrg3AXboxWAvly+/N/4IyedsRaMdg6nlQtBKKrFSssq6wVUc1R+AnXmGFXrg4ibrJ5D93FVE4y/ifG2MN4W2nsBbI2sL5B6iqynCO3COzpd/Ayhl0iTIm0GtkKP3s3ivb8PXW4y/OrfdQ6W0StY/6MYbwDBkHq4S737btAauZg1cdtDUAFY6xIt6RJRVy4zaV2D1piNHawfIk4OkScpUs7wvAwvTCj9iEenHu88EGhAWwcCEQKMdSDW0CRnFLRh+IS1KGsJhcCTTiuuhKWucowuEQ2DKnDgW0qBFRItFIs4pvYUQVm5WyAl1pMYT1BLRe0pB/RVDHICah+k4ERZFwVDcpnURuNwhQtjCNbhcBcPu9OddbOtcQDcMxZpDKKGspDoSrIdLtlLUvYHC/bGKfvDOfvDGeMgQwqNQF9el9VIYdB1xT/+sX/BP/rXn4Ht98N3/e3/DMV7iHg3ibnjj6sdNa4GIjSBwZO6VkoXSpnllsuwWTiMROXaNq/hvAByIHPXIQqLEjUBJaGXM1YXTk4m50zElC0xY4tTNsojIluiuqykBWEEVjfv5UoisuuBggbyQch9/x6Hgz3G++fMJkNm22PSjQHFbkiZRBQqZlpvc2CaJhYWqRyQF6VFZBZ/XhHMC+TplPxL34L80ldgeor6xKv4P/lp9D/4F6v30PMUAU5PXp7D9PNX3iPdYqWPifYaLfltbHwHHe9Tx7cx8Z0m0sstEAFSTJH+ApmcIcc13o5CvttH3g2R+xIpltisxM7BLDxMuo3O7mLSb6Fahk6eMrXOQBK+Y9fr3MlPinOXWGl5gFw8QswfImfHyOwYkZ1AeoQoLq5IDlfvRcDKBDN8D3r4Xszw3dj4BWx8p4lgtenegUUCSx+r22ysAYgQxMRZWG2CJdG+WxvQr9sHxQelQLUPeu7O7oVY32UyZaLQ0nP7tsSMAJdpynP7CQHlGWrxRRfBJX2ILB5h9AWqOnUTM4RyWaKrqlq981us0A1O0eKANna653mrftr9fvW/vb6CnaFmd2SAcrXPdcy2sYKLVDWA3uMi8zleBDyeBhzOA06WHo+PA774ICErJWXd4IYmDKvvWZLAMIw0m0nNztBp5O9tlry0VfLu7Zy74wql1o/iruphzFMjyd3tr8N1fcLqynNwDY5Zh2fWEYX939bVZ901rYu6cl3pyqe7296EJfr4sYupuufsA/J1QL1VjVRV9dwA/bmjuHzyk59cNVB3GOlrKV0n0esE+9d5zV53rnUSlxYcdgFfny3vLvc7Vndq63LTUMx1VnrL7Ftrn8pueh2ov85y7Heq7nzd9v1OBPBH/+3/kdoqcO5KjLyMsZ868N06VCqn7x7I+Qp0RyK74t2+ziLuLnfv3XUWaLct1j2U7T1c10brDKF+HbpsWdeQ01rz9ttv8/nPf55Hjx4RBG4oe16FPHj9U83b0LoPgvSgBdHV3LHWVeP4WTXAuvf/5e+NXr1aMn/xv6Lc+s6mcZaAdsPPq7TizcftqT5um7oYVx+rEbZJV24q58BmC2jXCeOcrqRw0U2UxCoJygOlsJ4TgJtw4hy02o91k65cmHrlFGeSEdyJXOxnnSPLBZQpIi8hzRFZjcgNohRY7YHx0cEEM7ntAGAJeDXCK/FUThxUbA0Ed7d8XthQvP/2gHtxzUZ9ykZYECgXbaUFpkpasDV1VXByfMR4POYbvuEbiKIIgItFzj/5lz/Lv/l3X+A8l6RiQKGGFGpEIYdkYsBJvMmT4S42l9hMQCXxVEUoCyJyYl0S1iVVnlFkOVqD8mOO9Xswwm9C3eEyNCoBoQBp8Zq4/xIXYtBYgWkkxi5hlVgBYuqG2Ze4r6y1jfFhVyDfRYFhpU7wpCFQNbFfMRiWDCYFg0lBMDQ8On3A20cPIBrC1/2OH6dmQskGJRsUDMiJybAsMWQIaiSqAr92CXlCAZFAxAIRSBAWo6UL8ZcLFyN+KRChIdgoiTZyonHKcDBnMz5n1ztgXxwxyacEyxo513gLg1dc2id9Vm/du7fSknmd8Ci7y9998u8DlneP3+T/sP9PWBCRysRlFFUDHsUv8eboNURiuLN3n/eKX2SxPWG5MSKfDKgSHxMqZ+Dp5pkBhBIIYUFZbAlmXmMuUszpAnt8gXlyBI+eYN9+B+6/Ce88xE4XXFdu+la2ZS0gAWesNyDeAfkOIx/fds6h4S7YHMkF0psjkxw5rpFbIG/5yHsJ8qUYkdQwW2LPS+zcopc+Zhli8hidJphsgJ1LzEw7wxLpwDwCqiWinLocD+khcvEEMX8HuTxApC52umhAvajTp67PqgjbOLYyuIuJ9931JHdcHPPkDja+7d5r6QHkxwizRMoSfIEIPWwUIqLYRY3xY5ADLBGUCpsZKAyUAmtC96wMDKKYuzjzOAdvVInID10MdjV0z05dNfr5dghJ4YkSn5RQLIjFnIG4YCBOGcoZiVww9FIGXsokKtgaSG7f3ue1117jhRdeWOU6abMit4ALnibXbgKy675VN0lN2n20gWkmuUh9Hs8C3jkPeTwNV2D+PPWY54qsVJSVeweBU24FniEJDeNIs5lodoYVt8Y19zYKXtoqubtRsTOyTGL71Geni0H63+B1/b2PA64D+9dhnOsw2bMI4K+1XMfWXzfvAvd1AL1Ptl431XW9Auft/Id/+If5G3/jb7THvPaCvqYwi1199C+nkfqOa/2buU4j1qnLtYx1HxC2FuK6fa8D6v3hm3a/FmSvk150j9MfHeheY/8jta6zP+v//uhBF7D326XrANq9xv/mc7+FL03vsBks+XMf/JsoUV3pbH0LcF19W2eTmx7YdW3Uf5n12f1uffvbtb+vMzrWtXlbz3VGkNaad955hy984QtXAHqWl7zzzf8EnbyEP/0M45/43Zhy5hK3rHnZAFgUBDuYYBfj7zXzZgr2mvkuJrznQogBojpGlYfI6qSZXAx1UR2iyiNUfYQonjg7IdzCBFvYaAsbbmOCLScnCTYv5511IJoMpI75k8WZ+784QzXrVXVBfvubWXzwDwGayU//eZSFOthDh3sueUuwSbX5bsxg3wHMrATj0Xg5No5aQCghtBBZiK3TGA+ly8oYGJzSvNEJK/ACiRf64Em0J8kbciypDcNCu6nSRIVBFhZRgs4t6VJTFAIVTpgXkrMMlpVg5Gv8akao54R6SSAqfKvxACmcVvrLL76EqSTxrEIuIatCqsJ3kUsGBoYWPyzwmBPZORO5ZD7f4PTwDuLU8PXqZ1jqLT4nP+QMqNgSbxWoiaEeehRBwDYF7x/XfP2+4iPbHu8ew17sotKclJYHU82DC81XLwzvLOAgk5xVkmXtUaLQSmCluNSat2C9BtIWNOOY7hb0K9vEuqZhGIEKS46hpEZTosmwLJh99t3USzyRE4gcoed4ZsbWUPCxD75IPn/EL33+F6gHE6JXP4B44RXm4R6pHFGJAKMVppCYpYSlgAXgW7ytinCnIN5JiXeWxIMMz5Z4VQVTSX6ekE9jynlEvgwp05Aq96kLD1sphGecysEXMATebfHDChXWeGGNjDQy1ojEIAeGYLMgubPAKytUVaNqjao0sqxR2iCMRRvNdL6gNBr5vndDHFFrhTmbIzYG4ClsXrtoIVaAlEhPInxQXg0GzFJjZiXmbIk5mWIPT7GPDjAPHsL9+/DmG9ijU+i8X7vvoXVlHZnwFHkhFCbcvRLZ5XK6tQL21h8i6jMk5w7Ix9klkN9XiLsR8uUR8vYQpgvs2RJznmNnFrP0nLwmG6DzASYfYrIEu6idptxaB65l4EiAao7ILxxgTw+Q84eOnc+OkC0znx27hG5tiEoAf+KMjvg2NnEA3sS3HLhv18f7zkk2e4JMnyCqE4S4QIYZIikxt16Bd309cnwfb/AQ9vYw996H2PCgrhCHB3B0DMcniLMZYlrCtII5yCxgd/AuZmchF/MIHexgvU2XK0ImTmqjK4RxPgRtsABpM2Kv4PZWwN1txa2x5taoZDsp2YhKNpOaraRmIyqJvPppTmXNvV7HGK/73q8jqm7qL30cZqzg4UXAW2ch71xEPLrwOZwFHC88zlPFIlekpaRqc0Y0I1KBbx2Yjx2Y3xvV3NnQvLBRsTeu2RpotgaanZFhHBm6tsVNYPu6SHhdA34dDlvXPv32W7eu387XYY1u6bLo63DPumk1AtiRBrfAu/29Nei6oLyu69X8z/25P8cv/dIvtef6lQH07/zO7+T1119/Sn7QTn3WG9brpvuJf/oMa7/DdVnU9vdu6QLy9v8ua97drj3eTQZCH2yvCyXZf5D6db+JAW6X+zrzPrjuX2u/M6+zOLvbdeOZd8FvbeDRcsJueIEv6itgvGvhrbMm2/P3fQVucqTs1rEPwtdtU9f1FSOuf6/WneO6/tsfeWnnLUDvM+h5npNXUA5ewS6P0XJC7W1jg31s4MC3Da6Cb+tvg8kc0C6PVmD7cn7kwpWZBTrcd1HOlAu1ZsMWWLfzbWzYAO9wC4RCFueI8gzVAu0V6L4E36q8QJZniOIUUc2QQmBlgA62McEuOtx1H/1gBxPtoYNdTLhDHd3B+JtYf4yopqjSJW1R5QmyOEEVRxALvMUj/NmbyPwYVZ4ibHnZlsYigiHaSxoJ0IB8uE+5/z4wEWLjDna0h4k3EdEEwhHCi9H4aCOcj1yNA0rKunBrgb00AHxoM2mu0tYbAaVFliAzsKnB5GAriZCGQBXEMmMolkzEnJGaM4hLbhenjOSSkUwZyZRAuCHpozTli8s5x2GAfOlF5EsvMp+MORlMKLZHlKMhSZoymi2JFwWbhxeox4rqLCGdDzirRpxFGyyGCWJi8SYCkwjqJpO8p8GWUKZABnFuCTMQqcWmliqFMpWUpST0a4ZRxTAoCTyX+EhrSVZ7pKXHIvMpltK1gTWAp9FIjBVEFjmwiNggIovwDdK3iNAgYo0INTJwWT2F3/4OwrMIzyCUdUkePeN+DwwiMCANJlXUcx8986kXHmbmMoiamYeZS0wqsZl0Iwdec/98ILJOox4b5EDjjWqCzZxwM2OwsSSOUiJyTCmpc8md6jFb6Rnh+RzvIsdPa2RZQV5yIO6wkJu8NvsFAnk1tXmX2Fkul/zcz/0c77zzDtzeIfye34X+9BfJ/taPutHayQhxZw9u7yJu7SBu7yFu78DtPcTd24g7u7AxgqLCZpXrn0iEUghPIgOB8g1CGnRmsfMafZFhTufYo3PskyPsw8fYt9+GN76KffAQUa4fzr6J0bupWG/gMm4mdxr2vUmgdIWh3wdTIatjhD1zQD7KkOMKuWWRex7iboR6eYi4s+OMz9OpG104LzDTGpv66DTC5ANMPkIXI0w+Bu2BrpqoNMKNNEofqvSSnc+OEYsnyPkDxOIxMndgXmYniPQYyqkbNYx2V4mHWvbdJi2Id7IXG20j8lNE9hiRHbjMovYM4afIpECMDXJHoW4PYG8bu7uD3N9D7O9TjYYwm6FOz1CnZwQXU4LpHM6WVMcp5Vnh+vDCRxcTjH8XBrcZbL0brUYsc0VRSTxhnB64GSkra4GSkATaAdvGGXR/WLE/LNge1GwmNdsDzUZcshnXRP7N9/ZZpGf3+9vHBOvwQrc/dX/XBs4yj+NlwNtnPu9chDy88DmYBpwsFRepYp4pikrgCVBYrMUBeyAJDJPYsDXU7I00tyc1uyPN1tA4ID80bA8Nm4Oajdig1Pq69AH6OvK1u906UN9vn+tIwesw0zoADlyRUd+kMe8D9HbeX+4D9D/7Z//sKmTurxigf/zjH+cP/sE/eKVxu06Z68B4v3Q7Vnvx3Rdrt/H7na3tmDdZWF0w2v993U1tWfbuMbvAvgv229Jnxbv17hoL68B6t059EN7vWOvaYh0wXQfm29/6hkV3nxbAd+vStwDXdejrrmvdNa5r95sYguuup13f7yvdB/A6C7q9L9oKFlXCohowzSPePsy5/yTnLPWxwR5abVPKTbS3i/E2ESZDlEeO2W6BdwO6qS8QogCpEVJCOHAOWuE25grwvgTcyMDpWYuzFbAWDdAW+emK5XaA+9wB82q60osKIVz0lnAH3aTb1sFOw3jvYMJLIK6DXWyw4UB3cYJcAW8X19gBcJfx1KtO8aozhCkod16jvPUx4q/8U1Qxw0iP5Yd/JzJb4B3fR4ebmHgLE25iok10uIWJ3LIJt9DRhrt+P0EUU2R+hlde4JUzVHlObJbsDiXvubPFq+96mfFoxGS8RVHDk9MFJ0s4SRVHmc9R6nFeeCxLhacsoa+RssaEPvFAoSKoAqhCSx5o6hjMQEIkG1a5kZTIBviDc2isLLIyyNLglzVeUaGWS+x0ikwXJLYmpIK6pJYWkgh/a5NyNGQ2GJMHEdooqsJ3zHZhEIUD2zI1iJmFucCm0jHOtWyYbotqALRNDOHAsDEx3NquubtTsjtO2UjmCK8gs5rMaiopKARUSrhJCkopePvwCZXnIZMBcm97IYQNUfhCNCqoUmJLgamEtYUUzqlTY0o3USmXeGmRYJcx5dSnmilMqrCZwubCaZxL0WjrW+MJNzUp64UAhJPKYXEJkAogE9hSQht5pdUQr8k8icRp+pVFBBYVarxBRbSRMdxeEI+WRFGO7+WEXk5ISmIXxMWCcJaRLDLCswv8ZUGTjYk0TfnFX/xF3njjjSuApiydQbbOUfSp4inY30Hc3nVA/vYu4tYu3N5B3t6DO3vuf09hZ5lj5HVzQZ6H8D1kKFGhRQUGU4BeGMy0cKz88QX28ATz6Am88xDz1hvwxhuI6ey5APqzWHprLdpYbLTjdOTJpaymZa9XLHaw4Yz74siBXjVDRilyVDlGfk8h74aIF8bI/R3Y3oRFij2+wJ7Osec5ZlpjFhKTxZh8hClG6GIDU49BDZwxqZtoL6Jx7rbWyQeLCwe+00PE4hFy9gCZOVZepieI7NhFdwkuGXnTsvLtqEJzfagImR8h8wMifcrtsaWcPUDbc971vlvcenWH+HZEtRFwEXgcCzgGZnFIvbmBjSLE+Tnq5IwtbfjQ3h4vRgmbGoIMxNxiZpbyQrCYKg5mPocLfyU3meWKZeGhNfjC+c1gQdeCSkt8ZRiEupGd1OwOKvZHLlzj9qBmM67YHmg2k4qNuHZ5GW6493256bMwQ3e/647Z/Z5WRnCy9DlaeBwtPA7mikdTn4fnPgczj5OF4nzpgHysLIFysZ5sE/I0KyXGwjgybA81uyPN9lCzNXAA3i3rZtmB+62hRYqr3/a0EPwvPzXk7qbmt30sWzuS0Afh/eeiiy/adS2Ou4kl77LgcNUvYZ0jaAvA+7KWFpyXZUld1/yFv/AXHInArwJAf+GFF/i+7/u+tTd0HbhcB8D6GUT7TqN9ANYer52vY0T7zGwfsK0Dzv26dYFe97jd+qzbt1+3PpvfrUe39AFnH8DexJyvW+6z5N2Htt8h1zkwdJ14ux2tD+z7hlT3uP2yrq2e1U+6dVmnH++3Sa0FiypmXg1Y1kOWesiiGjAvk2bdgEXlpmWdEMqSYeCcXFV9Qn5xn2z2EE+VCFlR1TnGaoxSGJU0UUoc8HaM9zYm3AQvQRTnDmAXl+Da/X+OKE9XQFwUbllUU4RdY3wIDxvtYqN9B76DXUy45xjvzqTDHWywhahmqKJh54uT1bID4keoFQA/RnaYbgCjInS4AckOOthwYDtygLse3mH5/u8CJNQlMl9gog2cRt6660lPUPkZKjtFLg/x8lNUcYHMT5HlHGsM1ipqPLS3AYN9xPAWJLeo/E10tIOJ96i8CZGs2E00tyeS2xPl2G4/Yzuq2QxLNoOS7bhmwy8IRIXWmj/z8CV+qr4FQvI3vxl+021IRM3P/MzP8OM//uMcHx8jpKSORyxGuyyGWyziLabxJtPBFvNkRBYmlH5A5fkYqVy2ShppSW4RhUXkFjKLyIGG9WYJdumAt00lIjTIUY0aadRI440rvEmF2qhRGzXeRo2a1MhRjQho5CcSWUpELjG5pC4kVSmwSIQS1FaitGBoLZu24pYq2CNnR6eMdIlXa7xa80P/j7/Cg6++iUkLNv/ev/pY+s4wKU+iQTX1J8zkPTLeQ8mr1NyjZgfNmKpOMMpJZkrcfAWODYISaZd49TGBPmQnmbG/ZQmDCi+E2otIGZKKEQUJmYkpbURhQqrap9Y+ulaYSrnjt46owiJ9g/I0SmmUMkhRIzEI65g8rT2q2sNUCp1LKJsRgnV8j8UxvdayCjUuDVIZpNfMlUbKCmzhQvjVCyguMPNDzNlDvOwJoj5FVqfuOakOQU+5PtbH08TC6j02GSHv7MKtXbi1g7iz58B8F9RvTWCeYecZtqixumGcPR8R+qjIQ0agIgMa9BL0rMRcZJjTGebwFPv4CPvgEfbtt7FvfAXx6BHiBvb9JsbwqW1bTXkH/HalKSa+g01uu3dX9gSZHSDMKVLNkFGGGJaOkd9VqLsh4vYmYn8H9ndd2t/jM+zxeQfMV5g5TmaTjzDVBnW5gWUDgjGoyHkym7q5355zTNcFopwh8nNEfuKcYecPEYuH7t1UnuGZwnVsNSTZfhe33vP1lP42ye6rzPSIw7lkWg+QaIZixoa/ZCsqSOwFp+98huMnX0CEGWpiufOBW/za7/g21O1bnAc+p1Jw0kynSiKBrVqzpQ3btXbLtWZLa4a5RS4tLCR6rpgvFaepx+Ei5HARcLp0YRvnTdQqjAP00jpwqzXUWhJ6hmGomcQ1W4OavQbQ7w5LtgearcTJTzaTikmk8b2nI+DdxMqvkxx3p3Vgf93xskpytPQ4XvgcLhRHC8XhXPFk5nEw8ziYKY7nCqMFw8CQeIZAsgLzZQ15KZnnkkrDZtKCdQfcv3zg89axh5KW/+kPn/AbXsuvXGMXd/Wjy/SZ8XWEJVwy5vB0VJY+QO8qFLpsehecd5nz7voWoP+lv/SXePjwYVuPXxlAv3fvHn/qT/2pKwmHbip9oNmVQ3RlLn2QvY4N7dZvnUb9Ova0r/nu1qvb2dpGXhdacd05+qC63a5tm1avvs5Iafdtt1ln+XXP1Y9E0tarzyB3S/e6247TbtMy5O35Wk/iPijvD990z7POsOiXbvt1t22dNNvlvgFRaVhWA5Z6yLIeMa8S5qVLXLSshyvAvagHpLXLGDrwl4z8JYmfEfsFvl/ie24IGuWidmjhkduQ1CQsdcSsCliaGCNjZDVFVVNkcQr5KbI4RRRniPwEm51eMt75KeQniPICyVVJVbdY4TVsugPaNtx1UR7CFnzvYUInN3Gge+4iu6zY7qOVxKRdJ4sjvPIMYXLXnl7sGO1oExNtYeNtTLiBji5Bd5ftNtEW1osRxRSVnyHzMweui3NUfoaVgvnH/5BzJrUF/sV9qo0XIG4csjROs6txTltaNBn/rAN7RoLQQOWSNuk5qprh1+dseilR9oT88AtsMuXDd2N+3Te8n7vvfhf7L77AZH+Xk8Wc0+WcAksuLKWw5MDMCs6s5MJI/tlyhwsCEAIlJZURgAtv4OQyCm1cKlClaqSsUZW5ZLkzC0sQSwsL4Ri/haSee+jMR/o1QVISxDkyyLBBhj80BBOF2QyYb49gKGBgwZPOOXTuQg7awgF9I6QLoSgEWLBSMnnphGiS4cUVplDo1KcuPapauURJuUAfKOxbHvbQx+YKg8AGwsUMF0ABMrfIylKntWOnjQelsJSYJoqMQqDxWCKZoThDcIzkCb/w/b+P2X32ogV/9A/9fhaF5VOfe8LnDyJS9TJV8AJa7ThHPeNC1XiiYODN2QjO2PEPuB2+w63oMZvB1GVVhVW0i/YZqLWmkj46HrGUETM5YikmLMWYpRiwNAMyG5GbiEJHlNqn0j515aFrha3kJcA3ApRB+k5+g2dBNm1sG6lTy8oLnNTCWNDWRb8xBmksoragXehKtMQ0kzXtc9sAfvf0us5ua4QuwbgkQsLkLnuoXroISXruQpBW54jqzMX7rk5QdTPipi9Hvwh82N++BO+3GvDe/M8tJ7XBWuzprAHyjaOj8sEPEHGAjANkovBiixAWnQn0okZfFJizBfboHHN4jH34BPP2O9g33nBgPn+acWzft89bLGCCzQa0N9lOG+BuOhpzG25DOUWmjxHZE0R1jJQzZLREDivkpkHuKsSdCLm/60Yp9nZgbxvyAo5OnN7/bIE9zzAXVeME62GKMabcRNfbLnNoOAF/2NyyqpF8CRDOOVTqjFiV7AwEI7lk/viLZAdfws4eO927EUTJmO3du2zdeS8LPeKto4KLaoAO99DBFgO/4vbYcmuo2RvqVSbRrahgMKwRoxozslz4gjPP40wJzpTi1JOcKcWZp5gqSWLsCrw7IG/YqjXb2rBZa4LMIhYCPZfMct/JT1Kfg7mL8nLWxGFfFIqslCgsHhZpwRhBVQmshSgwjCLNRqxd7PVhze1xxe6wZivRbA3qBtTXjEKDUpe4rB+dbx1wb8t1mGNdH7vEIzAvJAdz5UD83ONwoTicexzOHag/mHscLxSesGyFhmGgSTzLOyce5wuFUpY/+ZunfMdHUvZHmsi7ih+6+Kw/dbdZx5L316/DQ+26FnR3f6+q6oqkpQ/Uq6qiLEuqqqKua/7KX/krPH78uD3/rxygf+/3fu9alvtZpXvju8MD3Ugwffa3y2K3jdDXJrcNu4517zLn6xjqvvb9uuOs+727rn/M7v/9du129pvY/HXAvfUm75c+SF9n4PTlKu3QC1wF7H2Q3nWA6BoefefL6/pPew21kQ2bnTAvB8wrt7xYAe6EeTVkUSVk2oHuxF+SBBmRX+J7JZ6nkVcAt6KyPpmNWJqE1MaUNiQUOQOZkcicgUwZqIyBzN1cZSQyIxEp6dk7vP3Fn+fg/ufxm5BUaZpSluVTw1UASA8T7DhHo2jPMd7R7uVyuLMC4DbcbkC3S8ThwPcxMr9cVuWJG4otjh3oaEC2jRuwHW2tgHV/2UabDmznDmDL/MyB7uIC2QXf+Rkqd3IZVZwj8nNAoMNtbLKHiffQ4Q51tIMZ3qa4/fXUgzvILMXIIcabgKyRMsMvnyBVhvILZFi6FPdDgRwrxETiTRRi6EHoQ6AQgUT4zvHOBapxodEMAmskRnvUpUKXHqby0A2DamqF0cqBba2wQCA1oawJhKbQMKws7/YkXibRC0inluMnKfMLS5n7VGWILnwns1AgfY3vlUQqYyAXDLlgpI/Y1EcMq8ekJ2+yPHmT3c2Ee/fuobXm4cOHHB0dkSQJd+7c4YUXX+LnPvHtHIz3+LbP/ziDxRlf2n6Ff33vW0hERkzKk2Sfc38TUwlUbtClxNYSIQzSWqTRCN86XJkBC4lZCOxCYdNWTsIly100k8Tp8VUzeTjgPmzWy8al1AVX95q8ogtgvpru/4tPUM4JRcUL+xsszw4oF6dQLpDVElm7CV2DNyEcvUiw9REycY95tUVajyh1RBt1JlAFibdgwz9nNzzkdvSAF6K3GXkzPO9y9KtvwPaJme47CpxPQy49UhmykAPmaoO5HZLKMUs7IBMDCptQ2JjSxJQ6dOy99hy4r4RLa1/hQLtHI8tp5iFNJBuLjAwq1PhBTihzIpkSeSmxV+CJmjoNyS4GZLOQcu5TLT3qTKGbMJlWS6xROAfDJhITomH4cQ3VJP/C1ghTgi0ROkeYDHSKME3kp3qO9DNkUiNGGrUJckei7gaol2LUSxvIezswGcHJBfb0ArsoMKVx51YBhCEijlCDEDH08QYCFVh0LtFLg56V6PMl5niGOTzBPDrAvPMQe/8t+MpXsMdPuOmLvu49/9QIsPQb3fhtpydvHUOTO1fZeOk7LXnaMPLZE4Q4R4YpclAiNw1qXyH2x46N32uA/P4OjIZwdgGHx9jDE+zJDHueIeYau8DpyYuEKHiR4eQVcjnmLBXUKsGquHEMdWEirXtBILCEsiQmp5o9xswes+XnfPS9L3Jnfw8pPGqjSAvBLJecLhWHCwei00qyHdfsJhW7g5LdpGY3KZv/K7aSEn+kqQZw7knOfI8zT3GmpJu3k1JkUjDRl+D9EtS75R1j2KgMagnlUnGeeZznPqeZ4ngZ8GThc7zwOV96TDPFolCUHf28sBajBWUtnHP+lSgvNfsjB+h3hprtweW0NdAMwnbw6mlm/lkj/d35un515R2B4GQhOFp4KyB//8zjX3wpwQoIA8vhQnG6VIwiw/5Qszes2Wvm+8Oa3WHF/lCzk5TsDGp8aZ9636wD8V22vAvM2326EuCuxrzdrgXlXXDexjxvpxZT/NW/+ld58uRJ2wa/coD+p//0nwYuhxDWWU3r1vVZ8nZ44KZQjb8cgN49R3ff/nbdCC9dMN1liPuWV78Dtvt2/+8D62eNBMDVuJzXXdt1jH6X/W+3745OtG3QjQ2/LlJL30JsfQtadr3f7kIIaiOdnKRKVsy2Y7qHK623+31ApmMCPyf2CyK/IPAvAbeVFisltfCo8ChsSGrjVcr2RGQMZEoiM2KRrpYTmZKIlIHMiGXKUOUMVI4vL+9Xnw3otulyueTf/cJn+MJbJ+hgh8rbJmNMLifOsTJwWm8T7DogHmxBnSKLI0QXaBfHDnwXx5AfoswChMYGCTbaxkSb2HgLs1reXoFsE225lN1ejMjPG3B9jiwvkNUFUs+RZo60CyQpnkxRXu5AclAiQg+iEBvGmHAb7W9h1AZabGCYoO0QrYfoeoAuY3QRY8oAEdR4gxI1rFETgxpr1FCj4hwVZig/RaklSi5QOoeihLzEZrlznMtLbG6oa4kxPtoEGAKMjNEyohIh2ksgGkMygcEGVTBEC4mXL1BZSgIkyidA4csQXXhkF4ZiCUUqqTNBnQl0IbAVV4GXFBBYRAAqEPiRxfNqhF0i7YKIOQMxZci5S4SkBEYpShWxlAmLYMwi2mYebiIsxOkx/sVDxuU5+yonmh9SPPoS8uwBt2LJ3ouvkMqEMhgTbt2hDjdYELGwITMTsCBiScxSJyzrhLwKERWoWkMFppaYSlxm1SxxOC60+FHNYFQx2rWE+5ZiuybdLAjGhjBSiMCnIGCWBywtSGUdqzvT2BMFZ4Ex51LapcLW0oVQHNPEbrcWKPFYcvrlLZbHUCwZeOHlaEgtcdk/PbAhxoZYP3Hx/IMhWkZgDQElvi3wcFEvjFbUxkfXHrV27HcbOlKhiUTOQM7YVBfcCp7wcvI2O945vs0JhMu61H1P9YeXuyN/3Q9g/1nuv5uMkGQq5M3jC37xwYxl/G7MxivowQuYaBcTOqdoZARWNYy76BhGzciQFo6xbxyVReCcZlVYEUQF4SBnMF4w2FwyGC/xw8o52aIpl4bsyJIeCNJDKE8s5ZmhnhlMIcEowMeKECsHWJWAirAyAhFg5cqqZS3gR4PQCKGd4etp5/ibgBxK5EQiIo1kDnqGUCnSL1zCpLFFbgjUlo+3GSEnEWqoUJHFakGdCvS8wpzn6NM55ugM0zi9mrfuY974Krz9JqIqVu/W/rfturL6NsJlhJekw8K3jHwrr4n2oF44AJ8+QWaP3bw+QQQL5LBETDRyx0Pd2UHe2oOWjd9zoF4C8vQcDk9Qp+eI0wvM6RJ9XqAvavRcoNMQzC6jvdfwNu5xXvgsa5etGaEIhHUhhi3URlAbQaAMQ98wCWo2wpqRp0mUix5lNOSVIC0l81xxlvqcpD5CwE7sQPteUrKTODZ+b+DA/U5cMBpq8pgGtF8C+XNPcapaMO/YeSu4wsJv15ptYy/lNtqwrQ2jCrJUcpZ5nOeXgP7J3Odw7vTzZ8tWPy+pa0EgLKrpcrp2zqFKwjBykV62Es3usOb2pOb2WLMz1GwNNduJm2/FmtBfL9ftfoe7fWcdAXqTwqDScLSQHM5Vw8A7nfzhvDGeFh5HS49ZLl19BxW7jUxopzWmBhXbccF2XLARVkhxNctnH6h3ZSxtvVog3rLoLTjvLreJ0roY7gd/8Ad/9QD6Cy+8wPd+7/euGqjrGNptyK52uK8x7zbuTcC2zf7VvTHt/Dp5RZ+Vb8/1LHZ3nfyiz4T3tU3rOlZfB96/1n5HW7ftuo56HUBfV/oGTbdu3XbqgvLzPOEHfuE7KWqf3//aP2DkLzld+mRmzKyIWeohp+UGn1u+n1orRszJTURuY5cxMCgJvArPq5HKgGwYbhQlPoV1Cdh9UTIQDbCWKclq2THaiUwb5jslJiVRGR711zRKA2CRpGbAwrhsp0szbDKcDlaZTufNfKkjpM5R1Sleo9sWH97B/8ge5tM/jf6nP4q1OSIQiDjADibYwQ4m2XFsd7TpnEP9CcYbY+QIRIBgiQhBxh7KnKPMMSookGGFimvUQKMGBjm0eGOQY4lIAkQSQxQiJJjCok81+gzqC4GeCvRMoRc+9dLHZCE6D9FFgtERQuYotUAxRzFF2SlSn6MqJ8/JX/lWqp2XGf3i/8rgl/4+ZfwaJx/8zxGzY3Z+6j8FJTj9Y/81xXs+yvDf/mPCh19FDzcwoy30aAMz3ESPNtCjTcxwA6t85HKKWlwg5+fIxdTN5+cwPcXMLghzSyISdse32Nm8R11HvPVkwbyM8IZ3EPEemR0xL0M8CVuDms3YDcVuDCy/8CjmdCn5vb/G8MkPSz53LPjvfxbubMEf+WbBeWU5yuEstzw6X/LgZM7CCCoZoFWIUT5GKKxUDuSsYr4D1oXkE9ZlNbTWIhAglMsQYJvMIDUueU9pkJVBFTVeZVGVpS4UWR4i2oyhwgE6Gwh25Clb6oIsDHmU3GZDzfgEv4geKfJRyOlog7N4i6kakYuwkQkJAmHYDg13Nww7kxo/rjjzat6xFRK4oxVv/tS/4/yNB9SnGea3/PEfQIkRym4K3+xizS5Lu8WMAUvh2VxKdAPyYlrgDsIgPA2+hqjGSgmETh5QZKgyxzcVIRrfapQxSF0jtRsNENogrdOJW43TzdaK3CTkNqYioMbD0LR9oxdfIR1rkRg8anxR4lPioVHUKFu5zK62RtoKRd0s1yg0HhqJ+80TxunZW/LFulBzyzTj8cExeV6CVAjpu9+QCOkhhMQKHxNuUt39pKtcPUemB85oTvZcX1EGRN3o9Zt2FNI1ZKOGWYXEbHPVvN1ca+AyrMpI48U1flLhRRUqqFGBRngGSQV1DvkcfXGGPjmkPjigevIYuTxBLI8hmyJKXHbQYBfjb2P8bay3ifUmblJjrBpg1dBlC/UHTeKtACt83NBLx2nXAqJ5HqRdAX6kA/3C1y6KT2iRQ5BjUBOLt2FQQ42NDcKvECLFVjNYnGGOD5tQlA/Q9x+Tye/BlmOSL/wxZP7W099+odDjjyOzt5DV6dPfaulho31scmvFvJs21npHXoM3QDSOoSo/QGVPUPkRd7cEOzuK0+VD1KbFbIcUG0PKyYh6awO9vYnd2cLubcPWBnK2YJSXjNKCiy9/lVFW8LEXX+HOYIegCCD1qBaK+dTjydLncOlxmnucF4p5pcgqlyXUb3KMWCuojJOexJ5h4GsGyhB7mlAYfCxaC4pKsixd5JSLzCPxDfsDx8DvDaoVE98C+b1BxVZSUXhOVnOqpAPtXRCvJKfN/EJJImvZrh1gd8Bdr5Z3jGWrqtmxsKUNtVacZR5nmcdppjhNFaeZy4raSlDOl2oF6IVt9fPuGai1oKoFgddkR42dM+j+SHNrXHGUKv75/QEfvVPwZ771jN2hdg6x3iUmO5xJ/tpPbPDafsl/8PWztd/8/uhbd30LpqWUpKV12vi55Kh1eF16HC18jpc+x6mbV1qyHZdsJ4WbRznbccFWlLMV5WwGGZthRixzvnw24q9/7sO8e3zG73vPp7DmUsbSBeitrKWtU/cZ+Gt/7a/96gP0vqNmFwheFwe8u66/T59B7+qS+0Oj69iT7k3qyzz65+nfxHXHh8voNF3g3a9P/1htfbrbd6+1+393u5vaqFvvdcz8urbsHqcvbWnbqTts88/e+hg/8tY3O0c5BLHKGfhLht6CgbdkFKSc2C2+yssQuA+th24AdQuqL8H2QDWAuwHdMUsSmeGLmmeVdcNj4D64qUkasO0A90IPWJoh82Z5oYcszIBUJ3iiYqTSVcbToVoy8HICv8ZTNcoDJCyLkkcnU46XhkKNqZMt9Ic+gk49TKawtUDGGpVoZFSjwtIxUV6OUhlCpiiWCLtEmTlCT5HVHIxi+d7fzcUv7YIFUcw75JdwUTG0bLTc0jmNaQnWc6BQ83TkDN9eDtO3/6vO/x05LQ1+cN9k4eai0ex6zXG8BjS1YQxbeUXMJdhowUfdzEuc02TrOJlzVYpR2Uv9cHu727q2YRM960BPANIXqNAiA4H0JVKKJhOoS9CRngBfFRBCMIDKdywfFmRo8RNQIeAZrDRoNMbYpi+DbSQHtnUEbYGJtpd1rZqpjViymjfLbYIhz7H2RMLFe4+aazsQDuAkwNDCREAiwFp8U1LhuRthLZHNiUxOYEt8oVeYFUBLRaV8ChVQSp9aOhhqG0mJki5WceAZsmyKlRqRROD72tZibis5t5WYmVLObCVTXMoeC0gkHm/+299IMkEOdvCTTSqjMFZegmfJpVNmk9ad1qlT44wQbREGrHWJmbCiSdIkXVvbBrQagzANkDcWYTv7mUbi1OzPSgfOJUtMY+w0/9pVx26L6GwvHNC0vd/E5bGeqzz1brfgCWTxFv6Tn8Z6Yzf5bjLeGOtPAIswS5AVdmPbSWiiEjiD8QjiAWQG5rp5XgSiElBLREUzOgS2FM45trxk70VoXFjMwCBDi/A0QtZgS6hSbDrDTE8QswPk9B3U6VdQ0zcQs/uIfAprvj0WQCYuvOroHuy/B7v9EnZ0Bwb7EG1hvA2XtZgYTIjJwGbW1bEWWN3c6zYAv7VNg9umL9kmB5tF7VUE78pBVmByqJaQz7DzE2rzMrp8GX1WMvixX4tcPurdkmeTM9Za8EcwuIMY3sMO7mCi29jkFoO996KjWyzZoPS2EbbEL45Q+QEyO4D0sdPKp0+Q5pThluBjv/5D3PnY+/nUO/fRO5u89ImPY3e3OQ8U557k1HP27mZtG9mJmzYrJz0JM4tKFXohqOeK+dLnMPM4yAKOM5/zwmNaeixr5aK6CINq+q82DtArAZEyREoTKouPQVoXCSavHCufVZLNoGYvrtgbVuwljh3eH9bsDWp2kpK9QcVGpDECLpR0khrfc9p4X3HaOLueqkZuoyRLKZgYy3bDvm+bZq4tO8Y2wN6ybQxjY5EIZoXkNFOcZZfA/mipeDz1OJp7nC4V56lilimy0hn1rb/IrYFmnkmWpWQjbiK6DDRvnvicLV0Er//XH3jEN7ycP0Woduf9dX3fti5o7y87FlyzLC8B/EkacJIFbp6GnGYBZ3nEWR4hhAs5WVuFJ2r+1Ad/nFcHD65kCO3KXPr+fO01/PW//tc5ODho63htZ/ee+RT0SgvG+2C0b8m02/Q1y12Q32eN+zehC7a7wL2vVe8mz+kfo1u6v183PNfvBH1joLtNHyz3LaU+U9/+3q9r/7z9SDDXjRz0j73OGOnWrds5Ad4zeYjEYBH87lf+KR/d/hzAahhHCMGD+gXePH8RgO8a/0O+bvClp47fHXloy02MvzaQ2YRZ5YD30gyZ186JzIFtB8SXesDSJHhCM1QLBnJJolIiv8BXFWGYE8uUXXmMVQqjPHKcTGZJwkN7iyUxGkVMzkBkDMgYUBCoGdYrCLK38BafhfwM8bt+H+qVTXjjCxT/8Z9FL5aQFYhrhtm0DLDJHkW0i413MfEeJrlF+msi92QtF4j5hYsCE4xddtHiMsqLbCPBFKfUd14m/9bfDZ5h9I/+S9T0IXa4iRmOYTDGDMaY4chl+ByMMMEIGw2xyQATJdh44CJD5CkizxFFhsgzRFlSbbwCViHOLghO3qL2d9HqBRe55OwxQvuYZNe9OEt7Ocxfi8usoCWAaJIS4UDpQMAebl1AA8JxoFzwNMNolZtoA3VoBJYocMSkQiCMQGlIhYWRxWsOJermWW1UAkXD2FBKqCXkHpQGaoOsaoSukLVZyRdM5WHqAPv/o+3Pg21J8vs+7JOZtZ/t7vfdt79eZ6ane4DpWbAOOAQJCBQEiiIpgrQWy1IowpJsOcIKapcsW0EpQpZIW5YpiiBB0KJEU1xFgOAygCUDIAbb7DO9vu7Xb73ruWetPTP9R1adW+/0fYOhTFZEnVpO7ZVV9c1vfn/fnxX4foXnOfs+7Bxh50RBQRyU1PaEE3WHYvMTUFQkp79IKA/QxRXyxSZlFeIFOTaW6ChwCXuonCf4qXIVqz5UvcABeeEATG4ichM7QGMBaxHW4GHwTIVPjTIlA5OjrMbD4nsSG8aUXkRqFVmtKKuNBgzbRgFhewIbI9gXGE9YI6wWNbXQ1giNEZb6NZhLzFJRKYW14gLEiu5z24ItJ3hvbx/Nrb/oxNo63VlufdNYMDZv2aYuYBHCOCJd0QTUSowUIOUK51nbfsi5uFbGoExFYAtiuyBhRmLmRHqBb2uUqREWjBBkRcnxyZjZssCqAOHFWBVQCx/hxeAnWC/Cegm6dw38HtgavKipSeOegSxABx9BVEtEvnR5DSqn2xfVwlVSRIAVCfXwNqZ/C7FcYOQIE24AFSJ3AFCoOSIsYQB208Puj7DX9rA7u+D5MJ3AvIClQZQeyrpeWlC6RGrtKje1j6m2Md4+NvwI9VBidyXcbJ5d0TyHvnWJoKRrBrI6Q+gFopwg8lPk4jFieh/56JvI+d9GTB85xynb8WxXCrG3hTjYQ17dQ9x0Aa7y6p6zm7yyh9jax+gY/d6c+oMM87igHsdU+uMOeN5/gKhmWNUHb4BVN0G8CERIITj4P5/i/Zm/is4keqHR0xxztsScnKOfnGCbTK/m/ffg/vuI+dlTDjaiXqAWd1HZPeSZK8u+7/MKrzAajVzpE5LjmeUsj5jpAZncpPR3qUavoPd/GBPtM40PeHA8pP+LGSo/ItJjtu5u8MJBxHf1a64MDXu9gv6gpu5bxp5omGvBmRK8E3uMB4KxLzn3JBNPEhnLZlWwVWfcrjTfXV2A+kFhUamAVFEtPMaZ4qzwOcoCjtKA08JjUniclj5prcCC71lU3xJay8x4nBuf9xYGP7X4Z87iEOMkKVkh3iwOeQABAABJREFUMQY2As1uXHGlV3G1tXXs1bzcq9ntFez3anqBK/O5gEkD4s+U4EwpzpTkgSf5ipKcqbAB9c4mYUsbJ6tpAb2u2TEVrxjLD2rDjjbsWEtiG2bbCv6Vnzvgl+4njELD3/rn7tEPLHklGC+drvxsofjjv7jNedrcS/XhYM51RUJXItfFOl089e2CPo0xhLLiWi/jIH5aztIdr2rNrPD4k9/4DN+YHICFgTgnz/On5Cxdrbm1FxK+byfrflb3D2yz+A+y8ct0493/ngXIO/t/ig1+VnrcZ2mV1kF4V87ynQLu7v/rx9Tdfncf3X2vs+XdddeBeHf97nYuk9d8u+XXKwDdGtx6YOg0j6i0ZBRMV8tVVfXUPua6h0ExUrMPJZtaBVFYSHW8YrOX7dD0mdUObLds99IkyAZ0x15O2ABuT+mOVEZQC49a+GQiJiUmI2nAdkZMRo9GLmNTElL6Mqcni4tx4UB5TI6SF5U7pRSz2Yw333yTN998k+VySV3XpHUFd25RPUyp1CZ1uIOJd7HxntOiJ25o4j1svIsNNxDFBJm79NgqO3bjxTkoi3f8DWR2hNAzl1lzMMAOt7HDTcxgGzPawg62MINNzHAbs7WP6Y0w/RGEMWJ+jpqfI2dj5OIcOTtHzseo+TlqMUHMzlDzc7zlBDWfIJdTrK4dy+c7/bwOdqmjA+rBHaz2MME2tb9LHe6hvU1A4FVnKH2OZI4/f9D4ozufdFWdEtRjvOoMW8+wfgLRgEpFmHCDKtqhHh1QD/Yo+9vUyRZ1fwuSITbqIfwI6/mYxnGFFny1LQoaHE3csKFCXDD4hUUWIEvrEHkhEBVQWkwJphIrVh1lO60HYuWv7UjspkWi6aV0QLH7TnAsqmjwc8tvi6f+67x5OuO/DVPbfe9Zezlb291kd7jq2+Owa60lwmlzZGdZZUFaKzxjhWet8I0QykjKHJstUXmKWM5hPkPWJYFnGSQBG4OYJFRMzk6YnZ1QlRn9jX0Y3ST1t0nDEbkfo8MAAsf2itqglIaegW1LuJUSxSl+1TjoaLBaUeNTEZIHCVkQIzHExZKkWJIUKXG+JC6XBEWBqkpkVUNVU9cB86LHTG8zl1tkfp/KjzCB12SybfoA8ASi0vhVSVAusfMzqtMHiMUYlc2Q2dwxztkML18gl1NYTpDpDLGcIPKFu/9egvF7GK8HQR/r9yEYYP2e64PBKimX9fuYZmj9Pqa34yQlKsYE/Qb4G6gLhG7T0IPFAxECCqqlCyo3S7BLhCogqqEPdjPAbm9htrdhNHQM9GyBXOYOyBcCTyrC0CeKLbE6w/cWSM+A8jAiQJuIug4plyHFIqRcBtS5j849TKlcMHXbYqTbZ8a1oCArsHkTzDpDFKeI7BTyU2R2ilweIhaHSH2G6lfI/QTZgHlufgR5dRe56yEP9hB7WzBbYh4fYZ6coMc+ZfUZ7KO7+Pf+W6wfQX+E3NxGXt1HXtlB7oxQmwlqFKB6zr1HF14T9Fqhz1PMyRR7PMYeHiMePIb7HxA+ecTHD7bZ6cd4nkdRFJyfnzObzciyjDzPKYriKS1xGIa88l2f5pM/8OP8T7/+NoW3zYvf9TvQ8YFjU7OQ46XPydJDCNhN6kZDfhEQutcGhyYlW0lFGsK5Lzn324BQwbmvOPfVCsif+9IFh9aGrQbAbzbjm7Vhs9JsV5YgB7uUZKnHpPQZFx7HmcdhFnKS+Zw1gH5RSgqt8IRBNRVeY5x+HkAJ27xqm4DRSuAJyyjQbIU1e0nNtX7FjWHJ9aFj5Pd6Nbu9mtC7eGMtpeRMCU6lA+ynrYONcvPPmvnnUhBaVoz8Vm1QqeSWqjlAr8D9jrVsaYsyhlmu+Iu/OeK5nZLf9ZHFU1hpncRcB+OXTX879ryd12W7LwsA7SYaKirDV8+usu2fseOdriQtXZcWuCCp17GllJKf+qmf4ujoqD3GZ4LqfyCA3r1Qz7LW6XbrftZdtnjdZm9dmtLOWzueD22rC9zXWel2nXWg/Sz2vrvOZVryLgheB+jdrutI0z3/7jG0BWP9Ol6WOfVZ571+Tt3pyyoQ6zVKrQ1fn96kMAGv9d8Ee1EwwYHuZR3zDfMKC91j15w1LHfvKbnJ3PRITQ+pNLGXE3mFc19R2gW2SYsRklp6VMInJyQTMdqqBnannT5zzhhm6aQyZE6/vvqvQIqLilbbSrNeoemWqcoqp0s3PRa2x9L2OM18Ho5LjpeCXG1QB9vU4TY2GCGKc2TmQLdVAbIYo45+A5UdY8IYc+Ul/PGvI5jAcIgZbGGHW5jhNna4hd69jtm+BkpiBiMIE1imyMUMdf4QMRs3mu3xBQCfu3mqnb+cYXWNkD5W+Vh/Ax1dwYT71MEuJtp3iYr8HRcg6m9ivBFGDUFIpFkiTIq0mUu8RIEQJYIKKRytLJRurIYDrPQwwkNIDysb/bYKsH6I9Xys54PnY5XCegqUctpl1fRSXMhczWW9uJCUVLYzzoV9Yyun0Q3LrDq91/SttKeV6rSyHQErCrZtel9VBDrj3XfhikUWYA3SVohq6ew2l09Q6WOS6th5svcieqFCmpIynTEbH7GYnDDsRdy5eZ2N4YDM9FjqLab1NrPKDcflNtNiwCCY0w+XaKUoZYBVglk4IBhU2KHAJlD7fvu2wLmDgJCWSFZsBDUjv+LeG18mXywc8/3aP/73gB4wRNADEnx6BER4rkECAGVc/ENYo0KN9Guk56z6zEJg5jVioQlKCIWgzpcUyymBsMShC5RDgG00/Vr65LJHoRIqGaKVh5Hq4v5b9yMEWNlUviQdRny9wkPDXNtVXcS2FZO2twbR9NJqpDGrIE+jVWOfqFzrgmnKjxQ4+88aROVYei9x43bhbEWDELzGrL4qHZguc6gKZJlDlSPKDFE2w2LphnmKKJaQL5H5kuz7/wD65quQLxj9l/8CIp2h49vo5DZq8iV3v2TofMe9GGSMCTaw0RV05KxXCbew4agB/InzSxcalLqoqFkLxumzhKybYFaJjQKIQydPqgtEXbjzqTTUoKTADxRRLOiF5yTJCf3BKb3RjGSzwh8BcURZ9Jme9Jid9lhOemSLPkXWo8pj6jJEFxJTtM9yp5Wtau6tb0HVIEoEGZgUYZxTjZApwsuRUeFicTYsakfiHQSo2wO8GzvgedjDU8zjI+yTE8zjYzd8coJdLDFXX4StfZRdoq5dRRzsoq5so7Z6yFGI6nsIBbpU6FxBahHzCs6XmNMpHJ1iHz1Gv38Pce8eHD2E6Qm9OOK7v/u7+fznP89f/+t/ndFoxO///b+fO3fuoJTC87zmmw7nueKk0aEfLzyOGi2zA/A+x6nPJPcZhc7RZRWY2Izv9ZwsZScp2YxqCiUYK8EkUEwCjzPVMPG+asD9BZhXlgsgX2m2GrnNZm3YbkD9qAKWglnhcd6A+bPc4zBzx3a8ktsolpVEG4EvrFOLWdCNjM0JBJsipwW+MAw8w0ao2YlqrvQdmL81KnhuoyIJNF+fxnx6L+NaUiGEQAPnAgfiG1nNofCY+cJp5VtAryRzKZ2LTVdiU19Mb9U1W7XT0A+1cZWMBqd9LfT5pX7M754teS4rnsJ5XZDdxT8tu23t08GeLWBfB+pdZ5b1YNA2ELQbDHpZtvV23k//9E//owHo3a6barkdrgPGy8D0Otjt9pcBy+7/lwHry9j4S85ltfx6dxnwXmfQu8s+K4h1ncWGpxn0dr1uk0fbda9f97/1fa+z5Ovn3C0U7fmsy03aQvflyQv8hYc/gkFyM3rElj91gZSN3GSpe4DFhBIiS6wyeip1MVLyAnAXhGjkClC3YDomdZIS0fDf1gWBxnZJX+QENkVyuZ97e+7r96ut9JVGkjJwlQMGLGwLvh1zP2/A+MIk5DYiFplL865Shiol1BPSyQPOzh9SmQValdSixsYepjdA9zcob3wP+uBjTn8vM4h9CALXlFxaxHIBeQ2FcfKKwkAJtrfn5A2lhcUcTARp4HSoVnMhJRAdJllcANmVNERcSESsvdCWrxhiWsb0aTAr7QX4bAfCXsxvwbSAVcbN7nwpLlhb01Awxkky0E5jLKxxeuNaX+i5Gw2tbWzo0AqMcnZ0tQIrEEqDrF3ymgB83+CF0Ov7JH2fIIJKaN6ufAgtgdbsVVNOdY9chYBgXywQuuIw3wANnqoZBgs0ltIYaisw0nOVCaFASleZkM1Jy/baCwfeazoA/qkC2AzXZRwN0rQWMAjhPmQrGQcWsZJ4mGY1ibYeRRm4a9XGEQhxITQ09gKICYuQDZv/oUejPR7ZTrRtDt0qkcU2yvJiPkIpB0hVU5tZYeQ1Vt5ebN61eDR/iM6FaZcxl4ybznrdeXy75Tvb7dSxnroZa40al86Tl8yXa9NCwDXrnk86855YWDWV24s/V9dGfHjeUzvrVAx17e6l9B27v6PBFM02mnIoZHMvOp3Rzbo1Qmsw2jHLXgRTi/ziV1EPHmL6N500J9yCeoqsTl0yND1z4DiQ2DCAOMJGA2w4xHqJCxzFByNdGWy95EUT7dvcDCnB88DzK4JgSRAsCeMlUZIRjQqSjYp4s0JSoYuKym5Q1gHZuGQxGZCmW2TlFmUeUS8NpjAuZ4LGxdsY5d4NWjl5WiU77L125+DVCK9ChDUyqhGJRW56yJ0Qf6vE7y0Qk0eYKkaLm9jxmOQ3/gTiySFhUfHcc8+RJwPKK/sUO9tkowH1Zh+zkSBGEbLvIWOBLmXDyltUDlEBy/tP6KcVH93c5oUg5Ka1XJeWfU/TU087wsHlLeilFpymDhCfLAMXjJj6HDfjx0ufkzRA2wt3FwfgG1DfAPrdXsV+ryb2nZPqTIkVaB93APzYk0+B+bknGTRymu3asFVbB+Jr64B9Zdg2sFkZvEIwKX3OCsW48BgXHie5x+HS4yjzOct9xrliViryWjYNla4SbKzAdJ/f5pkeBpqhb9iMnMTmoFdzfVDy5dOIv3t/g+dGOX/ln7hHqC6e8QIYN0C+BfNtwOtqvNHNawGbjRXlRq35jV6EBRJj+R/fuAf26YzoXfC9LnPpMujrYLy7XldfrrWmKIqVnKX1Qb8Ms1yGW//cn/tz3xFA/wfWoLeF0PO8FejsspbdZS7zwW0B6vp/3ZNrQajWegXILpOsrG8XnvYMv4zBbre1/kB1WebL5C7tet15XVDdLQyXHV97juuuKpdd2+45dc/7sorLZcfWPcf1a9zdx6yOaXwQqIzP1eAJSThnoJb05IKeWvAlPsHfEz+MQbLBlFf51gqAd5nt0GaItfR/660Ixl78LxAYay6+zdZSGrUC20vbZ0mfhe2ztD0WDfBe2j4L26MgIhYpfbGkL1MSLyUMC8KoYiOcshWeQyixoaQKPHI/YunHpH7MPX/EUt2h9D6LSucr9prpGXI2RszOEOfHCDlugiAt8uh91NE7VDd/bwOMDPLsA/cxNc1H1VRYoahf2oUNBblGjLVLijIXDqCbxt7NduhGYRxTJjQIg6Ad1iDcx4om76VtYv8cxShBKFASq6RLPe656YtxBV7rOqEdoK5rRG2g1g4I1G6+qLULEKiMk5JoCXhYq8B6je+z640N3PxGbC6ERoga0TCWUpQIUeMp8EKL17N4CmpdU5YFUkhCFaKEQlYCMVOYMqQQgkwLCDy4Iigjj8eDLYwSsARqOLZ9bPvmmkFd+IwXmwBOi1wXqDJH1EuocigLlyFVXJQ1AOX5ICVaCITyUUGI8DzX2oPCWCcTsNbDGOUATSvTQTrQKpxfu+4CwrZfFXyaD5e4qGi1QasG2MTp+r2mjCyBVGBzXKXHPdAO0KObwFXZ1gZAoFYPlUQg8FBIBB4Ci40bJtmALFiBcokrH20riBKdSlpnGWHXzk10xu2FSUhbgRRr86Rw4Gv9+qjO/23Xbfl4SofezrNNCKxFaKdNf2roKECM1tSV7mB82VgZhrC0sNSswDIWTgVU6um4idpAXTlbwYZJl+USUSygXCLMhfZVb11DX3kekc1QJx+4LMT9OxALxO4J4fnPYZMRtjfCJCNsMsT2NjDJEMIeFCkUc2Q6RWQLZL5ApHNMkFB/5AdhC4Lo1/HtFxFHUyfRyTKs6WHEJja+ho6vo5ObmOQ6NrmJqa8gzibI9AFieR+ZPkAu7yOqY2yYQV9gd/ewO7cwW9ewGweY4RVMf4cy2qDMIJsH2FQgprED0rXCGg+sj1A+QnogBP3RnFsvvsGV4V3i618nHpVEm5Z4WyGUID+ryc812VSQz0PSRUKWDcnKbZblPstqn3xaQ5YhqtRVVLTB5gKdKRh72JMY7g3JKhAzHzH3MeFV0IL+jwj4HZ9D7gboq/u8c2UHKQTe2RRxdIY6OUe9dQ+eHGMeHzv7yJMxuixhb4/Rx1/lxR/+PI/RlJHGPrfDW9sR34rABBZrJHURYAoPv5SEpaRXwqiybFeWfa25Wmmu6Yo9WTMSJQd9w0G/eKYqwFqYFZKTNOA49TlNA07SgMfzgK8c9R3AXwaMM49eoJtg0Go1bAH9x3sVu72CnUQ71ZsQFNiGdb8A8We+5KEv+HoSdjT0EosDu9uVZUsbtmqnm//u2jbg3qyYes/AeSE5L/wVmD8vfZ4sJX/+nR2MkaAtP3pryrSR4dydhXzlNCbVily7Cu57j0J+7M8/x41ha39Ys9ev2I1L9vo1d5KKTycFkfdhqbE2hqWAM+mkQ0+U5DeTECsEkoYA7bDrLfDu5nbp2iV2mfLu/PWMoF3GvOtt3kpa1pUXXXnLuqrjO+3+FzHov53fuSt8T2ujL2POu926VKVdt6t5vsy5Zf1CrHunP0tm0p1eP471/7tSlMvWqev6Q9KUbuDC+va7x9B265Kf9YCC9QrPOsN/2XHXdb0K0u0WxHa5vIL/8fD7SXXAP779C/RU/lR2UWstFR5/h99FRcCPiS+QiPxDlan1yk73eEujWNhGBmMd4G7Z7oW5AN/LFnSTkqgFUZgTRwVeUOFFzvaLEGwgqUOPyg/IwpjUT0j9hNILCOucXpnRqzOSKqNX5/TrjF4z7vqMfpVjp6c8fuPrvPXNb7BYLKiqiizLVvdba03lDUi/9/8CVUH8tf8G648obv041bXP4x99DVFmneygLlsoKkbYBUQCNbmLOn8XWc2pRq8jqzP6D34amT92XurVBBEPMfEmpud80W2yiY63sMkWJt5Ex5uYZBsTbbjxeBOsRmXnrs8nyOwcL5+g8kkzdPP9YorKzvGKKULXaG+b2ttGB7sYf4dKuena36X2tqjVNpW3jZY9PD3B1+cE5oxAn+PpMV59il+7oVefIstTAjNBCicziqKI5XJJmqYYY4iiiL29Pa5cucLe3h47OzscHR3xxhtvkGUZt27dYnt7G9/3CcOQra0twjDEAn/P+zjv+9f4Se+bXGfKu0Wfv8L3shnU3BBj3l/6vFUdMLMJWkhsBarQ2MoFhFJZRKWxtQHrEslIPcMrz1HFGbKeQjFH2Rxsjh/EqKiHimKEH4EXNHIOhRaqMfcTjYW4wKgQgrhhOJ0MiJal74LQp1oquHDWacHtU0CVS8Dr2nZUF8zyYSD74XGLrp01jjEIBC5IVLjKnQDqElFliCJF6ZJeEuIFPouyogp72GGMimq8qEb2NDLRCGGhNPjzgmg8ZXB4zPDokP5ijK8rFBZlDR6WWngUsk/hD8i9PoXqkauYXCSk/oBMDchlj1r6zhazBewrdryp2FgHxCUGKWqXvMwHG4KNJDaGQORE9YJgfkT56C2q97+Bd/8d/MePqO/fx5oBnl2ghPtm1Ls3yT/743hvvwW6T73zMeqd56m3b2M2r2GHI0hqSDT0JfSU6xMcK7wsEPMlcjpGTk6Q44eosweI82PK3u9Em2v0vvFf4D/6NSdt6xA5q++AF2CSISYZOhCfjDC9BszHQ/T2VWwyBOWvQL5bxoF8vACRzhDLKSKdOk39cgbpDFHU2EpAHWBNjLV9LJsYuQNyD7IlYvYAtXyATO8jlg2Izx8hggy72YOdq+idK9jda5idA+zOAXbnKmb3KkjlogtzgXzwTcTpISxqyAQUAbbs4QUDor0ByY5HFJ+SxMckvQnxYEE8LIg2NNG2JBx5FJOK/Kx0YH6iyBch2bJHmo0cmPdeIFtuUqXW2VAaD+tdgfSM5MH/gJcdM5Q5n3z5Ot//A9/F4fwhX3zrN1j2AqqdDcqtIXpvC7O7idnbhlEfMZkzyiq2SsOTL3+N+uEhd+IBP/TSx3guGbJjJDQM9eMg4InvceRJznzB1BcsAigCSx0YrLCY0qPKPWSpCApJUsKgdGz1Xm25UmmuN0B+W5QkaKS8HLQJ4bzXx3nDwi8dmD9e+g1DH6xkNXkt2Y4vJDR7vQuLxr1exX6/ZicuGYSN/Mw6umep5EoT75h54bKheu08B+anSjRZUe1ToL0df3IS8Pfe3uRHNhb8s9fGqEvO5099Y5f/6mv7vDzM+A8+/XiVNbXV9x8vfWeDuPSZ5Iph2HiYN5r/VuO/HRfsJiU7cclmWPJmL+CXBj1+5/mM28uL73gX93T7dXZ8PbnQOot+mUtLNxi0260n5uxKvVuM+p0y6N8xQP+jf/SPPgWCuzqbdvyyBECrl1DnAC+rUXRBXhcQdx1auoC8u90uWH9WbbW73rMkFevHfpnM5Vnzvp1ryWXr/YMC9O563+64Wj1UWZbkuQPTOzs7H0pi1FYiLguKuKwloLvf0nikYsDcJCxt/ymZSctwO713n1KGREFGHKWEYYkflqhAI0MLAehQoAOfKvDJg5AsSCi9kLDKiauMpExdX6XEZerAd5nSq1J6dUFSpgxMQVLneNZ86Np15UXd6zWZF3zj7glvfnDOtIwp5Qa53EC3iYqCNlHRHnh9RHG6yg6qiuOnsoSq8gRRngIZwgMbb2ASB7Jtb3sFtk3SHd9agW2ZOrAts3NkNnagOx0jszFePsHLpxfjxRSbz1BSggrR3g7a32kSLO02gHsH7e004w6EGzVE6SlePW7A9wTPzvD0HE/PkHqOR4okR5CDcnZ/NQHGC7AywHoBRvmosAdeiIoSwmSADCO8qIcMIvJKM54vyKoalI+KYvwoxotipB9QW8jrmtpYhB8gPA8rXQuAUB5GCufuoZqhEFgpGme3DzO3ogN0P7SMWnvGWy16F9iuegvG+X0LXTvfb1OhdInQNTTzbVViqgJdFlAVCFOjjEZZ4/y7dYWtCkChTUBFHxtsI5M98DcoCahtIzORIIzFtzWBLIlkgZC1k+ObkCxPqCoPb6ghAe010iElXKtLJQvO0RwhmONT4xFi6SHoYxlhSRB4zQfA4oBuaS+Y+lw6Zl/YRtpkL6kYaKR0ycVcchywicQOJX6vwN8sUdsVMjFopZB5TTyeMXh0yPYHH7Bxfsjo/JC4WLq6yjPyY7TPrjaWebDBOL7Gk+h5jpPnmPT2KeKBO6DCQi2QRqOo3LEpg/EUxvPQSmFE68EuLs4b3PlZjTAVos4RxQKRTZCzU+TZY9TkCXJ+gpofImdPkLMTLCN0cBMdP08dPUc1/Dj16DXwFYQTRJwhemAHIXaYwCCEMEPEhXM46nnYfuTyHMwmiOkZcnKGnJ45YD8dIyeniNkYcX7aTJ8hpmeQLj6scOp0FiCIG4DfMvTDC5D/FKDvTPc2sL2RSyZmLeQp5BWiNC7pVR04aV5pEIsJYnGGmB0ipg+R5/eR43cRZ28jy1PE5jZm5wCzcxW7c4DZvdoAeAfk7WATMT1DnB0izseIyQzmORQK5gYKH1v1Idwk3t8g2o2J+xOS6Ji4Nybuz4mHGfFGTbQJ0baHqQzFaU52WlBMBflMkc8j8qxPWe1g5XVKe42TxQaF9vCrGSo7Q2WnqOwMkZ4g01N6Kuejr93hxktX+Nq9r/GkPEIdbLL1sZfoP3+bWT9kEiin/S6dznurbAI6i2a6smwVmmGpKSVMPMGRr3js+xz6ilNfMvEl8wCyEKrAYH2LKSV14WNyhVdK4paVLy07tWG/0lyra65Qs6tqtmSNJy7HEEIIZxWYOunM8aKR1zQ+38cNiD9LfQJlVlr43cQx13uNlGavV7ETu0RKgfd0yatx2VDPPcHYd0GvF0D+gpEf+5JSwEYXwDf6+K3aMCoNu9rZU27VhsTivmdNd+IJ/s5GyEcmFfuntiMJ8p1/eeqt9P4nWUBeS7Yil+xpJynZiQq2osJ5mUcFm0HKhp/S80qsfRrvdJny9UygXR16XdcURfFU0qEuI7/+Hlvv1vHcz/zMz3B8fOye4X9YGvT2IJRST8lU1jXV3QPpgudvdyJdqUgXTK8HgLYSlssY+C7wXmd413Xgzzrv7vyuJ/r6ftpuvaLy27Hy3WvwrIDb7vJdm8rL1m9rdq0eqixLlsslWZYxn89J05SNjQ0+9rGPEUXRU60C46rH//P091NYn39m8NfoiwWTKmKm4xXQHsst3hh9BFMqkvmC3IupgpAgzInCwgHu8AJwm1BSBx6V71MEIaUfEjRAugu0k9IN42JJUjXTzbBXOau57vVYL1vrsh+lFLX1nG3jKjlRk6hI91jUCYvazZ/XCbkJCe3CZQdtgHbuvUAdPoc//kXi8V+h3tsn/SP/R6gL4r/53ziZR7LrQHay5VjvwR52cMUBAaMRxQI/sfg9iR6fUz85RdQFoq7AVAhjGjlMgxQbBs0qD+EFGBkAEYaI2r8GNkGUmQv20p7Tcteq0W1KFyTmWwjbHufXHeFkE5GAuOlb3/NW97eSJHAJmDUNoG36Rj4grEtK4RJTmEYSLxo5vNOp67Iknc8xVYkSEEhJ4EkCKZFYynRJkS3cfCUo/QFnw+cITcFr8hFK13yrvsnCxrwaPuaFfsrRMuGLx1cZ+Tk/sv822Iq/O3qRSdjn00ff5MbkMWWWUmYZWIuvZCN30NRVybQSjE3AuQlZBpsU/SvUyQ463HApwCsH+jCyOXfT6FYk0uYE9ZREn5PUY/zyDLs4ojh/SJEtXPlUCqU8pFKIRutuLeiGRYmTHptXrvH+iz9B7vX4xPIt+rVmXPWY1D3mOiLHJfnRQq0Yc2kNSmhUI24q2iyXZg4bOyeoxvRSMUUyBSoqJBk9cgZUhGjrrbzoI1xMheq+pyzOi7JA6ozIAxsMqPISshShnIemkzY1MqeVzKqziXaTLfMtxIU8ZtW7MiIwSDTKuORE0jbJiaiRVjcJirTT8LfBoRi09KhUTKliSpVQqQitfLevurmH2qJ0BeUSWyyACqEMZjjCDrcb2UrpGGnpc2E1KZ6SvXfethcAfwX0BWAR02PU9IzwYAdMSP6gQi96UHiwOENmU9dCIT0HkLd2ISqQ6gTpnyOCJSIqITbQ87GDGDPacg5Pwy0nzZqMHahvgL1oAX0HyMvJGWJy5kB+x+Kt23W/XavvACDiYQPaL4D7CtCPrmFGV7GDfexg2/0f9yCKIPTAF04KVFSQZ4h04Rj8+Sly5io5oli40ut72DCEOKF+5fsxz78KpkAsT7Fb+1CVyJPHF0B+toBZjkgNNvOgDDF6CPEe/t4W0V6feJjSi46IkzOSwYx4kNLfMiTb4G+ADARB6aFSyE4r5icl2dQjX0ToegshrzJdjLj/RDHTI0y0iagzenbJ1aFiJzJsBhW9SBMkNV7PYPuGeqAp+4bFwHIeSs5DRaoEowbAbxaazdKwVWg3XWq2C812DYm2lBKmgQPuj3yPQ9/n2HfgdhYI0oaV14HBaudeUxU+snDymqSEYSXYqgz7teVKVXPNVuwqza7S9Kib+unT+EUbGOeeA/HLp8F7l8VelIqt+IK13uvX7K008tUK4A9DvdrPitQEMilWDPyZ53TzZy0j718MJ57ENzwF5L/S91lI9035f33rlB39YZeWrpZ8UYrmPNx5tVKh0zzgLAs5bXzMpbBshk3SoSB1wD3IGHkLhnLOQM05Wyr+7KMfJRIF/+LeX6QnZhhjKMuSoig+lBH0sufsWUC9xXL/SBj09SDRrja8pfK7rhqrAtEB8pfZG8KFhKQr42hBahfEXiZ96bLtXXubdYb5WVKMyxjpLiP+7SoLbbeu7V5n8teXX9/nOgjvHt86MO0GOuR5znK5ZLlcUhQFWZaxXC5ZLBZkWcZisSBNU6SUfPazn+XFF18kCIIVsP/Fxaf4W/MfWAlZE5HSFwt6YkFPLOmx4HRnl7sfeQ62BEiBX5f0ypSkWpI0DPcKaNcXjHdULOhVOXGV4hl96XVtr91660f33lRGNn7oAzKGTZKigQPhut/YNzognpuIWGb0vZSBWrihnxL7BX6g8QKD9C14glTXHM5yjhY1qexRRdsUo+9y4MW37kNd4UBtIi4cRDzrPkqts4gCJhYW0MaJhXHO53/f37koH7WgnIcUs5ByFlJMIzecRZ15IeUsoloGeHFFMMzxk4r545HLOugZZKBRgUaFGi+u8GKXlVD6BuVppO/GpadRvnHTXvf/dp5BdZaVfjPtGZRXIz3TBCg25ZUPd9+GLMDFDz37vdIUchDOZsQKyZd+6TOk8z5xf0nSS7G15fHXbpIMl/S35vQ25hz++jXqzKO/s6C/N8PuCx4lN+kFCwbJhH64oOcv6IVzYj9duf18+FBsg7O6f7SyCifsdsTrtzvJtXOh0T93Ix7X361S8eTxDd7++itsbJ+xvX3K1s4p/cHskuvZkt5t2Gm7WXH5DVk7DTdujQBjq8rDGgdEuynk2/M1UBYheRqTpxFFGpGnMdsHJ2xfPbk4TcBap1k11tne0SYrsgJrZRM0Jt24EWjjYbRAa4XRCqsl2jjHFVNLjJUYLZ3Ht1YY3SQzanqr3XaMkdg20ZFu9tdMPzVcze/0zXFaA9YKtnunSKsxBqzxsFbwyZ/4NYJg4sqjlBfsuxDNdeuw8e19AFa+6UIwn4z4lb/zQ+gSFwAqZSPNERcVXdNWJJyVpChrbGWaikUAtY9Ynjtf8sn7+NUjhDhGelOXBGmQYDa2saMtzMZOA+bb6W2IEsTsHNEw9N2h60/d8Px0NU227MTCPv0d+lDx6v4P6HAHs/NRzPZLmM3nMBs3MaNr2MEedrANoQ9iiVA5qKrJl6Cw27su/qHM6f3H/yTq7V/DbuyuGPgVG99Iadz4gTu/8RHy9Ani9AlyfIZcZMhl5Rxbcp9esAvBLgvTR23t4e8keIOK3e0Zm5szBsMFyXBJ0M+w4QKSAm8o0cuK8mRBNS6J7YCd/g1E2SdfJqTLHrN5j/F8wHjRZ1wGnFcBBtjwKjb9imFQkYQVYazxEw29Gt3XVANDPrIsRjBLBIG2K9C+2QB41ztLxa3SsFm5wHMNzH3BJJCcBIrHnseR73EWSCa+YOFDFliqsNFbFx5V4aFzp5VPKsmggo3KslNZrmjNda3ZV5odWbOtavxOAHiLS/JaNYD3wqHmeBk4AN9htJWgk+3UWU3urLKglo0MpSL0LidFNTDzJJPAWyWE+q+v9pgr51jzZ984Zb+4SHTYJSa7MX1d++iutGWlH9eGaeFxloWcpD5necS4cP15EXNexpyXCXOdNK9Rw48MfoHvif/+SpXQ4ssuQdwllZ+lGIELArqd9w+dQf+jf/SPPvXfesKhLoi9jN1uAfr6gV+mEe9uv7t+u173YrQ3o6u1fhYgXj+m9r8uq97d7jOuyYeW6a73LI/19YCBdYB+2X6616Jdpy0sLUM+mUyYzWYURUGapqRpSp7nqwLVHuPNmzd5/fXX2d/fXx37vWKX/+rsJ7EI/lDvf+Rj3jefupfGGB5t3uCnP/PPY4XgD3zlr/Dxk7c+dC3bc7usJeCy66CtIjV9lqbHtExIGZCagfNI1z0WZtAkMOqRm4RQZC5RkVqS+ClhWOH7NZ6vEQ1gNp6k9gIyEZGKmFTELJuhxLqgVpvTb6jFoJpTnD1i+ugu5fkT7OKM4+1/ByNv4I2/zOAb/xuq7/oci3/tT4C09P/4v4T/7m8hTQ11BQasHJK/9E+Sfv7fgxKi3/xriErT/9xn0HpAdiwpznyMHCHMEmXOUfYcZcYofY6yY5Qe49kxnp0g9RmeGSMpkSrk8CP/Dypvn2j2VTYP/zssTepuGSBkhMFDqBiX7ibA4KGtjxUBBr9JohJgROimCVzQowhARRgRNOu1y4ZY4aIvhSnBFghbIG1J6EMSSnxl8GSNEjWerPGlQYkKaUuELREmB10wn54wm55AneMrjRI16BxdpwidE3iWjWFEL/Y4vv2D3Lv1I0hh+Lz5Oqns8ffLj1IZD89aKq2Q2mJyuXKMkdo6E4pEQGQRA1CjCjk0mKFCBx6DbMlWPmYvPWS0OGYwO2I4PyJOJ9R1zWw2Y9o+P2WJrmuMsRhrCMOQMAzxGgvBMtmhHF4hG90kG90mHxxQBFuUYgC1v2LgbW3cV8dKsBWqOkelD4nyR/QjzZOP/ziVCthZPACrOAsPMEhG5TEbesIWMwZ26R4aFWCER21d0Gqp4e2798gqA94A8cqP/To1WxgxtJa+tQQrBN5iSYHFWoGQK5DoEm+5Py/E7d0Ht32wO9MWVu4sDqF/WPO+3ne3u86ky7Vx2Vgsdpazoskui7NVlE3Lk7SmAcaikzG2jZiVF5UaI54+Buu2L2q3Rpfhl0o3LkUFos6QeokoJ8jiFFHNkMUUUU6RXgWJwBxchYGHHCg42CR4/XmKIKY0ATx4jHj0GB49QTw5x5wssVMDuUKb2LmwDK9je3uYeBMb9Rq7xybRVWtLubr2DYNvaFyHNFQlokgR+Qy5PEUuniAmD5D5GHTqZGoBEAfYfowdbmA3mtwLDbC3GzvY4abb1sRJaxyAHzfTZ3DuhuK8CaA/P0XMz5/J0rfv+rYz/tAFrCY30MkNTK8ZP/gkNr4GJYj0MXL5wSqI9amg1vQhwhQXtzAZOB387gWAZ++6k9hsX3HDjV28fIk4eYQ8fUI4O2NUl2xKn1j7mIVmepxxfq5J7dDlu+jtEuyOCLc9eskJmxtT9vYrhpsZ0SDD7+eIXomJSxAQl4qkVASFj8wCdBZSLSPyRcJymTCd9zmf9TkvIs7rgEnls9A+feWAfC+siKIaP66RPYPtafTQUAwt6cigB4ZNarYq3UhpLFtlA+Yru5LaJLZhyIUg8yVjD849wWNf8cRTnDRa+ZnvjMTKwGCUxZaKusPKR6UkqQSj0rJdW/Zrw1WtuULdAPmKQcPKr+6thWneAPaFx0kWNEDeXzH0p6nPtPAYhvVKP96C+N2kcGx8A/A3Y42SgvcDyV/b6/HpeckPTosVbujipstcWdr5wFNa8sv05uvTrb78i+MX+ctnP4bA8M8N/zT74sHKoaWL+56l0GjLfxfrdbFvO/2PBKA/i9ntHrjneasHtwvCL5MldP9bZ1bXQXV7UVog3gW87X+/nXxlXa+9/t9lTYDtf0KIVQ1qvbusUtBdr1twnnp5ddj/7jW9bLwtSC0wXywWnJ+fr1jy5XK5krhEUcRwOCRNU+bzOUVR0O/3+fSnP81zzz2H7/urwjktfSojGcr5h2qlbbcMEjSSYblYnW+3IlbXNQZFarr+6P1GbuIygy478zKTEIicJFgS+ZlLVBRUKE8jfIvwBNqTaOVTSp9MxaSyR9aA7dimJCajZ1Nim9EzKT1y11tn59hvpvvkhJSoNV16mqa88847vPHGG0wmE6qqYpkbqv5r2HqJ9jYx4R718A7W28LSxwQuENQEu1h/G6EzZHmM0BNkdYrKHqHKE5Q5Ixzl6Pt3UdUJqjpFmvyZ2tv1+atYDhlikxsExX1UE0QUBAGbm5v0ej3CMKTX6zGZTDg6OmK5XD51/5RSKKWQUq7KjtYaz/PY29tje3sbIQSz2YzDw0PyPGe0sY0WHtN5QVoYjAhQQcLulRvcefGj+OGAvIJSSyqj3LiRVFpRakFtfQf8ZUCaGyfJkCFWhs38EOtHCD9BNkGWVjmdu5E+VnpOmy1dqnlPaZRnkLHE+GADsB7USqKVxIhGatGko1+BSEEnSREdW8PmmaoMUmtU3WrLa2xVYUyzslA4hxbp5BziQpTdtjg5r3LBytf7WSC22xsuDsLd5TUJyEUvMQhhUDhJiMBQ5BlWN+bxG9efNJi09W9RjaOLhyDEtf/gqOI1+Ua7H22gKBHpjDA95c6g5M6m5Tee5MxnGjHLMeEN6ngfPRhAT0Io3JYbXkHUJUF6Su/sHXaPfpmD2bfoeRZfCnzl2CjZgAjt+dTxgDruU4Y9lv0dTnZf4HznJunuFtUgxkYSoRriufQc026a67+SmICsK4IyI8zmxOmU/mLMYHpCnM7wsjnn99/n8J03qc9PUZXBVpZi/2W8xRIbblDvfIRq92PUux/FBkNsOAIVOjlZ62K08lVv96subqQwncDf1hGmSV0ra4TU4BlEaCGW0PeQfYWwKaKYILJT5PkhanyIPH6EODzEnpbYaoNaXUXHdzCD56mHt6E/RKhzCFJEYCFUzvs8jJxsxA8aYN8C/AbUt2XSOEealc97sUTkC0Q+da4pdYowOUJUoGpsAPQi7CDGjobYjQbQb26DHyLmE6crP3dSGzE5Xcls1hl7OR1DtvxQi5IVAmSC6d3AdMC7bYamdxMb7iLy4wsHmvQBKn3gQPziA1T2kEBWq3eo53lcu3Ubs7XPw8KSj7bR2weYbcfA650D9NY+evuKe0eOj1Bnh6jTJ+4+jE8J8poXDm7x8TsfR5oesyLkvIw4L0LOcp+5FJRRzXBjwfbWnM2NOYONJfEgJRjkqF6BSUpMoAlKSVIq4sIjLBQyD9FpSJU6ML9YJEznAyZZzKQKOa8DziufSRWgpKUfVCRRTRDXeImGnkYPDOVAUwwNSVSzE9TsyYrdhpnfrhzI3m6Y+VFtUR3ckUuYeDDxBcee4LHncdhKTnxY+JAHhtp3pIMpPMrCRxceQSGJK8Gggs3KslsarmjNvi7ZlTVbomRTlITiAg/lNYyzkJPUZUn9+qJPnQsWWSNJyZwkxVjYjSt2e4513+uVK018GxS6G5eESj8VL9cN/FwPDO06sLTT3eDPLunZ4qeTPEHonNBMV1p04Cl8uY5tu/PXyeTud77FTT/zMz/zDz9ItO0uk7Kss9/rTPGzdDldcN49qfX1Lkslf9l2u2z9Zax191jXa0Dtdtf3s77tZzHI6/91b05d15f+32X/n3UNrLUrhnyxWHB2dsZ8Pl9lRmtrf8YYDg4OuH79Ovv7++zv7zObzfjCF77A+fk5L730Ep/4xCfo9XpNYTV8dXKHXPt8YvBNMBfZtKy1WBRzHfNN9SoL3WcnPyFtAHhq+k3W0D5z3SMXPXy/IA5yAr9xX/FqRAOMjKeolUepAgoZkckIAcQmJTFLYtNkBNVLEpsRm9QBcJ0S25SezeiREVEh1wr9erlrx13AWZPhtO6zqHsrHfp56vN4bDhdKnJG1N6Oy6wpNLI8RqZPkOUJNhkgqjP8w19BlSfI8gRZNUOdgvLIf8c/hcwWhF/820ghqG+8RPHxHyD+9Z/HOz/6kORq/djXa9fS8zFSIrwAlIdVHvgBRnpEwy1efuU1tg+ugx9hpE9WWx4dn3L/8RF5TeMBHuBFPYK4jwgiaitYZIUDzMrHj3v0BiOkH6GtpNSGvDIu45x0/uEGCdID6SOkQii/Aa0uOtNa6YI416IKWyazQ4V2QGwHwLas4LNYV6wD6+CYz2YTouNLbnFAdpX7RnSCS7vvhu4+2sPrJkBqNdqIxtPdNNlLNSKrIK2QVe1YXNsMdYnUpUvF3gQd2mKJzheYYoHSOb6o8TyBN9jAjPaoB7uUySa5t0EpYixBo32n0fwLCCyqV6MiF5BZqQBpNb4uKbIc6/ece4byHwCPf5v+nH/VpU2Ndl9k7xP/BPPeK6Rbn6UYvQihRgQWm3iQ+O7aFDRBpAVyMiVcHjK0p+yqEwb6GGE1Y/k8p/J5pps3qTZ70McB9zZplDb42YLe7AnbJ1/j+ukvs1E9dCWkeTbX3Q3aZ8ECWW+Hs73bnL74EtPnb1AeDGFL4CU19cKjnvhONlbE6NJzkixfYJVCGI2qK6hKTF07P/yoB54PeYlIM9TkIWIxRS6cZaELhJwhshS8BOMPMfEOdf8qenQH29+D2RwxybHLAPINWBjk+ANU9ghhTpHMQRUIZbHBABMMHfD3m0ykKsF6CdaLQARNwbNrbj7i4hFoEy2J2rVomdJJY2oLVmGtD/RcucnPkNkJspi5jKPGYL0QG+9g4iuQbINMEXLeyE1qV+Z9hfUDCEJsEDdAv3EkMqZ5JqXziG8Bfl1C6VrC0AVYZ6mKNA7Yxz52EMGoj90YweaGO58K5NE95PE9mJxh/Q3k2SHqvS9daOgnpw74T8cIY7AywiTXMb0WuN/sgPnr2PgAUU1R2UNU+gA/f8RunBKWTzh/+BWY38MzS5RSJElCGIaOrMhzUj9CbzngXm/to3cckGf3KuGN5yg390hlwMiUbOmMUbFgo1ywUaZs1BlRoVE5yFyRlyGzOmGqkwbMR0yMIg8M8TBlZ3vG5sac4caS3jAlHGSofg5JiY4qlBENK++RVB5x4aHyAJNHVMuYbOnA/GTRY1qFjCsH5M8qn/PSp9SSONREcY0fa0RPY/uaqu9A/Sis2fIr9v2KA6/iitVslY6Jn0xC/u67u/zw9pQf2p2snskamPoOyI8VPPEVh57ixJecN6x81rDyWLClR517FIWPLBRRAb0ShqXzX98tar6UbvINPUQBf6H/JfZE3mAdmJce4yLmNAs4ygLO0oDTNFzJbE7SgPPcpx9oduKC7ah0wzhnK8jYCFK2w5zNIKXvpWD0U4C8qAy/dXaVbX/CgX+8AugtMG+DQNc90dcVAs8id9vxdUzZlXi3///5P//n/+ED9MuS56wfcMsKry+7XutoD7xd51nMedtdVnv57dxb1oF/u34XMK3LZ7q2OZfJXNYlOd+OPe8u393vOnPeZf/bc20LRVVVKyZ8Pp8znU45Pj4mTVOK4mmP1Z2dHW7cuEEQBPR6Pfb29njhhRdWBWI+n/OZz3yG/f19rLV8+fxF/vtH/xgWyfXoMRvexIHYJkNoahKUMOihgqEl9lyiIuuBVbIDuEMElsSkJDYl0m6YmLQB4KnLDGpSB761Gw9s8e3ltJ372q19Ggu5cWB7qV1FIdV95rWrPCzrHvOq1wDyBCU0A9/p0YfNMBJTsvH7nD36FsX0A2x9zoP/5H+AJESUKb2//Ccorv8g1e7noBao87eR+bmTgMgm4YtQ6I197JUtF5yZpQitscngIhFK2+rSosunQOgagGynnwq+sxfzRLscXCSO6Za7ho1d0z8L0SodLNY6j1gBCCmco50T52IxWK3BGmybubFtbbcGYzTWuAQqNIF+GNNMW7C66Q2eECgJpqowdYU1tQuiVRHl8HvhXCC9BSPzdeJYMxz2SaLQuf6JiK8ePwdjSPoZL+89oD9UKCXRRqNrzdL6fJXnIIdYLLldvkscQC9JiOIIpbxVc2xuBMtastCShVYstE9mfQrrYazXsO84cKxskzAF8CUE1rHvgXTMap4j8xyVZ6giR+YVoqjASmcR2FZcWuZdeq7C09QGrPUxjbe8tU57TRtw2bLrorm/XXtFv+lDXNnyAS0MNRWGFMMcSYagdpoQnKG+QPPWr34Gqwl8jytXdsmzlLoqAYvwE0Q4woRDMhOSGQ+owfoX+7PmovJg5KpSJaxG2Rplyya407icCiqmDqKLWI228gSgLVJXqCIlyKeE1QJp8iaLoUTYToXPuutiEU7rjkKrkCpM0EGI8TzHXIO7f438yVZga+kqmG2+gbb1QgPXge0Ukc+xYQJR37HJ2eyiQtz0Vnkua4/ymrLeavnFKkeCa0YSkAKnFcwiODnHO3kfNXkP7/xNvLNv4WVvo6onq0thEQ64e/0GxPfR8chlIt65htm6hhkdYHs7LsOo6mOJsLmFzLqkaLUEo8B2mf3moW8djZRtWH7RPJ91M2zLW+BaCSrH7JM5xypZnCKrc9Bz8ANsmFC+/KOwfRuqJer4W9hoiI1H2GjY5F9wGYdd5btpuVm1fAiozvEe/DLm2kcwB3cgBvXolxFSY0fbmHWWvtXMrzHzbXBs/Ynvp/iJfw2Ocwb/5X+OMCOi3ZfJ5C6pt4+ODvBszqY3ZS/J2YkWDMQYL39EevQtTt7/Tc4ev43ueFlHUcQP/dAP8cM//MMQJXz98IyvH405xqcY7VIMd8iHW6T9LZa9DdJkhK9rBvmMzSplxxTsiYorUrNtCnqVQWaWKpVMWja+DBkXodNBVz5LD7xezu72jK3NOaONJf3RkniY4fVzRFKg4xKjDHHp0as8kspfDcMyxGQR+SIkX/ZYLHpMiohx6XFS+RxXPmelx6z0WRYK3zP4icZLasrYIgc1w8+ecyss2K0N26XrtxopzXZp2Sw1I935JuFwjQHm0jIJBBNfcOpLDj2PI98lU5oFgmUIZWBdfqrCwxSKF6qMm1XBqLRslIaNyrJZWX51N+QXroY8P6/597+VocwF+VlUhrPsQkbT6uNPs4DTLOQsjzjLI0oj2fBzNkPn4LLhLXh3vsPDbAuAf+P2X2RXHaG1XrHnbd8NBO22SF9GMl9GTK9PtxizJWI9z+PP/tk/+48mUdFl4LwbIHqZl3n3RNa9vNe32QXh6zWWLhDusuTdZS7bzjog7rqzdOUn3QrDZd1lDHzbr1cGvt1668Cz3ee6jrt1ZDk5OeHs7IzpdMrp6Snz+XxVy2vTEA8GA7a2tlYZrrIs45133uFXfuVX+NznPscf/sN/mL/xN/4Gs9ls5T290DFtrkNtFbeiByRiTk8uiMWMnljwlf4n+cLwRzFINqtzXsm/Tk+nJHVOpBcOdJsU3+QXHx57uaf9ZdeEVVmAzMQsdZ+lHqzA91L3WdT9JjDUjae6hxSGnlow8Jb0/aULCPWWXI+fMPRThkHKMMjpewtC6eyVWskH4K7P7B3eyt9ikk3IZQD9yH2UZQ99cAe7tQc43bMRI4QtnN2eLREmdS0OgyE2AXogRInIFtiNgUskg0GkGbThhrYBzqIZX59P0yTdin+tpVuUHNAWWGuQUqG8hoFsW7Lb8tgu36zUvfZaX7T2KE8h5YU1qumUZ/eNVw2IdzS2sM6VxDaBcaszsBbWynwlBLWUTV2hEzIqPcgVHIIZDzlffh/nteBJYhF9i+xbbA8HBheQniZ8+RsvO9wxcFpzMTDYAdBzFZ7M9Hmj+i7IQGQWGYAIQQQWEYBo2PGLy9Cke7Ig0avzsA29bp/KMuTAITlO3hD0MEEPM3REOxZHNbXZVOuGga+a+e245oKp99fG/bX5otnvSqLARUuD7oyDRBIiCJFsrpLs2NXQYrDsfAaMoNRwfyLcsZY8fZxtr3HBisLCAOcEZOXTevRVmVPUVlHb8KJlortMO96dL4SLexAhldhkKZob0QJ5uboxF5WTdrrdTumOVXQaamwLRkPhKjAdY5ZVxbW9rh6wTLAkkDpgbUdDrEouHF7SHJGfI8vUOTDBKhut2dzHjradzGZyz2Xr3Nl257dTARnEW9Ryk1p+ksKTDhwXFnLrvNOXC+RigpyfIKeP8aYPkPNTWExQi0O8h2/BG+eIxRQvnUORudeGlJi9Xez1a3D9KvbaVczVA8y1q9i965jta67i8HCMPZw6ycxEYRYhJhti6hGWIXixu5CiAlG6FhkZYOVV6N9E9yx6laCqefiFdfqIJwLKHuKtI7zpL1N8/H8LUiGXjxj9D5/CCumuSbKBjUfovZdJ/4k/Ab6P986vok7exmx+DKYSxiXh//svEv7mX6LbmTB2gH1zGzvadrr5RmZjbr2I/sT3Yja20S9/F0Su9UfsHzL65T/Dc/Y5JpMJvu/ze378J9i//d18/d6Md49qTiYJj+UuqXydtP+jTF/ZRHy0QmUPkel9VPaIqDrkXTvg+vkGcvmAX/m5/47Hjx4xHA7ZGAwIw/Ap+aAVkmq4Rb25T7W1h9m6wnjvOqc7B8yTK4y9hDMVUQrJpi7YMRnbumDH5LxQn7NlcnZNRa+u8ApFWoaMH4wYv7vLWR7wpGjBfMDUCOqkZGdrzs7WnM2tJYPRkmSY4m+MEVdzTFxQhzVBrdirPO50gHxSesSlj8kiytQx8//RV15kVsfoDyL+g+97gyK2jAPJWSD5et9jHMhVb6EJcO2A91Wgq2Yv07x4XuLbC0wDFzjgi3bEfyWucyOa8buTY2ahYBpI7g4U00AyCxSPeu6b9H5P8TiA69kFxvIk7CUFO1GG2byQrHS9y6uqZlm5BFDjIm5Ae8h5dQuNQqE5yRNGfvmUC9669OWp8rhmgtKan6wv08Wt7Xl3u8tk3t+u+44Z9H/r3/q3VgcBF4xmt3axrk3uHmD3/8uY73UpSnsh1pn1Zx3vZRe07dqMpF1mvev8cpmv+3pl4DL2vgvOL5O2rDd7XAZU15tEuue/XC45Ozvj0aNHPHnyhLOzM7IsoyzL1b49zyOKIq5du8bVq1dXAPT4+JhHjx5R1zVbW1v85E/+JNZavva1r3H79m0GgwF5aflbxz9AVgf8yNYXiGX2lIZLCEGNxy8MfhclAb978XeJbP6hVof2Gneddtw8Q2GTp5juRd1zAaErAD5YAXGwLhjUW9BXS3pqTk8tGfhL+t6Svreg7y0dAy6LVXKH7r1t3X26UpJu+WrLbZZlvPfee7z55pucn59TFAXjj3+O9HN/gN7P/1n8r/8yOhgy/55/H1EvGXzxjyF08dS9BLCDTWb/wr+HTGds/Lf/GcrULD7ze0i//yfY+Pk/Q/Dmbzx1v9crsm1Tf7u9siwxxhAEAf1+nyAICMOQIAhW8R157vTs/X6fQfPBaJvpuokUus9A2xIzHo+ZTqcAjEYjbty4ged5q9iGVsceBAGj0YiNjQ0GgwFBEDid/nLJbDYjz3OyLFu92LpJHNpnd3t7myAIOD8/X/nHep5H8fK/RLnzvdw++Rn21AmjvdsMrr9GuPcxTsshh2nC3fwG51VCbRU9VbITLIhljtIlutLkteKUHZZVD4xgL1myvVEy2LB4PUnhK6YoTpoArV2v5IqXsS9ztknZtEuGeoHIFqR5QV5q8lKT5pUblpolIec24sQk5Mk2VW+bOho2UoDGpUPSBcSdFg4uQGJtoKpQ2kljpGkCEoUzGzRCNpIVCcJ5wSPch985inABVFsaWFjww6KpX7u9Ne6EaMQKcFc4SUQLxsFJeXya1gHjho1NKiEQSgjVBRPbdl1NPXxoXBjjrDgbaUTr7GLbliMjLtbprttuayV3Ehe1KVfzdB71ViOMRtoKYUs3bXVj0WiQov2/RlhNFfrkUYAdxi69e0+5ikANZrHAno2poz1ssAX5GO/RNzC7+5jNLRgNoRe7Y6ks5AVivkBMTlGnh2Bq5OQQ/9Hb1HtXyX/8XwRtSP7q/53hX/7jWKmoN29S779Evf8y9cFHqK++hN5/HjDIbAalxslUYqzXA5khggwRVhCDjT1IImxvAFWBXMycFGc5Q86b4WICK6nODObnLnyiFzhP9q0BdncLcf0Ae/0a9tpViCM4PIbH53CUY04FZhxjJlvUs2uYSYRc3kdVDxDlE6Q5A2mxfkh563PYrY9BlkKRQ7DRgHsBS0341p9CpfeQ6T1U+gEyuw+6wAZ9bDREzh6779voKtnv/08QkyfEf+Pfd8H33eLwbXBJ2wkhqL/vR5n/Rz+FOn7E9r/+e0mqnNu3bxMEAb/v9/0+hsMhP/uzP8uXvvQlsixbYY8gCNjZ2eHW7Tv4g+u89Tjn/liS+1egf4vB1VcxyXVOsgStQeUPCasj4vqIxBzTM6ck5gS/eERkzolCH8/zVn0Yhrzyyis899xzq+RrqfA4UxEn+JyqmHMvYezHjFXM2HP9TIb0bcW2ztkxhettwa4tm75gw9RQh5wXIaeZz1nuc9aOZz5necBZ6ZH7FRubC3a352xtLRhuOCAfDjJUL8fGBXVUuscwCxnUHttG0it9kspzcpvSI6kdwI8qn0y5jKTjQHDmuwDU80BxFkjGoWQcKJaeoF9duNSs+rxmo+k3C02vNk+1oLfY46+9NOILdwbcmFX8e1+ZEsqLb3qLTbq68y7r3fUv7yYY0lrzMB3xlx7/IPv+MT82/HvounxKf94Stet4rsVCl3Xd73kXs3YlLe1y3W/9d8qgf0cA/ebNm/yb/+a/+dSF7AZrXqaxbeUi6/KNdpku4L2MSW/3cdl6XYDfnd+c7Gr9b8fUry/bdm3taP0mtTdgvemjey7r+2ybNdr12+u1rjdfu+aAKxSz2YyTkxPu3bvHBx98wHQ6XaWh7VYo+v0+t27dYn9/HyEEeZ5z//79VVCgEILXX3+d7/u+7+Px48cEQcBgMHgqmhkubCrXI6Lba9IcIaWNnZSkYboXdb8B2c28DtNtEfTU4qm+7y1I5JyemjPw0tW8SOYrPNAF++u10u5974LbdS13t7LQvY/PAujdF3i3fHTX7W573Trpsv213XoFwfM86rqm3++zublJFEWr4F+A7e1tbt68Sb/fJ8/z1bG0VppCCPr9Pr1eb5UboAvM1xMqtIkWZrMZaZqS5SW1FYRxn6Q3pDaCojJoKxEqwA9j4t6QwXCTuD/C8yOsUOS1JdfOj7sdL42gsrJxG3GOI0Yol5kzjHEuchIjFEIFyCBCeD7CD/GCqEliFOKHEdIPXLZOKzBSoK1otivQogmXFI17xwq4cgEmu2C2HW+WWVlHCj7MTHeTFrUstaBjpykuxmUTICgsSINomjBchs6OZKUbsErnWBUrxljUGlmVeGVGUC5J6gW9OsM3ReMYZKgrhdU+Wgc8fHxOVQcg+7Dz3XepGaIZYVGNF/oYxRiPMQElEUNmb/8Odg6gn4CpoZZQSMeGNmw0wgAlspriLR8Rjt8gfPSrhI9/Ey8b4ynB7Zs3uX37NlJ5TMQ+x+oOx/I2x/IO4+AqUbggDDN07LOMNuhVE/aze+xn99hL77Gd3ccY6daVtxmLa0zEPgu5RSoG1ITuogRAzzqHHr+5bu19KjViabFzhZdmRNmYqDonFHN8Fng2BVtiRECpBfO0Iq+FcyhSIfQ2nJ1fMgQ/BuFha4ktBbbC5Rdob5jOcfpqA77A+srZBkaekz91GWZTQ50jJ8eo9AyZjRHpKXJ5jFqeICoXhIkpsckAPdpHb+xjtq6jd26gd287Rv78CDGbw8xi0z4m34PcR/EBSj1EikP0zi3q3RcJFr+AV7+L7W9g+0NMfwPbG2L6I2x/5Kb7I/ADxHKGWExdny8QukQIjfAFxB6iF8KoB9tDxDCB2QxO53BSYk8V5nyEnu5ijyrkg/eRk3dR2V1kdpd69zWqG5/Hf/T/ReZzdHIHk9xCJ7ex4S4yf4JM7yGX9zrD95HpPUQ5bh7d74xRbN+l7Tu3C3rad+KLL77IT/zET+B5Hn/uz/05Hj169JS7R/telFKyubnJSy+9xPb2Nm+88QaPHj0iCAI+97nP8dnPfpa//Xf+Dn//S29jkhsEWy9hezfR8XWq8CpFcJXC38fIiKg+ITbH9MwJPXtKWB3y8tWQz75yhasjg6/ccc9mMyaTyYql/RCO8XwW8ZBZ2GcaDpiFAyZhn2nYZxL0mPgJ516CBLZ0xnYHxO/Zgl0qJ7GhYgvNsgwYlyHj3PmCjzug/jQLOMs95sKSDFP2d2ZsbS0YbSzoj1KiQYbXzyAp0FFJ7Wmi+mmdfDuelB5xoUhnEbJURD3bsO6KcSiZhB7jQHEeKs5ixSzwkNaysQbcN4qajawiLip2liWbhSb2g6dav7XWlALeGgYcTFLi9IL9bkmibnBod7rVmLeE1nr20HX1xDp2fBZm9TzvQzgSLjBDV94C/MOXuHTBcXugXdb0spPqaq67vuXdbbYPShfgdJn27nYuA/DdrgVXlwWdrgPuZ8lYuufUrQi0D1QLeC9jz9e32QKny3zZn8WkrzO/bc2uLVSXVaha95z2Gi0WC4qieIqVffjwIcfHx0RRRJqmzv2jGvJfv/1Pk9cBf+jGX2boLZmVMbMq+RDwXqyY7h7GSnpquQLWPemGu8Ext9V7KwAei5ljPZX40LVZlxNdAPDLKy/dl1l3/nrrSPfarlcau/esfWBX6wDnn/jPSK/9Xvrv/Wk23/7PqW58nKN/7WcQRcb+f/nPIybHIP3GbSRAqwC8GOP5oCK0ClziEz8CP0J4IUb64IdIP8aLE7wgwYsTbBizzGuyMGIS9VzmTgs1Eis9jsOId8IY5QfUSCptKI2lNhYjhGNdlXfBskoFTXOrA7BNL2Unq2K3yVp0wG0XzFqeBr101uESRtUijAVrnIVfq9Nd9RqMaebrRiLkEhwJa/B8Dy17nOUjfFFzvT8mRiNMja1LbFXiYciCEff9a/REyfdGRwwDReQ7pFvUikWhuFuGvJ1HeLnFKyDXcgWGfWEbNUoLvoAaPAwmABPAwBYoapZeRKV8FJrrxQMGs2Pee+4F8s0eH/1bf5L+//SXqDsfgu4z3n4ghHDBvnp4QLlxg2r3Y+TbH6Ea3aaMDqjFBuBjlUR7MVpGFHKLuXHHRQpk7jiVqghZ0tdnyEffguN3YPEYfvRP/+vAI1ww6Kn9T7n0pSbEyxbA37zCxqd+jOLm6xQ3P0V59TWXxXZ6iNUCa/touUW58Srl8JPMr/+v3HWSgKz4gJQvqTlb5pArxV0O0rd4qfgiAkuVeZyKG5x4dziRdzhSd1hGmxyGgtPkOl/e+t3kfp/t/GED2N/ntfQXGRZHrnhJibGQigHn5T4n6S3O7DXG4oCF2qaM48YJRWEPgOtQ24SFTFiYa6i0QkxqzDRAzA3D9JCBeEi/fgP/5KvY2ZuIxTtImz31LbHWYva2Ed/3PfC9n0W+9jLy9j4iDKjOQ6pj0O+fob/1AXz9TcTjD5DjYwgH6L2b6J0bmN3bmO0b2M09zM3bmOomZHVT+fFAe46BT3MoC0TtKl5oCxMBE4W4m4MXYeRNaKxO8Y1LNCZB2+fR9lZjlRnDY0Ex/iNU5REqexeVv4uff8mNZ+8iiwcIjKuQhjGmN8QONlym0f4IO9hwGUcHTUKiQQvuN9z4aAvT24VrPTioXeZVoV0wafgK2ryMyStIDXapEIuI+vQ6PDlC3nsf7+HfJ3r0ZxDnb4OUpP/sH6Ma/R68X/lV6sVnMMltTHITTNkw7h90mPcGyGcPEfZy57T2vX5Zt7Ozw/b2Nn/hL/wFHj58iO87M6P2G94lLY6PjzHG8Morr3DlyhVOT08BB7jKsiRLU2x6jF+O6ev3CKYBQRAgpSQIAnzfp5Z9iuAAk9ygCq+Re/scBp/mSXaTn/vSHmntsxVl7IQLEnNCz56xHczZ8KYMxZiBPCfoJA7z5jO2gK1Lzk0phbGWNEiYRQPm8Yh5PGQaDngQ9plGA6bhAdOwT65CBnXGZpWy3c/Z0Rm7zLgjar5Paa5IzYFn6AmYlR6nmcvSefpkl7P3ggbEO0b+NAuYaIFMCva2Z+xsz9nYWDj3mmGK38uotivS29oBey3oVZK4UMSlx2Yh+dhcMStL3h+mWAuf+uo+QdrjPJBMIp/zSPEg8fjaVsQkVEwinzz0GBT1yi9+s6jZyDW/eiXhJPYJa8N/+PNv4K1l/+zipu68dYDegvKuQ18XY307Arv7X9fppUsYXkbGXmZF/qzuOwbol2UL7XaXsenr618m87jsBC4DVvC01eL6ReyCsq7+eT0AtQvKuuuvA7juOXWB+Pq+ukzz+vbWa8fr1pTrrGu7vRZw+r6P7/tEUUQQBB+S2bTrdFlway1lWQLuJdMew3K5ZDqdsrm5SVmWlGXJV8Yf4bwcYRH89Af/nJOQNCx3Ihf01Zxt74ib4YK+7wB5Imb0vILWSaPbvNOWjw874TwtF2rXu6x71gOxDszbfT2rW2+iWr8n3UBmay3G3yS9+YdASBYv/u9ZPv8vO8btRGJvwpP/4P9zwcrKtYerTUKyao5vkta0w7aJHpC28TixFrQLupTGuJBC6yICpLX4EqSSKCnxdOWak/MUWefURYYwGg+DrQpsXSCMZqOfMOxFBBKU1di6ILcRX/J+B6ROizHUR/gmwzdLArPE1wusUbxX/SCceyAsW+Keq1wxJRZzBv6SoZfRkzN6YkZkzhF1StlEvXej4KuqIs9z8jxf+fIbY+j3+7z00ktkWUYYhly7do0oipjNZvzc8v+ELTYp8ZjJBbvim8T6AZveMZF+wNXru/zsq/8OplLMifh6ccBOoBEIx5LWipNSUSEghHoEw1Bz1a94XwbY2AUW38kL7i889CsuOFkcSqJzSGOB/1pNcSwY3DeYh0ACWigOucZpuE9qIxhJvvnj/wfE5/539E9OGDx8j/jsfeTxO3D4FurkLrY2lMEmVbhLHe2h432Mt4+trmKmW+gyxoY+BAFePSW0CzxRYn2J8QNqP6KUMSbwYSShD7rvk/Y3SAcb0HseUu3YTZ8/Rs030XwZ+BXxi/yG/Z0rIcuHn6HlmPitXyB+6xfc8+EFVAevOMB+43Xym69jQx8hF9j+EEzN1n/371KNXqTcfQ0zuMVM7XAurnE3+JQD7kMQnkWpkog5A3vKTvUB3599kf7ijOVslxN1h2N5iyPvNpNonzKMebD5UbKDIWDZz95nrwHte+k9rut3uK7fuXiWa4vJDFr4LNUux1znvnqVk+g2y2SE7gfoOIBrARwAAs7VTc7zG4jZp1DTn8TMBGYW4WVj/OxdvOxdVPouYvEWYvo24q//LfhrP4sVgtpaxPYG4rWPEHzf9yJefw31udvI7Y9RTyXlIqG+P0V+633Eb30Z9fUv4D16A3vywFWS929gbr2EvvEC+uaL6OdeRt96CdsfIcY5TDNEVmN1gJVDrNpCzscuEH38EDl+iJocIidHyOkJtjbYjZvUOy9Sb79Aef13QaRg2yKO5zBW6Mlt9Px5TPpPo8trIBNU/h4qu4uXO7Zbnb2Levgmvp498127/n22UmF7A8fS94aYwYZLjnT1BvbgGmb3ALu147Ti1zewn71FrV6g0j/iAooRrDIRLy06+h7iv/R/Q979k6j0XdfIlNxGJ7cwyW3qre+lvP6Tjn33N5H5o6cB/PJ9ZPqBY9/r6YeO3/M8hsMh7733Hnfv3nW5DBrW8jLnN2PMSkZ68+ZNkiShLEuUUlRVRZZlKxzQbZFsv3NVVSHEhAEpifeEQTggVCGnp6e8sPMCn/zkJ6lFzFnR5/Hc593DipnZ5F51hXE65LxySff6asFITdkO5myHC7aCOVvBjE1vxoY/xadc3R+AyGpG6RmkZx/CI+10qXwmQY9ZOGCROCB/Lx7x9WjATA2Y+gOmXkyEZs+r2I0LdjcrdinZNQW37JIdnbNrSzaoEBbSWjkJTR5wejrg9OEW4zzgMA/46tmI4yxECMMf+vi7/NhH3iENNVlQkwWaNKw52i5c3LaFIk6580hwtYPvqqpisVhweHjI4eEhvZ0tbn7mk9Q7m5yHivNAMQ4kx0mAkYJMwJk07KzJXlqypP02ARRFsfpGta53cNGqsk7kPfWJ72DCddz22xHH3w6n/HbddwzQuyxlt7m+e4DrYPtZTf7PkrW02+7uYx2Ir1cAvp10Zp0NXxfvX7b9Lgvfgt9uwV8H8et69fZaPQtMX3aOz7pGYRiSJAlJktDv9zk9PX0qgKEddgtYu26rhYuiiKpyPrHdWmNRFDzf/wDJ92GQ/KHrf5WP9t/4kL9o233Yleei0tFdrjv+rOag7ja717ErIVkvU92Wle76dV2vWirWr197DOsvrva/fr+P53lOclLO8Gbfoh58hODsi2z+5r9C9fHPMf5d/wVYw/Z//S8T3f8anoRQKaLAI/Y94igkbipQvu9TliXj8ZitrS329vZWH4jT01OklFy9epV+v09d1zx+/Jh79+4xGo3Y398nSZKV3KWqKuI4JooiptMpb775Jvfu3ePo6GiVHbYrPbLWojyP4cEBH/3oR9nb23OVNetxV303M0bcqd/g1fyXyUWPXCRkIiGTCakcIKXB7FuUrZhxjbF5DlnXjU+4RRS2yQLpoY2HEhWByAj9BaE/IwzPCOMTwvIIlRxDdowMHhOqR+Rzd8z9fp/PfOYzZFnmrns0oBjC6PBtDsVVRKip1IL3glep489h4h42ctZ/ctFUeASEfYFWCq3gHMWpUewkmqBneDiSBCP4oXjBIpTcU54zkrAwMwofQUmEUJatF0uU0BQ+WB/ENUu+LVHfnSP8AjtWmCeCcu65YMI33TFYFPNwn/ntK7D5PYg9sM8LKIRjv9FIs0QWZ6jlY7zZ+/jjb5GU/zNhfUbPTPGrMzxqfN8nSRKiKCJJEnzfaVktUKiEsb/DWf855v3nmAYHnNUDGGxBbwQxr9HjNfr8YQZABOJ/xpCyIOcBFV+n4ks8/z1w9DZm9zr5C69Qj/bRgz3MxhX0aB+2DqgHu9jBJq32HRSkkvE/9Z8SHL9NeP+36H/jr/JKOOX2QGBkwMTf5knyMofqRSYckIoNDu2LPBEf4evhjzo9uwfSq/FVTs+cM6hOiZYZeuozNXuMvaucRteZhdu8vfc9LKMR/ep8Bdb3svfZyR4gRUUgDYE9YpMjXix/A1EJmANHDgMeqRd4t/cpDuMXmPd2qJIQuxNQbwauYu1BrXeolzuw+AxiWiMmGjP3EFagyid42bt4i2+g0rfxvvwu/NJPIeq5a5oY9BCvvoT/qU8QfPp15I89j/wX/wB6+vspFz3qc4l++xHmy99Evf01/K9+iehv/QVI5+4d1x9hbryIvumAu7n5IvrGi+irQ+wCzImPnWxhln3q6hUsPXS0jx1dQRRz1PgB6vwR8eP/HltI/JOvINCYcEC9eQPz/EvY/V1I+qiz+8jTMUwC6tlHKRefwpQHGP8Wsj5H5e8iG7bdy95F5XeR+QcupOGpVjKNmE9gPvnQN+pZnZUSdncw169ib9+h/ujrFL/7JxEb4N0u4Sf/VYyU6DOJORVwuIDHJ8iH9wk+eBv5/l9Dffk3EXWxksro+BY6uUW1/f3o5DYmvoGol8jsA7zsA1T6AV5+H1+eMbfbvPPu+9R1TRzHK9LqMjzSfuvH4zEHBwfEcUxRuMRIRVGQ5/nFeXXY924m9ZY0bOOGfN9fmTcYYwi8gmtJxYZdspE8pt/vE4Yhvu87yWJuOK8GTPQGmdpjUo94WN3mq8s+p3mPSRnT90t245TdaMlOtGQ3StlNUvZ7GbtxyiA0H8IS3RZ/Y0qEOEPUY8zMwMx9P3euXGEiQ04IOMLnRAScipCvywEnYptjL+BUBNQItm3Jji1dgiFbsGNLtnXOy3bGts7JZ/Af/vonAMuPbh6zMfMZWQ9rg9W1ez6O+NXXJygtuPGe/1QSoKIomEwmPHz4kCdPniCEYDQacZAbDnSAXVrMXFPXBR+ZV/zl50a89njGQQVayqe06S1h1LrdJUlCURQrdUFd1yRJghBihYnggtS8rBW+K3PuXuvuMm3g8GXWipfh5N+u+4416P/2v/1vfwgAd+Uk6wCqa4vYlaysS07W3T7WL85l+vX2v3UQvM6aPqtbZ8HX/+uC7i473W63Xaa7XLcFoQXI3Ru2Sj7zjJpXO91leI0xnJ+f8+jRI9577z3effdd8jy/VC7y3HPP8eKLL2Kt5dGjR6RpShzHK2AupVx5oC+XS6IoYmtri7SOKGtL31s+pcPqMuJd8L8e5Nhl7p9V2brsXNttrZ//ZTrv9r/1lpW26waGdvfVnXeZvswYw/379/nKV77CdDqlqAxVeIBK7yNwAbjB3g0CJYiq9KlKT/uCDYKAOI5XQZxhGCKEIMsyRqMRo9EIz/N48uQJT548IY7jFUszm82YzWbcuHGDGzdusLGxQRzHZFm2Sp7k+z6LxYJvfOMb3L17dxWD8KxrCpAkCdeuXePVV19lY2ODykimpsfQjjEdQN+9JpVRzKoeG94EIUAjyUkoVJ9SDchlQkZCSszCJszqAfM6IdU9chNT6ojKBC65i9ZOp62ky44YWPBrZGCQkXCZX32FkRK/zgmKKWExJ8kPUZO7FEfvEWanbKqcpJpx49rLvPDajzIJXuJ8kfBo7PP+ic/DsWJvqLm1V7Gzb4g2DcueoPIEp1rxfuZTGsG1uObmqKQ/NJieYZxITpTiAxGgNMS5pVoI0lziZYYw09SpoEwFZiFhLrBpExAa2wunFQ/oC4gNDC3sSBgKmFrEaYaYZJAajOqD5+Gd3yc8fYfw/B7x7D6DxWOS+QP8KgXA930GgwFJkqzeey2w8DwPpRRf+MIXmEwmTmr0p80NDJ9D8IPAxxE8h88WASEeAp8miLV5DgVNYqIcNT3BP7pH9PhbbI7fQp09xIwfIRZjsjuf4vT3/ruE7/4ao5/9zymvf4LixuuUtz5NefOTBLbiyuwue7N32Z/dZWf5wAVstt8HBMtgg9PwDo+jlzgLbjKX2xT00DpwlRhLk+zIoqhQdYWqNLryKP2IOJoTBAVlFFH4Pbbzhw1of5/99B6j6mR1Xt33wbrDQiZ6vB+9yoP4IxxHN1n0NzGh1wT12ovYggJIrQP8MwETjcgM1ngImzXg/T3U/Buo2Zfwlm8h8wcQeYiPvYB49WXEZz6J/K6PIe/sYWaacupT5pvUD2eYb7yLevPreA+/hXrwLeThe2Cab4lUmINbF6B9NXwJG0aoh++hjh8jxmPEIoMcLD3s4Ap66wZ64yqiLlDjB8jzh8jZEaIqsUJhwz56sIfeuYMdXkGN7yNPHiLOJzCpsYsIk+9igjtYNUIV7zu2PXu3Yd4byYyeXPqu+U46IQRmYxe9fwPv7S9jjYHNDcz1a5hbH0XfegV78znstQPswSZi38mZ7NjCUQ6H54iHjxH330N+8C3U3S/Bo/tYsYPp3cb27jjQntyCwQvY/h1KG+IXj4mqR/jFQ5jfhcV7MH8PsXwfU0ye+nb3ej0+8YlPMJlMODk54fOf/zxbW1v8zb/5Nzk8PMTzPJIkIQiCVd9+D3zfJwxDNjc3uXLlCsYY3nvvPV555RVeeumlFQaYz+ccHh6ytbVFkiRcuXKFt956i9lstopLiuP4KZcY3/cRKmRaDxmXfU6LAad5wkne5zRPOM4STtKQxNdc6ZdcHRRc6Zdc6edc6RXsJSk3Ny0bsbPMbc81iqJVHNTh4eGl38e2M9Yyt5KTBsifitCNi2AF6E9lyEz4DGzFjsnZ1jlbdeaGOmOrkdps1SmJLrEdnNHGR43HY+7du8f5ZIIWkl7gc+XKFV5//XXu3Lmzwh5dzXhXQ94SkHmec3x8zNHREVVVEUXRiuA8OjqiKAqiKGJnZ4eyLJlOpytc15JirRSmxSBRFGGtXe23vV/te7rFSEopoihaXbsuUG/LgRCCn/qpn+Lw8LD9Fv//r0FfZ5rXawWXSTa6IOuyZddrFV0Gex2QXQbIu+z4OhjsAsv17bTHcBkTv86Sr493tejwNGPcasFb9r7LpHfPu3vs3WNeP04hnCdrW2h8318FDK5XbM7Pz1eFoH1pdJ0/WjcQIcTKVmgwGBB7lgD91LF1u/Wg1u789evWPZfusD3fy14Al7ViXFaO1rXq3UrCeoVu/Ti7LH23s9Zy/fp1jDG88cYbLBYLgiCD/i5AA6aj5jii1T7ba9wy94vFYrUfKeWKQVksFkwmE6zweHPxGcriKsnJ3yYMAhb7z3P84u8g+bWfw96/z/61K5y9IpFpBl+eMJ1MeFJf4Tx5jY/4X3smMM97HyMffIb+2c/hVScIX3D1x/dIT+Z84Qtf4NVXX+Wll17iIM7ROvxQ2emWz01rgCEAtfV59+xVNqMFr44+QMoZ7873eafY4WOjx5TqmImJmdgeU5Ewpc9CDVh4I0qvj1cu8IoZqs6oox52meEtSuQyxtQBddX0NqQy+yz1PmP9AtgfQPRqxKDmkWeImfGBn3H3NMJs9Hlpz/IHX1tye1PzZCL48oOAlzZrjiaKd0487r4JJ4chi6Xi2nZNuG0oB4LzxOOJkEwrwTRXpIVAaWdTX1hQsSXuW0xfMu8pVGTxRs76sRxakn6ODGpkVJP7iqCq0doZp5i5RDwGc+xTH/nQl9iNGHsQwlA6F5DHFXV0mzq+RTqcM1U1T3ohptfHK5bE84f0p+/RXzxiqzwhKccYnbPo91xyo/4O5WCH5Yt/GAY7sHkAu7yF46kPgSfAb9ImKLKckOOx5Ar/08/8u+x9BK48Dxsj6CXo/Vvo52+RD36IiQFmFjEtUKdn+A/fIf6f/wbxvd+CWhPd/RWiu7+ClJK9Kwdsv/qDTPZe4Xj4Am9c/Z0sgxG783vsz+6yP7/L3uwu/fKcfnnOrcVXMTLAM/nq+c1ExFl0k2P9PMfhbc7VPpkakosYqyRoyIoRWQFMAWM5CW5ynlzh3d1PUQchEs1++j5Xsga0Zx+Q2AzgKTIk1HNeyX+Njxe/virnJT6Poxf5IPoYR8ltpskuRRS7ylZiEQcG6yusFojcYLOEevEc9fQWzH4XVKLJWgvCLBDFIepL7+P90huoyZ/CS7+M/+IG3msv43/XK4hPvor83E1MdpX69EcoshHZso956wHizW/hPfgm6sG3CL/1ZeSv/b2LllFjnNXgzRfQN19agXd940XMwU3k2RHqwbv4934BeXqMmC0QucWoPmbrBnrz+mqIMajJPUQ+gUhir+5ibvcw/T1sfxcxeYR//A3k+AwxyTHzPfLsJYx3Bx3eQegZXn7Xgfe8Zd3fRRUffFudePu+UtNT1NTpu60QMJkiJ1P4xreAv/LUOxnA9Ifo5z6JufMJzI2XMDduYD7/o+grf4B6VyGGBqY1PJkjHp+gHt0nfPA1tqd/l++/eQNxvORX/v676OgWqbpCGt+mGH4fdXQTHV1HVBMnlVm+j8ruQX3I2PfJxbewnGKtJcuylWPWOnG2Dmhb4iaKotU3eh1vdHXKOzs7XLlyhZ//+Z/nyZMnK4yxatFtvuEtnmidvHphyHYU8V1xTDR0+5NeQGo3mNpNpvUGp0Wft44SfjkbcpwlHC0DfAlXhyUHg4o7O3BrW3Bz07AdLvALxShoa8186F5Ya+lZTd/W3LaLS9UD1loKA8d4nMmIUxVxKkPOZMS9YIezOGrcaiKUtWxWKZt1yqhY0FtO8MZHWKsx8S6/9Y//G0wOnuO13/ib/GPv/TJ5nq8q3e09aL9h3We9e7xKqVXrehBcBJkul8sVe97+V9c18/kcpRT9fh8pJUVRsFwusdbS7/cRQqwSGbXYr3VZa1l6rfXKVa2drut6hRW6rS7fafe/yMXlMrr+MglBO77eJNCdvoz9Xm+m6Rb0dcB4GehudUBd7fE6COw+YN3jXrena2to7TrtA9p1POkec1uA2tr1t2vqWL9e3WE7rrXm5OSEu3fv8tZbb3F8fPwh2x8hBHEc8/rrr3PlyhUePnzIdDolSZJVsMvGxgbPP/881lqePHnCcrnk5ZdfJoqip65DW/iqqvrQvVjvnlXBWF/vWRr1y4IlLqu4rVde1kH4ZW4q7f1q71F3uj32tixZa1eOOcBKP90F3HEcr5iSttK0XoPuarG11isrwq+Wf5D7/h9CIPhI8ktcHR3yi5/+g9gtxyC/8Ft/neT1AdvfZ4nijOHbGdETzc8e/gjLkxERNT8gf4G3v/obTB8/QOQzZL3EEnD/E38TUKjqlL33/kM+9s/s8Mof7BH2Ct766a9z8sVDtsMtXrrxMtd3buB7/lNB2ca45DIzGzGlx0z0mdLjV84+zqNyGxsKNoYppe+RErigxcpwIzhlkzlbTOjrKVF+hp+eIuZPUIsT8sWU8XjMb/6Rf53x9/wwCMH1P/kfc+VrXyQqM7LM9VJKHvyv/69MP/Y7wcALv/RXqI9K7t3+I6Akfpazk00Ys0cxVM7/eg52gWORfQvS5ZKxvsEbluhKYQoP0yZ38kAZ6JWGbCEQGvYGmr2tGm8Iv55FEELSM8gQllpgHwOJ5SOfysm34F4WwkQiR5Z4VJN5CvvEIgJNb29J1ZcIa/HKkgofvfTxntSYU0ElE0zfd1aGkXQ+3TMNJyViqrGVAhm6IMjAugQ/Auf9Xtf4RUqUjektTxh/4+9TvPlFePI2/PEvHQAn9id4dnMhF+9x3/c5ODhw5TXapLz2OuWV1yhvfTflzgvOXrAfOn/5Pq4vgJmFaYUcT4iOH3B9+g7PL99js35CzytZqD7Hw+c5Hr3I0fB5zvo36eenbM/vc1+/Sl1E/ODZz/By/qvUXkzp9Si8hEIllH6PQiUUKnZDv8/djU9SqB6khsF0TKlDShNjq5b5pmnNcPdIRBbrSQKTsVkdsZvf5/ribW4s3iC0Vfc6rJ79dcleLRQn4Q3uhR/lcXSHcXyFNOo795haIEXtWn6UJChyZFZgUkWVRtilB0vtgHstQTtNlSBFlCeo7B5e+jYqeUhwZ4H/XTvIT76C/PgLWA31YUE5jyn0FarDAvHmW6gHjmlX97+J9+RtRFU8dU+N56Ov3cG0wL3Vu994ESsl6sG7eA/eRd1/B/XwLvLsDJFrzGAfvXUds3nDse9b19GbN0D5yOU5VDkgsEGCjTcQyzHq+F3kkw9QkwXMwCwHGHkTHb2A8TZRxQeo3GndW7mMl99F1Gd8Oyjy7bDHs5a11mJlTD16GX3nk5hbH8Peeh577QocbCJ2JWqnhk0grVBHU9STQ+SD9+D+u/DBA+yjQ/SxoS42MfFNdHIbb/Nl/M2XmNldrAgZqjH/P9b+M9aWLD3PBJ+1VthtjzfXnJv3Zt50laayWJZksYoiJapFUaSkkYZiNzjTwmDmxwCDGQGURj8GY4FBz0ADTbcaDTVEdENCYyCJDRlSFJ0MSVGkilXJLJOmMvPmNefcc4/dZ9uwy8yP2LFP3J0nqZKaAQQitou9YoV7v3e93/v15BnD/W/gxvfw833a7oguZwSyXJA0tbSx3++zu7vLzs4O5+fnfPTRR3z2s5/l+vXrC5BdW9xubGzw/PPPs7a2xs///M9zdna2YHVr2+f6+fvo0aOFhhouNfY3b96szoMlGWoTvO7u7vLH/tgf45nbdxikAU8mAYeTankwVBwMFQ/OBY9HCgdst3N22hnbjbl+vRblVeG5hnKg/v8mcblMXtbPmEV1TucYCp8zEXKCzxMjObIeF15c6eT7W8xWtkEIgnzG//Yf/p+4e/cur7zyCp1OZ7HdJnveXK9B8WAw4PDwkDzPF7l8WZbx4YcfMplMWFlZYW1tbTG6fXFxQbvdpt/vEwQBZ2dnnJ+fE8cxq6urxHHMbDZjMpmQZRm+77O1tUWn0+Ho6IjJZILv+2xsbLC5uUlZlkwmE5xz+L5Pr9dbBAMAP//zP/89Mej/QZVEr7I9bAKoZbD8vXzW/M4fxqTWN9XlSGn5Al6WXCz/5zIYXZapNI3q65NrmTVvnnx1u5ZtAesIuB72aEp/mu1d7qNl8OmcYzqdcu/ePe7fv8+DBw8WiSvNSSnFrVu3+IEf+AEGgwGPHj1aMOhSSm7fvk2328UYw/379xkMBrz88susr68/1SfLF9xyckS93pyWpUtX7dMnDZ81+2T5f64C4s3vNOU2zXPjKonSVcdueZSlvsjTNGU6nZKm6WIbnU6HnZ2dhQyl7ts6EKwBepIkiyTJ2jbxdyb/GfvixwDBdnTI2org3e51eFWAcoSU6FBy57nvsrP1BN/XeH6JzSTf/Y2XsYXE5B6mUJS5T5kF1TL3K3CXW4RwVXFRB5/5oa9x+8V7xO0EGTqMlGh8pmmXi+Eao/EKk1mfadIhydukZVxVW1UpPZmwIhNOz7scHa1iE8mqzCgTxXQazG0GIfAM2lXlKmNV0vFSet6Uvjegxwl++YTs4iPe+vE/w+Tzr4JyyMEpdmUNmSZ4hw8JjvYJjx8z/dSPkvfvQOHwR1NKOvMiJlT0dikubfbqYjq1zKRkYY0opWPzjUPWnj+j/8wFYTfn9ME2+/u3SYo2RnowdTAQiJHDGzrEBRRGwiYV4POpihIdAjMgB7/tKNcFrABzy2fXc7hg3q4M6DrEmsWtgNcukdJgncJqBTm4QlbHygLzQk94soocQgGxgMTCQOA9yfD3Z4ixxoQhZauNjX1UVGACXflzFyl4nXdxvIvHN5C8A3wXuOf+MpdPdK4G6PV1dNXImROCcuN5smufJb/zRYrrL2PXt3HdGGJV9RFU7PYESBwiN6jSIEuDsA6Nj8GvvM9jAdtuQdD5JiPSU0I9IzQJoU4J5uuRSdjvvsRR9zkQ8Gff+X+xmR5U7QLG3jpH4XOcqFucqGcYu80KvGtZ2TNG8+MYVccK5whcSt+esJs/ZCs/oJ8P6OXndMpxVVyzcR9vLg2SQbTLQfgs++FznEbXmcYrVR5CKREYhDLYwEPqjHB8ij8+w7cBZb5CmqxQ2nZ1/pbz89iJuT9+jtAXKHuE1x3g30wIXovwP7+L3ABzOKG48CjyNXKzg7v/GPWoAu3e/jvI/beRZ/uV2VKTnADMyib65nPYW8+jbzx7KZvZvIY8ebwA797+B6j9D1H7HyJKi129MZfMXMes7VXz1h3MyjXwwgq8OyqXqiJFXRwiTx+gTo4QR0PEJMBwAxM9i45uI2y6SFD1sjlwT++h8vsI93Qu81VE3/L0Se/XhINSis3rL/LKF/8cuvMcv3eUcdZZQ1+7jt1ZR6yXqLUUsWlhMwRjEI+PEQeP6I+GbKQZT77+JuFFzsvXP002iHnrwzGZdw3buoVt3UaHO3j6grA8INJPaJkjupyw3Z7x7LbguWttnjypcos+85nPsLu7u3hunZ+fkyQJW1tbfOpTn8Jay9/8m39zkVvmed5TI+/AgsVtPgt3d3fZ3t5+ysFMSrnQUh8fH3N2dsbrr7/Oz/zMz3Dnzp2nCisBi/XBYMDJySkXmcfRNOR4FvFkGi7WqzmmMJLNVsZ2K2WzlbLdzthqJWyEMzbjGWtBghT2KVBeg+UmIVqTWEmScHFxwdnZGWmaLhzqjFD8y5/9fzDcfZZXvvHL/Mjbv8b169d57bXX2NjYeIoQaz7b6/+q19M05fj4mMlkssBj1loODw8XUtNer0en02EymTAeV4nT7XabIAgYj8cL0F2PjCRJwpMnT4CKPe/3+3S7XY6OjhgOh4ttbm5uIoTg+PiY8XhMv9/n1q1bKKUWxN/f/bt/94/WZnEZRC0DzGVgfBUzWk9NQNxcvwrM179bTgRcBtzLny1PTeDWfF0f4Bqc10CtOSRV/6Z+f3lfmoCvZu+bTP7y/i6366p+afZbrWvqdrt0Op0FcGze1Got2WAw4KWXXuLk5IQsyxBCsLKyQq/XW3x3Op0uwOPKysrHXG+aAHe5v696oC/vx1UA/g+bmudO82Z0lTSqviEtM+LLNknNyLoZZDXPl+WAox7yquVB9TZrVqMG5MBC61YUxYINXi7gUw99rZv/D2k3J5RTfvzG1/jsZ17nOxsv8vutW/zp5APu6gE6gvfWZsSpx2vvrBN4bX7j0TazgxY/cusIq/f5N++9zf3Tc0yng1tZw65sUq4+h4m3QJRYEWBEzHfy13jrze9DW29RwUY6g3IGX5SEMifyUtaCM2727tNrj+h2prTaCa3ujLid0OomtDozhIBkGpPO2kwnLYanqwxP1pgM+kwvusyGHWbjNrMkZDjdROtdnHsVoSzCt9hfA35DgrBY20EwhLhAt1bR4QaJ//24b7Qq0J1LyqCHCEpEUOA8QRiNCVcFZdQi8WIEjlhrnBak1kd6jl5sCX0Yp4KT0TWm/2ab9q9anKex/Slbz5xy/c49/I0ZQ+UzO+8zOe0xmvSZ2TayNHAB3tBhBwI9UdBmAdpLM08ATR34YNuAJ6BFJTDxASlwVsEA9ImqgEwAIrLQt7ClCcMJIjLYyEMYA6XFZCHiMEU8KbGqg97oYm94ZD+8Cg7U0QTv8QB7HKFPejAqIH1cMe2bnXUi/hMi/iwxBTECD1/8bc6xPMDxHTy+zvNfhqPv4tKL6hoTAhf2sHEf09rC9G+i2zuY1jYm3sTGa9hgBet1cV4LN41xswDc3N9dOJAOIV1VYd4TOM9D97wKGEfM9e7zOXcEkwm94gxf5iTtNSatNaLZlLXkCTuTe2yP79EqhggheO3Jr7Pf/xSdYsBa8rhq7/y+0y1O6ZVnPM/vfez6nRbrvJd/lseTFxmyTSK6EEqKqMVp9Ayn8TPVMdPze5WEUM5YcWesmyNWiwHd/Jx+cUE/HxCXY7byQzazx3xG/HZ130AyCLZ4EtzgUfAsR9EeE7WJcyFZeJ10Yw8pS0RUJTL38wHr2RHd7AKRJKQjuMh3GNsdcreKtjvowpG9L+ADB/+4apcQKTKaoDYzopfP8Z9rIZ65jSu3KPkxUnmLchoQHH5AfPQB4uF3kI++g3z0Dv7oDH90hvnm7zxNXEUt7M1nsXvPY/bukr/6JcyP/yzmxrMIo1H7H1Ss+/6H+I/+DfE3PkQdPkAajY16c7b9Bmb9GfTuy+jt5zC3XqV8+SvgzUv1WoNIhnjD95HnT5Cn58iTGeXoFkX/q+joWay/icofLZj2elbph4jy5MpnyDJIXcYN9W88O8Kc/hs+u5eyNX3Eb/3Wb1UyQwTa2yHz9yjD25TRbfTO87ibt+H6C+iNhOHaBPcXvkyy5fNvdzs4TyIODwn2HyMP3yU8+S3CsyHywsEwxBUrGP8GZ+EPcujf4N8Md2Ao6cpzgtY+J2eWG7pkM56wGY2R5WAhkQyCgPPz80VOWY0/atKontrt9uJ5VGMBz/MYjUYLXXQ9Wluz7mVZ0ul0FkRS83m5TIhWoN7Q9Up6qxnPr44+9vw3xjLOJSdJi6NZyGnS4jiJ+XCwyUmyx0naYlb6rEfJPIF1xlownbvRVE40fW+ELVOSJGE6nTIajRZsdBPUO6v56s//HF67Sz/0ce324ln6SeRwk7Cr2+37Pu12e+EyVoP0fr/PZDJ5Krewzgmr+19KucBLzfdqE4datlLjik6nQ57niwAryzJardZiPQxDZrMZvV5vgRv+sBzJ5vQfVEm0yRI3O2qZ1W6CqOYF1bzwrgKvNeC+CrDX03Ig0LR/XJY01NtoDrkIIZ6Sp9SRW/29utOXdWZNdrb+v+b+1+81o/lm1NpMFK3bcVWS5FXvl2XJ8fEx+/v77O/v8/Dhw8VJ1+x3z/O4du0aP/MzP0NZlvzu7/4uq6ur3Lp1izAM0VozHo958803GQ6H3Llzh+eff552u/1UhFtvr9mPzWN3lfa7uQ/Nflk+hsvnyvIIR3NEot7OclZ0U7/f1KTX23POXUbkjWNZT8uSmHq7i9LN84Cn9lKts75rjWCdXDObzZjNZkynU5IkYTabLbL+l62Waj3hiy++yJe+9CW2trae2s+6LbkIOCsDBibiOFPz9ZAzHXKUCAYmIo/XcULizU7n8wne9Bg5PcabnaAm1dKbnSDzMd78Oqv/y/M8bty4wRd/8MvcfukVdBBRhD4zJRlJxYUJGBmPYaGYakHhNE5qhNR4Xo7v5wRhThCkSOmwVqKNR6l98ixmdNHn/HSD8bBPkrQpyhDB3CPdCJyu5kWinqNiWg3QdohNi1i3iHUHKw7XF7gVhYskYliiBiUyNVCCsx5GBFjjoRILecXCSGXwlUEphy49ylQRzzReUqDaKcFWRvvaGH8tpYwiZmWf5CImz2JKF4ETc5WJIMTha4fLocihzEXFrg+AC6qkQuEqSUiPCqB6VIxuwEKuQt9VoH/VwapFth1SFEhVVt+TAgqLTX3kaQnnGkOI6XURUYlrezCZwr0H8N2HsPVn/kcO6WF4BsFNFD6VEkni4SPxEEicrYD1fKThqaUTgEa4AmFShJkhywkyH6Lyc1R6hkqO8aaHhMkTrkUZz2736MwLZC0/EyySk9ZNHmy+xpOVZxl1t8jbbWxHVRpvAYyBITBziNIincX6HkGZsjV9wN74bXbHH7CWHCL5OPFy1WjdVZNSiplY4Ujc4pBnOFAvMPB3K7ebyOEiVY2CNCquSmdBWFyoIHS0xYhVe8paeUY3H9AvBvSyczrZGTIZUuQ5SZYziXd42PshTqIbZN0+ycoeIBC5RRsfKSwqMJSRT2hz1rNjdrP7rKWP8cYjZsOMU3ODM3OdCdcpxAbWtarCSZZqpEXZeb0DEO0Sb0cTrcwI4xynNLkfMI038PIB4eF7uAffRj78Dmr/beSTDxHmaq24EwK7eW3uMHM525vPYVc3UUePliQzH+Lv30NOLi7v32Ebu36L8uanKW9+Gr11t5LStNchiCs5mikR6Rg1OkFenCLORnBhcHkfU+5g1G2EKxbAvUpUrdn3+8jGwFATN9T3tVp2IKXk+77v+9jb2+O73/0uX//615lMJh8bDXeuSgp84aXXKLzrfPtBQebt4a++TLT5OqNwk2K9heqf4K1e4G1kyE0Q2xF2u4fttPAHF8RnA/rTKbvasl56HH7nCeP9krXgWTKzxXne5yzvMSpadL0pW60ZL1zz6HDCt37nHyOTB4TFAb4dLiRBTavlpoa5fl27gzUTJJsYYzqdcvfuXX7qp36Ka9eufewZV/fZ4eHhIkmyib+Ap/DUMg5aJvOmueA4iTjLOpymrUYia5vzvMNUx3TklK44JzYnRPqYWJ8Q2xNifUxsTpA2W4wKhGFIp9Oh1WrR7Xa5e/cud+/eXbjyXEXCNdn0oiiYTqcMBoOFlLLWlteSlDAM6Xa7KKWYTCaUZbmQLUkpmU6nC3KuzitL0yrIMMYsmHUpJePxmDzPFzr2TqfDbDbj9PQUKauCWLu7u0ynUx4/fsyv/uqvLip6/2EM+n8QQIfL5MblJMf59xfLZW12vb4M6j6JlW0ynFeB+6YevDk1T6Amy948yeqhECnlYr3e7rLNYNPz9Cq9+fJUS0pqxrUG6/Xr+n+aD7dm/zRvOE3QenFxsQDn9+7dI8/zj40KSClpt9vs7u7y0z/909y8eZODg4NFpDybzfjoo4945513yPOcnZ2dhcyl3lZz1KC57auOcfPYLLd3GZB/UsC2DJzrPmz+z1USoeY2mrKk5VGS5WPUDJqWRyyaD/46cWQ6nS6Y8npZ3xBrG648zy9lNEJw+pf/K9LX/jir/+K/ZfPX/mvMp19k+rf+7zAwPPu3f407e6/wm+MvcVD0ub42ot3KOREtLmjjjKRjZ/TNmI4Z0yqHBOk5UXbOlJBv3vgJZHLBzb//M3jT46f2zRhDtvMlDn/gb6Gyc67/i7+El51/bCTixe8P+M//ZpfpOfzBf/sab7zyg+zs7DQCZvitbI3/Z/oSnSTnhw++xixTJHKF3F9nbNqc5SGDIgRp6Xcm7EjBmpfTlRPaaoSnBmRiyHEL0ueu01Pn9NN9wg74vYCoVSCNocx8yixifN7j9GCLs8MtRoMVZpMWRRphtAfKVVVAQ2BVVPrSNVctN1z1XofKhWPqELlFZKKSlWjBosS8ZCHRQYoq0c8AGYi8skOL2wnxypRoPcXrafI0ZjZYZTTpovFpl5rNxGES2M8l7RJ6ueNk6mFGwAhEOldztKkqYrbnbHtdQbQG7QEVaF+dz56Ye+fbeSElh0tFZfE4k1XhokKAFpXO2brLwMbgMPPnu6BAUM7fFRRZFxGAKUHoqg2hDy2/cqUJBSJNUKNTvPMH+CfvEh2/Sfv8XeLigjAIFlavq6urCzvIb9/4Ud659hVeffwvePXot5+SnNXXdn3uGWOwwKizy8O1l3m89jyD7i5Zt4tu+1UfzZgDd6hV9bLQxNMxW+OH3B79AXv5u8Queeq8l1KSem3+5fP/KTj4kQ/+ByI9W1zvTz0jpGLgtjniFsfyGY7EHuf+DmGUQSAowwArPURpcKWoqq1qwDmksghlcKpyIRLKEOkJcTGEXDLI9hDGsTv6Rb4a/wbh2nVOW3schzd4HD7DabSLReJlJUYrND6ep3ExWE/Ryy7Yzg+4lj9kI31MZ/gR2SxnP1nlsNxjyC0SeZ1SbWNFryqAJED4rsrF8MDZ+WhgO0cEE/ANptPC9VuVl/jgbdTjN/HmUhkxPEJe8Ryr7xUmbmOuP4u99Tx27zn0zbne/caziGRyKZc5uId3MF8/PkDYRhE4IdDrtynvfInyxuuU23cxa3vYzgZICc6A8qBIkeNz5HCAuJhConBZF1Ns4op11PTxJXjPPkSlH1Tr+gQ5B7NbW1sMBgN6vR5f/vKXWVtb48mTJ3z00UecnJwwnU4XYHZ9fZ2bN2+itebtt99mPB4jhGBzc5MvfelLnJ+f83vfeIfC38O1n0f2XsS2nqMIb5P7exBJ4rUjWlsjOtcNnZs+cktx7uUkaz2KlT5xmtGfTFmZTFFHZ3RSn3beYTO4xf1vj/jmtwYkaofcvw5YwrJynonKx6z6Q26uFqx6F3Q4x5nLROv6mqo925u4wVq7kLD+xE/8BN1ud+Eq1nSIMcZwcHCwCF6aoHtx7BpgvLls4qPlz5pyk4WcZZJzOPI4SWNGZoWp2GC2mDfJ5CqBm9AyJ7TsKV0xYC0Ysx5M2YxmvHK7y2deeY5ut4sQgt9/3OE7RzF/6u4J/eDy+dv879oNbTweL4hj5xxnZ2ecnZ0hhFhYXtYsvZRyIV+tpap1oFQnntbAvR7ZCMNwQdABRFFEu93GOcdwOCRJEqIo4tq1a1hreeedd/jt3/5tkiSp+/iPToPelG9cxV4sA85lDfOyrKX+zbJspPl+k2mtp2Umvcn2flKUt6xNbrKs9TaXAX7z/TrKXE4wak51lLrsh1kzr3Wbl0F4E5w3wWz93mw24/Hjx+zv73Pv3j3Oz88/NhQlxKXrS6fT4atf/Sqf/vSnF24iR0dHvP3224vqab1ej5dffpmbN28+lfy6DMyX/+Oq41T30zL7vxxAfVJQ1WTnm+xz85ypX9cXY7MIAVzq6ur+bxZqqgOn+ma2LOVpZthrrSmKgrOzM548ebK4qddAvJ601hjhUcbrFK0NTGebcvMuwz/5v5/bDTooNKDACmRskbFGRQYnoPhGDNYhMQjp6P7JC6JXZ4gSKMAVDlE6KCyiNJSuzejeBuUwRFIg9QypE2QxRWVD1PSMpPMqpX4GCpDFBf7sMSq/wMvO8bJTVDbkB/5ixKuvH5HlEYdPNjh6skFS3GBabDOYrXA+65JbHxc6CASeyonNBbEd0XUDdoOEF/qG1R/9Jqt3nyAFtJIendEqDBXluaU4NYweTvkv9/5TTjefBWfpHX9E6/QJRytfgpmHcCUvnP87dsWQdXnKhn9KJI54sr7Km5/+czyY3aUrh6yP7xMVKaoU2Cxkmq4wzleYpD1yG1fgt55DUUkYDBUIbjtYsxUoN0AqYDQHg6Wo9OMRsEXFopYOZhB/eVwdmxNHOCmQE8vweIPW+oxgLcd4iuFeF6clcuIIxobymx7mvo/oOkQM1hNEn51iM4kZ+JiZqoq3KNFgst2lFKQ5z/MJqpN+PksHyoE3n6uCH9WKL6AlROWHLqCgJGXMhMf89j98jdPHCBztdhcnA5zfR3sbmPAaJt6FUCGCHEKBi32I/Uq+4xyqzAjLhMAmBC7FFznCkxysf6piRseOn/zO/xecpCq55WGcwKFwwsOh0LZ6bVHYj33PwyBJ/Q6jcI1JtE4adii9oGKMafSH5LLKLQ4hLVJZnCexvoJTiB7OCMoMR/U/1qlqufhvhePpES6BQeBACCwCpEDMpTxuXkVXYqsRn2o8CMf8XieZy35ENYJyTUNb4vs5K+U5K+UF/WJAv7jA0zmFiJmpHsfBdY7CG2jhE2YptpDkLgIFKtKYOCDQGWs12z65T3vwIdF4n2QyJi0sk/AlTsRd0vAFpuIaudxE2w7OqPkojoPIIZTFOQulV/WjV0DXh0gjiyeoyYeos2/jP/49/If/DlWmi3vzlQSaENjtm3O5zDxB9cZz6L27uE4f9fijCqzXy/0P8R9/hEwml89s53C9Hcqtu5TbL1HuvY7efgG9tgd+iMinYDV4AS7qIIoMMRwgphnMBC5tYfM1mBm88/sEsw/pyycUg2/jJu+x053wykt3uHbtGs65hQ92XenXWsvR0RH37t1bOK8AC4B+cnLCm2++uWB14zhemAX4QQDhNjq8g2k9i+y9SBk8w2m2ymnax3MzIvYJV0+It2b4GznetmLlxR3yjVWStVWOlUSkKf7RCd7xCepkDGcl7lxihhFMNiiLHVK5Q0aPtrigJ89ZUees+RdsRBP66oxYPyESyeKGUTO3n/nMZ/jKV77Ct771LQC++tWvLhjjWkpz//79hc59Gf80j3+T5Gxir2aiZpPRbmrN6+TK2Wy2wFwfA/54zMQaM7lJ7u9QBNcogh1mYpMp60zdCi3fcK1Xshprfv9xF3B8anPG3/7T3/mYHr0ODkaj0cKyuN7v2kqztmBstVpYaxfS4Tp/0BizYMvr0QvnHEmSLPBCrVGvCyxprQmCYGFjWY+wCyFYXV2l3W7z4MEDfv3Xf53pdFr34x8dg76s+boKqF8F1Jr/U4O0T2LIrxpiaQYGV01XgeomsF4wnPBUpLUcHTb/t9mWZcvGJiu5DKqbQLAJGutlU390lZyj0f+LbRRFwdHREfv7+3z00UccHh5+7GSvv9tutxcBwd7eHjdu3KDdbi9Y5idPnvDgwQO01jz//PM899xzCzeXJkC/KphaPpbLQ1/N43gVm748itJkspvBSvPiryUmzai8eWOoz7d6ri+QZknm+rM6Em5KkerjW++31pqs0Jzliu8eTzjJPXRnC9fdxXa30Z0tdGuLsr2Fba0h04tKWjI9Rk5PSF79Km5lDf/JO6z+5t+BV2+i/3d/Aawl/Dv/I5EWHO/+DCU9WuKULg/J++tk17YQDrzhFIQET+Lk/EGrBHgCy1wHTPWQrNYFVgtsJrGpwk4UdqqwicRlcs66UrGuzlVgQ1nCIKMVJnRbI1a7A7ZWT9hdO+TG1iNubDwijHMyYvYnNzkY3+DJdIfTdJNBvsbIrDEVa7SCGTd7j3gmGLEbDVkNTmlFA4JwhGzPsAgez65zMNkjOpacT1Z5y3yKcdnDlQqjJVFhEBpKIzFCEHmaNPDAE3iuBARa+lXpciWQrsTPp5RJGzVxdIcPiYZnTNLbzEYbWDz8bg6xQIcebl3AuoMtVyUrbrgKHNdJe7mAY1ElPJq537lyCM8gPVdJCwA3qSwVKeQleLbzPnVzqU5Jtb18vg7UOA4r5jIF5uB8LvlwVGy4nr9PBQKxAoRBirl3vVQVQ2rmny2D+4V0RVwm1dbtrJeBqJj70FUAPGT+ul4Xl+/5QOGqkvU51bI5l/N5DErlKGXwvQKlSoTnqtmnYuw9qvPXEzglK0A9XxqlsJ7Cqmp2UiKNQZkSZTTKaIS1OCEpRUApApyT1X42+1dTBV5Th8o1YTGjLwZs8Zht+4gtfUBXJOA0CosQpoLw4ulnoJSSiW1xxB7H7HHILY68WxA5ojDBRpI07NIpB6zPHhEUM4z2eKJfoFAx1mms6lRtVCA9g6dKlG8gcJhQUbQiYqb08nPifIQwlvEsY0absreH8doEkwtkAZo22mshgxLRFlil6KTnbKb73DIHbOaHrM0OOLr3Dg8ePKi0vdpjFr5Iufo59OrrmN4LGG8HV7aroMoHfIcINCiNEw6cgjKo+tUvEVwgs8d44w/wTt/Ce/ImKnkAxfmVrHt9T3e9VczN59A3nl0qyPQMcjTAO5jr3OcAXu1/gDp5jKifD4Btr6O3nkdvP18B+Pm6i/vI2RkyHYEtccrDtVax3U2EMchpipsYbBpA2kaML4jGh/TTR6zbfVrmEV52Dz19wOD8vNKnL5GKm5ubfPGLX+To6Ihvfetbi1H0unBgzbDW7Gmv1+PWrVvs7e0xnU75/W/8AeNylfN8nSnXKIJnSNVNiuAWmdhgNU544ZpDZvcYZu9Af4Tup+g1Rbm1Qb61Sbm9Rbm9CQ6C0zOC0wHeeYocWMTQx43alMMVJhdrzOwmipIOJ/TEKbE9wk8f8aVXt/nhz93CjO/TinxeffXVp7TsWZbxwQcfkKbpU8dvcRyvIDmXmfImY90E5nUeVm240NTI19v/JPmw53kLr/ZayrOytsHOs98H7Vu8fbbCf/O1HYwTPLeW8nd/6ptPSZea7ZrNZlxcXCyMNaSsHHKOjo4WjHmt80/TlKIoFgAdni5WVY8+1KPmxpgFQHfOLfazzhmM4xitNRcXFwvGfWVlhTRN+cf/+B9X9Sz4IwLof/2v//Ur5QDLoKoJtK6SaiyfCEv/9xSYa4LqevokucnygV4efmmC8ybYa35WTzXga+qyltu9DM6b0zIob65fxbo3q7Q2l00Q6Vwlczk4OOD+/fvs7+8/ZbDf/E3tmw7VyX779m2uX79Ot9tlc3OTa9eu8dFHH/HLv/zLdLtdXnjhBVZXVxf907xw/rCAqNnnnyRVWgbry/3c1AXW/1ez1XVJ3jparYf1mhVR4TIgqrV6TTZ+OWcAISnDFbJwjTxaQ3e2MZ1titYGqb/CVPWYeX0Sr4tvMvz0DDvYh9EhfnJGmJ0TpGeo6THe9AQ1O0Ga4mn2QSp0fxvv4nChKTQrXShKxLQqeBRGHbzeTTrygmAeNLC+gmcsfl5eMv1IUtElkSskcoWh2iZ1bVLXpgw30OFWVVI+XEMWQ1RxjrQJkhlKpRBZZAyyI6HrIdoBLmpBFGFliJU+Dg9nPaz2cKXEFgpbVhIRaSwr/gWrwZDVeMBa+5zV9oDV1oDV9gV+q0B7ioyYi3yNg/FNjpJdTmebXJSrOCRtLyFUGVLaKlnL+RQ2JDUxmYnx0CgM0jqsFuhcYQoJuYTMQgJkEmktAQWBKPGVwQpBgaJUPoUM8HSOKnNUUYIRWDysCtCej/W9SgbgU7HNHvNCOVTBj54D5CbwrB1jLJcyGbhks+Hjuu7Fie8uEyULIBPVfngO2bfIrkF2DSJwmImHmanK5cWjAuBzn22Yt1FWDK2QFt8rCYIcP8zxohJCiQvm4LcGvEJhZMVYG6cwVlWAkSq5U8iqoBAGXOmQmUFmGmltlSvgqD6XEqcUxlM4pXCemBf3EXMWX1wGF8257r+6P7WAEoQxKK1R2lTgW2s8U+DZEt8U+DrHdwW+K/CkwRMlSpR4ssSXBkVRvRYlntRM4h4nvRucdq4xijbRyofQVv7zXSrwnlHp3sfADGSqCXVKTw3ZdIfsuofsmn3WzAUB+mP3NGMqpdDArvPE3eSIPZ7IZzgPrxNGCV5YUEYhpR/Tn9zHP/g6d4ML9vwZ2rU4VHuc+ze4YIMpfTLbwjgFCoSyKFkgRYZxCS4E22tDu4tIh8giASTW7+G8GDUaoKYJTkdobwUX+qiWxrZ8VD5DHX+AfPQ1oqNv0r34iPbsEDk/kbTWaGPJxQZZ+y753g9Rrn0a7e1h8hVc6lWBm3JVkTG/BE9X0ivlV3r4hKoQmRngpfuo4Xfxxu+j0n1k+giV7kMxqOLOJXLHKQ977ZkKrN+6W2nebz6HvvkcLggrtv1gzrgf1LKZe8j8EkDaeAW9dbcC7Zt30dvPo7fuYrtbeMMDgtkpZnqGK3PwQmxrE7t6A9vfqC7cmYbEq/IfhmfIwT7q9H3U8Vuo028QZB+xtd7ijTfe4OjoiPfff3+xDzVgbAL02obv5s2b3Lhxg7OzM9566y2stUwmE4qiIAyr+hN7e3u88vrnEZ3naG1/mn/2mx/ywRPBxO0wddfQxET2MZHeJzb7tDkk7AxgZYbbjtE7WxRbm9W8vUW+tYENAsLBBcHZGG+Qoy6gPBUUZxGh2WNyvsZk5rHbL9lb1Tyzbthb09xat6x6Az78g19Dp2cEQcDm5uZCQrKsJFh+RjeZ8vqZXVsM1zbFtXlCXWztKjxxlc49CAK63e5Thg3dbndRjLHT6fBL313j6wct/rPXDrnZSz/G4NfLLMsWCanNc/H8/JzBYIC1diFJqeUrNakXBMFCGmyMWbxnbVX7pBm4KaUWAYm1dlEhGlhIZWtJje/7/MIv/AKDwaC6Lv6oGfSl71ypR19+vQzsm4x0zaQus6tXac1roLUMqq8ahmmC/Oa6tfapSlHLbW6y8MsjA3XblxM/P4k9r+UVzaTRui1N5rz5P8vgv+7fJEk4PDxcVBY9Pj5+6sKpv1tHdZ7nsba2xp07dwjDcDGvrq7y+uuvs7+/zz//5/+c7e1ttra2njoeV+nQll9fBbqb7b8KsNf9WF9Ay/6lzQi42Qf1/tR96ZxbAPiiKDDWooMeZXuTIt7iUfAFdBARhVNmXo/E7zPzV0i9HtJp4vyCMB/gJ6eo6TF+csqazLjWgri8QE6OKSYDxuMxR0dHT0XBtQf68ojN8nlfDYMrjJOL4f162B8VIOJN6F7HtXZwrS1MtIkJ19DBGqXfp/T7aK+DtAWqnKCKMbKYIIoJMp8iyhm21UKvrBI9eBuVJiAUTvg4oUB4OOEhpI9UAUivAq145PFNpNP4eoyQPkivAuqiaqMVHqbbxZUOMo3DxwoPqzxc5ONaqvL0jmVlo1db28Xucl2DyCpNuMwdorDIwiIKhyvB5gpXghKW0Mvxgxw/0KjAVGyjBxkhOTE5wdy6T1Wl6AWXcodaU+4aoBbmwBpwDmFdxQLXcylwNYOtaMhIQAQOWuDalcd2oEBJh7FQCoENZCUjiS2qV6LaGtWtiivZxz76QYg59nATWdkvbhnktsasCcQxeB847AeVHEauGsIvJoQvpnh7Bc53pN/skT+OMYlfAeHSLUY+WADkeXurE20+L+0/8/0TogJdck7X20quIZxFCYMSBikNShlUvVQa5RmkZ5GeqfTXnkUIi1IGYebyK22xpULnijIPyIuAvGyRlm1KHVSSnnpkoRn4lBZVaEKTE+kZbTMmtlM8q/GcqVxFnEVaENZincJYD+08Sutj8Smtj3Ee2vnVbD1KAgoXop2PtWo+wuTmYNjh6gDNv6L/5ueKFBZfFcQqoSsu6IkLZrttYjfh5v6HKAo8WeKcYSpWuBAbDNUOZ/51ZlEPzxsS9ARFq4snDLv5Q64V++xkD9lKHuAVE5JSclT2eCxvcurdYCA2ycMdbLAOMprLmQBShJ0hRAqBw3ViXK8PflBpt51CZDPkdIrLLNbEoPoQlJWu31eEoyM6gw/onX2b9uB92oN7eMXkYyzotBAke58hv/l95KuvUoo76Nka5jyoPPn9Yu7aE1T9F5rKdtMaSAvIHBQhlCUqfYRKHsyB+361TB4hZg8QeoQUDVMIwK1sXBZimstlzM3nMDs3kefHc4nMvUu9+/6HcHq4YPJt2MHfew29/Tzj7g301l301vPYlevIi0fIo/dRF/vIZAA6x/l9zNqzmPXbuJUdXL8PSkECYloQJFPUxRPcyXfxTt8mOv0DOtN3iX31VNXoVqvF2toae3t77O7u8ujRI9577z0ALi4uFsAO4Pbt23z605/mxo0bvP766/y9v/f3eO+99xbPbOut8mTSYaS3mIrrJOI6M3mDmbiGcJrYHhCbfVrmgJbdp+0eE7WHsLNCub1JubNNsb3JbHWFbGMdd/0aszBgpdCsZo5u6uGNFfpMMX2iOL4vOHkU4RcZPXnGtW7G3mrJVmvGZjxhuzVlJZiC+7jtdE2UNZ/BNUCt56a5QhM7LC+bQYCUclH5s1mtta6OXefMNaWuTby3jCdq6cloNFow+FJWVV3rSqO+7y+KEU0mE+Cy8FTNjNd+6jUYr/cRWDDm1lqSJFkYeNQBXVmWDAYDtNaLYOAXfuEXODs7q/vgEwH692SzeJUO+KrObkpemsx6/Vl9gJvba35/eXvN/6+315Rf1CC9OeyyPNfTVRm/yxru5dGA5XZdpZ2ugeQy+1+3tU5SWnb1+CRw3vy8fr9uZ30itFot2nMXheWRhGa0C9Dv958aNaiTZX7/93+fn/3Zn+UHf/AHF0mjURQ9NarwScd5mTFfDmTqNi9H4s1oubY1WpYX1ftYBzp+EGCDDkW8QRqsMvP6TL0eE9llKrtMVK+qYKl6CCAuh7gcZsU6FLCaHLFmT9jQF+zqE7xSIyy4uQ7W2JjS3CbNb/BgmnJfeHS6fZzwKENHuQa6D6V2Cx0t0kcoHyf8OSBWODwQHlZ4IL2qco4n5+wnIM1lkmDNQDoqNkprhC4RWiN0gUwLxDjH048IyhRhC7C68g52pqrc5zTOj5l+5sdgT5Anr9N663eRsxkiSZFJgkrGqFIT+gLPAkaDKznb/BNkq8+CgLWzf8lq9gGxSUCVFGFEGbeY/PAXKO52K1b9/gSTF7hoBRu3kTonSk5ZXZ2xvjWmF41Ivv6I8lHOd279ZZgqdtKc/8PeMf+/aJ1vuTZGww+mCV5h+Ld+CFoQZBovc+RGMbVdLCt4ssRTJVJajFF4vsaLSnr+CKYQiYx+e0joZ0wnXU4OttHSJ3chZR5ii3lSnxUVi+05vLbGeQIbSeiDCwUEAuEZlDbIsYMLhz2TmIHCFYr4RyYYJyjHIXlHIq45xI7F9mv1kabfHmAzj+y8Rfk4oMxa4Eu87RL/TorwLeWTADeVmAc+vCsJvZyolzJ5owufdlitSG2X9LtdeK++4c2XLVEBtXh+rrj6nKGSnmhRJTMIFI7KPxEgw5JgmGEYI8hsiMihbZBrPq4rsXElZTFKIpRDWgGFjygFWIlzlc4aISqbwzooeB8YCWRY4vU0qmOQHYPsWGTbIlYsMrZ0ogk2EBhP0Slm3Jw+Qnj2Ujozl7ugBNopjPEpy4CibDPLPXTpYXKPoggoiwCdK1wOsrAoWxBQ4FMQuIJYpPiU+HO23ZcGTxi+e+NTXLTWEbnhuUfvkYgu58EmBkVXj4nIENJQ+gFJ0CZXEUZVXvVlKSnLLmPT5bGpisK4dzzezT8/tyzVeLZEWQ0WnJVYq5CmRIs1dGXCjhZwX77IA/kCKIdTEil1peU3E7xigBkfIrKPCM07SJfNr+0QE69hO1vY1gY2XsUFXVBRJZPyRHVPEVOcpzGhB90AOq0qcFYhFCCynNLvcb75/Zxv/xDC07jQQ2UJ7dEjeufvsDJ8j/bgQ+KzjyjPvok3fJskSRgMBly/eZONN17l7Npdjlu3mca3Sd119JMW5YGHfawQngFlcL6sGPhOjNl9oXJlKVPIUsgMlD5OrEKZ4WWXoF2mj/CyA+TDR6h3/wlSjy+fuV6AvXFnAd6zF97A/OhfwNx4DoTAO7iHmien9s4P8fe/Sed3/wcoKvs+40XozWcx2y9gdl5AX3sFs/0Cdv0ZxPAQdfxd1P1/hTz+Lmr0BIyldfOzcO3TjMJtyhe/SPaF/4RJJ+bUBxKNmkwIJufE0yf0s0fc5JDVlmTdsrANrEfg6+ddU2pZ65trVreelBlyPb7gOvvk+b+lKAokEoRklHcYmW1SdZPUu8mx+BOk8gaZ2MU/GtI5ekz3W0d0xRPWy6/R90742T//g6zurPOr3/kO/Vdepveplznp+xze0jyWMDSGdpDgJRlqkHMxckwnMW9frJCeRoz322SnPmtmxlY8YT0YsxYMWQsuWFED+vIc9GXdjyZz3pSzLMuZr8J6yzbIwMLO0LlLk4skSVhbW3uKxKynJhZrkqdhGC76vG5L7cBSO7VprRffq3PNrLVP6dGbxGTNrjcxZD2q0rRnriU6QRB8omLjD5u+50qiNYMOl04uTYZ0GcQ3Ew2XwWZzqqOm5Wz7qxjy5fWrAPlylFd/rwkImxHWVf+xzAw329JkqpsSnvpEqn9/VcJiUyfd6OMrtdrLgL5+fzQa8eTJEw4PD3n//fcXF/lTF/o8oaXVanHnzh16vd4ianzw4AFZluF5Hl/84hf5whe+wLvvvkuapvR6vU+snLrszLCcZ7B87Ov2XsUs19/J8cnCVfJonTRYJQ1WSfwVkmCFxOszVT2mqocRio6Z0DFjOrZads348j0zpq1HeMUEawy/Nfzj/EH+Y1V1NPkR295D/DkL6sn5UjmUsCjpUKJizazOefTgI3SZcvPGLhJDMh2TzMbk6ZTJLGNmPQrZxkYriPYGxOuYcG0+r2OiDZzwUNkJKj3BS4+Q6REqPUalx4jkSWVblx0j9WTBJDUDnKZMB562kmyeF+X6Dh/9V78OykMUOfEHb2HaPUy7h+30sa0uFAUqmeElKSLLEYWhUNuYrKqSKOoHa110J7fIvEAo8DYtXrtEkSE8EL5EJBoxKTETgx8HeALKPCDLIjIdY6w/10w7cLKq5tmuWHUvBB2ACA1hZ4YnDMJaRO7gyQi7fwbnBTptk3XeqPTgdq7HXoi1HbUXtxIaEoHLBDaRRJ2U4NkCuypI/TbKt/TXz9m8e0y0kuJHBbNBh4P9W0ySflVIp5Zk1Nt1IIwlaKWoyILvMGOFufCwiYctRTX8r0D4lX2hs+KSqX3qgqXRbqoCT0HOVnTMVnRMoAqSPGa/c518LawYeC2xBz76Gz7mGyHedkH8lRnBKxlqu8Rminy/Rf44pjyNQFjUikb2NLJnEL7AWnCFwBaqGikoBK4EVwqEkTgtqzYvy3YUl0moPlWCYa1Xr4sAhfPviUraIoxFGYM082Np3RywVvIsIwOcccjCVT0h5Bz4zyU09Z8vTvfLNrk6IKmXdV8K5kmZ1VLMJUdCuOq1rDpfzGVGfi9DBuZyRGXePmfFnJmvpFyBLQl1RmALJBYjPQrpMws7aOljUw83VDBxlbXmSFSuMxOqURnfIZnR9sZsehfsiCM2zBBXCEZJm9Nyi2O7w4W3hVI5UpSV/EqEyNFhVfhndIKanld+niLEyghkiJMxTkbV7PXmHvVdnIoAv9KPN/tIUoH4zzkICoh8UMXcRSio+qewOOtVHSKr68yfnNM5ex917zdpnb3HD9zqshKwKLonhKDUmlkv5nCtw8nmBhcrzzELb1OMeuj3Csp9Dz1ewVkfoWYQgosDCMOqfZ4DEmQ+RqRjSDOwAVb2cWoLoZM56/4IlT56ioFn9gBRjrHOIbevI26/hLl5F33jDtx+kXz3Nnp9G3l8gNr/APnwA9SjD/AP7iH3P4DBCTiH80LM5rPYnRcxOy8sALzdvIOXDGgNH+Aev4M6eg//5EPUyft44Qbm2vdTbLxKsfosRf8auruGaUfQcVVCfZbjTy6IZid4Z+8Tjz6kNT0inp3wmVsbfOruHe7cucPt27f5G3/jb3BycrIYHa51z3UyYlmWC/3zsoSjxiDGKVK5SyJvkKmbzMR1xm6XVN2gECvs9nK2OyNevCF56YbiuR3LnS3NVrfkzTe/zi//xm8wasWk62tMV/uImzcQN68z7HQYtGOGrRbSWOJxgT+wuIGgOPNITyKmpx2CQUF3dkbXnhCbJwT5AbE5pONOCO05go/LWq4iPZv681artajYXeeM1RVUX3755YUioNkvzdGgeq77Ks9zLi4unkrulLIq0lSbbQRBsLCbnk6nOFdZcNb21HUibQ3ugYV5hBBiwa7X360DlLqae5Zli2JIURTxi7/4i98Tg/49A/S/9tf+2sfY8GVZRt35y7rtZXb5KvD7h7l/LEdWze0sb6+WrSwfsGby4/JBbLK3V/XHMtPelK9cJe9pyl+a2uimFr3e7rLkpV5epUmHKkI/Pj7m8PCQe/fuLYZpmt+po/XNzU3u3r1bDZ/ZqopWPeymtebGjRv8yI/8CNZaHj16RKvV+hjbXff18nFuyo6Wf1OiSLx+Bbb9fgW8g1USv3pv5lXvlzKkZSZ0zISuHdOdr/fclK6d0J2D8bCcVJ4JVwQGzak+npn2+J3xnwCn+Vz0S3jiUsNeH4+mXMbKgBk9ZvQ4y0O+c39AqlZobT3HlD4j0yGRKxgZ4hcDZHIEsyeI2ROC4pSgPMfPTvCykwqE54PKEaJx7jTPoeXrpnm8ly0Rm339lHuQEJjWCsMf+CmS136I+DvfQBQa3dui7G2i+7vo3hams1b1XZ4gygxhDEJItOqBsyincW4uHbFe9aCv/oRKDlHJGNAgbKVdloHDaxX43ZxgNcfrZcjAQiwI8gJ/qImGM/zUoPM243SFdNYiSWOyLEbXThI+iMBCbBEdh+g6VJjhtTPUmkC2LAyPUQ8+whwUpOINxFjjrEP3bmFVjLCWwHNoFNYKpHLIecFOIyUidhXAlBUTXIMYISzSsyBt5QPtBDj5VBzwFN6umWsjKzBYJ47WfdI1eP0Sr1vgtTRaemjpYYSau304xMKhpfr/TjyhG45pqxme0Ix0n7HtkcsQqyrQ72rf+JwqOXWeXFq9J6CQuFxCXgFxClElv0oWiYCX2nsq0O0zZ7LdpQ6fxj4vJ5uapaWtwPniHBHz1brDFtIRt5DeCFvPlcZdGo0wGlEWOK1xpcaVbrG/6DovIsJZv4rsSgmGxbYuAXftrS/BCAQGT5YEQYmMLH5QEPo5XlziRSV+XBLEBX5cELZygrhA+mae1OoqKYxHZVsoBcZVcppqDtDGQ+tqLo1P4QK0DSi1h849ytxHzwLsTFX697wCv1VWRE6kL/DTc2ySkkwVpdjGxTtgBwhPQ28FTIF/8HX8x28SHP4B/uFbyGx05X1voZV3sLL3Ov7OZ3l/7a+g4xVUK0F6JaXsVQdpnqyLslVQLkukSBGyxHk+Nm5VYH4u9wGBKAu8yRmt8w/oH7/F1tl3WE+fEIX+QjYphKDotbm4vs7F9U3GeztMt2+T55vob04p3ysoD310soGxWyBzRKRxHR9aIThdSXZEiLAJIj1DJueIsgAjcbQwag2nthFmgkr2Udk+Xv4Yle7jZfuseEPK4Qek5Yxy9xl0rW+/9Tx27y76xrOIskA+qkC72v9gsS4f3wddIryA3t3PIG++wkW8S75+B731POXms6hsQnj+EfHFfdrDh/SmB2yVZ+z2YrrbrzMUN/mDk4BTb4ekfY28vYrr+IhWJTdycUCkC7ZVwTPtkqNv/jbi/BHt5JRufs6qGROiFzrn+jldkzK1aqDJUNdsbrNwTp7nbGxs8Of+4v8COnd5eB7y8DziwXnA/ROf+6cepYHN1ggv/YiueMKKd0LLHvDstubFO1sLXKKdYxT4nMURg1bEoN3iotNm1O0w6naZrvSxUuANMsTcgUYPQvKLHvbcIzodEA+e0C6PaJkntOzRogKrIn8KO9RW1HU1z1p2Ur8fxzG3b9/m1VdfpdvtLp6vzVy2pvlHU+YyHA4XOQHAQqZydHS0SOysHW5qgF3nGAghFlp6YMGK1zKe2uWllr/U9VBqLXoN6OvCTEEQ8Eu/9Eucn5/Pb5N/BAC9ZtCd+7gH+rJ2/Co5zDLoaGrQm/ryT0pKXNaVN4OEOqJsDh80I6hmZdAmO9yMvpptXQZRzZNoeWhlGWA32fWm7rypQ//DpqvY9aaMxRjDYDDg8PCQhw8fcnBw8FR0Vy9932dnZ4eXXnppYR14//79hfOL1pput8tXvvIV2u02T548WRzbep+b+w9gnFiA7MRfYepVCZUz7/K9NFghVy0iPaWtR7T1qGK59Zz91uMKgLsJXZeiGg4Ky0m0yxKb5vqypOaqoEJbwdS2mdBlbDpM6TETfab0SWSfGX2m9Ehp0RYJXTmlJyeEesD48bus+gl3tmP8/JTy4iHl6DHJdMxsNmM4HDKbzcjzfBFV1wxIsx3LAH15asqjhBAIKXHtVXR/h3L9Jnr9Fnr1BnplB9PdwHTWMK0+Nu5U2KjIEbqcJxVW1nVOhRWQyWaodIzKJnjlFFVOIRujdEI3cATkUE6RJkWUM4TNEDrBmILW+h5Fa5OJ6DKlQ+n1cP4qzuthaKFNhNMhVnug58DWdxA4RFjPFhE5RDxfRqbSl8s5EtQCoxVWq2ppFNZInJUNgDiXqihXsdZevV4BYzzXcAmhYgKFmzPhonrmGwGaywJJ89mVczArqkBBhBYZGoRfATSnRJUU6c1ZW+UQvkWFBhlaZGgRnkY4hxt7mHMPe+ajpwK/r/E2Srx+ifQNZR6SFxFG+xWDbajcXua6bOkskUxRGDCgy4CiDDBaXfqeyzmgru3zaqAduvn+z5eKBqB2CAsYO9eNc+nMklXAX2hRjRq4klhmxF5C6Kd4oUZFJSow1WgBAuuDjSS2JTEthYklNhQY6WNMlZBqnQRR2RQKaRFq7uYCOO1wpZz3vav6QkusnkuTaotMM/eCN66aLTjjcAbsXE5i8TEiqPIjbBUIOSdwTlb5BSWNIKPqE2Et0lrUfCmsRbgG629ltR9GVVaFtsokEWKu15caT2qU0nheiR9ovLAkCAvCKCVqpbRaKWErww/LS9DvOZwCSwXqtfYpjYeeA389DwK09iugX/iU2kcXkrLwKW2Inmnc6AI1PgWT4R++gxydILIEmc0gS9joxPQChSgKTBkTFwd4RUre6vHWX/qbGN1i/Vf/IVnRJu89i+7sYb1+ddJ4bg7eHULkeLJAuLwyO2p1oBVVoL5S70BZoGYDgsF9Wifv0Tn+Dt3hAzrZgEBWo8C6E5M9e5P02Rvkd/conr+F3tnGvHlB+Y0R5gNNeRyTF9cw3k2ciCHKoetBW4GdIXQCzoLq4mSETI6R6TmimCK0QTgfQxvnbWDUFkKPkXPGXaWP8PIDxOwhInmEa5e46zcu3WX2qkRVt7qJPHyA2v+Q7tkB7dPHFO9/C/XwA9RsVPm0b96G3Zcot+5SbN4lX3+WdPUWntNsmwFb5Rn60bdQR9+lPXrI7OB9nLdGGd6hDO7Quv55pp1nGQYbnBEi4wLVSqHtsO0AHbfwy4R2ckI3P6OdntPNzukWA1qzE+LpCaFJFxinWdm8aR88m824du0aP/3TP82NGzeuMKkQnIzhH/zyW/zOW6eM3Q5js8PQbDK1O0ReyWZryGZ8wXp4zlpwxop3Qk+e4PRsUZ0zTVPyomDqe4x6XZL1NbKNNbKNDbLNddKNLfLNdUwcoi4yODe4c4W+iCnO26izlOj0lPj0gDg9pG2P6YlT/Ltd8pd3eeHtd2klKcYp7rvPsR5O+L6bKW+88Qbb29tPPfebeK4pZa618pPJhPF4vEjirLHm6ekpo9EI5xytVos4jhdynVqJ4Pv+Yn+bUhbnHLNZ1R815qq/W4P8WlMfBMEiUDDG8Ku/+qvfU5Lo96RBr6dl2clVIGn5/WUd8lVSl1qv1RzuaALFZXDcZCPrA9ME6FfpfJbBe/O7V8k2lkHhsqym2e56Wv59/Xr5P67qu+UAo8nOL2vawjBceJ0HQcBsNrsyoKirYDUDKt/3kVJWSR3da/zX1/46pQr5k/nfozw/wHZ2SLwqoXLm9UjmbPfMXyHzOgQmoVWOaJdD4uKCth6xmT6kPfn2Qm7S0mMCyUJvd5W0p9rJp18ujx58EuPsnGDqWkxdj4nrMpkvp1TrQ7XKebCOSySByOm6MR1vTFtM6MgJW+KMSHxES81oewndqMSPPH7F+zE+kHd4TUz4/o0jvvvhPcr+M8Sra2jzIlmyxzef/TEGW8+zcu936H37V6DIya0jUwovilBBMHe78LBSYr0WzmtjvDbW72H9frVUnaqMuoxwKqgSR6WqCnfAUwmOOIvAgDAIVc3KG4JvEAGIgMr/OwTCAuIMIg8hwLketuyRTwViZmFmYAaTGZBJXKKqZaZwucSVc8Ct5nrhoKp2KZTBSQfKVnIXJcC3VcKdnPtDCw9wOM8hnUbqAisFWsWIwhJMU5wKKGunCFmBX9HShFFG3E3ptCfEMsHPNaK0iBSyWcRFts4wXyWzMTKo2W8qAJaDm1RMss0r+txXjoDquV4UAm3AFw5jBHYuj1yRDqcdYyMI+5LIkzjnkWKqqqm+xvMN2km09jGZh7Wqqge0kIYs6OJq7lroQGF8ykGEOxKVp319D66BczkH3aqSj1hfkgSd6o4cOkTbEgcJK/EF260jVFBy7ja5cKsUbQ8MmExhRh76OyF8R8EDidrUBJ9LiF5L8PYKyouQ/FGL/CjGznxUXOC3SoJejr+TI9sO0bLIqA5QKsY9Mx4u9yFr4RKHG0nsSGEHCj32MVOPcuJjpx5qtZgnBAtkaavqoKWbO8QI3FxWUwFridGKYDWnd+OUHf0+bTdD4irXmdBDBwobeejQQ0ce2g8oPQ/t+5jAw/oeNlDYQOP8DEKJ8B0ysNWITMBlQDCX9tQM+2K9FLhSVhIgzTxQEPNAoFpG5FVQYx0GibE+JR6lC8hcVIF37XAWrBHYXIL2qqJSUI0yNGQ1i+JS9aiCq93ULVJYpLhM1PW8Es8raQc5QTgiCDOirRzPL1BelUS91l9FsILOHbpw6BKsDTCEFC7E2YgJMQUtCheyZWaUpqRv19j7pb9N9088z8pPPU/y7jnv/29+kyy4Trn6IuOtzzPrPkfhb1DabiUxmjEP/izCpWBz8BQmWiPd2CG99gXOAzkPkAUiT/DGR/jnHxGevE/424/xf+k38IePCfUYXrqF/+mXCP/8q0xv90mvCdTRW7g3D8i/MaT4QFN8tEoe3sWEt7Fus7rf9CU22gTfR5YX+BTEccw489HxZiWPmR1X0ho/xnSex7hXsKKL9bZw0S6iuECePEI+2Eel38RPfhFpz3ArwG4P9/wLjF/7AZI//pcorz2DnE3wH39EeHif+Pgh8dEHbHzzX7OWjul1OrT3XkLtvcqxWueDzk1Gr3+BaW8Ph6N18YDW8CGd8SPubH7E65v7/NCnniFJcv7Wf/fPyL1nSNQeg3yTkdvlIthmFN2i6K4x6Wacdi2mJSniiDTs4pmMTnZONxvQLy9oJaeE4ye00zPa6RlBMVngA9/3GRnF7w47fL6fsxPWWnDHWktzI7rPi/E3F8Tf+fk5u9dusHbjDY5nfU6SFU6SFd4fvMp5ts5Y92irEX15RIcntOwBkXlErPcJntyjJe9dOfJuWjHZxjr55gbZxjrZ5gbZnU3SL+5SbK0z7b3ObPwSZ+cWd+FTDDvYI8XXHVxPP2Bi1xi6HUgc7dbf5dnxmI2NjYUyoMY3TczUfE+py8TeJgarAfhkMnmKea/BdI2hmqPtTWORmtlvVqavMVatOW8qNGrZTtNz/983/Ue5uCwzxlcx6M315g597OA1JC5NlrHZrmVWvbnTn8SGN4eInHOLyptXfb/J4Denq0B5c1oGv3VfLNsrNuf6P66SaTT76ZOCnDrB8uTkhJOTE+7fv8/R0dFTSQxQnXxra2t89rOfXYD4x48fLyLtoig4fu1/yaNP/69xQiGdoZUcLapXtsohraIC4fXruBjioz+2H03pSHME4Q/b12YfXh5LSIlJ5eoceHeZMgff89cT12NGB4WhK8Z0xYSumNKVE7piQk9OeffTz/H+63egbZH+PHeuKRkxpmLPnEWaiknDOmZ5F/2PO1Wf4yqtYvXi0i1ESdjhUgpg5+tiaZZXzPD0A9vVD/CaJWws5/IAMV8uzr/6f+vS7VZevjZzx4ymV3bdPjkHgzXD6jNPVnWXrLNwIGw1Kx9aqvrcVoWShLZQaERhICsxsge9oPpOUSJLjXfdsPljR+ihjzly5EmXadDHSYWcGrxUs9M65PO3/h0nw23OhlucJ+ucsoW1qmJ3Td3pzKs4Viy68k0lLZECK6rEReE7rJDVcZHM94Wn5RrzfhZmDr7a889qf++0WheV4gVTUh1jNf9/66oPAjFXdFSgbMHw19U8ax/zmuWumfdaMy1cVSjpvGqT8Axe26CUxaQKnfvV6EEzGdRU/9f2p/TCEf1gRKRSzt06Q7NCJmKcFNiiYo1tKav/mIpqv3yHXKmSOAktLpeY8RxE1q4qBZeWkstFkpq3PcfT/WrnHa2pHF48dympmdtYVgFkNdIhPBYjIQQWGTuErUY50JUm3OlK1mN1dT47W10DYn6dXOrc3fzasGAN1rnL+5IDxLyYkGC+rA6EE1UhIifmRMpC/+4QzEdrEMhVjeybhSWlqK0u66RvRTWCM39PyHmM6up2OoxVCM/ReWPEerqPyUp0ZtCZIUtKjJZYGVd6ctnByhhHhLU+znqL0R5nqmPrTEVOOFuNEvCmQJSV3Ep6FuVXwN3zC3w/I/QTIm9MGEzx2ha73SOMc7qcEcYp4Sb4kcbzcib/x98gOW7ztd3/GyD40sn/le1wWI3UniVMd75IuvsFio3X0J09nOo2krHtPG9BMR8OAt+rSINozsqbDIEG38d5Ad74iE5ywjMtx6oZ0crPsXFGtmqZboeMbm2R7u0gL8Z4736E+9Y9im9ekN1z5PI5ipVXMdEdrNjClh70QEQJQp8gsyNkcgL5EFQb272F6ezhog1k8gQxO0JkQ0SRVUEkEU6sYMJdiHZQekioj2B6H5kdoIIZ9CXstOGZTfSNPfJrtzGtDp3zJ2xMz7leTukPnjD99tfxH3+Em405LQOKzefIN54lXb2NuvkKF61drPS5oaZw+C796QH92SHd8T7bcoYSkNkWg3yTzLvFIN9kqLcYFJsMyg2CVkbcvSDqzFBdA22BaUekUYdJuIq0hlZyygYzPn1jlX+VrDM1kpZ0fO3zB0Sqwih5nvOP/tE/4t133wVYSEjW19cXI+5NS2OtNUnuOEv7nOfrDMpNJnaHKdeYiutoWrTcE1r2MW33mJat3GbaPCZwF8gltQRcYjfjeZRbm5hrO+Qvv8i9P/+TuFISvJvwmf/+X/D14k8xclsoND+x9vf5yU9rXnnllUWBIfjDpS613eJ4PGY8Hi9041LKhWR4NpshpVxUFk2SZOGJXsu4kiRZjJjXoN9auwD4tdyoZtEnk8miQFKtq59MJiRJwi/90i/90TPo8DSLfhXT+UnylmYnXiVlaTLLy4B4+b3mgV4c5CUmuykrWP5uMzn0qm02JQdN9n8ZyC//pmbyr5q+l9GGZluawL/+nzqRto7yWq0WvV5vkQCxfDzq4gCdToc0TZ/SwkspuTF5mwNXleD+8cd/B/H2Ly/0X01wXc8iVAhxqd+uP2/KfpaPW9WHUBBegm16FeCmx1T3mNKt2HC6gKDDZAG+O2JCX4y4LvfpiSkdMaYnp0SiQMqn5SF1e1amM+719xDO8Zd++Z9x/fiEnIDC+SQuIJMRqYhIZcTMBSQEzAj5IHwF3YWAnFBlZDKi9CPAEMgSJSyZH+KURGCQrsR4AQhRefWmY8RshEzGyGxaWSHmKdJonDEV1glaOD/C+iH4EdaLcKoDKsKKCEcIwq90t07N5R6VtnYx/A8LmUdV+MaCMgi/rBguZeeFYsB6Fhv3QKgKeVoNbilycFSfL1hh5g/cWlIhqoIgCpBBBfYlC2BTgTYfa30KK3j8O92qrXXwMQ9IrPEotMeji2d59OQOUlk8TxP4Oav+AD8qsB1F7nxS2abwAvANkOHFDq9tWfUv2Cz32fGO2emf0wlnjIouqWvh/Kr4zenpFsdH1zk732Sc9nFKVkVZWpXjCLGt2lN6uFxV/asdrhSYhZe3aBQbEuCpal1SjSTM91/6mjjMidopTlhmkzZmGqMLBYkA45C5RhVQTqvS7CoqIAFz4lGOA8pVi+prWttTws0c0dPkkxZ5GlG6AGckU9NmWrY5PLuOZwrWxCnr4tz5OhdZ1mKQr5MVkXOlEK6cnyvz4MEeeFX9owXZL+b64qtuVEvzwoWoXp+D2HmCZr1N5yojmac3xLyo0fxeV6N7ASgxP5+qWcyDGDFP9hTCLc7j+n03/13Vhnk7hECgULi5ssldBqULeU+17uxcWlQz2cahnEOay+RWg4cRHibxsYfqMrhfBHvz5fzUXyzrnIZaJuLNJUnSYd72mPgrKD/D82Y4MUGICULOkN4QzzuqNPCBAd8iQglxhNvoQbeDa7cQUQShD75CqHmT/ucs2H9KweTXV0h+rwuifXn86va5+rjbakTO1sm11ciBC/9zuDk/NtLx/sZfpFv8U7QaofLHdO79Iu0P/2l1HIXACYXZvEu2+wbJzucpNl5Bx3tV1eNUVwEswSV4D2KcctXx8y06usaos8kHoqDlp3hxSaECJqaLeSLpPhjQzs7xhYNoF3tzF/GliOBun1jP8N77CPXO7xB+8JDkzQekH66Rbn2BpPMSJn4OHb0GojUPlKfI5Bh19hZy/AA5Oqgcdbo3sd1b2O4NbHcPnENO3yeUF4R+Rq4NJriGcS1MsYo5vo652ML7/QtCfUgsL4h7hmA75nx3jQerOwz+zA+Qbu3gT4eEh/dpnx4QHz9i4+Q3+YJ6i8/u7bD38vfx9ScJ/yI55DhY52D1U4za1yi9Fv30iP70kLX0CavZEXuTt3mtOENhMVaQsMWo3GF4usXF4RZDvcPYbJCZNl3/nH73Ahmes7obYFdXGRUKKwVj50iNJFKXDnh1qXm49B631pJl2VM2ikVRLGQtfn7Eap7TbdgnOuco6TKT15jN7SHP5GdIvJ8gkTcQGFr2gNgc0LIHtMw+sa3WpZ0iioLw8SHh2TmtD+/TGU8ZvP4Kn/pXv82mf8aO+oh/W/zPWFVn3FbfZDLZoygKWq3WU3mDTca8nmoVQc1q+76/wH/OuYVGPE0r6VDtW+953sLjvZ7r3zaJ4OWcQmPMIuG3tlisdfDNXILvdfoP0qA3GfEaMNYArSnNWJaH1IC8Bpf/vgbWHdDUXzdlLMvge1njW3dI83Wz2lU9XbWNZifW/9c8oM0O/qRkzhqsNhNJ6+GVGtjWv6/b0XxvOYip+67+rbWV3+ZoNOLk5IQPPviAwWDwVBAjhCAMQ1599VVeeOEFRqMRBwcHCybfWsvm5ia7t5+v3BYmZ3z44Yf0+/1FkYDmiEDzxG9eAEaElwDbdSrQ7eagmx4T12dKF41Hhwpgd5jQEWO682VPTukypuWGxKQo9TTz7pzDCkWuYnKvRa5a5ComlTGZjEllRCaj+XpMKiJmKiYTYeWjLUM8p4ldRuxyQpfjC42SBiccRklKocilR6ZCSuVR+DFOKvwyRWYzWtJV7mXWYqyDskAbQ6mhcAKtIrQfY1Rc+RgXGtIMURSI0lbMuAasxFkFzsctRMQaQY4gaywzhMgRskTKAqHKyqzBd9hAIcIIF0Q4P8D61ZKwBUGI8cLK0xeQRYYocygLZJnhmRLPlMgyR5kSqUvIE3Qyw6YzfAFxWAULmexiZYR2AVa2MCrC1rP0cMqrmOsFEKiG7Be4X1W0ojAaZS0eBhUZRMcgOhavVSJjh1USXXrYmUfXTliRQ9bVKevyDGUNU9PhYHaT42KbESsY35sntQlU4Aj8AiU0wlUaYm08tKk00ZSyAogLvbe4DHLqUY0adNWa55pZr39Ts8uL4RSeBmyLuTHK8bHvLo2ANHTRi/eaplJNcNV8sx4hEA2QLUAKUwU7Xg4+aL+SiLjIgWfAOYwfzH3VZcXin4kqufVWgbquUSsWPfMoTiJsUsmVKjtHIJEVI5/N2xo7aDkIbMWcelVjpZ7hmYwAhxMBpYownldVCPXEJSVUB0KGqmiRNSCoNO1dgetQ2Uu2JLTmfZXVx8cSTaasjR5zXTykF5xQpo9odTUbt3dx/Ygy9ihjnzL00IFH4fto36dUHiU+hQkonU9JQCk8jOeBcyhd+fTL3CLzSmJFJigzD5MrysynyHx05mMyH5dJ7EziUoFLJDaTlatQNq/ia0CIeXBR0+zWXp4fQs1Bcx281PKYEmyK0FNkOUaWZ8j8BC9/gtBniJ6G7QB3vY/7gc9jXn6VMgkwqcIbXOCmPjaJcFMJEx9GVG4z9eiZ5y7dejyQNkMYjXE9cBC4CQhFISJwFpmPUPkFMhtWBdHy+VwMkdkFIhugygkuamNWrlGu3SLbepli/ZXK8354ishLHDHGW8WpThV01edEXTjMA0/mtNyUnhsQmRHYjCToM403mMYbCCyBHeEFKaJvkddCVJQjH78P772F+OY3sd/6LubeY8zWG5TXv4JefwPTuouV25C3q3MpSJH2CJl8iDp/E2/wHkLmrO89h+s+w7nuods30Z09TLyNys4Isif4xQW+zQgQBF6M8PqUYo2Lok9iuwRuiM8A4U2QPYfbCDA7HcpntnGrPrdUxvr4FPnofTqnB6wMj+kPjzEi4CLa4Tza5iLeYdi6xkVrl8zv0s9OWEmesJo+YTU5Yj0/Zr08p+VX0tek9BnqbUZ6h4OLFrL3Eom6xcPzCOfBtQ3NF27kPLujeXZbs90e8tu/8t9zfnq4SHCsTSJq/KS1XoDz2ue8thb8pJyqjxGvDgq5QTp3mEnVTRJ5g1TdIJO7+G5EpPdpuwO64ohV/7TSvYdD4vDjeEkpxebmJp///OcX+vpluXL9upmDWLurjMfjRZGhepvj8ZjT09MFY1673tWuL7WzTP1ezaLX7Hqe5yRJ8lThoiAIFqx9WZaLbThX+ar/k3/yT/7oChXt7e3xcz/3cx+TtLj5sOKyLntZGnKVLukq5rgJ9OHpRNKrttlkzpsnTB391d+tM56bHujLQL/5v83iQ8vse72PVwUZzff/fdVEl3//7wtemsFR3d7RaMTFxQUfffQRh4eHi4pddV8qpbh16xY//MM/TJZlfPjhhwugL6Xk1jPPcLT6oxSEbJ/8Ivc//C43btxgY2MD6UVk3socbPeYih4TW0tMKrZ76npkhLRI8FRJKHN29QFdMaIrxnSYs+CMaTFDKI9EhBWQVhGn4Q4PO8+ymg9QpiRTMblqkcmYTMVkMiJRbVIVY/BQriS2GaHLCSkIKPGdqSpRCgPCYZRPKRUXYgWjFJ6n55UmfUrpYaSaD2Y7nBMLoFO5oldoybc5ymRg8kryUBik8/G9GF365KWPMgarwZRVgpstK50ttqnfFtUwr+dAaZAlggLhCoTLqlknCFMgihKhS6TWCK1R1uG5apbG4soStMbkBbYowWg8Iasq9NZQZAVWG0LPJwpCrDbkWYF2oLwAFXcpvRAdtRGtDjZsYaMIG7YwfoAJAqwfYIMAF/gVM1loRKkrWctc4iK0Q8wLzaAFxkZzZt/hUAivKpK08NFuJhoIW1n11XNMNQQeclnkKOKyDP2imIyowFxKVfhoUe1TVMmFRrCwOZRUCX0YIpHTljNa3pRQZYRKEHpglWZcephshSTtMEhaFFoRBlUBGiNABwodVEJmV0rIJDhTWUJOBW5EVem03j9D5aBScCkVMcwB2RxYezRGHq6YBJdBQzl/3XGwAmw6ZM8iYouMXFXZ01b9/BSrC0hR37PE3EFmcRep+mch3xKXUqm51EUohxdrRGixXnVeV9rqeaPrEZGCS1eXGmw2nyWOeTxhCVxOz03YcBfEZFhTBVG5DUhMVFWUdRG58DB4OCkbjD2X9QOafecafVXLTwAKQ1fPWLFjVu2YVXPBhh6wYka0KAjJ8f0M1XKYyFbnX0vgYkfZUhSxmi89ytBHh4oiqJxarFQIrckLydS2SbXClBoRr1B6MYUKKT0f46lK3iMrhtvmVX6EyyQ2FRWYz+eypEJiEw3TAqY5bmaweYjNI2wRVnPu47JqtMfl8PQ1JS7BPRbhDAiL86KqDf0c2ZliNnuVTMOW8xGjoKrSOwVGsqoQClWCqGdQXgmhQMcBqAyVHuJNz5CzITKbVcSDMdWoRNjDBCvYaBUb9DHhKi7oIMoZKh8iTILAVkFa1Ma0VxESVtyMnkuJdIbWHqOyw8h1KV1wOXrTkIspWRKZC3w9oZseEKWPKaVkFvZIVq6RrVzHRBsIafA6JapdopIT5Ml9xL3vwLe+hnzz93B5grn+Onrvq5iN70O3n8fJXcgiGIEQhsAbwPQDgtG3CC6+hV8cEMQC0buO6exhOs9gOnsUrRvM/G2s8OiYc/zkgKAcY7MpJi8QRGjXRntbmPgmie3SCnOiVobsGkxfkK2FpBsxnWjGljxjOz1nfXLG2uiEtdEJvtaceRs8VtuceWtchOuchVtM/D4rxYCV5AkrySEb+Qkb+Ql6/zt89fs/z5e//GWiuM3RKOCjE5/7pwH3jn0+Ovb44InkbKLoyDNWvGPWghPWwjNW/RPa7gClzzBGXyaDzosOXZXbd5Xaof6sOX0MvKNIxS6pukkRPkPh3yLz9piJ62T06chzVrxTVv0T+uqYFXXMqn/G7oqh/8wP8+sPP88Pv5zyFz43fArXNVnvJjHblLnUkmfP88jznJOTE6bTKQC9Xm/Bqud5XtVjmTPttR87sEgAtfaysmi9zdq5ZTgcLjTncRwvgPsv/MIv/NG5uNy8eZO/+lf/6qJRV2nJm+CxGdUsH8xlGUbzu8sHt7neZOibOqNPShptgvFmNasmqF9OTK2nZQnOslSm6eLSDFia4HpZg76QicyBczOhdFk2VH+v3uYnGdvXbiKPHj1if3//qcIH9f+0221+8Ad/kDfeeIM333yTs7MzPM9jc3OT2a0/yz/N/wwWwUqxj5k8Rnavk/sbpKJFRDYH2BO68hJw13PLDmkz5dv9N/i1jZ/AIdnN91nXA1IRkalWxWyriukuZYBvCyKbEpmU02gT/MqCbjs/WmAxJwROSoxQnMdrVUU8WGhLhatQQTWkXQGUxTleY0JNxYIumEpRVWQs7LwCopjrfZnrWsHWVQ+bOvF6m7WsAVExiYsGQe0a8nEdej0U35SE2Or79VB/rU2Wl+ti8TN3+R/O4bBzlno+lC9BKLnQZDMfsq5kGLUuW84lKbJi7kztXGHBGISp1mv7O4yeFzRyc8xTX4MW5yxCSKQvIZBk8QpORBCAChNUZPC2oLWSkOuIsvDJZSUJWgDuwlVl7+cuIioDW4AnykpiknvzQKfueLdgp6N2ilCWMqvcTZwBkYDna/yoQAmHv5eiC4/0sE05DSsZUNvBAOTMIbVF6YoVrhIFBaaUOCOrZM5yzprX50wNrJsgcRko0jhXakDpNeZAXNoZzlnLOpnR1Z7iTjy9vUtyvMJflnm+QH0uVkFmsx0VWG9sx1XnlJonIEphcQi085h7kzz93QXQXtq3pq6fpXb+T56+9+He/7jpqoaKT3j/k347HzGNNX1zhhEW0+oSRQnb3iNW/SkqrJJUVaArhx+/ukfpQlIYRW4CMtGhICYIc6yz5KVPoWNKE1LaAGsVzgcRWWRYIv0cGeSIwCBaPqIV4ayFWQazHJcWuMTgcoUofGwRYDIfU8SYslex+KmDVGPKGJcqysMAO6pHQKt7b5O9XrgiibkMqHAVoIeKeFBlNYe2ylHpxwh7TjjZxxse4A8fYYoOpbtJ9/E/JZh+iA362LAC7iZcQbQ2aF1/Cbf6DFPRIXERpfPnshjw0FUhKpshrKW0PpmIMMKnkufNm764Li3KjvHG9xGHX0dOjhG2wK32cLdu4q49h+nfwHqrmDxAqgJZnqOGB4j97yLe/xby+CNkOiRY32L99T/FsbrDVN7EyJ25372FwkeJEZF5RDf7kFXxmN3OmJurKVEkeDwNOJgEzPxtxmKdJNihiK+Th9v4espmOGUrTGnLglAYdKY5H8xI8pDU22XCJkkREsYlsl3i+oJiPUT0Hb0gZzCNEanjJ/tf43PhKVtYjsuY+0XMg7LFE7HKsVrjQvXZVCnPdyzPtzUvdA132yUvdC1dr8Jcjx494r/7e7/Ak0mXib3O2O4y1NsM9RYjvY1E0+ExLXdApB8S6YcExX3aHOJJ/RTe+sSrZwnvAU+9buKa2tIwiqLK2tDvMWOXmbzO2O4sdPhju4PFA1FV6PYk/LO/8oC9tfwp5rxer7FfPRpQY6aa8a7J2PPzc87OztBa02q16HSqXLTaLrEeZajfq0nfZmXRusBRU1IznU6ZTqcLYjSKIrTW/IN/8A++J4D+PVcSradmcmiT1V0u+lMD0iYDXf9++UAtR2HLyaLLv112bmlGTM3vNU+Cq6Qxy9+tA4blwkTLOvP6vWU2vDmK0GxT/d2F73YjwLiqnz9Jp78ctNTWPr1ej263y2w2+5gWP8sy3nrrLW7fvs2P/uiP8iu/8it0u11u377NN+ng8jnILSasPv4n7HQsL93ss9N1BNI8te/NPpRCLvyTSxEsErEkjp38MaFOKiBuU2KTEdmU0KR4c32BRfL/fvX/jFWVZ9daPsBJRSk9Si+gUCG5micaaQHC0TYJkc1p2YzYZrR0SlCUBEWJKgxSO4zxuZfd4HG2BU4QigIpLBqPEg+BIERjhaCYF5wJhaYlcwZBt3IlUY4uMxAaa0qs1Vhr8LyAWbkJJxVrFamyAka2nl01m/mxmrOLzYIrohrrxtXrTlYwwInqvwOBC8GFXDLJNcMczNdrdr6cP0ALV3ktz9dF6SpZTVFW69riSlvFC9LhnAXpECEQy6piZeywkYBuCFGIiwUmFLhI4uJqJMBFsqpS6EsobCUByRykYJIIk0vMkSZKcyKZ0SZBFx6T8z5FHlSJeYIK/M4rSpoAUNDrDlnrndONxnhoLgarHB7fJEtamMJDZBAXCbFMkcagZz6z0w7JaZsyCyl1OAfJ3UrCISt7RwKHC0TllOIJLAo9B7xibqFXE8n48yBnbjUnA4MIHMITWFXNfgjtCDqRQASWmbLMFORKgieqJFavsuVjzpIu/NXrYJF5AFVQDbU7ULFBtUyV86sVtlRVUGbnAYOtDn0xLXCFX4G4FatFy17giTMsUiqzqkLdF4pAa0/osgJ9Zm5jiYOWnLEVH7MWndMNJpy6DU7tJols4xCXo0FZNTLhLNX1lwJTEL4mWMnwO3klX8k8XKpwRtKPL1gNB7SDKZ7QFDogz0PyNKJIQvIypJAhOUFVJEpQ7bxwSGkJVElMjjJ27o5SVRnVTqKrcTI0HjiBokQ5XY18OUthLMYLcTKoRrGWE16bgzl1ouqCnXcoaSv3lHkgIwRYqmJLhah06bZUXLBdUeNTxyzpMpJrxK66ryks0umq0q/VOKexlDhhsK5Er+8g45Bn19/l9uxXaYUp8XpM1FfEHUMcF5SlR5LGJEmLZNYhOemQjFokoxazaYs09UlnOcV0gp6eoUcHeMUD4o4lWFuBVoyNQ5LVl9G9LcLuA0QM9tMvYdd62LHF+y/+Kepfv4/2Nyn8HXR4jbx1gzLeRXtrWBeDVdVF4TXIBS3BhmCDeYLwnCoQm2RyE+Snq3yYrgcBpJ/5YxBr1OwJwXgff3iAP9ynV37IF8MJL0R/QEfkc/IBBrbFfXmNB2KXQ7HNidygdCGdYsRGeYRfJDxRz2JcCIVm8+K7TIN18nAVI1cw8g24/kYVZEg7DzwE4sk58r0Pkefv4RcnyK0O6uYm7F3DvfJDmD/9ZyjOLCYJmBAzKcd4pw8Ij79egfh8iohD2Nqk7DxHHj/HzLzK6SDjw5HDDEIElq48Ic4/oJMe0J/+Pmvph3R4gpKWYO1Zbr3+Vcr4JgOzwrnucSI6nPV65ISsqgm3ogHr/gyXXDAbDhBHHu3pBjNvm/cu1qEQuD3417ef57d3XuPCi9k2GaujY1aGx7zhDvjc5ojrGFIX8/5U8bjo8xtnPf72QZ+HqcdmYHi+rdnSlpPNN2j7D7mRfRsv/x2MVwHaNM0Zlz2GeoeJu8aM65x7P04a7pGLdUJ7QmweEdtHhOUDYvOItt3Hd2fVbe4PAe3LU5OobGJBn5R17xHb3iHWWs7GZ4zHYzrdLlHvJv+y/L8wLNeQAjrR0zVSmjisiVVrJjwIgqeqhdZMeBiGCx261po4jqlNORbtmlspNiU1tdRlOSCoteh1ddJaOlQ7230v0380g14DxCaj3WSVP4k9X5a7NEF2c2qy2/WBXAbZNQBvylzq7dadVXdk0x7nqqGQ5TYtb2854msy3cvBRv05XEpdmj7oy1aOn6RjX5b4NAODus+zLGMymbC/v8/Dhw/Jsuyp7Ugp6ff7rK+v85M/+ZO88cYbHB4eVlnZWck/v3iDk2FG9Af/BXo2oNPp8PLLL7O+vv5UddRm+5YDqsIKfmfth9HC4yvDf4XEMvbaTP0e06DHJOiShH0mfpep32US9Jj6HTIV41lNt5ywXgxo5TPiIscvNKrUSC248PocRjusTMaowjG1MTPXYkaLnIjAZcRuRosZsZvRkSmxyDlx1wiF5kvtd+irGV2V0REpkdRIKShkwC/Lz+Fj+HHeJJDwjrjB18SzfIX3uONOFoUM7t+/z6NHj+h0Ouy+8Zf5MP8Mf/zOhzy7er44V+vo/OzsrCoINRwyKmCqWlwYnzzs463uotZ2obeFaa+j4xXSoMtMtUm9NspqonyEn49QswvE5Bwxu0BMh4jpkI7QREWGmQxRWUKgFJOkIPFD1NoGcm2dPIop2y3kygq61Ua3YljpoeMQ046xnRjbjrHtsJLAzgpUUqASjUw1YlZCoivt7bz4jZ0zzAaF8ALwA2zoYyMfF85zNuaa3SpBcf7bEqS1+F5OoAoCUSC1weYeRRKQTyLKzMdqdekKIqqgRMUGLyoJ4wK/nUEAhQkpTYjGwwWOUBZVeXdZJaXl45hiGmJyNbfKqxhwGRtkr0oQdb7ABQLbEtiWxBclGCjLADC04pSgrTHCI9EdWt6MrdYRW+0R6+0p51nMeLZCNtniZNxDO8WnVx1fXFM8s1IpB76dwW+NYb90rCoNqiQPy8osZeijx0GVh5BTAXSoQNAQxInDnQm8viZ+scD0HXkZYGYe0lrsVFZVLI1FpBRuIIRwiPDVpIg+O/OCF/M/kF39G7HKHsUu6ZpC/nB6IX88KdaYjFfIyy5MLZxLfFvw8tZ3+Ozm13hj5+uorubXyz/BN93rDP0VdOajU59iEmJGQTXCxFx+IkUVEJagPI3yNU5W8g07VUjtWJfn3Aj2udW9z27vgPX4jN32ITc6j3BGcjFcYzBd4yJb4yJfZ5CucTFb4yJZY5j3OVdrpKttNtoT1mdD1qYXdC/OCM7PkWdjdC4o/T6Ji7l/NMK1NljZuUMmu6TEXIgOE9Em18F8BILLQKlcmmsHHjc/D+c5A8ozBCqjxTly+j7y9Gusy1PK7/+LtLoxny3fxgiflICUkKnzmBqPqfWYaFW9L0JyGWFba9jWKggFRYLMRohsgsxGVVXNfIryHX7bw+8EhH2foB/QWvNpdXK60ZBea8JKe0S3NaHVSojijCILSKYtkmlMOvHJx5APMsxgiDk6wJwckMV9Rv+rn6E4TfD+yn+JG1aalmUiqH5OlVpzMbVMu69RPPujFDufQa/ewoXdakinlNW5OwNSV4F3OY+KpF/1o8elbEtqUAXSy1Eti9ePyNsRKrBsMGLNjlkzF6zoISvlBf1yQDc/Y2YUh2qbQ/8aR+E1nkS3SFWHOB9xY/gm/eGH9Ef3CM4/4izrcKieZ7j5RbL1FzCtDfCDqi318RUaRFm1LYpAj/HSx3jyhGClJLge4/Y2sBs97PEEc5hhRmB0Bx1uYMMOweSEcHpM5HKCQCHimEStMC5W0dOA4GSKG5TYWYixXQJ7Rtcd8Oxmwc2VlJ3OiN32mLU4wfMUOREj1jjJO5wUHU7yNid5h7Oyy1nZIZCGFS/jaNxBGcOf2P0mX3zGsbsKE7/krYsJh2GP6fo1zrqbPJIRvtFsTM+4Vkz5zGqL1/sht4TG5oIHWcDv3B/wbx+OOA+3Gbd2CMop3fE+ndFDoosHhOf3CE4/xEsvoIHPDBGZt0eq9kjlHpm3R6ZukXpVkm1sHhGZR0T6IbGpmPfI7KPIn8I8TXxVA9yaPa813DX+OD09ZTgcMp1OieOY7e1tdvY+hbjxF/nj39fltVv2KYxUy5ubNodNR5rxeMxwOHyK8S7LkvPzc4bDIc45Op0O3W4X59wCXNftrHMA6/bXhbpq55f6/VrmMpvNFkmotVvO3//7f5+Tk5O6T/6nS1x+7ud+7mMSjqYkpQkcl5nzJhCv31sGnstSlU9iuZfB+VU69OYwR/35skb9Ks3SVU4ry/9Zf29Zh34V492UudRsd1OL3gwOmpFb85jUmcfLQLnW/ud5zng85uzsjAcPHixOsCZIj6KITqdDp9Phy1/+Mp/73OcWpvn7Bwe8/fbbnJ6cLCLEu3fvcvPmTcIwrPZ7vh2NZOp3mAU9Jn53MU/9LtNgvh70SL0WUTGjnU2I84wor0C3LDRSgzUSYyo/4cS1SIhJaFPiV2sipS0SOiKhRUJkJgRmTOymRGZCaMb0/QKSM6bDs6fyDZYv+jo4qd1pmom7TQeY5WMqhFhczPfv3+fevXsM05KXvvAVrr/8GfKgy1hEjFx4OduAoQsYEzIVMSCIygnxfG6bhB4ZfVmwIgpaekqYjSgHh4yOH3GRJUyVx8wPSfyQqReg1jYItnbQnS551Mb2V9CdLkXUxnR72G4fURao8RBvNsWfTZCTEWoyJkyn2Itz5HhEXJaIshqNMMajNArVXqdsbZFFa9juFmVrhSLsooMWwhq8LEPlOSIrIS0hs7gMlAkQZVgBuDxAFxESS+QVtANNJ7L0YujGjm7k6MWOTgTKE0y14CyFo5nmrEyhr7Bh5f9staJIA7JJizwJ0XpecXQeF8vI4rU0UScn6qSIlgYVYEVI4fnojmZlZcLG6pD22gWldSQPtpg82uD8uEsROLo3Uv7/tP15sC1bfteJfdbKOXPP+8zn3Pm+9+6b6tVcKkqUkDAIGjEGuGlamJYd0IAgmAxhE9EOuzscbkd04LDd0dHdNBZghOQQICiJbrVoiVGCglJJqvfqzXc687DnvXPOXMt/5M599z3vFu0IyhmRd++7M0/mymHv/K7v+v6+X+sgxjiIYbMknngkC58Mm9I2MC9L5LGGI0l2bIFjILoauqBbErlQGIMCeanRV6DSKlxJlXLJKj6bDAPk0oJPLQeCDAMcAwypKaQmF5pyXW4Cz8u0CqpRCiUQHY0IStTScUfoKvSnsqLUS8AvEO0Saz/T1ssJwiQtnly5jEb4dsrWa9tMuwGx8knjgFg5GKlCRQLfCHmt/w5vdr/Fg91vc+jc5Ot8kafObXLDpMjNKlnTEHBiwBSKqUM+sWEuYSyQmwrjbo5oq8rpIzVRhYEwNNJQaEOgDAOjLLDKAkMVWCrD1yEN5rStKR1/RN8bsOVfsuVdIooSlRkUmUmeWUxpcuXvMFMt8rlJMVIM3jsnmJR8ut9mJ1nQySKM1e+zINUGofb5sLHPe+1bHDmbTHWDIjOr8xZpmC9HCmowV+vq12sK6sLe5SyUwqLEI6IrxvTUKS11gp0+Jp8+Jhk95Ozo45XmFUAbNsptot02ym2hnRbabaG8Nqp+77ZXn2mnifK7KK+HdptgOqz0IEIgZYkhciyR4IgYVya4VorrpAR+lS3Qak5pt8Z0OmNa7TkqLcmmBfk4JBsnpOOMbJKTzwrCy5Dx6YTwKiJSm8T2beS3/hkiy8FwyFoPSHufJt76LOlLX0LtbkCQLm9sp6pJSUEMY+RgBrMCnbrowkMrl2dpxaxGMaRR2USadgnu0vs7cKosAEa08xGt5IpmNsTKFiTKZGZ2GHr7DP1bRE6X5vwpwfBDvMt3sc7fRp6+Q5oJFs5d4o3Pkex/D/nWA3RzowLoWkKqqvZqA6FTDJmCKSgcr9K9GxPMRoi1KzBvupjZGHl+hn0xxpwJzLJF7m4wd/vMnD6ltLDTKSIPq/IOxyM3eshJinsVYY0K9MImShqgBRvOkC13xHYwZSeYstuYsR1MabgV8LMdj0nh885RyEeXJVPRJ3H3CM1tRmWbaWaz4absBSkHzZSbrZKDVoHl5UROzj+cbvCO6rC5q4h9yUhLbsmC3uyK/MNv0Ric0hyfU04j5qLNyN1h2thj3rxB1LmFkcd448e4w0d4kye4w0e4o0dY4eA5cZoGcrlFYt4iNm6SWrdJjFsk5i0yYwu7vMJdAnYnf1K9Fk+wygtsy1oB81qnbVnWKpXz5OQEgPPzczqdDtvb2+zt7fHVr36Vz3zmMwRBsGrHujphncCt5zzPWSwWjEajFWiun/m16UaWZbiuS6vVWhWBXpe0JElCmladjpolz7KMJElWGK5m7GsStZbUuK7LT/3UT313Afpf+kt/6Tngd3160Xaug/D19dbZ43VN+HXgCs+scr5TpW491Rehvkj1PtZTpep915Kc62FF9fGtr1u38fox1se3bo6/vmzdonAdqK+7sdQVxnUBw3w+fy5gqLbraTQaeJ73HBtfn6soihgOhxweHnJ+fr5KVq0n0zRXAN22bXq9Htvb2wytDr/4W/9TcsPmxk/9ZexohOrsENy4j7d3l7SxwcjqcWLdQJmVF5qTxLhpjJOkmFmBkZerlMaifBbiEeMD4FOB7EBEBIQEIsKnAuDVZxFNIyEQER4xkmd1A3V19PU43dpe0rIsRqPRSntf3xfVMF2lCau/XEEQcPPmTW7evEmj0ai8XoVBKH3mwmMuXObCZYbHXHrMcZgsQfdY2SyES2nYSFmiXJO+jnk5G6Gk4FecHSxZ8gfjt9lizt/df5WP2xt8Zvgxn7r4gMTxiRyX2PEITZvQcphLk4VpE9kOieNROi4yDjHmM8zFHGM+hekIOZ1gxwuCIoPJmPzqEisKcXNBNlfkuoHR2iPzNkjdHmVzg8LrkLsNVNBEe1UxqHYsyEpEqqpgn0RAZkJSIpMMGUdYWYyTxVjxDCOdorIJvl0SWMuHv0xxjJTALggcsFwb12/iBS1Mr01h9/k78gGXmctbKmS7VEwTg2lqMoklk9ggKSSmWyAbBRtGhje9orVnMNvNsLXF3XiTSCjeT1PMzpRGK8a0crQSzMZNRqMtQtXA3iox9lP83Rnt5pwdf0TTmRCXkvFkC6vYJEwDRkZMM5hy0Jpi6YTxWYA62mFx2uXowqV/KyXtJ7AX07ydEmuLNPYJc4dcVx3GNlOaZQgxnM/3mIUtzFRRJiad9oib/cc82HqXdnvENO1xPt5hNOsxuNhhPOyTZC4NCnqlQcs2sG2Ym3C6lGC8ZMHrLjQUPJor3plrziOJzvUzy8eSiq0MqQr7tK7kX01dObMUupqXziEVsAcsrYWbChkUGF1ZMZ/RMlRHLSVPElRpVAmfpVj9fS0fYVmfodRaCnJdk7GUiDzviCOesdSWRjbKyq1HiFVyKKbGMBW2KLBljjSq5YUwyIRJLsylTz9VEq0D2hRoUyJEiWXkOEaKa8UEVkhgzwmsEMvIkWaVXmoIhakUblngJgXNOKW1yGjOFGaukEKjLclhd5Nvbx1w2NviMuiRSwtQEGoYCRjI6tyaGkpdFSrPQM0kZPKZhKYeWVD6eSBfasgzRBYh0ilGfIkRHmLOPsCcf4A5/wixOFwWqf/PT1qaSxDfQnkdis1XyO5+mWL3dVR7H+22WbnBlEWVNKwBYaINo+ol6spT3qDElDm2keIYIa45w7MnBM6EIAgJtlz6nQEv3/qQbJaRjFPScUY6TknGGekoJZ7BIt1mXtxkLl5h4rxFaHZILSg6ATRtnGaMcEoyw0MCW9MZnUmInJSkE4tw5hDNXeLUIS/NqsNb114s7ze59HvHUSjPQLkGjgjxGeOpEQYR2lCMnLtE3jY44E8e4V29h3PxbczTb2GefxuymNK7Sdb9FPH+l0m33yDt3kD5QXWNCxBhAkkO2kRLH6EiJCm4RvV7atkYfoaxXeIEMY18gD98inX8CPf8ktkoImvtUXQPSFr7xJ0DssY2CJAqQxsGStoYswX2cIExSBAjTR61SHUPjyld65I7GylucUg2fJuuOaBjTwkXM7a3t6tEzYO7PB3DWexXspmsyUXicxZ5nIQOaflMivTwTx6SGCYPlcnPvvMx//pyyqSzzaS7y6yzhVek9GdXtMfnBJdHOBeHqNGUMDGYeDssWjeJOreIurcRqsQdLYH7+DHu6DHO8CHm9AxDPlNKSClROCTmDVLzNol1i8S4SWLdITVvAQZueUSDI5ryjK55QceqClYtkfLw4UMATpPbhJnF/dZHbG1u0uv1+OxnP8sXv/hFNjc3Xyh3XndyWfdGj+OYyWTCbDZbgWkpq/DG09NTFovFys2l0WispMI1A14HGV3Xp2utV84vNX5c90qvO+m2bfP3/t7fYzAY1BjzuwPQVz8Q3wGor7PQLwK26/u6Lu+4vt46s7zOjl4PF3oRM76+zr+NSb++rN5PzU6vrwt8QmcP//NWizVor+UtdTvr3lZtZxTHMYvFgtlsRhRFq/3WPS7Xdel0Onie95z1UBAEOI5DGIYcHR1xdHS0sgFab6PneTSbzdVwDMDF9/5HnP3AnwLDQBQZ9ugIa35FkM7YsUu2LcVxcYN3wzcgkwgUraUji08FuD2eAe110O0T4ZIg5fPn5kUhTOvnN8syoihisVishovqL0Wr1SIIAprNJkEQ0Gg0MAxjJS8Jk5QFDqH0WUiPsbKZaoeJsriINRNlUwY9aG+T2k1i4eDrjJZIaJHSIqUtqlffruK8ERlRsuByfMlJlvLwh/5w9RBGI3UFKlYPkRq/KJ7VoSlY+U4vnUgquzpdDQ3Xc7RkQDONWLKxui7Kk6Iq+DQrjXrlDqMrzXSmq8LXrFyGBWXLOUamC4x0hpGMcMs5VjmDbIpZzvCMmGJ2galjuq0tstxhPC8w7B5ec5tc+yTKoxQNhNOjMNqkBKSiUb1qD0sUNKyUtlPQ9hSGJ/lVs4W2BdJRfLqZcDvIeamR81oz40Ez57yb8p/ujSglmAq+dJJz3L7AbS0wtOJm3KJ1dYO/8rX7DM56OAb88JsJP9/NWXxugWEperMcMUzJcoje8OAc4qcB0XlA+S9sJIrG/ZDugwH7n32C20/JMpti0SeabzAtJL4/Z7sxJp5pjr55wOmv3kAIjeFp7r0RsvdaQriZcCTBznzi1GOQm7iUNPSCgDFeFjGftBnONgmjBqKEZmvC7sYJL22+x/3ex6TCZRLvcDHZ4XzSZzzYYTDpIzW0KCG1iLQkskC3AA/8GDZy2ACcipjmca7RBeRJXtlzKl0B9YGoLBNNoEXl+GKDyKrRDqYCUWr0WEIO1v2Mxhszep+eoDZACcHmVUzrPKHMFfPAZtBvscibzKdt8tRCjDRmWPDqxjt8rvtveLX3DrNGi3+2+CofildIcchGNkVqoVRFoupSUZzZlOcWXMqqnZcC2S2xX4kwWyVFZlNMbIqFiWun2DJDKUkofKQCp8iwigKVCYrUJE3MKim2rmEw175zULn3SFXp/1VV+6GVRCuBUqIqdlyFHbGqCdCqCv9h7TkpZGX7qA2q7951F5nlV7OyK+SZ49BrJVa8oJjY6MiF+vmhliBf62XRuFh1QDCWoF5pKEpEniKyEJGMkPE55uIpRniIMX+MEZ8hkxEiGSHyRaXfXiPAlFIordGbdylf/j7yV76f/MZnUM1tZDwHpVBOA8oca3yEnI9QGSjto2QLLVvgNCtpiLlsXykwL0Ja8S/TMh/RND6mZT+hEcxxOwZOz8HruThdG6frLF9dLN8gWUiSrMEiavEvvv5VnhzeJvd9TF/R9+b4XoZyBQvXYei0cMuUzWjCRjgimM/IJxCOPUbjNrO0TVQG5OVSA18V71TF7Zqq0FvLZTqvgM+Am18i87gqjrYa5HYbZ36Gf/Uujct38Afv05o8xCFD2E0S/yWG/muc+S+Rbdyn7O9UxbCmQCwSjPkMIw6RpaY0u2RmF0PkGHaOsgXKtlBYCLfE8hY4xQXe+An24Xv0h08xi5C41GStXeLeLbK911g098j8PqXpL90BFEYSY81DgkihBgl52GSx6EMGfnlMq3zC3c2cN2/bjA//Fb46wjPzVaiObds4bsBfmf3HTMsG++6U/+JTv4jrupimyS/90i9xcXGxsgU8uH2bYuuAU7vFid3kzG5y7ne5Cnqkpk17ekVrdIp/cYR3cYgYT8gjmNsbLNo3iTq3ibu30cLAGT7CHjzEGT7EGTzEHT/CnZ8vzR3W7JMBZW5RePco3fuk1m1i41blpa43cRhjJh9jipyJ9T1Aycv6b/OZ5s/jui737t3jy1/+Mrdu3VrJYV6E+dZ14bUOvHbAW5e5aK25vLxcFW/6vk+n01kVgdbEqW3bQFUsmmUZ8Dyznuf5ar/1qH2N72qi8e///b//3Xdx+U565HWm/Lot4nU2vJ7Wizzr7byo8PO6nGUdONcXoP6b69KV9QjWdQ359W1fZ9evdySudwbW2fLvBDaB5/zDa8CulCKOY+bz+WqIpSgKptPpJzRMUA2fNJtN2u02nufhed4qcra+OXzfp9lsMhqNeO+9954LIKjbVcs9XNddtSfefon3fuT/hTZM7vz4n6H50S+tpDj379/nzp07DK2b/DX9v0Yj+Q/lT3BPPHrRffKJTko9XT9P11/r65TnOWmaEobh6ksjpMRsbUJrE93cRDU2yLwOsRkwEx4zKonJTFdAfI6NpUuaOqapYwJifCvHMQssq0SVCYPxBcPZGK8dsHnzAKPbW8lJqtkiMiwMrWkUGUGR4kQxejAlORtxtfc6hdlmY7LgrdGIQe7x6/kWIoVXwyFGqvhA9skyAzspsKKSXEtyaaBNY2nrptYsBamYUHtZkCVglSRae4obmpVlukvlC+2ICr3J5d8IURWNRiw7AfpZSubSy3n1/6UUgmT5Hv2sGLUuSHVeNOvVq3BAms9wRkUeahaqkl/YKGwNpRKUCoplh8M2NKVVYgUJ/UZEz4tR/gzhV0yol2f4KsUgQS0kV09uMD/Z5fy0x3joYd7M2Xo5w7qRMdvIwNB4hcBRmkgqilJQ/JpN9rZL9rFHcW7i3w7Z+PIl/TeuaO1NkIZClZIkDohm25wMW4hUI0eQn3jMTpoYVolja5LMpHcn5v7rCeZewrGvCHMbI/cZZjY9CZ+1oaViTuZjTiaC0azFbNFGaYkVJGxvnHJ362Nutx/RtSaEyud0dpOr6Q7DSZ/paJvxrMV+I+eVAALTYorkaQmHqmLWOzHoCI7PRpR+UCW4KqHJtEJgUKKI0Cy0ZCoEBtCgSk0tqNjgUCAyBTHoiYRtMF9OaXxmins3wvRLlCkJkpRuGNGIQzIlOTE3mOQ95pMGZWxgDRZ0dMLru+/yhf6/4mD7kPeNB/xS8hVO/X3UQBJ9HFCcOZSRRDY0olOgtECNTPRAVqz0kcDcy3C+mGBsFxS5RX7mUkYGOxvnvNz5mDvOU1wyxkmPi3iT43ibM73Nwmkhc4WOQM+N6hjrnx5FVYCbg+lmNLendPoDdCaJZ010YtK2Inr9CV47RvmaxDLIpEGhTYrcoCgsktIlyhuEeYO08DBVhk2KqYsqxVSBKiRlXvmj56Fd9araGuGU4Gm0TBFXlxgfXMHIRcVdVNkHPwCvqDzkFyWM5jBPEFmJ0Cba9JZWiTaY8pqL0HLURGsoMkS2qMB8dIGMzhHhCSK6WgL5MSIeonWG3rpNefMNijtfpLz1eUhC5PgEmUWV7Ka1g27vIScnGIOHyOERxeZnKI09rMPHqMxDO9sosw3CXP5WpRjFBCM5w4of48zfxx5/Ezd5giVnuF2HN3/0Abd/zx2GV1sc/sR9pumrfJzvcGU08f0MXIgNm66es+9c0XYXmE5O4pgMvA4Dv8vUbdFI5/TmA7rzK4LphHJSEE09FmGbqdogpEOiArRats0Cy1hgighDRggzQ3kGWatH7nUQukAohZI2Zr7AX5zRnj6mcf6rhF//u2TDE4SUiMZNyv5nSG58L+n2m+T9m5StVpUDgMKZTXHnI6zFCJlGFDRYePtk9jZCKqRbgi+rGo1SoCVYjOnkF/Tmp+TnH+DMjnEWpxzsbXHhbXPevE289TJJe4/EblMIu+pACkX7ySMaH3+Ir9uY5i5HY49Qb2HrKU1xSkuc0uCUrnGJJwcot8VG+QTHUKto+pooqzXRd+7cWbHFZVmuRp7zPGesJOdui3Ovy2XQY9TaZNzeZt7ewI1mNIenNAcneBeHcHXF5HxCqALyjbvkWy+Tb76Etn2swSPswce4o0f448e44yd4s1Ncy1gVadazFg5PBybjfIsL67exCL4KSG4Z/5Tvbfw1TNNka2uL3/AbfgOvvfbaiql+kQz6+lwUBfP5nNFoRBiGFMWzdPQ6L6YoipXMxfO8lX69JgzrtNDro/s1i14TrzX2Ksty5YmuteZnfuZnvrsA/S/+xb+4OvDvJHOpp+sMdf3+uuSl7rVcZ8DrbVzXl6/PL9KpvwjIXy8cfRGTXoPE9XXXAeT1TkYtxVkHptf18utDHDWortetNUn1sIdpmiilGA6HK3BdS1lc16Xf79Pr9VYMuGEYqxutvhHqeN733nuP0Wi0asd6sWq73SYIAkzTXLXdbLRBmhSz4UpnZZom+/v7vPzyyzQaDQrhoBB4Mv/E+VmfrltPXruXEFKSYjIXHpHZYC48ZjiMS5MpLqERkFhNYrtJYjWJzACNpiVSGnalb3YthW2VGKbCsKqgi9IU5KYkMw1Cw2RmmISGRSotnDDHC3O8sMAOS5y4oBwnzM9GJMOQvt+l7XShMJY6V0mWGcSZSZybRLlJUlhIFBYRopzj+tDtGLhNieEqSlcgHIGyoLAEiWEQGSaJNEiMSi/qpDPMcISxuIDpJcZigJNOaOsYsRhQDE8IigV2OieOUzLRQNldEgIMbxPpb5LSWH2eyyaF0UbZHQqzQ2G00NJFlgtMvcAgRKoFUkQYMkUTYZgZjgdKpmDlWA0T4QlSQ4PrYrc64PoUpo2yHbTtoGy38ke3bJRhUhomhTQohaBcDqNLrTHQS3MMvbIwllqvnCTr0YSirNI681KgSkFRZegghcYwC/xmjBtEuEGEFyxoB2M8EWOXOa6OyScWZ08OOH+yx+S0S1bamLdzzDsZ1kGG0cvQhUSoylFFyJLsPZ/0HZ/8Ixt1KAn2FnS+OKL7+pDuwRAtJONZj/G8z3jeJ479irYeAJfADOgCmxo2gXZVBycNjTbq/EyBonKz9AW0DYENRIVikmvSonIrEoZCWiVNd07DmeMbEa6RkCuLRdokST2S2COOA1QpaTslPVvSsKoyqw8mEboo4fhjuP+Z/4kUk4JtFHeWViclGpuChBhNVGHK2jUHwcqRhRnPtNeWhl0NtxTBmzOa23PsRkbhGjgqxRA55AXlXJMWHcKky2LWRMTQWITsc8KrW2/zxf7XUZuaf1N8gV/Tn2Eum+SPbaKHAfmRU1n8xaJKWbWo9qupAHUokM0C406O3FRoKcgnDkJArzfkXv8jPn37Gxy0n1KOFMlIcnlaMhi7yOZnOc/v8Ti6xUD0KX1ZdTQLnhWBZkCuMYyC9s6I3tYV5qIkGjSJxgHpEBgc48QfsdmJ2L27g9zdIO9K8kZBGSh8UXms+ypGiBIlJbmoHKIiFTAuekyKDvOiRVg2SJRHjo3QZZXVgEZJiVIlYr5AjOYwVRBZqCQAs1UBclNVUpTJDHl+hnF5hBycIsIMigaluYkKDijdfsV4286a7aAAUVYFkbJcAnoJlgMIRDpBJCMoI7Ak2gvQrT44DmL8BGN2jIwGiDJGN3uU/ZuozXsgDczhI6zhI4zRESJMUEWLkm3yxhsoo4kymiCcJWlQIosZRnZFy79g27nie/ccXtrI6FoLcmXyKOzyKNrk42iTh2qL2LLw/IzCqRy99tQlt8sjdsszfHNO5JgM/C5Dv8ew0WfU2CBxAxrzIa3JOa3xOe5oSDwJkIuMMO0xN3aJ5AaZaFYOMBIwFKaIMQkxZQyOpgxc8iCgcP2qeESViHiCOTvDufqI4OJt/PFD/PAcr8xQ7VcINz7PpPsms/YtkmYfHZjgCmQU4oxP8SeH2OMjZBJTtLeJuvdI3QPyoltZpfolQuYIXaIxKC0Xu5ziLJ7Szgf01IQ9O+ayNPnmq38ADMXB8T8mbfQJN++Sui0ao0Ocs4d4FyN6pQdzk8nCZ653meldclwaXNC3r+gY57TEOR3zgo68wLfSlatb7VqyDtBrqWkt6agDi5RSFEIyafQZd7aXUpkdxp1tpt1dtGFgnjzGOv4Y8/ghxuU5JBqtfcr+XcrtV8g27lF6HdzpIcHkKc35Md3ojG50hr844ejRxxiGwenllNmt/yte64CvNP8mDXOCYRi0Wi0+//nP84UvfIEgCJ7DhOsYsJaxXJe5jEYjZrMZWZatiNQsyzg8PCQMQwzDoNls0mq10PpZMns9ol/XANZa9Bqb1Z/VBGrNoi8Wi5XT3s/+7M9+94KKbty4wZ//83/+Ewz6ukxlHbRdl4YsG/EJPfr6ui8qKl1PBF0H8ddZ9OsdgRdJV9aB83eSzazve12usz5Msi49edHy+n19Uwgh2NjYoNPprKQpRVEQRdHqgtfbTdOU6XRKkiQrcO77/kqe0mw2nwPXWuvnvjhZlnF1dcXZ2dnqpluffN+n3W4/Z/NjmuaqXXWRKsD29javvPIKnV6Pb7U/Qy5tPj+r3E7qISEpJZmWzKXHr9z5NLHwObi4IJIeoeEzlz6R4bOQPnMZMLOrOHbXSHGNBEtmSDK0yCowZUuEa6Fsm1zYpKVNXNqoVOJGBU6osOMSK9aYsUImLFlggUoEZSbJUkmWSZLMJC1MpFB4Zo5n5Xhmjm9Vs2tkZNGA08MP2GjbvPbqLdyWC55JYUlyS5BIg1BIpkowzARjZREZDTK3hU1JR2a0SUkXBg2d8UrjAj9fYEVD5GJAfP6UydkZo8GcUQiLwganj93cRVkdYuVTWh203SWTLRJnBy08tHQRZYylZpjFDJmPkfkEW0dExjaOmtCJ/g0iGxPSpmzfp3f5s2yIM7K0GoWpi2ySJFmloNWdwUajQRzHCCHodrure6leVuv8azlRt9ul2+2uOniO42BZFkhJYZjkomIfUyTfUA3eVS5fsSN8qUmRpEKSIUkQZEg+SC2OC4v7foEUMC3hKpUMUpjmklCZRKVBqcA0FI6f4jRDnGaCF8zp+UOa1hxHZdhlSnrlcnG0z9XhLuOzLrlr4NxLcW7nsJOCo7CVxJCaTJbIQwf57YDwXZvwkYHbC+l9YUDntSGdW2OUECwWLaLEJ448omlAfNYkv7Qprkx0JjB6BaKj0YGgtAxMV2M6Gm1qCi1Q+plswqQaBNGVhJmyli49+3WsHHVEZc8oxVIfTWVHWoc+6doDWqhqY4ZRIhAIJGIJQwWaCqbZsEzkqtOIdIlhmJUWXLKUWvBMP/2sppLaex6xZGrrnpbQlRRIlkipQFABTm2gysqNx1AlgQhpWxMa9pzcNJnqDnPZwC4K5AyyhUVS2mhHVOE4CnQGeiargtNUPEtWVcsTKHUlFZEaYZXYMsLjgu32lL3NGVsb57TaI0ylMSKDbOoymXW4HPc5nm0zLntMZZvCdJ75fkuWiaYKx49pdWZYKkPPDLKxRTZ1KBYOWdfG66X8jsU3aRoxiSUZuE0uvSZXXpNh0Ea4mq30Kd3yjKazoNtM6VpTOmKAa2TYOsekQBgwybtcZtsM8g3GZZep6jBXLaLMJUlMMmVTCBdtO1CoZSdDPLOOzBWEMXI6xhgeI84/QF4cIq7GkDVQaotSb6OMPXC3UHYfrEZVYGqWy4yEeqihSi5GFuCY4Lpox6u0Smp5+5QRMp0gixAhSrRpoB0X7TdRQWd1bwQf/TTW1YeIRQhFi1weUAR3Uf4NtLNJru3qlhLgmgUtK2HDnbPnjbnpD2gYIbPC5STp8FG2w7HYRLoayytIbBtb59wojjjIDtlND9mInpDrnKHXZdjoM25uMmluMO1sM+vsUJoWjfE5jeEpwfAEf3iOGEdkYcBC7xHat4jNLVLRocCr7ncThI5Bh2AWCNdAN110I1h6wVdfFDsaESxOaCyO8cJz3MU5ZhSR5U0unLvEW6+TdG9QNNvVSJYusaYD/PETGoO3CYYfUpqS7NZrxBsvkfo3SIse+dRFFwLTSjBlhkRRCknmBghR4CUneOEJ+3ZKQy8YLsYkpkmxfcDVjS+Rej5WHtG8/Bj/8iHy6Cm7sklTdpmmfQZpn3G+yUxtYouYDWfA7X7KfmvBtj9h0x3RlAPQ+YpFX08SrQH7i0jUOI55+OgRlzkke7cpDu6TH9ylOLhHfnCfcnMPY3COe/YY9/wp7ugSK9Xowqbwd0h7d1i0b5G6XczRIfbgY8Tpu3SiM14KCjbyIabKuNp6i4vdL/Lb/RP+wJcfsLm5+Rx+vE7oXnd0ybKMyWTCcDhcBQnVmO7k5ISrqysAPM+j1+utAobq466fr7VUWSm1KhatawqzLFux67Ztr6S4aZryMz/zM989gH5wcMBf+At/4TkAW081kK1B7fWI+vWTtnocXWOurzu4rBeNvmjI4jtdhGWbV+B4/YJc9zZfZ+vXpTD1cEf9uVJqdVHqDgOwKt6sizTXI2XXiwS01gRBwO3bt2m326ti13pbtV6pBsdZlj3HZNdDPvX+rqe51j3cuqBysVhweXm56tWtT5ZlrWQu61aZ9fDQumRoe3ubu3fvcnLvt/APt38nSgh2s0s65ZyFEVQ6b8MnlQ4WOfmGhK7G0wm+TsAUlJagsAxyyyK1LPhYwLsCMy5wkgyiaridrCpaU7lJWZgUS2DtWgW+VRBYGf7yvW/leFZBYJcV2LYLfLtYgu9qeeCUuEaGNDULJKPSZJBJprjMhc9cekyXEplRYTIsTAqniadTujKnI1O6otKjN1SEE49JBhcMDk+YD8Zstvts797BbOzwy4c3ee+qDwLafowgJSkNMixK4QIaWUbIYoHIZ8h8hlku8GSCWU4hGaKTAbPbv5H4zm8EC3qH/5DG6T9HJGM8MkQyRaQzrvb/FFft3wNoNq7+PwTx2zx98H8GITGKMTc/+E9oiDkiPkekI1zHXllEXS8yllIShiFBEKwsNWt5U7vdptPp0Gq16PV6K5/9urq+LniGZzUFUkqOtcMPLe5QIrgpc/5W+5QdQyHlsxGVR4nJb3t3nxK46+b8vVcHdK1n914cx7x9VvLDv/g6pYBb2wV/4vtj/stveBwdW9ACcQf2OgWjqwR5rtl+dcyDL56QOBMGX9/EMRIa3ZD51RaPf22f6ZMO4p6i+dkU61ZGuZFhLoFzYSjUlYl4JyB73yV6aGB5C/pfHNJ+Y0jnxohCm0ziPuOwzyTqkk5cOAd9LtHnAmyNsVNibBbQBpVbKC0IWjluqyRzNDESrUxKLdg1Na8agp3MYB4J3p8KHoZw09VseRnKCRnIjFRm+PaULeectl35w5+Huwxnm0wXHeZhmzR1Ma1ca0NQYgoMqZAINBmKBYqU4fkefgcMC4MK9PtktIgxVUGsbebCJRXW8kcdnsvoMdbmFYhfe6Zo/Wx91v5utarCFAW2mWKaBaVhkAoHJSSmLrGKKjwsL00yy0TZlRMhhq5sO2NRpbSWLIOf6o4Dy2H/tX0uJSDCqGYpq1ELYeiqINbQ1LnBpVha7DzXV1oC0qW+mbUXe5RgTQpUKCgimzIx6DJjpzxiY/wt1PnHTKZXXGiIN3fJ925R3nmZ4uY9dLsHZQnSQKqMlh7Rs6Z0yxHddEBXz+m5Y3qNId1gRM8Z0rVG+DLhPNvmNN7iYtLiatJgOG4xDvuMi13m5i6h2SexmhSWWdWlSL3UYRtQlBDOkLMRYjpELGYQpRCVkDnoPEAXDXTRRJcNyAPIvar4tdDLkDPAKJf1LwqhZhj5EJleYmRj4pu/GWwfrJzm0x9H+Q2K/l3y/l205WGNn9BJLvnUps2dQOOXJenC4GjW4njR4jJuMsk84sJCiCrh1jAUDUAaHh4AAQAASURBVDOmYy1omgsMWZBok0vZY2x1cP0U4WoSy6WVj9lPHrIdPWJj8Yh2eESZpxRlSWT7TNtbzDo7zDo7zLs7zHu7hL1dRJnTGJ0twfspjdE51jRiep5zvOiQNF5CNe+ivR2U0apuMFOALBFGXmVWeBbYBZZcIGWOMi0Kt43MIpzFGXY8ZLL3RcjAGU5onr/LvH2brLOFDhxwwYhDnMkZ3tV7dIe/Srt8QnmwQXT7LaLNlwmdPdKkiToX5GOHMrewRYhnZJQqpbTMytXLcZcySUF//g73Lv4hxY03eGjfotjYImr0aSyuaE+O2Jif0puf4U9SXHrQfIXLuMv5osVZ2CbMbTbcKRvOiA3niq55Rce4INDHmOVkRSyuKxdqwvDp06ecnZ2tcEyNgwCU5VDu30HefZXixkvk+3dJ926T7NxGaI138RTn7Anq6UP0LKRMBDLYI/78D1GIAF0a+MmI0O0DgqBc8DN3vsGdO3eeU2RcV1usg/TazWU2m61UCzXuMU2T4XC4krnYtr16FtZYK8uylUNL3SmpcaNlWQghVpisxqK2bSOEWEmbv/a1r313GfQ/9+f+3HOMeQ3Mr4PwWn7x3EVZW6f+fw1Ur2/n+old7/nUy6/ryV/k5nJdg7QO8l/Enl9n0mvgvG6xUwPo+jhrV5Qoip7Tj18faZBSsrGxwd27d3Fd9xP7V0qt/rbuddXHt+4CUwOrdQlJnueEYbjqyaVpuio2vS7VkVLSbDZpNBorWc36tur1DcNgd3eXGzdu8PSl38HPbf4gWkjupId8IXyboIxoqIiGiglUxNdfeouf/fRX0UJw9+qEzx5/iJ8l+FmKn6cEecqvHD7gf/jwy2gEd5tHvN79CIsYWyZ4Zk7bh17LpN+yaDgKx1QYhrzWIYGFMirdufSYapeJdp4Vgy7n+n2KQZOMtkhpk9DUMV62wCsK3FLjIjBKQTJPef/dj4kLi707nyKTTRa5xSy1mGU2i8wi1ya2DjHVnIaZ0PM1/YbgeNLmcNgGBVv+FLu4YDYek8YJKI2UJpbto7RBoSSlNtDCAmGjhbl6r6wmetusCv1MsZbqtyxCM5di79GyOLB27VDyWfKfqJnO+ldRIVSJUDlGkWKWMVY+x1EhvpoR6BlOOcUxS1xbEngmgWfT7TTotHyagU2r4eF7Jo4lsUyNYwpsC2wTLLN6dSyBYwmeaJvfu7hNjsBFL2vgNC8bGS+bOa+YGb7S/O8/3qDQ4AqNQrBhlbzm57zuZzxwUmSU86P//S6FFtiyemDbShOeCUQChlK0GjBCVmxXXey3AcJUiFSjQ0lzJ8bdmpAf2yyetJCGonkjwmlJ0r5NtKPpvZTCQUbmFbSQKKlI5oL9Dxv4H3hcfWhypubc/+KI7hsDyv0RKjOZXbSZLhqk2oTcILtymB+2iC4CrG6BtZNi9jOEp0kjnyy1sVolbq9EuYJESMrSQGmBA/gIpBJkJcRaVJLqtbrFOvtTUWDKEltmOEaCIQrywq7mzCJNq8AmaZZopFZaVhr15TmiALNUqEKgCoFcBgJtljNez4+5l14wzF3eVnd4XG5RLiQMBcxZuscApsJ+OcH6fIbqmKTCwrEzTCNDaIXOJOXcQIagk6U+O7HQ2VIrrgTSLPDcGNPLKR2T1LaXwVAKyyjxdIapShJpkdg2hWmgDUEZS9TQgoWoLCUDhWhV3xFVyFWomRTLpGEBGole/17Ubiv1pDVC6+XIxTKoiGqkAL10sNGyekUizMpRxLUi8swhy5xqEKRQ6KTy/yedI6eHWOffwnzyLaxHv4bO56iD25Sf+iLFq5+huHkX1d8C265ObFku2XFgUkJoYaU53caA/saQXnNI172iaw3pWjO6ckyvOKUnh3S8CNNQTEYW40mTUbjJINnjorjFVbnHQPWZZB2mU5NoVpBHc2QxAsIqeKzRQjc76Eaneg1aoJbm8aGAX0yxf/nrlP4BytlAG361rD6vAtAlMh1iZpcY6QVmeoylLrCCgptvPKD70gOGVpcz3eRCBTRExp6YsSNm7DBlS8+w04w0lDxd9DgKu1wkLSZZQFQ4z66dAFtmWDIHqUktm9yxsP0UFUi0ZdCLjtlavM/G/GO6049wkwHotXRJDXGzR9jfqwB7f49Ff5+wt0vY2UbMRsijj7HOn+BeHGJfHGKNZ8jIJWu9RtJ6hTy4Q2ZvUcgOGuNZgrAFmCWGCDHEgmxjB2yJnw/40t/991cy1tsPvsDTZItD+2Xm/VdJezfJmi3wQWiFPR/RmB6xHT9iTz3E3swJD7rMb91h1LrJXPfRhznquCS7MsmyHgUBGBAUl2zPvsWl/RKhvYeg5Icu/3ckjSbhxl2mvZuMmvsM2zdQhsV2OuCWnnFLTdjLRjRnYyYTi7NFi/OwzWXU5SrtMUr7WCKlY17Qlhd0jDPa8pwmZwScg6qcUGqzihepKYCV73mtiY+TBOfGXcSdV1ls7DNsbVbM+42XKHvbq5G82+99nQf/+p/xP771F9BC0ijm/I3uL/DWW2+tZCfficBdx5N5nq/c72o3l5oFT9OUx48fM5/PV24u3W4XwzBWOA9YFYvWpGpZlliWtcJWcRw/x6LXVozz+Zyf/umf/v8PQF9nw9flIPV0nT1/EYt+fTsv0om/SPD/Ip35dda93l69vO7BfKeLdt2ZpV6nBrp1j+n6hRZC4Ps+vu9TFAWTyWTVC1svhFxnqW/evMnu7u5KUvKizkLNrF/3eK8Bds1c1sdcy2XWb5K6CHX92OpzXReU1rG168tqBrXZbLKzs8PW1hbt3ga/vPFVCmnzm+f/Ektln2TxpeQfP/gChWnyW9//Bo56Vrhbdy5yJflHh59DYfDbbv0KtrE8LiFIDZ/M7xFbTaZ4THGYaJcZztr/K+BdImiT0iKjqTKCosAtSuxCYeRqGeEOWQKLWBHlNrH2WeQO89wmLixsmRMYKU07o+UUNO0MW805f/ouTTvji2/dpx9ofJngsKCMrhidPeT46AlJkrCzs8OdO3fo9/tEmcE/ePcNLBHz1b1/xWh4yT//5/+cR48erTpyrVZrpWMbDoekabr6vLZ3MoIel2/9MWQ0ZOPdv4VlOsRpSZIpbDfAdhtk5ibnm/9LUDHt/Fvgtpk1XiUO7uBkpyitKa0W2m5TigAlPTQOSluAXCkd1r7grLLkNSs3bqOCNMtkeo1UGqNkmVxaOWIUpSQvBXkpKJf+31Jo5JJp9Q2FY2pkR6M2BGoDip4g7QmSlkCm0Jgr2guNEyuMEgopmLuCiWNQAs5Cs68StvIUEVk8PndRjmABxJHATTUqEohckJew0VbMSokSEKfgNCA3l5KSGNxGgeuklJEmPPFp7Mxp3xnj9DTa8xmbAfmGovVySr6VVXW5EqJUs/nQo/dBwOxDk9Nswd6nR+x9ekixP8IvHNrjDheHDR4NHGapSZFKossGycwl2F3g7Yd43QipFOmVx+S4R2EY9F5b4N/RhIFkUtrkuc22VfIp4PbCYTE1+OWh4DKH7+nB23/vr3L59Z8DQ2P9337iP2sY8y/59vxTnh1vNsy54VsRSeYxiDaZx22iNCDNHQyrrECslJVUZiljEVQdoBXIqqUuyweiVLoKSFKisr7LqOaUCkguQb8wFUanhIZGW5W+3PZSDDevgLdSkAvIBWVhkC488omDHcXsqTN6eoBpFIyyHheLXZLIQyygiKogK9PJkH6BbCqkWyA9hQg02BoVGpRXBuWxiXBLuKcRvkExsygXJmaQ46kFwekh7vgY04LU9ig7e8T+DjPdQuUCN80xUo3KIC8MCmWgtcR2U7xGhN8I8RshplWiJbTUFC0kqWNRtkEFJsqTlLnFYtQimgekqVsFa5lL28C8hGSKnJxiXD3FOH4fefQeerNH+cprlA/epLz9EvgNUAWYS5a/1BCrajQhlaBLhKsrDbNTgIzxVEjXS+kU52xcvUN/8ZBedkxXjOkEJZ0th+6mpt2JyXKHUdRjHPcZpz1G0yaTgcXkLGd6OGD+4cdMzoac/OWfpGhu4PzrX6D3n/1vnq8vEgZF+yXiG18l2/wKeeMBpbkDUY5ICihrMsJY3lcCUyhco6BhZfhOjuMWGF5J6Qsi32Jg+cSWxbaYs8OMbT1hW4/ZKEZYccI8sjmOehwlm1xlPcZ5k6j0l/ewrtKKpV6CZY30ClRgYBoZ/fQhW4tv05t9QHP8AWYR86Lp9PKKx6km3jzAuPMAdXCfYv8u+c5tit421ugC5+IQb3CMf3VEMDzFnUwgsgj9l5m695m6d8ndfQrRqKyAVqNQCkOE2NklG8VT/MHXcacfY80Oee3uPp7f5FtPYwaNN5lufppF5y5JawvVcMABK0nwF5c0Jw/pzb9N0LyiuOEyv7WDeu0e4c4uPNXYH85QTwtOwzfJIx9ZFPwHxX+FN/k2vu8Tx9Wxa2Bm+Mx7twm37jHr3eIq2OHC7hGohJ3kiu3ogs3onP7shMbkjHHkMkj6DLMNJuU242KbablNrJs0xAA3f4KavodYfIiMPsKMHiKL533Ta4AupWQ2m+H7Pr1eD6j8zmsN+MHBAd3f9cP8jz/4xwD4wi//XT71rV/kxL/NSe9N3gq/xfe/us+Xv/xlfN9/zmTkRVLodUVFkiSMx2PG4/FzNopSSp4+fbqyQfQ8b+WgV+PBml03TfM59UMNxKWUK1xW1wjWjPtiseCnfuqnvntFovv7+/zZP/tn642tlq8D8xqoX9eEvwjU19OLbA9fpB+/DtDXGfXr/78O6L+T+8uLWO6aPda6qsS9vLxcGczXbVnX3huGged5CCFWtoDrSaH1tuspCAJefvllOp3OJ8513aZ1cF5LZa6fF2A1xFQP16zLapIkYTKZrHp668dq2/bKpnC9fbWuqtlssrW1RafTodvtVkWlyxtOrnU8Psm8LwsGpaBEEBkBCyNgLn0WRsBi+TqX/tKBxWMmXGa4SKVp5hmNIsPNC5y8xCwURqGrYJ1cUOSSdFm0Oc9swtxGCE3DymjaeQW27ZyGldGyM3wjJl9copMB/QDu7rfpBwLfiNFF/FxNQd2TH41GfO1rX+P111/ne77nexBCrL7E9bCdUorNzU329/dpNpurUZY4jonjmCiKOD095dd//dcZDoere8q0bdAaa6n5r5PJGo3G6voblkWR54SLxerLL6RBliar89xoNJ4raJZSMl+EBL6H67qrZb7v47ruShfneR5CGpSmj3ZblVey2yf3D4jsG8x1j1nRZFE4RLlFUhhkhaRUog5VrKbaMlJUJKRpaGxT4duahlPS8hQ9r6TrK7quouOWdG2Fb2hsNK6QaDSn2uRQmhwbFiemwZllMjIl7VzRjxWNuEQkkGUw04KRbRJbklamaScaa1qQjUrKwiLUBrNlHL0Zg0qgyJ4FBSkBKztgwbNh+0RXQHMZTCPMKixF2pW2WksDZUjwFEZDoW2FYYI2NFYmcUMD5pJY5Ti9hNZejOhG+FKya3gEmcd04PHRvGSUQRpbCEPT2J7hbMVY3RxdSoqFSTL2COcN7N0E96UcNgSxbZJoi7ajeEnAQWTxi1/7JWYbD6pq1M72TwK/APyi/l08Et+kB3yqa1995aB5+Ls73vjVvnvV6NpDposeT0e3uRjtMhxvkuYubjtG24IMh1KbCKXQsa5AYH3CFCA0plFSUqJtG5HliNxExbLyZa/Pn6PRS3mKNjSyqxEdDT5oWyxlJ2rZOagcLZSqNPbrQz9CKyyRYRoFWghyYSF1iVWkGFGMkRYYOChlkxcmeWmSFwYqk+ilD7wwFIJKz49VyVtUIdHZsh1aU8aSjWzBvh7TFyGeVSBsTeaYjJwGF1aLC9GmyAy8aYaeQB4aoBSWyJG6RBeCPHFIUxfHjWk2Z3h+jOVlmF6BGeTYnQR3KyIuXSbTLuG4TTwPSHKH0jTBl5VsxxCVK8t8hJhdItMZGBq1tYm6catybSoWCNtAO/4zG6WcZYdJIaQCVyPtEi1iRJJVNpN+G0YDzI/fw3z4Hp2Ld+jPP6KnQtq7W7TuHdA+aNLdhk4vptee0A1GNJ0F06jJeNZgMjSZXqQsjobMj4bMLnNmA8XsSjG7KommGm3YZDuvk9z8HNnNz5Hc/BzCDLgnr9hNMtyZSRlKRpHHMHaYZQ5hYZGVBuu3gSVKLLMyAxA2FK4g9m3sIGfTnbBjDtnWQ5jnnJ13ecV+H1uGXKY9nmY3uSq2CXWLWDfQyOeZfkuDLTDMmEZ5TC/9mK3522zOfx2jjDg5OeHk5IQ0TfF9f1WnpbVGeAHF7i3Kvbvku3fI9+6Q794m3jyg8Jq4ozO8i0PM08cEo1Pci0PMyzOy0CLe+hLTjS+SeHfIjD5aO9V3Z5knIIwSU89wkhMa2THN5CGN8CHN9AKjyDja/l0c7f4ghsgovAZZowWBROgSez6hOT+hF35My3mKcwOyu9sM79wlvbWJjAr6Tx4TPDqhc3xF6+k5wTzGkHKlsV4sFihVuZR4zRZx94Aze4Njq8eZu8G5u8ncatKNrujPTujV8/QYLxqSa5dxvsnxNOBoEhDKfTLnLoV7B1SKlTzCjB9hJQ9x86f45RHl9INqxMzzaDQa5HnO5dWIRPSwyws+9eab3Lp1i/nuXRLLof/03dVIiFKKIAh45ZVX+OpXv7qqp1rHUy+Suqzjpvl8zmAwYLFYrOTWtm0zGAw4PDwkz3Msy6Lb7a5cbtZlLrVzy7qcpdacp2n6nO1iza7HccxP/MRPrHTu/84A/eDggD/zZ/7Mc7KLmp2up+sSFWDFKNegbp3Rrj+7rg//t4Hq9f286G+WB/vcOtflL3X7XxQiBDyX0Hl+fs7Z2RlJkqz2c10OUl/QWmJSS1Hq/dTr1tve2Njg3r17qxjY68z+OnBcD1Oqp/rmqC/6ervqc1yDyjAMP9ExMk2TZrOJ7/urIZr6S1kXAtaOMa1Wi6m/w3+9/cPkwuQPTf57OiqsNOhGVfg5lz5XusMH+U2UIXDNjKR0MPMSL09xsgK7KDEKjcxBF4KykOS5SZKbRIVFoQwCM1uC7YymldKw8uX7jJaT01wua1jL9awMzyxWPuvrmvp6yrKM4+NjZrMZGxsbbG9vrywqa9lSXXVd66vff/99vvGNb/ADP/AD7O7ukqYpV1dXPH36lMmkqh7vdDpsbGyshsNqYB5FEfP5fJVUdnp6ynw+J7v3ZcZ//McRyYK9//r3s6nmuK67smlyHIeLm69z/Jf+G8x4wYP/y39EfvSYw5v/HbH/JXZmP8bNxX/7XM89yzI0Bk9u/DVC5zNsj/9bbsU/ttKZ18dT1y/Uw3+O46yuba/Xo9Pp4DZa/B+PfhPvhW3++N1DfuTuxXPfCQClBZPU5Gju8NHY4+nU4WRuM4htxrHBIpPEhSTLa3ZticMUFUJmTX6zHAq2bU3HUwSupuUqmr7C6kLRhA/aMQvDRCqfWBu0s5LRY4OgUPzWBxlRKfjFhUmeSroWfLlVso/GiGAyFbx/JnnnSNJSGrcQjCeVOcbNLY1hCU4nMM8FvRYkCiYL8AwoS00214hS0747o3tviL8zQWUW00GPKGsRmyayVyADhShBlyDHJt65jRwYxHmODlIavRTt5pi5RVc5NAqHaOpwPhWM4yoHRSzda6RZCb9VbqDyKsxHl+KZj349mUt2UCgQMgcUJlZlKcKcggkZQxQhkCEomP5P/55oN2jd6tLc87HcGMvKyEqHRd4kygLizEc6CsMrUJZBIUyQlX5clKAKAasiVY1FRkCKFIpEu0SJDSNZOcPE1XU2NwrMniK1TQhBKoWxlSMOSsQ+yLbG7YRYjbSSJi00xcIhSX3iyKdYmOxbJ7zkfkC3OyQMAj4qX2ZEj04+pV+OaRtjbJ1TJA5FYpPGFpPUZaybzO0umWyglVEV7ApR9S+FeFYgW8tdtL4WLlRJqSxVYqkCiwKLEiE1mTCZGAEKWR3rlMreNFEYeYFfxng6wiGtgtdKgyy1iSOfxbyJEBq/EeI1I5xWjGGXFNogWXiEsyZpbqJ8oGdCU4BYxptaDlgulCmIEnwXVIIoQzAVOvDBdita1DIqK1ZFVWRqaISpwCgQaYKOk2qMrNlBnB9jPnwf4+EHGB+/j/HwfYynT9DWPuLm52k8eJX23S1aBzbdrYJub0q3OaJnXdFzLun5Q7rNKVKqCrBfFsyuSqYDRTw1eP0HbzCy9/nl46/wzY9+K0dFm1vGnAfGiAfGiFeMIX0i5qnF1cLmZNbgeNbiLAwYRh6TzGOROySlRaHlio0X6OqrYQJdcO0E3wgZpV0oNa/Lb/Kl4h+QRwmTss9Vscm5fJWhcZtI9ClZnqv1AmlDYagZInyMPP86wfxdnPQEKzwk9/YZffo/x55/yP57/wmWURXW+76P4zjMtETt3yPc2GfW2SbfvU2ydZN46wYYBs3xOfbZE5yLQ6zzJ/TDEXqRM7ZeYtx6g6jxgMzaRudGlW9RGpVscTkaULn7CDBKvu9bPwpK882Nv4iwJQ3zjHnrBlFzkzJwwQEzSQgWV3QWh/TMI/ztKeWdBos7e8xvbiOLks7RJTemCbcWGZ2jK/yrCdnS7lgIsSKv6ufpQhucOxucuZsMGrsMG3sMmnsIrenNK7AeXD1k9q1fIvvwm+hotvSg3yVz7lB498jde+TuXQrvLqW1hZWdYWePCdQxMnrEZeeHUe4B7vjn+e3t/45+v4/v+ys/8XXi1rZtbty4wfd93/exv7+/YqlfRNy+iEUPw5DBYMB0Ol3hrZoVf/jw4Urmsl6rlSTJqrB0nUWvLRfXvdJrM5Bao15bLv6Nv/E3vnsAfX9/nz/1p/7USrJQHzCw6nWsg8XvJDtZZ9KXDfuEjOVFrivrALz+u/r1+vr131w7jucCg+qLUIcHXQ/PqbcxHo85PDxkMBg8p5dfZ9LrHlPds6qZ6LoieH29OhTg9u3bbG5urrRK/za9VK3pr4+5tgiqAfq6jKS+DnWF8ng8/kSHyDAMfN9f6eeBVSFEv9+n3W6vdOq2bfNP2l/h51tfQQuJoUv65ZSmimjomJaKaOqYp5Mtvn10GyKJFIqWFdE0k+fAdMNKaTlVsE3TqtjudcBtrBUS1m2tr/H6CMf1+3V9vfr/69upv0y2ba+KbNfvu/UC5Vpj9su//Msopfie7/keAC4vL1cdtdp6qZY21eA8SZIqLCkMSZJklVY2mUy4/IP/D7LP/94KBGQRRjTGLLNKLpQniCwmOrhHfmer8hjPM2QUU47b8JMCHI00I6SZIAixZIipZqgsZGF9FSYmoGgWH+JxhS9G+GJM05oTyAkNc0bXi+k0zNXISLvdptvt4nkeh3mfP/Hrn6FAItF8eWPKvp+x76Uc+BkHQcaBn+ObzzqC9bwud/q1Q48//FdvUJQSw9B836sRvVaJ42kyCcPY4vHc5LFlwZcETEBMNOZEY0xAz6msFwto3pnw2p/+VRZPWkRHbcaDDpN/3K+KBvcF3NXwioZ9BXMI5uDPBSQwUxIhYNfUbOYaN4VkJjg6F1wOqiwfFcEsFOxvQq8J4xhOptBvVXh4OIOOXeHgyVAjfY3/2ozNT59x6/NPMMmYHLe5muwxUy3SLQPrZl6lZqLRQ4PuBz7Njx2iOCLdGnHwmRHWzTHd3OcLRp9Piz7x0Qb/4iOLf/ghfDTU9LsZ3s6Exr1TOncvyZTD8HiT6DwgPguYT5qYt1LkQUnhGuSFjS7AmOhYD41QTc2CUvbIxBkhbzPiXf7+H/tLFCW9zV1++7/3u9DSYoHLlbCZOpC6GmHmlLlBHPssoiZJ7FHkFlJWumitJLp2ESmW87XY+9X/n3veLL+vdeqoxTM/b2AV9uNohAvSLTG8AukohNCV9huDQpkIpdm0L9kOLnDbMXMn4NTcRxgaO82wphG92cdsmI/obGQ0dgy8HqikqCRHeAzMXa7CXaYXHeZXbYp9ibyhECZoLaoiRSVRiUERmqjEoEwNVGGg1FJuYqplLoGAgWZ3dIpVJsyFTeg2yLoN6FiVzaNiLY9A45QxHXNAx5tgBzm2zJCFQmUmWewwm7eZzVuE8yZZamN7KaZRoJQgS82qrxtQgXcvAblYZiO4YHuV/6cGVKVFErZEW3YF0qUExwBPgbesXUGDoRBlDmmMSDOUMKDRxDh+jPHoA8yH1Ww//hDz9BC0Td5+g3zvS2Rbn6LYuEvZ28HbMOhtTujaA/qc0DcO2XFPef21c7Z7ZzhGivpXf4BwfIsPyy7vF13eK3p8UHTxRcEDY8grxohX5ICbjDH45LMxKzTjyOF87nI0a/N3PvwKpTAQpuJTOx9wnG0zzHrgCaz7KdqTSJ3TiS9oRWc0Fic0w1O86SHu7JiYLufu5xjarzG275GYG2gll/fyshNHuZQDiuocqozdb/9v6Y//8cpG2bIsJpPJigWu7fiEECit2X7wJtHmASdGwLy7Tbp9C3XjJabNDWSW4F4dsRVPaE0uWBw+RWc2meiwaLzK3LtHWfroTFbFu7lGZhmKSroodMGnpj/G3fDnKhxkNpi6L3HR/jTD1n1mwR5po4X2JUIr3GhGOzynJ87obA7pf6rF/Gafk15AKWDzckr3ZEDzyRnBoxNaV1N8110laNZyjhp/lEqx8HoMmnsMgl2uGjtcetvE/RuY4zOs0w+wT97HOn4P6+R9xPlDpK7YbyU8YvMGKngJ5d9nbr5E2PrN1blWGb9f/ocrRzHf91c2hTXWMQyDXq/H933f9/HgwYOV4UWNCV4kha6lwTUROx6PV9LTmqw0DIPj42POzs4oy4rhr1n0moytpSt1XWHt3FJjwFpzXqeh11jLMAx+7Md+jMvLy7qd/+4A/Ud/9EdfyGRfl5t8p8JP4BMge31764D+OnBatyy8DlavT+us+LoOen1Z3SOsL8R1wF23J45jzs/PefLkyarS93r7l+dpdbHr/dU6pHVWtwbIrVaLe/fu0Ww2V8d7vYOyLhla11XVQzPrloj1D8H6zZckCYPB4Dkten1O6tCiIAjwPI92u71iVBuNxopdNwyDc3ub/2f/P6AUkj8+/Qe8Uhw/B84AjpMt/ov3fzdKS/7cqz/Hq52LVednvaOyLv2pz8f1/18fcVm/rusdnvXRiRcB93rZ+vv1v6/Pe1EUq1qDTqeDbdtMp1O++c1v0u/32draYjAYMJlMAHCXP1S1ZdK6N2wcx6tC3To5bLFYML79ZaZ/9G9AntH6q/8rjNkl0g3wupsEvU2EEzC99QZnv/H3gyrofuMXKIqSxcZvQhsNpJpilBFaeajCQudWxbKkcukpTeUrHVMNd9cazPr0lsvZAMNWWJ7G8UuajZJOO2ejlfONeYuFsvjM5ozPdRZMU4thanEVW1wmNleJRdsuKsDuZ+z7OTeCnBuNat71S7JC8nv+77c5HFn80FtzHuxlfHDm8P6Zw+Mri71uwf2djH/TcplvSb7/RsytoODt3OGj0iJBsJmVBJHiPEwgKWi5MfbenOatGe5WyPhXNpn+ygbzj9pElw7sAp/S8Aqwv2TpY4Ecg3klEFHlO1iYoIyKjNRTKkY3EugZ6AjMDIqoIimDRkWqTuNlTowBYQgdEywFozFs70L/5Rz5xhXO609ptoaImWBy3uY83CH0G6htMLeWMfaRwH3Po/Oxg4wTou6YnU8NcW9N6KQBn5N9PqU3uHzU55cemvyjhzBL4M0dRbe/IN8ZkGyfk7dmRIsG45MNwosmi8sGWdvCepCg+4LMsCmVwFqUiIFR5GO70DkuZ0f4h2/zw692+Yp3RWDoVRFTLZcrLIfTdpurHYP5To7upYSZz9Vkm5PRDa4mWyjTwA5SlClJCxdLpxWIjc2V04owFZ6V4JcxycRmcRVUaaJLdl3ainYrpNMJ0a5ggcNceJWLjLGUI5SVLEUYGmkWSLNEaoXOJSozyFMLO8voywG+GVFKg0nZISp8ZK4pUpMykhAXiKzAKCpJigFoYaGxUMqkLAzK4lkys5AV29z6vgnZXQuzX2C1MgyzQFBgeQnC1GhtkU8tioFBPrHJQw8106hZWcmESpNS+Es7mrJiP21RgWmX2gRz2bHRUOQIHeP3Ujb6QzacQ7r+lMAtsNDoTJLMPaazDuNJl+m4y2zaIg4D8txCCIWWlXSDALAycBU0rWpoiLByYnFsMD1WRQhq6dRiqGp9T4IrVm4wQuWQJpDnaNsCy8Y8eoj98H2sJx9hP/4Q+/FHGBenCK0pzB7p1pfIdj5HsfkaYvceRbtL6lpVcFCqaOYpW8TctOa84k94szkgMS3eK3q8W3R5P+8y0Q4vGWNelkNekQNekkMCnT6XClmWJY9GXb5+cpe3Nj7kRuOUKIO/+eR3Mkg7/J7Nn2ZTPmZEwKXscmX0GFobTNxtZt4OsdPFSwY0Fqc0wxOC+Qn26DGDp0dM8y6L9mdQN75K2b5TDZNpE3K58tU31BwnO6aRPMJJDinHH7DlzjHCx5DPVzhDCEG73aYsSxaLBVrrVUrlYDRmEXTg1sv4Dz7NpdthGHSJN2+QdHewwwn+4AR3PEAngljuIDJNJDfJZBfCCrC/NPoaO1f/FGfwEQ3PXskpak11XhTMdZdB4y0umq8yatwiDPrkfsW2O3lOJ5uxwYB28wrnYE54w2W01ydzbRqH53RPh2yejyt5zMkV5MVzz9kaoyRJwuPHj7mczIj7t0j3XiHdfYV09yWy/Qcor419/hHO2YdYS+DuXXyEm8zIi4Lzm3+FsPNb2F/8ON+/9U+wLGuFVerOT339hRAEQcAXv/hFPve5z60w1YvA+Tp7vq5YmM1mXF5ertz4aiJ1PB7z6NEj4jjGNM2VuqDWl9eKiVpLvy5pqb3S65rA65aLf/2v/3UuLi7qtv67AfS9vT3+xJ/4E58Ajy9yT7muRa8/X5+uDz/Un60Dp3Um+bpufB1M16B3nSGvAWsNyNdvotpK57ocYn179fs8z5lMJhwdHXF+fv6cJ/t1cKnUMyeWWkpgWdZzx7We3rW3t8fNmzcrP+m1bVyXpLzoPKwXktbbrdetAXztjz4ej5nNZs9dA9OsmNRer0e/33/O57ru+dWdFyklGQZCGrii/ARruioC1SZCGLhmuQLl9XWoz/uL3l+73164bB1cvwjAr3++3uF70fbXl9Wdv/l8zmQyQUpJv9+n0WiwWCx48uQJrusynU7JsmzVkweYz+dMp1MWi8WqN13P6046te9pbjjoIoc8ee7+cRyHXq9HEATklgtFhl5qztMsR9hdbKp91LKoLMtWIwKGabFILTxX0dy/RdbcIvL2SL1dcmeb3OqTizZp6RHFLllokUeSIhGodKlf1VWBVSW81s+YzjpV1AcaAgKN9DSGo5ESRCbQKRSZQJXgOpUOvWkr+n5J3y/Z9kt2/JKmWTIbmVxdGZxemhxdWBxdmJQKXtoteHU/4+ZBgbEL06bkvcLi7cwmVoLX7Yx7IqObpDjOjIUfc9VJGTUjJscek69vM3qny+LEgz2Qr4N8oCn2K2tnmQiKUmDMwZ4KdAhlDrkQ4IBYgDkFGYKaQrGg8tdPKomO5QFm5VinoWLVMqriyKIqnA02NJ198PcUajui9OZId45UBWnqspABWcdEbpWYnaLSQT+2aX3k0EgS0uaE3utD/FtTOnGDt3Sfl9Itzh53+WcPDX7xEWz4mumv/h3G7/4czYOS23/5R3/a6cWfNYNsdz5p25OTPvPzJlERIO4ViH1F4VXJmFZcYowV5cQhTy3cOKczSziIh9zOTtkQI3adBZtOhCVZ1UiUWnPuOZzvSGZ7mrBjMc/aXEx2OBvvM476WH6OcBSZspFliZXm5LFDmZkVALXBaBQ4MkFfSNLHLupKVs4wGuhq5GZJrz2iUYwIR2PiwkR5m6T+NmXgItoK0aJKYyxkVS/glAhToYWgKEw2jSsetN5hZ/uUtGfz67xFqQzMaczVaIdI96tizTBDDsbI0xO8y0e0Jh+wYzxk+yUL//6rNLouW71Ler0xs7DFKOkzWvQZTDaZ6jbjvMNw0iW8aGAaGqs3w25cYLlzjIaB0XEwei5IQT62ya5s8ouC4jyjHEAx9ynLHqUIVrZ4lRQF6gTMyqNdgyiq966BKCa03Qs2N8bsHIzZv3nBZmeIXaaUc5MschgMN7k832Zwuclk3GYRNohTj7xwUfmyI+IAjgIzQxgJ2CW6aYPvV/e2JcCtpSSqGuFweZZ2vCx2RGcIlaNtu5I9nR3iPHof98Nv4zz5CO/wIfu2Sa/bxbZdZtYeV8EbzJovE7ZvEXo9FrZD4QpECV5W0C1TdmXIrh3iOgWRY/ARXR6VbXZEWIF1MeC+vmCjHKOu1WvVrzWIq5+VWZat6oRqIJVog7m/w6Kxz8zbZeJuMW/sk3ZuVc/Vq4eYg48xR08xkgytmiTBW5Qb9ylbfYROQFro0sCYTmERIQnIRRuzmOBmJ7jZEXZyhJMeEZRnuOkxppqv2lxLJOrneI2hXNdFmxZpf49Ff49484Bw44Cov0e0cUDa6OLOBhiLHBnniAwi1SZXTdqjQ7bjY/bLC5p+QbrRYfudf0p8/HiVJF4/D3NtcphucRK8wbj9CmHrgKzZRQcGaIU9n+AuTrH1McHuGPuBQ/7KPmW/jfP0DO/hMY3Hp7QOL2gcXWKrCqccHx9zfn7+XD5MjRdUs0++/4Bs/wHZ7svkB6+S776MTEOag8f0F2d0xodsRRccqBm+KVaEodZ6RSrUeEhKyWuvvcZXvvIVtra2nnvOX7flXieRa4yXJAmXl5dMJpNVez3PI8syPv74Y+bzSpLaaDRWdsM1AVc/w+tk0RqMK6VWOKgme2ui1LZt/ubf/JvfPYC+u7vLH/2jf3R10C+So6wDyfX11gE7PAPr62z4dXBev6637Tq4Wwe76/+vb/QXMa3rU+3Xvn4xrzPoWlfFoldXVxweHjKbzVbtug4U6xunPqZab7S+n3o2DGPljd7v959r+3o7XyTrqX+ArhePrv99zYjVP0qz2Wxlpl/34lqtFru7u2xubtLpdFY91Jr5r3ve62C8/qw+7uujFevHVy+vr8GLJCvXOzrX338nEH/9nKxfi+sAfX2/16/5+t/MZjMWiwW+76/cVeoC0KurK6IootFocJbe5FsXB7ze+haBPmIymbBYLFbxvutOOuPxuCoexmDy1h+CeILzjR9fPVTWz6PX7JO/8kdx9ILe1dfQSmF/eh/3CzeJ/4f3MQYxwmpw6P0QxJd0xl+j1+0S3/sis1uf4+DDX6AVD1fhQrV/eb/fp9VqrQKvro9qCCFIcsE0sbiMLI4XHo/GLmczl8upzTg0WUQG8TIAqixYqRZWenKLZyDe1whXIx2BMDWqFKi8IhJNF0xHg1Wx2aajUTlkU0E5lDAGMdXoORgNcHoab0dh74PoQebBXEpKBH1KdqXiwIvpd2aI5pRLc8TZwCF8Z4PwnQ2uPg6Qt8B7E/LbEG6BGYKeCIpYVMyhJ7AicBMwcihVFa4pYnDnoIeQDqEIWXr2U1k7mtUxKSp8pbMq/0Wn1WZ3dmF7Hzo7MHcVFypjmpekuSBLLIolWBK2RjiqmrXGSASOLjCdBHczxN9a0NQGd02PvaLN3/5rv8CQG7D5BhjuvwZ+HvhHN9569I3t2+ffKwz122Sz+E0K+cp81PEmpz0xnTTJNyyMuzlly6AoTMxIIWcaPTFRuaQtp5hhSXJp4w00GzJk256zaUzYMCdsmVO23RihKteoUxVzsiWIbrlEu3vMyy3OJ7ucTw9ItIvl5xRGVbhppxkqNihyqwoycgpKZ8lEHgNPJUyABKxOids8wbRPUM0+UWOX3PKxFiOIQkQuKIwuyg8wdnOM/RLhC4RWqMSkiC1UaoAG20zpuGMajTl5YDDx2jTlEGM6YPw0J4uaFI0d1EYPWktXq0lUgfeTI+zpMYFfsLnhstHx6LhzNg6G9DYqr3KlDEZxj3HaZTjvM7jYZHjSY3TWY6w3ILjCFu/hiBGmKzHaHnKjhbHdxujZGIFGxYJioinGJtnUI5/4FKFFEVmVpWRpsfJqr0F8pitHnLJiuZE5UmbYbkqjF9HZmNPpViFRTXtK25uy2bmi0x4xuNzk7HSPi8tNhrMek2SDRdYhTgLyyIRYL0dCDLBKWHZOV77qTlmBe7+WywCufpZiai5nUSxHCQqs0TnW0SPcj75N8M43cR+/z91+j0/9vh/h0/s9Aq14Z9rl2/MOj9IWJ6XPWLpEtom2wEg0jSwnUBmOKChMwdDxkJ7mZXHFfS65U5xzo7xAFunq+Vj/HtcgrJYh1iB9/TlZ64GDIGC+WJC4Pcb2FovmPsXGPYqNe5Sb91HtfcxogDF4ggxnkEkKc5s8uAmOi+lkFI6LjBPcwRH2ZABxjuP0iMQWC7mFUUY42TEdOcCMnmInhxiLx3TNIb4MKYuC0tnmpPnb6eYf0o+/8dxIv2maFLZL3N8n7O9VVpH9PcLuLvP+PoXjYS8WiESTiA5EIKc5N578CzbjIzaiQ/rxMbau6rDq5/+z/BjNOPN4LO5x5Nznyt1n7vcpAh8ckFGMOTnHLc9xW1fY91N4cwtu7GA+PcV8/wnZr7yDevsD9DsfYcTpJ0bP62dyjUcMy6bYvEX7re/FevlzTDo3GTZ3mbs9OvGAjfCc7eiCreiC7vyUf3Hj+zlv3+Y3vfd3uDF4n729Pb7yla9w586dlef4OmMOnzQhqZdlWcZ4PObq6uq50CHbtrm6ulols9fy4FreWqsTalZcCLEaNa+lMnVhac241x2KH//xH//uSVx2dnb4kR/5kdVJva4Pr9+vA/cXXYgaGF3Xq6/LWYDngB3wCUCxDgrXC9mug8D6/TowW99Hvc91T/brU82GXu8RXmdwaza27j0BK0C03llYT/Dc2tri9u3bKyeYF7Hy149nvXB0/Wa7HhBV99Br+QXwHPiuU0prqUvt+FGz5utzfQ1etGxdPrQuIVmXtFw/tutSlfVr8qJpHYDX52P9vF9/rafr98H6Nq5vf/2+XW93fa6TJCFMJX/yZ/8AhRY4RsnvvP9vKLKEPAvJkpAsXZCnEbpMKbKYcDEmjeYMXv99TF/9LWCC/+ifYp18ExXPIJ1BukDmC5Kbf4h0+3eAhv7w5+nmv8LW/+l7aO6F6Dhm9Fd+jrH1OzkJf5A8stmffI0ub/Prv/f/AIGBm835Le/+v9l1NDd8yX4noBEENBqNVaHP9e/RizpH6x2n7zRrLbiYmTy8cjgcuZxObM4mFkdjm/cuHJZpOJXcQS8lyUtGXvigreXyAGhQrT+kYuc2NW5HYyuFmYCYQ34liS4FaGhsKvz9EmNPozckqS+ZCUmhoYnGKwtca0rQXWC1FuSLnPBRwOjXNpk9aqFvgvwUcBdUG9wY3LnEmAnKsKqqTGRlLy9yKk9rRQWQsgrgyymUUygXQFwdo2tWgw9JtjzmHERWAXckCBs6bdjZgO0ONHxF6MRM3ZDQClmUgsj0yHwH5VV1B+XMgCsTM608urWl0Ajy0KTIDDQ2aF0uzb4FlYjkHMFTBIcIPfVboWl98JN/NnhpD+vgJlnRYjENiBwfcauAHSiUjREqxEygphaGKNjtnLFrXtBMQ6Khz9nFDucXWzTzgi1rxqY5pS/GdPQVQXaGGR4TGyGjmx7RSztMd+8wLXc4ndxgEG4hHA22psxMZFJCBiqtTlrDCfHdhNnAI3nkw6WoPNddkHsprXuP6Qcf0ZplyLRBmvSYmFuM7A0iM8AoMnQhUJaBtZlh7OSY3QxplRSRQzZzkbGmJWaU2iTMA7LcQToljpMgZASTK8R0gEpzShxKt4/e3kL1WhCYkCnEJETMosoXfBHgTBU7u6ds3BzR9cb0WwP6nQFdf0RghUyjDqNZn+Ggx/CsyyjqMxIbjBZzisOPMc+fYJklouHC1ibGzQPk3gbGpofZKDCbBQgo5ibF3KIYS4qpQZF4FLEkL12KVAIaTBO0AamuAHyYIrISoQRCm+jSRBUmlpfiuAmuHeM5MYG7oN2Y0usOabWmgCbXBpFuMMz7jOebjCddFiODbA4oB628SloXi6qjIKnA+OdLRCsBx0aXRqWXLnkWnGQupTN1vsNShvebi1M+G4/4dDSiX2ar3ySlFFeJw69OunwQdznMm1xoj5npkLmy8vSPFWZeorQglwYb1oxXnGMeyGNuZscE+WxlrBCGIdPpdDWqWTu0CVGFGzqOQ5qmzGYzgBXzXrPxWmvsoIWx+woTd5usd4d8GciU9++iTRsnmqDDhJIGhdlBiAIhC7Tr4SwuaFz8Gs7lQ7oCPLPJeeixENukzg1S9wZCFbjZMYm1QyEbQMFvvPiTNMrT1e/zi8jIuqhfCEHqNlj0dhnt3OWbP/BHAIk5injjH/0dxq17jJu3mbvbtJMLtpJjXmskvOTMONADHJ09l82y3sGZhylH2QYfco8L/y7z5pJtb5igFPZiileMcJwB5t4Y8YYN9/fg+Bze/hDx7Y+R336IePdjxGS+AsjrhOr29jZvvvkmGxsbVUEmJlf+FlfBLsPWHlfBDmf+LpnhgBB0wkt+5F/+57TbbT796U/z5ptv0mq1VvfPdfXBOmaqn/k12L64uGCxWKxwQLPZJMuyldlDLWep6/NmsxlxHK8UE3UB6XpwUT3neb5y+gP423/7b3/3ikS3t7f5I3/kjzzH4tbApT4R1+Uv9efr4Pu6pGW5j9WNVr+ug9l15vY6qKj/Zp2Vf9F0HXhfB+frF/B6+2pwNhgMODo6Yj6fv5ClrT+rq4zrXtg6+L4OQH3f586dO+zt7a2Gua63+zpYXL9e6zffiwD+eqdjXX+/7u5Rg/JaGnRdvlJ/6a8X1L6IPV8/d9fB+f8vMpX19den9fW+E+t+ffvrIyLr//9O98j1bVxvl1KKRWrwIz/1Oyi1xJSKr95+RJZrsgLSXJPlmiQryYpKnpct5zTYo9xvQ0sgdIXgtCErva0pK3s1Q1SWdbFeFt0JbDfmlR94F9tPcRrVPHh/k8njHsnEJR75JJFHcctCNDT4GlxR+VADZl5i5gq3UDRyRYeCliro6IKuytkSOb5UmFJjmRrLAMtYvpoaU2ocS2BKjW1Vy1afGdWrZVRhRYaEJyOH3/1Xb5ErgWtW5y+wFbd6BVuNAldqJqHBP/nQe1YouJTC1pKRWiuPT+Vg0dXggSzByjUy05CCSioWvIxANEEfALcFxo7GaGpKWQVbtS3Y9lN2OjOajQnzScbJxybzb/eYPOpQ7Esan9UUNwWpBy9rwX0l2EwFaQQnqeA0gcsUpgWoskpI12UFyLVatl8sj0fzTPO/Fg5E8QzwewZsBtD1wbcrS8gkhShWzCLNLIQ0qjYmrbKyzQsUwlaQamyd4wchjYMF3u5C55EzDofBo8l5/yKaNDpo+anKY5AnGJzx9k/+NkwHt7fL7p1XWGiLUFnEpV01XS7dMGyqe8isjkco0KVEmiWNxoKdjSP2uuc0ywwdGqRTn/Goxcn5DpfDDToipC/H9BjSVlc0sjM84wRxY8H45VuMNu4yyA44nR8QFk2kW1IWBiQgUtB55czRske0igmzwy6z0w56LKtzuQHi5RLrNyQ0Ggua0Yjm4Az79JzyPCQee0y4ycTdI29to4MmCJB+gdEuMBsF2oRSGfS54l7nI7QrudTbnCX7yIWmWBikuYsqLIQokGqOsbhCJAsoFIWw0e0uaqMNvaCSgsQa5hpmErIqUMqICzY6l2zvndMLhnTkhK5dJYX2ghF5YTEadhmNuozSDYb0GV8tWHx8xfzbT9Hnl+ioRPX3kK99Ae6+hNxrY3UVZlBgNnLMRoYRlJSRQTExKaaycrMJBWWckSuTQjYpzaDyHI0zmM0xkxhb5ziuxvEkhiXIQ5t0YpMvbIrUJs8qAwHXSbhz82P+/d/3t0DCOOoyiVuM0g3GSZ/x1GUQHzCJ+iSWg5hl6MxFZnPM+BTsFPY6qJ022vHQiUSnRlXwWEjoKVxPkUtJKQRSa3xd0C9SDvKI+/GU15MpN9IF7TJDL0FWXmo+WrT41rzLw7TDadlkaHhEjo02l3U5KRiFoqFCdtUZN2bfpHX885TJZDUyWhd51iPxdRr3+vO1NsIAVqPfNdBagWUhkO0d0u5tBmafrHeHpHObqHuXrLGNyDMoBFpbla7fqPrU/ughnatfpTf5gH74iGQWkdgHvL/9l0nNTQQF33v5p2kUR889x68D9DpnoybjyrLEcRyevPYbeXL/87zyr3+G7pN3Vs9xbQdMmrdZ9F4m23mTE2OLMzps6hk39BUH6pLd/JSd7AwzX6wcx+rRiXTp9JLnOdPc5arxFpetNypte2OLvOGDDSKMkOEAU1/hbEzw3gLz03uIwQje/hD19oeIdz6Cdz5CXwzp9Xq89tpr7O/vEwTBc0x4fexzu8V/+ak/jRaSVy5+lR989ydxXZcHDx7whS98gY2NjU8QlteVH+tgvcZ45+fnK5lLWZb4vo9lWZydnTEYDFBK4TgOjUaDIAhIkoTZbLayYqxB+mKxWDnorTu61CYSSqnvrs3i1tYWf/gP/+HnihZXFbzXmPP6hNbrrE/rAGldd7UOyurPa2BYv1+2Z7X9emjkRQzy+r5eBMxeJIW4Pl0fBpnP55ycnHB+fr76cq5PdRvqiNe6ZxgEwSdsF+v1TdNke3ub+/fv0+l0nuukrK+z3u71c3UdhK6D5/rY10F+DbjXwXb9+foXf310or4ONfu+3ob10YG6fXW7rncsXiRpWV/+nY7v+vsXXa/r21jvdF1n1F/0d9elVNePab2T+fb5Jv/y6R6/5aWn3O6On2Mb6h+tOkL49PSUs7Mzpqli/KX/GKcI2fn2T5LG0Wr9mt1JtEv0yp9D5mOCpz+B5Tbp/y++iPuFB0T//IxiIiDY48L9DRgWdBojjF5A0WtS+DauLNBIyrLyjM9ym6x0yAqbrLTJlY22JaKhEB5oR6ANgSgVcjVryKuCNBWbyAxkritwqUWV1lkKVFFlqJS5eObsUYKBqtz/yiqoyBZ6iVkFhRCkSpCWgnJJpvW8ko6rQGvOpyZKQqYEaSawVLXfIq+Adj18Ll2N8CpGujRF5fxXA7yUZ0AfMNsae1tg7IDuQ9KG0oSG0LRaCxq9GUJFpMeCxYcBk8Me+ZaJ8WmFviUQpmATwU0peFkKbkqwpGBewiiHixyeJPAwrOQxzRzyGUwugJFGzEDF1bnBAeFQtVdQ/bME9XYAnQ60W+D51WelgFRBVGoWpSYuIa+Nouv7tdRVcmbtqlIIUBohFCgKXYqUUkChAtIQM77krQOP2+6MvpXQECEeMWWZE6IYBA4PrW2Osh7jrE1mOCgPKJfFyKmo5A8ahNLV/aAkhlFg2RmOlWHJAkOXUAjy1CSMPLLcwdUxXjHBS6/Ybcbcf91n2s04czXn5S6DeJuiTqZMRHUdl5advjMjmESEjxpElw2IQDQU1m4KbxRk932kVFh2huWkmPNLjNPHGA/fRz8+Jk+2SMp7ZFtvog8OwHfRhQQtkF6JMDVdZ8R27wxvM2TY6ZK6Nv44YvbIYnoqEA0D3W6hzCZqDmVmgwYZj5bgPUc7DqrThk4DWkZ1k8e6qmcIBaIsMZwcw0xpJBFtNaPXGdBrDOi7E3rekH5jgOdFTGdthnGfUdFmcl4yeTxl9u4Zk7ePSS4lef+L5He+j3LvDuaOg9ktsIIc008xgxyzUWI2M8z2koWfCMppQRFBkdvkoklBkyIxKHIbohgzn+GZCxrdjOZWRtCMKaY28cinmEN5BXYo8YjpbY/obg/pbo7pdYZ0mhMKZTCOO4zSTSZxh3HaZRT3mYzbDKdb5IsCa3qII+fouzuk97cQTYXUUJYGTqZpZSVeoSm1ZGGYzA2LVBgIDRJNs8zYzmNuJHPuRRN2kjmb0ZROvKDMKpOGaWHyfrzJh9kOD9UWl7JNbNtodymrijQijDDHZ7gX77Axf4+d7CM61hBdVt7Y677WNdaoycLaJGD92VwXZAohGI/Hz3mnl8Igax2QdG8Rd+8z779B3LlN1txEG+ayM185xcgiJRg/pjX4gDzeYGv0L7kR/dPVfq6TYrUhheu6xHG8eqbZtk23210BzizLVqPkcRyvnuftdpvXXnsNx3GIS8Gh6vBE9XiqNzgSG5yLPm01Yyc7ZTs9YWspkTHT6QvltvVzcjoP+WDkcNH5PPHOZ8g371P2NqFhQ1kiFzPMYojVGOO+XGB/uY2M5lgfPGHjbMz+OOTOoqA5j2EN49TXYVBaXJpNdsaPMWV1XQ4ODvjyl7/MwcHBys1lnS1fx6nXlSBZljEYDLi8vFwZcDiOQxAETKdTTk5OSJIEy7JWygOA0WhEkiQIIVbZI2EYPjc6U+vn61GbPM/58R//8e8uQP+Df/APPidJWb8o8IylfhEwr9nZZWNWN9p1/fj1Ifh1eco6u7l+wut91tu+zpb/26QN6+teZ4CvL0+ShIuLC54+fcp8Pv/E8vXzUgMvrfVKSrIOztfb67oud+7c4f79+/i+vzo3652T64zzi6Qg14/vO8lFXtQDv852r4P4Gsivy1uu996vd4Je1CFaP4b1Y1lv64tY8PXr8aLpO52Persv2saL7psXsfLrbb8O9q/fL0qp54ZEx9M5ZydHPHr0iMvLS0zTxHEchBCrddKiIF4sVprIdUvMOpFsa3ePbqtKfq3rGhqNBr1ej42NDbrdLs1Ol06zGnZbL6Z51lhYoLkoDR5HNieRzXlkcRHZDDKXWe4QlTaZtsmFWUkLmlVhmLIq/atRFpi6wFAlli4wlcIogNhAxAZF5KARlBJKKcikIJdVJ8AwNLalcaXGQmPmGrUQFAvI5pL/L2t/HixLlt/3YZ9zTu613qq73/fu2/t19+vu6Z6eacyCATAYbOIGkKApi5JlSxggpLAjTO8wHUHSMANBWAyTomUFqVDYDJqiZVKSaRGygotJGSRIAIOZacx093v99ruvtVflnuf4j7xVXa/6vgYigIzIm3WzsjJPnnOq8vv7nu/5/uIxpLHADwyViiHwNZIyCBhPJMOBRGdlVlOdCfIMXMfguWU6do0gM4L0Avx/0jB8YgH4qS40932RlI4V0iBVeU6jQWuJsQUiMBgfhBK4shwxsCVoKchlaeyQiFJhIGfJkQxaXMiHTSl1MQml5V5cBjUCysy3MRc2fKWe3QvArkAkS63+ShVW67Beh2/92n9Nr/+cYLVg5b//R/6bbNl6J6k4a3nFUoWR6H0b1dOoXCNtjRGQjFzSkUWRKYRy0EKUyYQkF9mmzIv/G4PSGstoJOU5ciEopMK4lEz7RcBGJhB2gVQFltbUspCW6uE5EYVtSJXCxJJ0aBGPFUnkU+gaSe6WmTqnvyUYLFEgrKJkuZUqk/ioi+EJc9GGCmSRoc4LijML3bNAgNcas7Jyn2bzu+x5G0w27lK0r6BtD1uEWBWNbUZY5/vI3UeIZ/uk/TrZ5rtk6zcQjQpGK4qxjQnLwNMPItxaTFKzqQRjfEIGhz7jj0e42R6q2kNt1zGbVynqa+huTtHNKWILTQVjVyCZlB3MccC3yhEK/yI4ywwiN2WW1YlBnwiwJW5twJLXoa2GtFWPljqj3Twrte+tAZl26IYNumcW/f2Y/uMu4ycJ553XOHF/mHTlFtSrFxpwjQw0lpdiVXJsLyzlM02NVQmx6hmqJtGRIY8UWV4hj1QJ5FOHXFfQ4xynGBNUU2rLMX4QQgT5eUF46DM6XGKyv4SvU5a2urS2uiytnNJqnrDUOGepOaJemRDnPr24Va7jJXrjNsNwCRUHVAuHrGbRXVKc120SSyByic4VzTSnlaRU05xYWvRsm47lkQgLW2s0gkJIGlnEWjRifdSh1j3FPznAPtoleXyfD377W4zCiPjKl4he/RnSrS9SNK6A45b1ZFEa3UwS3FGXYHRIdfCE5uBD6ukjPHoUF/LWqfXxFIDPP1OmcpBFomn6zFh81hZ+k2H7TXrLX2C0dJeouU0eVEop0EWfV1lEMDpgqfeA1bP3WT1/H4v8hfMZY14g3aYWztP3jCk927Msm4F6KSXLy8vcu3fvk2BibqJtnudM0oI93WCXFQ7VOkfOBmf2GkE+YiXaZTks1/ZkBy8dzAjNMAx5/vw5g8HgBayYiirh+leJt79KvvkWxfI2ptEoPfvDGCsdUK2EeNsp6l2JW0tZPu7h7SdYB4a3uh/R7A7ILxj9qZREytLg4b333uPu3bv4vv9SgP4y6ctwOOTo6IjJZDIbNZkmI9zf32cwGADlpNBms0kQBDMTDmDmNqO1pt/vz/rBVKkghJjNdfvbf/tv/8EB9JWVFf7kn/yTM7C8eOMLn/kU6L5MpzzdPy8zmQdU03PBiwmP5gOB+WPn9cPzrPF0me5bBFzzIHv6xVtk46efn9ruHB8ffwrcz9dHnuczyx6l1EwHPL/Ms961Wo233nprFvnNL/MAer5OpudYXBZ/FBblL/MAeL6dpvc/z5ZPg6hFZn1arvm6mb/G/HUXj11k1OfrbxHEv+weF/ct9pvL5DQvO9fLlsX6mn7+sqBi/h7SLOf/sPtVvh+v80P2+7xz8Dd5+vQpwMwmKskyvv2L/2uGb77B6n/5/2L5//Ifz/zUpxOXjJR0/9rfI337S1z/h3+XL/3a36dWq818zFutFo1Wm//9536QB9Ul/q3OPj/fO5gFvdNy5Vrw7/+Te3zUrfNzb+7z828dfKo/ZMC/v3mV+67HN7sd/vjJgKPI5iBWHMUOO6HDQexwmnoMCofQ2GRSIQKDqBiMJzCWROoCW2Q4IsWVGbbMUDqniAX5RKLGCidW2KmDndt4tsCxDX0kD1MbJzNskxOFkuFYMBlLbAeCusYOSsCbZoJoIMl6fAK+EwGuwV6BYNVQWQW/aiDP0VqSF4okF8QXUpIsBZNesLQxpT6+BrIBVh0KH4wwtERMkIdkfU14LpmEAXngILY0pimQIwjOJc0xtGNBoATWxQRSlEHLUrU00qWkepxBlIlyou2ij/gUgM6/d7EKcyE3uVhf7KgX6nMQCGOQGGEZgWUEVilfkVKjrBw7SHFqGXmqyEKXIrEoEgsD1GVEQ0VUVYoQkBhFaiSZcMhQJMIiRZGhKIQq/SYdytUVn0wOVJRZMR0zkzEJdHkDUmKUDQIslWFZOZbIsUyBLEyZxCyzSFObLLcotESK4kKCU2p2ZmD9BQ/2cuRgVncGHCemUT3Br+dkSUJy8Jxx0KZYWcW02uhaDakSrCDFbubY6Rnq9BATCoq125hAoYVD2nepDGM8E5FkPqOwTjZxsJwM203JMpt8IhDjDlb/OXb8MU7jAOv2Kty5Q7FxA93LyHeGmG5GHnrk1iq60gbb/kQaNXVNcihvIKcM2hJTdgCZQJIgRi6NLKJVnNPyD1luH9Fa69LaGNFaGuFbIYORT/fE0D1QdM/XOB/eoaNfoZNvECdeWUmWBNsq661ISwBfB9s+w7HPcFsFsm0hl31UowyC8omgmBiKWJEVAVlewSQFFjmBn+DIFGJN0dVkHZ/xboPJbg0TW8hKQWPjnFZrj3Zzj9bKOctbGfVmRsPrUXVGjNMq/ahFP2wRhQ1EElCYKh23wdNgma7nIrShKCycFFaihOUoxk8TQqU4dX1O/Tq5kFhhSJEkFJaLqdRQ/Q72wXPso13k0T7DP/4/xHg+1vvfovZ//qukd75GdvPrpI1XEQVllljHQvs2ZCAmBns8wRudURvvs5rtccXs8GH8E+wmb/Ka+vvcyv/zmfx3OldtXv47T2hOl8XM42XrC8LgBt3lH6DXfodh+zZZtV5+p6ySMJFFhhd1qI92affu0xo8oh4ecbLyDg9v/gw3Dv4pr+7+t5+cc07hMH22K6W4evUqr7766kylMHW+mbLKMzLpAgzneU6SG45liyN7kzP/Kmf+VTrBFbx8QnuyQ2v8nHr/MeGDf0F49BguMNGUbZ9ipKkTi5AK3b7H5MqPYK5/GXnlHnGtha7YCG2ws5RCSZy1mPoXevjLIU4nIz53ufP0A649uY8Xp3zvZ3+YtWqNvyhWWa83XwDli/W/CNKNKZ1WTk5O6HQ6s/LWajU8z5slrSyKYpZxvdFokCQJ5+fnM9erqSxmNBoxHA5ngdNUUjzNj/I3/+bf/IObJLqyssLP/uzPvgCCFyUA09eXTSK8TA4xL3l4Gbia7n8ZQF9cPksGMXdPL73WPHs5f80paz+ZTDg4OODx48czrddl15rO2p1OIPB9/wWpy3SZDxiuX7/OO++8Mxs6ma/bRUZ3/j7nNeaLdTG/Xayv+baaD5imzO30/fkfkMW2/qzRicX6vuw+Xtb2859bBMCXgfrPqp9F8L5YR4vHXxbMLC6XBYDT408Tl28+/HGm4uoKIaQTLJ3gSo1NihYZp2/fwP6RuPTTHYaIMEGECYwnMBpDpgnf/BKF8srzTIZUsoRmkbKiU9bJ8WyLv7NyFXORcOTVNGQ9T1ktMlaLlOU8Ix9Ifvkf3UIPJQJ4b2PAZjVlq5ZwpZayVUuJWoY/c2OTXEikMXwtnLCdZVzLL9Yip8WLoxUGSS+yOZvYnI4sdsaKJ5HFXmpzmtn0tMXEWGSWQlY0BKDdkjm0yHBUimcnVNwYaaechqvkhUVThSwXE/xJTtRxOD+p0D/zyEMbZUo7R4CKa6j5BW45P4nBSDIYSmxAZyURvLmac2c75o1XLW7ccMiEICogzAXjTHA6ge+dwUdDoEYplYlLnfjMnWYmSQFEya5LykyjhRYYJRHVkmG3jaGWCSqhQA0gDSGKYBJCmggCy9DwBA0XPBekC4kNfQlnGaR9uMj/WV6zyicuGhblA9rLQXQgP4Z7b38bQxNNg4IGZYLPiIyElIxUG5EZjyitk9uYWJZ++ResrZI5tpWjU0ERKYrUwmQKKTSBldCyQlasCE+XgU6hBVkhybQkyRVJoci0RVxYhNhkxkJr+emgwzBXiWXvmWaVlVKjlEYojbAMwi63UmnkhfWnMQJtJLkpg4NCynKooQa8etFGL7TTBWC/KIc4KWhWO2yuHaLDhNHQZRJViPIqqaih5YVfudQXVoKgXs/wtsfYVoxMx0hbgOdgcsVKckZdDBnpOoeTK8iRwQwk0SAgifxP7jGNkcNTnOFzVPgh9tIQdXMJ9dpNuHMdHaXohx2yI4cs3yIzyxS288m8lOli8QnLayilTYayo0Yp9CT0XTiTeEmPdv057ZVDlrb7tG4mtNoxbfuUpWBAklp0u026/RXOBxt0x8t0z2t0+3X68TLarpSTOAVlciMjkaNT7HQHyz3Hbueoa0vIrTVky8N2Qiw/RwYCHZXgPS980sQj7gTYZNgiRaYFeqhJzxzi3SrJQaWUMAXg1QpWliZc2TqldqUHrRG2O6bm9Gl5XZb8LoE9YRTXGaU19sbbPB5+jtCtc151yaQs3X0Ki1qYUD8/J3z4kGjYJw98sq1tiht3QRtU7xyynOLqrbKfRCGbP3F3NvK4vLJKuHSds8Ztxptvk2y/S1bdwO+eoKIYy62QWD5ppUZhqfL7OhEwyrm2+xdpREfUwyOW/RL8CiFmuTGmo6xTcPoyED/9jZ0nMbV0GXq3Gfh3OV96m/7yq+S1CsItA4kyihfligCj+Zl/9k2cfDJ71i0+q5RS3Lp1i9dee232XDPGzJzIpk4380B93rZyaoihtUYj6XlrdCrXS9AebNMNriLTEO/sI9zjD7CPP8A5/B709zEXdTFl9KcTKR3H4datW6WEyG8xWvsyJ1t/hNPG2xAIVCXDuApjg9XMaL11Qq3RJalXMFIgteHnzw3/Y2f1BcywSMjOk8vzQUOn0+H4+Jg4jmc6/kajwXg85tmzZ4RhiFIK3/dn2cSPjo6Iomjm5jJ1xDk/PydN0xmmmiY06vV6/I2/8Tf+4GwWl5eX+RN/4k8An+iYFwHePCP8suyc0+UyicF0Ow/cpsB40TVlvpPNn3MRYC2C10UQtijLmAKQl5U7jmO63S7379+fDd3MX3e+LFmWzWx4ppHVVMICvDBMBmUCnPfee4+bN2++cK5pZDsPCBdHGRavvzi0ddn9LLbTlClf9JGft3eaH+l42fkvK8/vRZLzWaMBlx132bDhZZ9bZMDnz3NZP5w/38uChcvqdHqdwsD/5vFX+Dhq8bXaLj+p/gUfPnrOSW/MyuZ16u11Rhn8o8+9y6jepL67R+PxMxLPJfc9Mt+jCDxM4BKtrmAchbQKhC0wtkRbAiyBkeUPsc5LHfAMnYgLUlXoC6mzoRAXbHEHvD7YXbD6IPqQjSSTsYVxDGZVs74Vc6sdQ1sTNwS9iuTEtQi05lqecb3IL7YF14uCbV3gXjICMq3DtBCcjy1ORorTkc3O2GYnstlLFceZomskIxTaEdAA4ZtykteFCsAmw7MiqrU+7fqAdiXGmQiSo4DxfpXzgxqd4wrh2KFay3BsQ5FJolCRhKIE2xfabOVCtQ7ry4Ir66WbiufA3z2EsYI3tqCrINLwI0vwqoDhIXz3KTw+hrMcWAWzDrSAmsG2EuxehhlChkPuW4hlg3EFbmxYLmDdSGpCkBmY5IJxCmEOUQGxvsjqPe1HFxNLzTzDXlCCsjlWvWTZBRgyBIdU+Ges8BtUyXCROChcbGxc/taf/cv4Po2NDe79xI+zH1ToCJ9E2+hCYows+5MswTsppRQnYZaURWiDY3LqxCzpkJV8wFbeZYUhS3mXIB0iwhFpGHLWn/DdfJmHb73H8OYbJEENJgo6AhKDZac4IoXMkA1csoFTarQjg4oMIivbUGuFbafYTobtZSgvQzk5ls5JsjJw9aod3OKQnrrBWK6Rp0FZNw4lI62BgYGBKF/XQK1N2LzyLX7wyq/y7t09vLrN0ccFew80O9F77AV/iG7RIvr2/wOxvUFx6zWK9etor4pVDLH9ArsBysqwVUpD9qk4E2LbY2DqtIddTEfRO2wy7DaI4yp57nDhtgNZjBieYnUfo7Ln2C2Du12Fm3XEazdAWegjSAc1isQjHbvoVCECDZZE57JMVDYNgqYjF4JyR16UnXgg4VDBWYYIuyh9SnPjhPZrIc1XHJbaKW1xQNvrs1Qb4tkJvVGrnLzaqdPrNzgbb9Idr9Ad1olNDZzSKlPEI6zRCWJ4gMUQXZfI29cw17ZwqilOO6MIFeaffhcz6iGuXiNbuUbqtkl1gGPHOCpFFTkiFGRdi2jXRx9YkIIIoFrLaTUS/HZGupJRLI/x7TFRErDTuwUSlNZU84iaHuPYEXFTMlxySZoeIsnROJioQO7uIR4/hs4ZQknSL30Ns3UNJMhBH3/v6SxbavPkgM3JgEDnnJ2dUVTaJNvvou5+jdH6m+ypVRr5gOXRDk9Ob1NkFZTexZYPyVdukbeu4eYhV6wJG2LIStGllZ6xFJ9QTc4xefZCNsopUJ1qny/Tdc+zv9MR/kQ1GHiv0PNfpdt8i2HzFrrqQFD+ePqTU9r9D2n3PqTRf0BtvI+SzDTUQRBw/fp1VldXZwz7FKhO51VN1ylYn18XZZnz4DfPcw6OjjnIAsbtu0TLrxKv3SNZfR1RZNhH30cdfg+5912sw+9hD/fL/GQXDPUURzSbTW7eusP9+s8xtq/xg+rvsXqlxX+6/T/FYFiJDviZD/8Sj/7wl3n8Uz8ACP4XOxH/5tLVFyRIi1Ls6et55UNRFAwGA05OThgMBrNjW60WAE+ePKHT6SBl6ebSbDZpNBp0u13Oz89n0qFKpYLjOLNEkdNgyPd9XNdlPB7zV//qX+Xo6GiKJ37/DPqf+lN/6gUf7Kn8YR7ULgLhxQ41v8wD+6neZxHkLrLl84Bq3iFmkWl+mQTiMkZ0WrZ5acC8HGYeuGdZRhiGfPzxx+zv7898zy8rozGG0WhEHJdJZ1zXnflrw4sTX6fluHbtGu+9994sGc78fU2PvwwQT/dPy77INi+06QuM+e9mrfgyIDpdptd72XXmX7+Mcf+9jnjMX3P+vctY9M86x2IAt1iWRevOy/rfZ5VVCEmoLXxR/gg/ePCA/f19wjDki1/8Io1GgzhJ6CQJ0enpzEcdyn7gui6+71Or17GaS2zVa7MMr5ZlYYRFYhQjbLrC4VTZREIyEBZnyqZj2fSVxdhShMoiUpJYKjIlKIRACzHT/pYCaVNqMAYC+gJ6F9u+KPclBrGusdcL7JUCa0kjljR5HTLXUI2hFWtWYs1aqtnKNduZZl1oAqUJbIOvDIFtqNilFFeKF9vvNJQMQ0VnrDgYKB5NLL6fOjxVNl1HEnlTcHMxNnEBSByt8UWGaxJUnGPGmmwoiM5copMKQT2mvhSjEIRdj9G5S54IyMEPDGtrBdvrsLxUBqP9GA4zOLBhUCkxEOcQDKGtoeKUWCjPYRRCL4bYAmsJzBLoeoFdJMhxQW5ZZG0bXjOIKljnKc6Rpv3Y5ebY5kpLsNWCzRZsLEGrCc0aVANDCpxP4HAAj/qaj7qGD04ND3YTTOGXPpCIT9xw1AurwSLDokydqWMfS4FlgzHYaBxR4JLhyBzjGBJfkHiKQglUXqBEgWXnGCOJBz5Z6qCNLCc+OnrOMo+SdXZN+X+uEWmKiiOcNKFGQVNpHDQD2+M8qBApF1NICCWoAqcZE7SGVJtjkqjCuFsjHgeYQiEHBZVORLM/ojnsU0165CalQ5tIVCkQZEWFJAqIRx66kFjWBGWn4Fnk0qfQ9ifzEWZf9It6Cwysp7jtHa6IX+Oe/RvcvRGzddsgpeH04wnH94ccfzTieEdzHG8wrN8huXob8+pb5Nu30UvrSJFheQWun2C7Ka4TIdwCT0XU0jEqKYgGDtGhy+iszjhcIslrpfA/v7AEUgoRDpC9Z1jFOdKPUasB6sYK6vUmqi4oBhb5wEFHEmFrjKMpEk16GlAMKyBkyYBL8UlwN3NLupDOTICOgaMJnB1hpSfI5hjnNY/Ga6u0lmLa4pSW06dV6dGunNOs9ogjh26nSWe0Trdbpdevcz5ZpRNvMUiWMQbEpIeKzrFWLYwOKWJNvnENYSnsvY9wD+5jHX5Ms2Xzxk98ldHmDQ5kha7xmWQ+ceJjyQzLZBBD0VfkRzbmIrGVdA1+PaVSyxCtgklLMg5cRFJO2DDT74LRqFEXlXTAyzCrdcxmA+FqyAxa+ojzHurxY+znz/GlRluKuL0Md14nXN0kGHZRTx/SON6nfX7Ee40Kt8j5+MP7fBT6HFVv0Gm9Srj1eYyUOLu/jbv7bWqnH/EDr97gzpd/nFPZZC+vcpBX2Nc1QmOxLsZsMGDN9FkpOrSyc1rJGW4RzsB5oTXPxBpe3KGW9l6wepyC90/lQwE66ipPa18m1TU6wW1G1TWkm5fuXlKwNDngFXnAW7Uxb1QmrHovWl7PZyOfJt2bBg9TQ4Opx/x8gr75ZVq2k5OTGbs8lbMU2pA1t8k33yLbeIN0/U2yjTdBCOyj7yP23sc/u0+9/whz8oi11RVWVlZmk24bjQarq6sMqlfZUetc73wHM+mijaHz+bssuT5/avMWb731FkEQvIDH5jHRPF6abqejBkdHR3S73VmgUqvVqNVq7O/vs7+/T57nM5nLysoKSZKwv7+PMaXO3/d96vU6w+FwJn+Zgvopu/4rv/IrHBwcTMvy+wPoq6ur/Ok//adfYFfhE5A5D2jnl0Vf6emx85+dB+Lzy2Xs5mWShZcB1vllftLc9FqLcpNFJvYylnTagXd2dnj48CGDweBTnXN+ieN4xqLbtv2CFv0y8FupVHj77be5devWTAc+X7bLZBiLjPJl7Pk8MzyfgGj6/7RNF1nvywD2y8D4Iqs/LzG57Jh5p5WXnWe6b/H/l42ofNbox2cFcPP1dJnM5bI6eFmgN99OcGHPGGn+zu8sk45OuF38M9599x3Orq/waC3gtY9PqZyW3rzT63ieh+/7dK41OdgK+OKpYC1VL7Td9Pof1+HjBnzlTLCSQJgq/u631lipZfyRz3WQ8vK2AUiF4FjZHFkuvz6qcT8LaC5nDDyLjrAYCUUkJWkm0AMJA2AoyqQyU/AegmhpxHaBXCsQbY2oG6hcyMUSEJGAsaAYSLKewnQlXiLwCvCNoaIMgWXwrRe3n6waRxlGUvJMK55qhx2j0AaWUo0bakwIUSIZCUmkBDIAXNAJ0AOrX7qq5H2BkAa3kiONJhuXExjJBdLSNDZDrr024PNf6fHW6zH9QYNvP1/mX+3VkcBX2jlfqhjWC4dRqOhHcDKEZ2dw0IXTEfSSkh0vk8tc1L11ocl2BTQMuKBkjisL/FiiejZFLIkmEI4MSx6sNwxbjYKVakLDmeCJLv/5/+3/xPGz34bkCP7L41d4yL/BGX+Sc24xJJjp13Om0hiD6Qm63yNgxN03v0g/dRhmNuPcJcHGtnN8L8W1M2wnxwSQ1xVR3SJzBdJoLFmg3AvAPvTJYhutS9cVYZU6aTOzqJnrb3OvjRClJl2Z8jOWKEeEbC4CC1EGjNIgHY1yMqTSFCiKwipHimR5DhmmuJ0O7uFz+OBfIL//q3g//Ifx3nmPxuDbeL5BtjegtkrUtRgfWYwnLSbpGvG4SjZy0LGcG7rgk0DHAfwC4Uzw5Clt9YDbzd/h5p0BV1/Pca0Rnf4WhwdbDP7lf0v3w2fsdRsMmzcptu+gX3uL7K0vUtSWERJcJ8byMpST4jgJgT0pgzi7QA4h2ZdMDhzGwyVCvVLOMk4LSEJkkYIQaL+OSENU0kG6AtoBasvB3siw2hlGg9ACI8rnabIviD+sQ6cKdV1aDAUCXKs8/7SPZHM/YhkQajifoM7PENkZ+RfehpqLtFMa/oC26NK2O7TUKW3npLSNbA1wnIx+r0q3W6c7bHMebtEdL9MZNumGG6SjBBmeIvUE47uI1XWKRpNta8wrMuS2HHHTmlCvTtjxFR+qgOfS59x4TDKfcFIlyTykKZCZxgwluiMwXVmSC4XB9RJELSVvCfIV9yIgUSAlkhgkaNdHJiNUPoCKxLQDaDoIHaMtG3KJc3jE8tEB+eEBqRvgiIRs8yrZjTuMakv4Z0dYTx9SO9ylcvAc8eg+SWfCZO0N0qvvkmx/gbR9nZv2mDecAa87A163e7RlzEjb7KY+e3mFg7zCblbhoKhwVFSoipQtOWJDjjguKjzQywgMf1H9Kqu69ylgPsVO032XuZWkheBEr7ObbfCc6+zbV5k4FZygIHMUgci4pXrcc0+4Y3XYFmfYOv2UBn2eRZ+y/lPAPi8XmS9Pr9djf39/Zi04f9z0GZTnOVEco5e2KbY+R7r+Bmx/nnzzc2hlU+0+Ymn0jMbgMe1wl6t2SKvZwK42eejeoTl8RjA+eGG+3+c//3m+/OUvs7y8PHu+v0zqMl+H03Kfn59zdHQ0u1fbtllbW2MwGPDw4UMmk8lMFbG+vo5Sit3dXZIkmSU4ajQaFEXB2dnZzEN9Srz5vs8v/dIvsbu7Oy3D7w+gTxMVTcHWonRk0S3jZQBycULmIqicP+e8c8v8eafvzYPpl0ko5sHVfMdYZHQXz3MZmz9fjm63y+PHj3n+/PmlEeR0mc4MnrLonudRr9dnQziLi1KKjY0N3n33XVZXV1+oo/lRgMV7mpZrul2MFKfL1Bpqauk4z5wv1uPLZCLzy2cB9nkgvSgZuazMi/e0CJDnz704SjAPrOfvZz5IWKzHeVB+WX1exs7PH7PY5z5rUvJ/8v5d/sHjawC803zEqvuU93/mNZztGKcw/PCzAX6aUM9zGqagLTS2K/mPvuZhhMDP4b/3CGq5oJZCUys8LRjagv/dO4ZCQDUX/LtP4O/9oyv843++jskk//ZXjvipN7psNGM8+/J2BTieuPzM//t1CiNYCVL+ox99yrVGiqNe7G+5EAyEza602LF8dpXNjrDZjyzORxaTgSqzgg5kCeJzEL5GrGvEhkYua2hohF8yeeZieN7G4BlNtTBUNLg5WLnAykCmApEKSAUmFaX/eSxJY8k4kQyEZGJLEl+UXt4TgZgY3Ki0YB6WXz2UhJpjyBGkBWS5KGUkGaWUI53qbvlE3lGA7Wna6zFX7vVpf67Lfljlyd46ys64vn3AvWunXG+mNPBo4FKfrtpj0nPZ23P4+LHHd566fDCy6FngeAVaFmgh0JYqWdwIVKiRsaEIJSTqwmu99AZXQpcBRZxgCgHCBSyNYYyhj+EcwwkwxGYNwStErJBhz+QxFxMRraWcazf6fHl5h3eLh5hUMowdhonDMHYIc5/Y1BklDsPU5bwI6BcBo9Qlm9iYXKD8HLueoryCPLXJc5tCK7BzvJWI4JWQ5vYAW0CmHUZ5lV5cxx1pKnGKm+SoTEMORSGJjc1IuCTSLoG84CLdPS/qyzXMggDJi6u6YPSvG8Soh+ydos73sPQEz8lw6xLn3Tdp+ANea38PVyUMhxXOu5t0d1boP1livFtn0q9QCPvivLr0655m0NWmlBYJaG+d8iPv/h1evf4bbL1mMzlPOHs44eTBmMMP+hzdH3HcqZFd/xzxj/8x5A98CdWoE2ufvLAvrCkjLKeU79hWgu+OURPITgXxkWR8VmMSrYDlUmqkRshxFxn1QadQW6ZYuoJRNsLSyJpG1TPsdorVypBBAV5GMTCkOzb58wb5mQcmQXh9zFJa2gRVmxf3KEspzpy8CQ2cFfBsD/voKcrtk796h2L7JqZZAQN+HNLSx7Q5YNk9Y6kR0a6f0lo6p7E0IYpcusNluoM23W6FUbHC6WiTvbNrVCknVyeWZGDZLNkZd90Rd9WYW2LEFWdEUZlwWDF8aAc8tSr0hMckqjAe1YiiCpm2EYWBicEML35/HgK+RgYpNAy6bYGvy/uTClUMEekYY0l0YxmUROoJVCQ07QvdfxlAqv6I+uEBzsf3Sc6OEFJgb26Qbt9kvHmdtFancryPt/uUdueYr9+4QcVqcRJW+Sip81Fapy4z7jkDXnf6vOEMuWmPEeZCVmIEJ0VwAd4D/ovRDQbGxabgf1n/Ll92j14A4NNnzGUyDfjkmTrFJvPPycj47GfrPIlX+TBe53nRJHckytekymJZTLgjT3hFHnAlP6SVnZKnn9hHT6Ut89lY51loKMnZ4XDI3t4ecRwzVUpM1RLz5QvD8IWEUfV6nbX1dSbOElx9h1HrLsPmbZK1e2inynJ8wMRtMZFVBAX/1kd/DifuAaW95KuvvsqP/MiPcO3atUsVBp/FpmutGY1G7OzsEEXR7PiVlRWklDx+/JiTkxOEELiuy8rKCq1Wi+PjY87Pz2eEZ61Ww/d9ut3ubDKo4zh4nkej0eDP//k/z87OzvT6vz+AvrGxwTe/+c0XGn7umBfA0ryc5WUs9LRzwaedUxZB4nyHm57rMsA33xCL7O/8518G2uf3zU9ovSx4iOOY/f197t+/T7/f/1RUNr/Ms+jThlu0XZw//7SDfe5zn6NSqVwKwpVSL1xr/pjF/fP3GIYhh4eH1Go1bt68SRAEnxo5mAfXi3Uz//4i+z1/zMskRb+btOQyln5xxGSxr81/sabX+6xJrZe5xywulwUFL7uX+XK+7Dx//buv8quPr4GA11tdAjfi/maFyrsjpGXwVYH2DMbXCL9ABhoZFAg0bpEitEGZctKclBosjTBAqkgLWWq2AdcIUqD7r1oMv9OkOPLITl2SgU21kbHajtlqJVxrJNxupFxrJlxpJUwKxc/+g3vkRuApTcMt6EQ2m7WUm42Ym82EG42EW0spNxoJVeeT7+7LRhUSITlQNve1w/fHLk+GDocDi+5AMRlJMmFKe79mCd7FskY0NSgQiUDp0tKwogwVAw4GJSGXhkQaYmkYS0MiBZ4x1LTBjQV53yYe2QyGNkkuKWINRwIxELRrOa9dMdzbFCzZkiwSjIeC7hAOBoL9EfQjCLML1QF8wqzOA0UN+KVsxVRAaQgKTVtl1NwU10uRToaxU7SdkNsJ2k7xHUPVErhGIjJJFCl6kUU/tMgdSeraxIFHtuaQDyyCJGK5OKdx1qf/rML5aZth3ijB+aQD7toTCtIL1tyiwMXgAz4CF4E9Y/G1+UQDPT+JU1Cy+Us5S8sTbgaHvCWPeVV1qcgYyyQUxSeT2qI45sByeVBbYae5Tn/1CirxUV2NiCE6qTA+qJMNHUwmPmFqBVi1DKuR4fo5llcgbEgdm7Hl4zk5bWvMuhqwJbusFuf0VMC3rFsc6mXS2Cm92HMDeenbbxKJiRXC0YigQDsKfAmeLiU3NVmyxiaHZFKCsbXN0iNzeMz2n/kK1tUa1VvLVF9Zpfr5u1TubVNrZsShz+CwyWC/yaDbZDBuMshrKDfDcjKiqEo0DsrJCyMBVoJtTqjnH7PhP+bqdodbr/W4dW9AfNbn+P6Is48nHH004Pi54eDuT1D5H/ws7pUVRqLOOK9DIkgzDyVzpMpx3AQniLHcDEfEiFFOfgbRuUcUNsmSGnqiIIwR54fIznPEuIuuXUGv3YZ2C1xVzsl1dWmp2MhQ9RzpZ1Dk6KGi6AbkfYXpTBDxKaYyQN9uYLZuQGqXPqLoMsOWTdkOE2Cg4TxCHZ1h+n1MvY65sgbLlTLRWAoMElR/REP2aVcSls3HLNeO2Hoto1qd8Df/q58jDtsseym+XZBJSadw6Rc2vlUgLUNsKSzLcMWZcNce8LoacEsM8O0B55Wc46rmcd3nyK0QSpfhsMFo0KAW9wmPqkz2HaIDB5M4mMwrR0kqBqoFNEQ5GdsqwFaI9AxncgomJ7n7DihQ7QnB2e+Q3riBCXyYpBhcNC7e8QnNnec0z48QyYTEFLTffpfwyg2euxVSIbmWhlxPQ6pRTjaWnA89Ho+rDLXNa86Ie06f150+99wRAWWOlftJnf+gc4+r1oQ/u/x9bIoXcNV0mZ8HdxlJtziiPk+uTkdktTacxD4Ph00+Gi3zUbLMkakhfY1xyxGrtbzDTf2cK+kOa9Fz3Lg709EvstHTQGEymbC3tzcD4NPJp/PP3yzLXrA1NMZQrVZZWloiy7IZaM+yjK2tLdbvvkO08hr/9Pq/S6J8pM74kw//j9RHz4FSGXH16lW+/vWv8+qrr2Lb9gvP5MX6uGyN43hmETm9l1qtxtLSEgcHBzx9+pQ0TXEch2azyebm5uxep/cVBAHNZpMkSTg8PCQMw1kG90ajwS/90i/9wQL0n/u5n5vf/wKTuAjYFqO8Kas53TcvfZnuX0z5usjAzwP6eZb9Mqb3MjC5eMzichkjehmQmwYS3W6Xjz/+mGfPns2i1PmJCNN7mU48iKIIKKOoer3+KdvF+fqo1+t84QtfYHt7+4UOtggyLwPpi0HH/EjEVF/17NkzPM/j3XffZWNj41NWT5cB1/m6fRlAXwyyLgO182Wev85lwdxiH1iMhhdHW6Zl+Cz2/7K2v+yaL7v3+esu3vtlnzHGEOWKv3f/OjUn5Y/d2QWjedh2+Xijwg+d5GxFeu54yI0kzC1+pyb5sCLZ6lmowmKoBBMpGFuCiWeIfc3Y16S+xrKK0vrO1mCV1nRy+n1LJPGxT3LskRz7pKcuybFHeuSRnroIZXBaKVatoFnJWPYK2soQCAGWYFRYdGObk8jiPLJpujnX6ik3lxLuthLutFJuLaW0/QIhPt1XLqtHgEIInkSK749cHg5sHg0sdiObcyGILNAVg1jRiJUC6gaRA7GE4kJLb0kqSFra0BJQFxqfAocCRxpybB50FKdjRRpLwnOJNdLkPYnV0Dh3EvJXMty6piFEuSJoCkFdCKpGkI0EvY7g5FiweyA4PRLoicCEAiYSkwhoA1sXWwANMgevAA/whMGT4MiSCTeUmRDj3BBlgigTJJkkzy8Smlk5lp2j7AJp65IZVRerTBFpHz0YoBuvfCcLnSxPLTtP7UAXqkVZigg4xLCLZp9//h/+j4gzakGDH/ji1zkeV9kf1RlmbmldKLiYFGA+HYxQOq24KqViRvhFH5UP0FGXIuqQDo8pZATbDZx7V/G+dBOxalE1Yzw3Yri/xPF3NxgdNNGZROSaQMY0a2OsICfzBIm2MV1FMbBJhzbpxKaIrTKA8wy2k+PZEUJFRK4ga1RLoOQJqGpUkOAvjXFbCSKUMISiZxOfu8T9ABNbpZ2moWyQC/kTZIh4gOzvow7ep7Lza7iDj0hH+9SuN/C++Db+T/0Mlc0lGo0RjZU+ytYMzhoMBk1GSR1h5/hul1FUYW9wh8FoGd1XcK6hByQOSg+omees1I9YWzvh2u0u1252sEY7DJ912X8Uc1q/R/aDP0i0eZVjtYlMwQ8TwrBCP1siMkE5adpKcP0Iy81QToalEuQ4JesqkkmFPKmSdRVZ10Wed7BOn6OHCUVzG7OxAXUHLIMMNGiBThTC1ajKBXD38gvffomOC/R5H85PELJDcX2J4tXXYRwjzmPQVYxbKSdm1C7ce0JgrKGXInoJJpHgeFC3oUK5hjlWmEFio0LD1SxmtYgRQCd3eRpVadgZVyohTTdHqYI+Dvt5wFnmlr8ldpl0rGWlXLFG3JEdXtdHrKfHfL9e4bvrKwReh7QJsq2QNkyGVcYnPuOjBuPDKuGOIuva6MKHzC1HXyqmdAeqmtJByZdIBgRP/xXOR7+O3X+GveLAq3fJ3niT6M5d4uVlVJJBJshVgJ/B9Tji1SRkvYiwhCaUkj034Jkd8MwJkAaupBHVqCAfS7pDh6OBz1UTcc8d8IZbSmM2ZDiTKV5GBC6C80UAOn09/4yc/hYvmj3MyzzirGB3XOfBsMmDaI2nYoOhVcHycwpH4eiEtXiPzcl9VidPWA53sHT6AkCP45hnz54RRdEMZM8D9On10jR9oaxTW8J59r8oCq5cucKdO3fwPI9w611+feWPcC18wDuH/w3JBUuvlKLVavGVr3yF99577wVjjvnlsvqclj3Pcw4ODjg/P5+V2XEc1tfXGQ6HPHjwgMFgMHNs2djYwPf92b1OpeDtdhvHcTg6Opqx6J7nUa1W+Ut/6S/9wQH09fV1fu7nfu6Fh+s8oLoMtC1up+DtMpnAIiM6f53FzjgP6i/7zGVg7DK2fL5M85MC569zGfs/Za/DMGRvb4+PP/6Ybrd76UTY6TLNLjmvk5pO+Ftcpo27vb3N22+/TbvdfilLuQiSL6v7+fezLOPo6IinT59ydnaG67q8+eabvPPOO7OJqfNtNN+ulwU2iwB7vo7m63gx4JoHuYsA/bK2ehlA/93KtLj/sv6weMz865e5wyz2oflzXtb/puWd1w2mWYZwbJbrjU/pyqeLwWCERppPe+XO7keUJiUOLwZYmYCJNEws6Dpw7sCZLegpGNgwtjShU7LR0cAmPnNJTjziY4/02CsB/IlHPrJwVhKc1QRnOcVqZBglyBKHeOiQj1x0rErwJcCVGl8Yapam6RQsewXLfkHFNVQcTeAYAltTcUurxMDWBI6m4phydQ1VF3zbYBAcTyw+6lt8MLD5fmrxHMm5LQh9g24YxLIuJSITgYgFViFwROmv7jgCgyI2ioEW6AyqoUFNJNGRID4DMYRKFe7eMHz9nubGskGJMmlRimEgDANj6BvD8GJ7mho6hWGEIbYu+m8k0GOBPrXRXRs9tlC2JqgWeBVNbgxxJkhzUWqKpUAUAjcy+HFONY2oJEP8sIMzPsYaHaLzmFA1GLRvE17ZgCW7HF2RGpmWLLIwBlMIo3MlstTWeWynWWoXeWKRZ7YsMqV0Zhytp1YfBoFBCoNNgUOOTU6WScLIJU/V1B3mxVXPbZUBkSNMgkxGyCxEYrAdF9evkQmHyLhkhYW0CmRQYLk5yintGrOJQ57ZIEu9uSULWpUBy60RldUx9p2I6AqYFNwDQ3og6R0q+uc+UbZMSgsig5hkmFRhUrv0ws9EaUXpG1Qtx15OUc0cpxLj1GKEgnhkMQrrFKqCSGJMLKDwyi9RQgkwcwEyQ+QjVHiMOL+PDA8wroN49csEG6s06mNaWx1a1zs0NvpUmmPCsEKcelgyRjHhfFLncfgqh/Gdkmk/KeAwgvMcJjZGNnHp0fAOWVs+4cq1Dpubp6jJPqaak167ylntGk/kTZZ0F3+Y0h202Uu3GWdBaYvpOmipcOwE20pQMsPyMuxqgkxSirFFFgVkPUne9ciODfpggkktqDWg4YCf4axkyHpRZiMOLfRIokMLJFi1HBnkCEtDkZcTWYeHkJyQri1T3L0HkzHOwSn50EarFUywVJ67JsqAKDGlxedIw9hApmYZf0XdYKoClRiKscCJNNf0hJU8RcWak9TjWVRl2Ul4JRiyFURUnYxQKh5nNXbTgPPcIy0kKHCtgiU5YU132Jo8Z/Xkff5V/S2Gd66y6n8fa1NibVTxGwV5pgjPPMYnHqOTZcYHNaJdUbLthY9OrFLqElACd99AAMoZ0ug/ZH3/u6zHR2zdaBG88znO19f5uFJn1/fIhcTNDHlhERU2a0nKK3HInWzCkk4RaLq2w3M74KkTsGMHuLqgHhXosaA3crBDw71swOesAffsPresATbFp54tLyNHLztm8Vm2KJWZnyg6r0EfxrATr/A0XuOJuM6xs0nm2iivoHBsqnGXtfAxm5MHLPUfYo7vs7vzfAbQpxNMF8s6T+pN37Ms64UJsEIItre3uX37Nq7rlgn6LpIIRVE0k6MopQiCgHfeeYevfvWrLC8vX0qoXTYaMV8HUx36dCIslDjYtm3u37/P4eHhLJBYXV1lbW2Ng4MDOp0OWmts26Zer8+yue7v75Nl2Sxh4V/+y395xrj/gTDoP//zPz+ryMVKnYKFywDqosvKtAPMg5IpqJuCmJcxr4sg/DKQ9nuRIUyZ7csA8mXgax4gznfo09NTHj16xO7u7mzG72VLnuefYtGnE0an9TA/9CSlJAgC7t27x927d19g2xcB7DxYXYyUF7+U05nVOzs77O3tzWQ3W1tbfOUrX2Fzc/MFW8XL2M956ctiUHYZmF/cXnb8ZW21+JnF+50vw6IMZvqZzwLSi+d52fayPvQy4H7Z9ef7/HQWflrk/I2vbbO3XuOrj7v8O/vFp1x9DJqP3vwnTBod1vfvsv3885+SbhUC/oPXYacGP3kIf2L/RZB/2SjI/OtPBXwYYglDB/7JecD/fbdObSPi1nJEt+fQP3cZn3iExx7JiUd65qCCAnc9wVmLsZay0u9cCorEJuu7JGceJpP4SylBI8Ov5fhejutoLAz5xCIZK+KhRTS0GA8swpGFEBDYpgT0c+C9MgXzLriWITcw0YJTBGeWoOcYwgCypoF2gbRBZAI05EJi21C1NGt1ScWymPQFJ88lg31B3gXhg7UB2VWwl2HNgRUFqxfr9PWK/OT/QMJoYvjg2PBrzw2/sWc4CDWpLzC+xFgKoTR+I2N5K8ZrZkRKM7Eksa0wSpQa+BRMJjC5xBQSo0WZhCcFGRaoKCE7OacYphivBatrsVnFwxPIs5yW1WVt/Yjq8iiUhmPXSR5UK8One3/9H/5P3NU61a01lm/dZiJsEqkoIpts7BCPPaKJTzgJSEMXYkkWWsQTjyR0yWMLnSmMkXN6bz5xkBGzTlVOLLiwh5RGY6GxKJDKYCyDdsA4IIRGFIYsssknNiaT5WcNZYBwkYBIiBwlEywvwWqAahosInJsCmNhjw6Jw4jEb6DXtkFaFwmLrFKznQGJxsoTLApIFXog0SNJMbIxhUD6Gvn5gvzLCmvcwXQnFIkHsgp2AImEbgG9BDGKyxnA2sI4TZjawWiDHWQs3z5h5d4pjfU+zXaPRr2PEAaKgjQ29Cc++8k1DqzbDGlizkDsjhDPB3AwQo0Bb4PM2cSzBrQbJ2xunbG6co7fnmBWJEVLULFCooHPYX+TJ+YGUeERH2uEcpF1F+MpUu0hRYFNWgJ3O8ZuZthegokl+cAi71lkvYDsRJJ1PYyxkX6KcyPEvRNjPEjHHkUXzHkOsYPOPUxqY7RAVXKEkyPNBJl2MHmf7MZd9LIHWYz94e8gDiMo1smddfRSC9pWCdzrppTMhKIMjCaU26x0axEBFLZAhFBPMlayhGqWYceGztBhN6qw4Ua8Eoy4Wxly1R2CMnyYN3mQ1tnLKvQKjzQvJzNjC1w35qeO/j7t4R7B+ITzm1sc39hgcq1FseLh1svRmrhvER5bjI8DBp1NxkcN0hOBpl6OxsSUwH06KhCAV8tZriccFj4Cw59be4DXyHns+jzwAp4EPl3bxs8NIpdMChsrF1yPYu6mE25kY2omJZeCAyfgme3z0K5y7Hg4qYYJpGPFehzzej5kW4z5J1fWuMqEXzz8CFt/Igee/vYvJl68jCyDT8CpMZ/4oM+D9HmbxSloj6KYflHhoLjCgbzBgf8KfX8dXFNmHBYG++wRzu6v4x38FvbBd1GTsxeev/NONPPP42mZp/uUUly5coWbN2/OLA6bzSZQyognk8kLAPjOnTv86I/+KNvb2zOct6g/XwTn89ebErDT8+Z5Psvevbu7y6NHj2YTSJvNJjdv3mQ8HrOzs0OWZSil8DyPdruNMYa9vb0XLBf/yl/5K39wLi5TgD57kF8MA0xTwU8Z0WknmAfZi0BpulzGnk8/M7WlmSvHC4HBy4DUy1jRxUBikfmdv/bi+RfvYR6gT7VKU0eXxQafX+ZZdCFKjVK9Xp/Tgb0I0KGM2N555x3W1tY+xfLPl29xiGbx9fQ+tdacnZ2xv78/m7wBZeevVCq8+eabvPXWW9RqtRc+d1lQNF//8/sXv2SXLZcBx8uA+qcY5UtYgMVrXNbfPitImF9epl2/LPCY7zOL97/IYEyDtylAP3UE/+FPv840rXorzKlrQSU3VHJDNde4JiFe/zavygdoo3DiOnbm4aY+furhpQGh8PhbV6ucyWWEgZtjQSuFdipoJZRrJmglhkB/mqH/rFGFX/iv7vDtwxpguLMc8yM3h7y2GvPaWsxWPUNcPFO/Hyo+GNo86dvsdxzOz10Gpy6TExedSuy1BHs1QTULhFuCmSKxSQcO2djGbyUEqzHeWoy1msJ6jL2aUnfALwR+LnEziZMJ7MzCSiQyVqXUJVLoUFGEFtnYIr0A+mEkmKSCfgiDRBJeMNdGi5K1dQDPlEP9qjS1EBYYG/LS/7EEiAIsDxpLhvW2YbUBlQCwBakFI0vQVXCmoGfKDO4rClYlLCtDUxviXsGzZ4Ynh4qxUZgKpRvOLtgnCS1zxJr1Pk31PRz7lCLwyIKAvFIjqbSJ66tM6isk1QZJpUJkK0zFQfgWwisZY2GBKYBclLrvvJxQKzKDLVO8IEIWMZz2eBOHN0bn1NME8gitIwodkRJB3SWqVhi6NqGrSH2JDkBWDJafowtFErokI5d0fLEduiRDj3hgk+cOqeUxcQIy7VLEFnZS4IUZfpxhxQVFqkhyi1A7pNpCOAbpa4SjL3CuRGtZtsEU/Aumf8BoBAYjBLaTEQT9kvWdKPK+pggtiszFaO8ClAE/SgmiAESBoMCyUyqNDpWNBJMpPBlTq5zh5DGFrnJ+WKfTbxGqNVTYR3bOKCLQfg3TaEG7UgLKwwhxcI4Z1yCpl2BzKD4Bb0uGYHtCY6NPc63LavuY5fYZgRciRUGRGsLQ4iRe4UxcoW8tM8pqJCcJ8uEx6uMj5NMz3NxCLN8k9bcJxSaBP2Ft5YR265xae0C9OcD2Mrp2k0fWDcaqTvggQgwKVMPBXvERdYtEuERxBWMkqkhROsFyEpxGjruUoExOPrLIeh55xyHr2WRDhawUeHcixEZGrjyyGMzjc8RRhska4DcxlgNaopOSyW78yCk8e0Qxisn9JdKVaxRxgf3wIbVOwoZ7h/GwzWnhE9ZtaAB1Sqa6YcpAbWhgAk5R/pamSs0Cw2qS00oTGmmGjAznfZfjkc9VL+RuMOCW2+eW28MzQ/5n4x8ntyQr3pi2PWLXbpMKi43knI3wmOXhPrWzJ2T7H/PEdem/cRtz7xrerRWCRkHNGSIwjPt1xmd19KnDHw+fc3y+xe8MNtgZVhn0HXQkSvlUFQig0sjJDSQDi59qnfALrz7jtOLw1PN54nnc9yvsuC7SgJtBVigmhcNymnMnCrmdjdnOxngUjJTigVXjI7vOvhswdhXEAp5L3oz6fJ1j7jl9ttW4/I4sPOfmidVFZcI8QJ0C5nlAPgXl82scx7P3p1Lep8926KtNxrV7JBtfZbTyBkltFeGkGN9GJmPc4w/w9/4FzsG3kQfvk4fD2TkWn0nzstnt7W2uXbs2Sx7UbDaxLIs0TWdZuAFs22Z9fZ0f//Ef57XXXsN13dnzbTHj/GWjBsYY0jSl0+nQ7XZnAYvrumxubhJFEd/73vdm7ixBEHDnzh1c1+Xp06f0+/2ZEqLRaFCr1Tg9PeXk5GR2/b/21/4ah4eH0/r//QP0b37zmzMwOw+U54H2PEBZ1HBPj7lM8rAIguZ104tD/4sA7WXgcX5ZZCcvA/QvA2Lz++bLPw1STk9Pefz4MXt7e7MOMs9kz3eM4XBIGIYvDIFMJ2pOO838NR3H4c6dO7zxxhvUarWXssyLs7vnh47m601rTb/fZ2dnh93dXeI4fuHLalkWm5ubfPnLX2ZjY2Omf1+s4/m6nC/vvIxp8ZjLlpe112Xnnb/2/PuL28vY+cXzXVaGxXPNt8XLAo1Fq8z5+p9+6ee//NMfoALDf/r1G+yv17n5nWf82O6ExrUrxJ5V6sutUme+v3ZI5ORYSYCtJVilDteVITUdUjMhjpygRAZGMKbKRDeZmDohVSICJsJlIm0cbVjKDO0UlmNFO5W0UljJJK1U0MxAzmV7/MePmvxv/+ENam7Ov/elE45GLg/OfB6clpkSX12JeG0t4fW1ErTfaKXYar4OBf1QstuxedCz+Lhv8bRnc3huc37mMunaJZu3mkJdI1wwRpLHNunIxq5lVFcj6qsRrZWY5eWE9mpKUNEgJIWQpAISBGMJE2EYSUMsDK4R1IygqiU1LQgKQyWHhnKpCQ87k0SDnKOJw/HE42AsOQ0l/RjCQpZuMQOJGElMJjFcAPaEkjm7SCkvspLxBrAcg+uC64DrGGxbo1SBVgW5ZcgsQ+pAYkti42JyWbK73sUqKDN8phlB2qUZ77A5fp9WuEMlHuNFYyyt+dVf/VW63e60nwnxb+OJleIqW+YdUTdv45tX8LgnXHMFzwS4Rgq31HLjmpnFobB0qfG2ALtkrt00x0sT/CTGDkPkeIwYDtGDAZOjI/I4RNg+VDehfhWr3sKpGDw/xq9GeJUYx09wgwQnSNGFIAtd0sQhzj2SzCPLHeLCQ6WCWpTR6sXUznJ0T3CaVtiXNQZ5QNr1Sc/cTyabXmRSFYFGNTJwBfnEwrXHuNH3sMLv4TrP8G9orHtXsV+/iWhvEu5XybsJ4lv/BZl7i/7Su4S1W+BUPknuYwqE0DhORLA8xNvI8LyIoOjijrvYeUFut+mcNun2m4zFOjKZIA+foYcxxqqhm8uYtZXS5L9v4CxEdAtMr1JGcIYStK1cbGtgVWLWWnvcuPKc9nKXijfCMilCSYoc+mFAT68wkMsMdJ3hwCXaGeA8eoL69n34aAf36k3sd7+CqV5lMmwy6jeo+BOW26fUG31UPadoGKK6x1HYIvnoDEeHeCsu7nYNteqSGJfJsEqUVSgsC5lmyDzG8nLsmsatR7i1CBMqsp5D3rUpYgWuxviavOGQ2wrR6SMfHJLGr2KyOpafYC3nqNUcqxliNTOEIzCnA1SY4dVW6Ft1TKa4MYpZHaYc7lcY7tlEHZuO52CmEpm6gSUBS8B3wHqsaa8nmCYMKw5JoBBVkMKwlKbU0wwRGnoDh8HAYUtOWHEmvOsdc8s5Z9PqMTAWT0WDZ9R5JhrsWi16TpPK4AD34AFq5/tYux/gjE9Qt18he+dzmB/4AnLLo+6NaLl9lBvjZpL1iWBrLEj7Ff7Gk9cIuxVuZBGHXZ9RzyntRb+iIZE0nJRb9oR31JC37BG37DFDV/LE9Xji+TzyAh75PkPLopJpRC4ZFzYmV1yPY+4kI27mE77v1/nvghYiFPzsziFHoc+HaZPESF6zB7xm93jN7nHX6uOLFzOVLhKIU4A+vy7aLC6C9Om+MAw5Pj7m6Kh0mlleXqbZbOK6Lv1+n+cHZwy924zqbxJufJl49XV0tYZwMoznIHsHqJ3fQD3973CP3sfqPMYUxaxs02f0zZs3WVtbK3OE1Go0m008z7tg86OZll0pRaPR4Gtf+9os78hlaohFvDQF6NNndrfb5eDgYOZeA3DlyhV83+eDDz7g8PCQoijwPI8rV66wvr7O0dERBwcHGGOwbXsmxxmPxxwdHaGUYmlpiT/35/4cz58/n/2OXwow+H0AdHjRB33x5ufZ6MuY6MVj5ytqHpR9VsW+DAjOg/p5APYyZnX+OvPXWLTkW/zcVIu+yKJf5jxjTDk7eDAYkKYpUkp836fRaMyGYC4DgtVqlXfffZebN29eKsmZlmM+KFjsePP1NxwO2d3dnc2uXmy7qYbrzp07vPfeezQajc8cnbis/hePmS/H7wbaP6uNFvvOZXMWXgbKLztm/thFZ6DFYG76+jLHm/nzTDV286B8Xv40TfubFwXPj4843dlla2uLN954g3q9/mL5MGhZoLT1Qj1kUhDakrElGCkYOoJYwcRJiZ2I1I7I7QhzAeilDGe2fsZIEuEyNHUm1JhQIcKjQOGbnGpeUM8FS6mkHdpsxTbbmWQ9k/hGYgwcjhwenPrcP/X4+KzcDmLFKysJr6/GvLaecG8t4e5qimuZF9pr+t2MM8Nh12G3Y/G8Y/OoZ/GsY3FwbnF85mBsg7ucI2qawoFCS/LIRtqaylpEZS3EXY2w1ydUVkPWGjltIWlqmyoWvlG4KCwjoYA8L0gNSM8ltCQjNCOhGSvDkIIhBSMKjIEKCsdIdCFJc8UkFfR2KoweV0meBMhKgbWWIZcLlJJUEslSKqgKQ6IK+rahpxXj3MKJCtxxgj1KkKMEOUkhLNBRQVE4JLJBLOpo47yYKdRcsP02n2TFtAyQQBGWa/PKfQRDoI+hg+acjBNijhlwTC6GVEzM+//Ot/jKH4XPfRXay3AqkH2DMAbhGIxrLhxQBFTLfcIUKJ2hTITQIaYYI2WMUBnS0UjPQlcDdLWGrlYxloKIkkGMSwZfFgZpNIoCS+ZYIseVCZ6d4Loxjpfg+RGBGxK4E0CQJRb5uEBOchzpoD1F5NqMunUmOzX6T1oM9xqkfZt86JRZLQuDjq0yiLJA2jEWXVSyhxTneO0jqq8G+G81sN5qIExI+mCP5Mwwtm8yce8RF22MLcs61wAKIQpsN8Jrx9i1hKo/os4B3vAEuzCklU06gza9Tp2RWYM8x9p7gD45xQgfXVtBr2zARrtkg3tAh3I7vmjnJUqmvWpKC0w5YaW6x/Xtfdqrfep+HxFF6MIgPQdpS0ZplYFuMjBNhlGF4bEmvH+C/M53kLuPqP/kDxO8/ho6reF0M9Jzj163RafXxvdi6q0eXiOkqCgmo5TJWQ/HnVDdlNRu+6grPlHqMzqtMxnXSKVPESgoNLLIUa7GcTOcSoTXCFFWRt53KCYKnUsybFJlk5Oinu1hvn+METcpuIJxAqy1BGstp74cUW1n5HVNbAv8FEwmGWqXIre4MkrY6iW4p9DZdXlyXOc0C8pgJy6/CmhKtjiDhh/TXE6I2ja9wEdUwa5rYl9hG0MjTZGhYTy0CEeK68WIu1aX6/Y5W+YYNzrgO7HF2WmXY6vJWW2L4fJNotVbqEkP/+hjGp1nvFEpuLW1hLpylWeNZXaaLp2KZtnps+z0sdwJhZ2zHElWx4pff/8Kz5+1+fLWhJvXE76rmzzKKpxFLnIoyScWNTvjpjPhc9aQz6khr9hjhGV44nk89Xweux4P/YB918PResa2D7RHIy14JRpzMx1zMxtTi1LOJw4P0gYfpEvs5BU27JDPWx1et/u8ZvdYERFwuR/4lFial7ZMwfh85tOpA8vOzg6Hh4e4rsv29ja1Wm12vm63y97e3uzz02djrJqE9TeIl79Icu2rFGt3y0nfF2SFffYId/df4uz+S+Tub+Omfba2tqjX61SrVer1Os1mc5b4J4oikiSZ4bZKpcI777zD1772NdbX12dJNS8jjRfx0jyZ+fz5c8bj8SzDa7vdZn19nf39fR48eDDzPl9aWuL69etMJpOZ/bbjOCilZuSqlKW19Xg85hd/8Rf/YF1cphKXeeA9D1SmN2hZ1qyRpw/ly1jGedAsxCd2itNlKp1ZLN/L2Mz59xfLdhkQnwfPn3X84rHzx00729nZGQ8ePJix6ItepNOlKArG4/HMuH9qu1ipVF6QsCzq+7e3t3n33XdnKWfnz73YweblLtNrzi9TO6D9/X3G4/EL78+3jVKK1dVVfuiHfogrV67MOvjL6mH+/89a5gH7ZSMfL6v/y95bBN6Xlemz2vaycl/Wr+f73CKQn5ZhMTXzNHPa9P/pZy3LmvXt8XjM/fv3sW2bt956i83NTWzbnrXJb6sGv2U1+aP5GddMfGk9/GZS51tpg58Ozriq4k/V93zZMwFjC8a2YehGTNyIyIlJnIjQSpjYBZGCWAliYZHgMKHCRFSIjI9FjkeGa3I8DUEuqOY2zczBO68T7dU4OfTYP/J5duxxPLC50Up5fS3m9bWE19dTXl9PqXv6hbZbDKCNgfOxYvfcZq9jsXNu8/zc4klHsdO1GKYKv6lRFU1uQVooDOA0E6obEa3NiOZaTHM1prIcUViaiJyJzDHAkrFYMjYt4bAsXVo4tHBYwibAwkGiUBgEY/QMwPdMwU6W851nDg/ve5w9qWA3MoK7A2qv9fGWE3Qh0JlCJxIRatQwR4YpZBmmyDEKctcmcQJir0bkNTAF2OcT5LEgHVZJ8iqVpIvdCYkPqkSdZjnJriJLwbunQcVQDc6wCVGkgIOmgia4aCZJQkGCJk5tcgm6HBGTlkbYBu2I0kbNGGSmsUyO5aWopRzpafLMIUtt8lyhNWWmUP+iDAEgDCJN8XRKXWVUVURFJhSFZJj4nMdVMhSiajA1EDUDgUYElF74/hyzb4PJQWQGoUHpMpupkgW2zPBFhCsjbDvFcVJclWKRo2INE0EaOiRDlzS2KXLIY4ukFzB+3iQ68yl6grRvo7WPcnKU6CDdIc5KgbNt4VyVSDEktwRZpUUifWLjkUsLohy0B5YEIVF2hlNLcLyIWmVAJTugNn6CIzVpY5t+ukb3vMYwX0Ubhb3/AJ49QqSaorJBvnYHs7UGdQUjSsA+oPTeySgDspYpbYAsQc0cs7G0z9pWn6WVPhWrj+mPyEcpuV1BNgKqdUOsXQZFg2FWZzhwGB/GhMcTrPUA9domyha8NfwQ5yRncNykP1hiPKzT7bU4767iuAleLUQyoEj6GJlR3cxovCmpvpITjSr09hqMOg2ivEbmitIv3JaIQqNkgWWl2F6MV4twl0JMIsknNnlqESceWZxj/eb7VE5c3MZbnGZNCgQ0DcF6zNrKmPZSSFYr6FUVE1eiMkGaW+SZhZUXxMJCnsPtf3jM4WGTLitkllvaOsZcWHGCzDOaXkSjXRC2As5cl8ZShl8vSHxF37FxCo0TFsRjSTKxwICjI977nb+Nd/Y99OnHDCcTzr1lwvVXMDc+R7L1KvHma0jL5pru85qXctuO8R3NwFM8tGt87HoMgozrdodVu4/jTUj9GGVgLVSsTRTtUKEzn5OizneLBh+nVU4iD3EB2qt2xjUn5HNqwNtqyB1rSEVk7LkuT72Ap77PEz/gsR8QSUk11chcMNYuaWFxNYqZCItT6cHY8K8dHHAUB9zPmgQiLxl21eVVq8sNOUDx4mjvFKDPZwydT1Q0zZLd6XTwfZ8rV67guu7suZjnOaPRiN3d3RkZONW0T883fdYpy8bbeIt85QuMV95jsvY2WXOzZNl9C5mMCc4e0O58m83kOeuMmDTe5VblnKrpvcCgCyFwHIfbt2/zjW98g5s3b+I4zqeejYv4cvp6SnSGYcju7i5nZ2czos11Xa5evUqaprz//vsMh8NZ9tDNzU2AmTJhnnhdWloC4KOPPmI8HvMrv/Ir7O/vT6/7+wfoUxeXedZy/iG7OGxyGYifZ8YX90/XyyYpLgLD6f+Xua9Ml8t059P/p+/PT3i8bHvZfS6y/NOGfPr0Kffv32c4HL50GAnKZEG9Xo80TWcdaTpUs7hMP1+tVnnjjTe4desWvu9fGrjMR36LQH2+zFOrxb29PYbD4QvnmQeT0/r1PI979+7xpS99Cd/3LwW88wHC4r6Xse/z5b5stGN+uaztF/fP//97Beiftf9lIy/zmrj5Op6y5NN1PnnVVAqz6EAUxzG7u7sMBoNZ+9q2XTIPWPx08C4FgprJ+bPZU9o6ZZmMtslwBZwXNj9z/jY5gobI+eXmYzZUyppKUeiX3u/L7u1T9SegsDIiJ6LnxRz7CSdORsfRDC3BWEkmwmYiPCYEGAQ+MS4JFjl5LBnvNBjvtBjvNBnuNRgeB1SbORubMVc3E26vJ7y2kbBd1dS1pG4EdS1wzOWWqQBJbl0Ad4udM8XzM4tHZ5InPYuTiUT5oAJDoSDXAjfI8VsJ9taY4PqE1bWEjeWMli/wlUIhSutDNCE5A1J65KRomti0sEsAry0aWlFNDG6Yo7sRHz52+K3dJb7TX6dijag0OyQbhqjdILd9hK2xqimWSlHkCA1SG2yZlPv8ovTq9ku7Q5EWmBzyzCbLXPLEQQtBYZde1PnAUIx9ip6FHtqpPrdk0VGWPrO06clTcvER8Gusmf8v/zqntGjyzT/6m7RXad56hS/+5E/TNz594zIoXEbGJRZeKSOZJqeZACONGqeIPEM5oC4m2+aRVeq8w1IKhL4Qil8430wzghqLkv23mcnHZ/7r2ryYyTI15UHVAprgNjOcRoapCjJPUTiyBPG+KVO1ewZ8kJ7GSnOW0lMCk2KFhmIkcb0UrxJh2SluI8WtRtheKT/ME4fTj9d4///5Lm56jBieoc86FOMcUVulcDcoTBud+6hqjr2UImoaUQHnD4fEUpHuj2CowGsiAwdjCQrtoOwcNwipeAOC5IDq+CmBF6JbG4ytq3RPAwbJMpkMcE+eIB58j2JiUVTvUqzehhWvNMlUlIl2BpRM+4WPPEEKdYGlY9pih42NM5avjqi3hzjZkOKkR3IwJNQVivY6la2Aei1BCsOgqDOMq4wmHhEVxkGTptPnLfUBXj9m2Klzni1jFwWMDXEvoNtd4eh8k16vhVIJjtXDsiaopZzKLajfjhBSMzmqMjxYYjyukboCs2zDklWyoblBmgJLZYjEkDzwygzEgUa0NPZSwTU5ZnMYMTr3eD6q00tdbLcgCyTNYMSqv0/FOiarFhx/8RWMrZgMA7IzjVYW/pOH2A8/xuyeo3t1Uvl5svpttFsr+1tECd5T8ERBu5pg1w0dxyHzJRvtEK+Wcr/eLoPQA8P24485lZvkwqI6+Bjn5HcQh9/B735IJTllfX0dd+s2q1/6CUartzn0Vngmmpwbh+sy4o6asG1F2I5m7Fs8sQM+sqtoO+UV64wNp4/vjsi9mL6XU08l66FieaIQmcs/bNs8nyyTn7boj4Myc+rYwrdytp0Jb6gh78g+rzkTWirjXEqeeP6Ftt3ncVDhyHWhEOSFhcgE/0bnOd/on/BBUOXv1be5sRdDT3E/a9LVHq+oHndVh7uyw019gp2OCMPwhYRE8+B9b2+PR48eUa1WuXr16guk6pS8DMOQnZ2dGQs9z8BPAboQYiZbmQfSWjqM/TuM628y3vwy8eo9dK2JdHO045ST55OCb2T/NXmR869aP8VyvM9PPv+PcYRmdXWVr3/967OcMi8jDxdH1Oef4UdHR+zv78/kPcYY1tbWcByHhw8fkiQJlUqFarWK67pUq1WklDPmXYgyodG1a9eoVqs8evSIk5MTfvmXf/kPzsVlarN4WXr6+QfnIiBdnPz4MqB9Gav5e2HKF907LtO9vwykzDvIvAyoXHaueRA8z/6fnp7y0Ucf8ezZs5fWw3Tb7/eZTCYzkF+tVqnVarOIa76jALPsn+++++5swuh8Xc4z9osM+jzo1lqTJAlnZ2fs7OzMtKyLTjLzbTONBFdWVvjGN77B+vr6pfV6GTifr8/fpZ/9roB+vq0W22n+/98NbM9/bhF4T49dPP9iUDc9dvEH57JlWrfTkaX5yaLGGA4PD9nd3eXOnTu8/fbbswCsi8XPBO+SI/DQvKlHnAuHc+EwEhYNk7Gkc57HHozBKgxtkzEuLEItWVYZWyphS8ZsWimbVsrGxesVmWLJ33uwMn//LwuOCnIGXsKJn3DipJw5Oee25twSDJTFSDqEONhZhjkQpLsBk506g90mvf0lHC+jud0j2B5jX4/wruQEbUVgHBoXWvK6FiWAN4qGFjRQNIykYRR1I2hi4RjJyaAE7s9OJQ+PFd8/kjzrCY7DUrcuPSgUuMpQrxXUlxMqV0LsWwPy7SFBpWBdOCzj0DAKX4PKC4o0JUliwixhKHL6smDsQuxbpMpm/KBK58M23Z11PHfIcuUBrcpzinqFQXCNvnWFNPPBZOi6pFhROHmIdZ5hOoJ84FKEPk4YUil6VLw+di0hWwmIlxsUgYPMThBiiKxYWJurEym1J5VWUhm0EehCUmQKnSiKWKEjmRfnI0sfH+Od7vKHXrtKK55ghyl5p48KEyqFIUkN5wU8sXyerF3naOMOk/UNjLRgLCDRSJFCVaIDFyHAVilK5JCBHgqKUwvrBBqjmLVoSNOEuDonTF36mc+g8BnmPpF2yObtXwyfgPUy6VLpXgMgDLNJok3DivsRmWMTei1SpwWpKI9XRZmcqKKQlQzlaUQOacfBKhLsMIG+wt1KsG8lGH+ClR8iwyrJ8Sb9/VUsFVGRT3C630c//i2yh4+R61eIfuAvoVqC5Z8+xt8cYlUlydAhmjhEokJyOCD+OEKny8jlFVSzAE+RFy5Z5GBZOb4zIMiOCKI9AmeMatdI6xv0zisMwhaJamJ39mG3Rz5soe0t8HNYcShN/ikDqC4laI8BnYNIoe7i60NWq/tsXO3R3p4Q1CdY4Zh054zxB0f0u4pw4w6V1zaprdvUg4iKFRIWAcOszqioYZGzwSFOJ+Ph6aucOKtc2dxlrbKPex5ieg75JOCss8bB8SZnZ22Gk2WUpVm5d8Tq13bwVgqiY5/JI4/x4yqD3iqx68IVC9qi1N97F22eGsRejjiX6J6CikEu5VS9Mc1RB7sbMyqW6Ii1csKwb9BViVrJ8Nij/rf+OlZnh2y1RXjnNdJXXiN99Q10pYrz6AHOw/uovTNkvIFV+Soj+yqRucgrEnMxYmGoUFD1U8ZVi4ntEHgJd/PHtOPHmN4zBlHOidqgG9wkar0OGILuR7SjZ2xxxBbHfP2dO2xubiBqLXZkkyemysOiyqM84Ln2WZEpd1TIphVjO4axZ/HM9fnYrlAl5TV1zlW7S9UdMalMOKrmIKCWCP7n326yZ9k8cH2+TYP7WZXD2KcYKfRY4SrN1TnQ/ooc0JYJqRD8ZqXJf9a6hqMyXJXz3A+YKAWiHDn7z377N/CyjK52uZ82+Shf4kG+xDPdZNmMuJYecDXeZTvZZ0X3MBejw71ej+9+97vYtj1zSlkc1Z96oU/JwHk9+3wG0imIrVQqM2e7eZML27Zn53Mam9hXv8K/XP1fYTwHPM314Ck73g2MUCid8lO7/wnb44+o1Wq89957fOUrX6Hdbs8I3ZeRm1P8NB+EnJ+fc3BwMCPcpJTUarUZkTYcDplMJqRp6QF/69YtgiDggw8+mNlve57H1atXuXLlCsfHxzx69Ii/8Bf+wh8sQP/mN7/5wg0uepHPfeYF7fn0+EXgNf38Z00AvIytXLzW/OvfDXhNj9FafyoL4vz2MseUxaGQeaA2z6J/+OGHDIfDFz67WP4wDBkOhyRJAoDrurPkRfN1PH8NKSU3btzgjTfemEVpi4z+PFu+OCt7es4kSeh0Ouzt7dHr9WYdb57lnQYdi/ewtLTEu+++y+uvvz4byprW92Xyl+k1L9Pyz/eNRXD/uwH6+etOt5eN0HwWw35ZX1m8j3lgOr9MQXae58RxTJIks+E1x3FwXXem4YvjGN/3CYJg9uWfAnTHcRiPxzx8+JA4jvnJn/xJVlZWZm3xLVHjN1WTP1acsW3iWZkSBB1hcy4cfiNv8D1d46obkynJmXA40zanhc24sPGyAifXqByKXBDnktwImipn1Uq5ohKuq5hrVsKmlXLFTmnLHLnQlNM6W/zezi+LAcxi3RZKcG4VnFopR17MmZ1yZmvOleHgvMrJXoNwp0Ky6zPZrYKB1tVz1rfPuHL1nO0rZ6yvj9HSIyRgJCv0CehIjzPhMZIS10DdSOoXwL3JJwC+VgjkRDM6NYz7HoenVXYOHHaPLE4HgokW4BiUgJptWKrlLLViGutD/GtdzLUBw0Aw8RV2lOH2JlidEfJsiNWbIMIYHSeYXNDVtzjWb3Ia3cHzJ6xsH9B69QzrqsFUHHQKWeKQpU5p25fkaGPIHIdxZYkMB9lJMScacyLJOz7RsE5mPKgX4EqI7I8Z8C844R8rmQ+sIL0tWvoHRdV8QVSKa2qtSFUrR/hJoGoSGRQoTyNFgWUKlMhRlkYoDQJUWqDCFDUOEb0h6VmXOClI/SZp+zrp6iY6UuR7NmoP1oZjWrUh4mrB5KrNsOqR4hAlAXHmkWcOhSmzT4oCvCijmiU0szGNtA/dLifPJ4TjFWJxh0kaYMuCXCrMBWuOLS5SrlPqtTMubO4MKk6wJn1E1EOaBO36pMEy2l2CtCg1XYVVAnyPi+REIK0MK8vI9m2IQ9y3BO7qCb63g5MaikGL8GSLwdkqrjukqvZQ4QD78T9G/c4/QFYU+Q/9COZLX0Teu4m74eLVIopcEmU+SS8lex4SP1NkxTrqWhWrFWOqDnnkkE9s0sRHkePrU4LsmED2sZYcRLvJaOgxmCwRiTbWZII+tSmOAYrSRmhJlE4n9Yt7GlAy7hMDSXbhVa9pqGesbx6wfn1Ee3uM7eXI4Yjs+RnD397j+FtnDLc/j/sDrxPcaVNraRruiLoaYhBM8go6kzAWnHRXOGIT2cpYanRYGu3RHJ/TtHJ8IdgIjgj8AacHks6JpJc2GC7dYLR1BdMMmOxXmTxyGT+qMXq8TOwHcEOANoiawSiJcnOUyDCFIptY0BWQC6xGik8fp3dOEcHYWiV3lxAywjRcpJzgnLyP9+y38He+hxgckN+4QXr3dZK790jv3kM3lljrnPAWBVe0y+mBx+PnVZ73A+LiIllXCoQgUoNjchyVkHuSJLBpZk/xzr4DB7+JGR0imlcpNt4hWXmDUf0ulkm4Kk645fZ5d93w9nLOeqX83cuF4rkOeFRUZuvDooKF5o4KWbVTbMcw8hR7nsdzyyewJ1QrXb4wifnj5wnX0xEWcxgEOLRsHng+36HBR3mN/SQgGynMSOFIzZY74XU55B26vGqNWBYxGsPP3fscPdumUhT8X7/7LeQcdphKNSep5oOJy4dJgydqnT3nCkbAteyQK9Eu+uN/zuSD/x83ttZZXl6ePf8WWeg0TTk8PKTT6cyAeZIksxHn6TPCcRx83//UKP3q6urMuWWaUNG2bXbkPf559qPcsz/kq95v8P9xf5hvVX8Qy2T8m0//ItV8iG3bvPrqq3zjG99ga2trZnpxmWpg+kyfTjidjhpMn+3TY6fM/9raGpVKhW9/+9scHBzMgPjt27e5evUqz58/58mTJxhTThZttVrcuXMHgPfff59f/MVfZHd3d1qe37/E5Rd+4RdmD9rFG5wHdtMKmGfC55ln4IVo62Vs53S5bPLmy4DT/P754GG+zC/TEF9y77PtIqM+Dyqn+6Ys+gcffMDu7u4LAHcRoE+16KPRaGZXOR0mmdouTut2/h6DIODNN9/k+vXrWJb1qXqdn+QxLdN0//TLVxQFvV7vhUxZ84BrHqjPTyCZl2jcvn2br371q7Tb7RfKN9+mi/W7OOpwWV952RDUy5aXTexdPNcis/8yNn3xmPlrzNfF/Jd3ajU1rbtpuyRJwmg0YjAYUBQF6+vrLC0tzX6Ypm0OcHx8zPvvv8/q6io/9mM/NjcxF2ItCaxP+t3iqNFiPc/fd4agg82ZsDkXNuc4dKTNkXY4KFzOC4d+bhHmEpUb5AWIx0DVKliyctasjCsq5qaVcNeOuWWntGSOfAkDv9g+n8XIL9a/wTCyKJl3pXk0kXxwbPH0yONwP6C3XyEb2VSuTFjaHrBytcP6tWM2ruxQczs0TUFbKwIclKliqJJTIRVVJviMtM9ZbtM1krEtmViCoTAMhcYxgoaReJmkGAuivmTcV0wGFtHYJgltdFRqhtUwwRmP8NNzAv+E2sYZznaO2KySL9fJ21WKioc1DFGnEyaPa3T2NjnrXSOgz5X8fbay71LXp4Sra/S3rjNa3yRqNaGqqMgBtpMgAxC+xBSgc0FmbBLhkcWa7Dwny9f7adfzs4lrZR0rzp44bnFiT0A8w/CcjIiEgIPf/Gncu4ii4Pq6IasJBlWXUdVDNg26bpCtAredYDUzdE0gLI2bjLDjESqZIPMQZTKsWoC11CD3HApHoETpca4zgQzBTTWuyJBOTu4ICsdgckEcBUyiCmnPJzzzKSYKMYmxdEi16aAaNrFvM7J9EteGxCDGoMICHQp0KJFpgQnBpBdZK0sLlpJd9eQnjjiAiDQiKS4m3Rq0ti682Qs0ohxCyUQJymwQlsZkpcRI1TKsaymN+nPq6hgxdkk6bQadLYadJSrVPk17l0b0iNrR9/GPHtNfv0v/zXcJX7+NWPep1Ae4SynK0aSRojjLyHY18XmVyFvDXomw1zNyx6foC0xikSY+8TgAo6nqMyqii1PVmIZPljkMJg1CmsikQHZS8q6FKawyWKvoT+YGVCjrp0cJ3MMyWFHJmHZln82bB6zf6VDbiFFSo0Zj9E6H+P0TOv/8kEPvFcI/9A2c19cJ6pq6M6JuDWiqAZ6KiVKfQVhnklXRviDxbcY98A5OqachSw3DWnvMFecpq5VzwqGkcyrppk36lW2GqxsMrBb93Rbh0yqjh3XGH9dJtAcboDY0LBkKbaHsHKlycqMwkYChhIFEWDEqPkdMRmjpU1S3gAzp5+ilKrIY4O3/FpXHv473/Hdwsj7+599j9cd/ivj2qzyp1Okpm9tpxO0kwutrTg8cnuxUOBwEFIUou1dOybTHILMEdIKueChrTKt4gApjRuNVrul/grZDxNUvMqq/wom1RUUk3K0MuVsZ81Y75Y1mTMMuLaQLbTjULg/zgIdFhUcX24G2uK4ilu0U5WoiV7Hne/Qsh/U0ppLk/Gv6lPeKPptFDHPPTwOcWhYP3YBvywYfFXX2koBkbGNGEltoNpyQW2pI04/54eyE28UIMEwpRf9iDl2SJAwGA/r9PkmS4LguA3eFZ9Ymj1jmQbFMVN9kNTpgffyMlf4j1gaPCbLBCyRhmqacnZ1xcnIyA7tTV5T5Z63jOLPMnPPP3hs3bvDKK69QrVY/JaGeBhMAaZZxbJrUzARPR7P9a2tr/NiP/Rh3797F9/3Zs3L6+SmjP012NLX4njqwTEm0/f19JpPJjFy7efMmq6ur3L9/n48++mhmw7i2tsYbb7zBeDzme9/7HnEcY1kWnudx48YNNjY2ePjwIT//8z//B+vi8gu/8AsvAN7FB+6ijmcRjE0rZZG5/t2WKeBZBHjT6162/Sz3lXkWcP4zi6z/4v3N39vivulnoyiasej9fv8z7yuOY8bjMXEco7XGcRxqtdrMdnERGEIJADc2NnjzzTdZWlr6VL3P2/rNl21xAuNwOOTg4ICzs7NZh1uspykYny6LgVGj0eALX/gCb7zxxkw/v/gFmh47/X8xWLrs3PPbl80RmC/nZ+27DAheduwiez+/TAOVafQ8/UJPwfmUOZh+CafOPsPhkF6vx/Hx8Qygb29vU61WZ0C+Xq9j2zbD4ZDT01OOjo6o1Wr88A//MI4f8O99/w0+mtT41zeO+DM3dl5aF5e13+8WeM7/nxnoYHEuHc6Mzb7x2NEuh7nLaW7Tzy0mhSLLRWkvSJkkqGYVtKycdTvjipVy00p4RYXcUhlVoZle6rKJ25/VZi8D8AAnseI3zxzeP7X56Mjh2YFLt+NQW4upXJsgt8dUr03YujJiuRJTNzEVMcFniCO6KM6omAgv9wlMjbpawikCdOoSJzaTWDGMFZ0JnEUlvhkpGCvB0AkIgxp5xUNXFcIHtKAIFXqi0GOBGRnUKMEZT3DTPi5jHCdEBQU6cOknW5yfXaP3fB27kdHaPmS19Yx6eoTVGWOGBRPdpC+3OPNuMXTWWR4/YSV6wlK6z/OPfp1scwn71jb1P/6j3xaYNal1W6I928qEMYI8s4qiUHGRWJNsYGfRh4OtbNQiS1qosUQPLKJzl6Tn0rJCanaI0dDNA4ZZFUtmGEeQ1yW1pT5+7QwZDKBR4G82SJdaDPwKE8fFyzPsIidXksyWuHGG7BlER7OSjFgLhlRWQvRqwbBiEfkWhRQUWqGFYpoA16SllzkTgRtqvCJHFZClFqPYo5dW0Uox6dUQjiCvKGSg0YEs3R8ic5HsxqASjclB51Y547gv8KMIbUsy10J7qmTl07nVN6VzTmFKx5UJMKR0CbEMIihwlyJalXMqZoLMC+JBQHewwrhXp9HssRocsWH2WY6PSYzHXuMmuys3KJYVjUYXZzlBLeV4Tlzq9zuG/NhmFNZJXA+3HSK2NEQSMTQYbZGFDknPY5zWKVA0RA/HSxAVyLEIoyoTHWAVZYbOfHAReEhTAnaH0kbTF2XwElLKZAZAbPCSCcv+Ces39tl64wB3NcWiwBlH2EcT1KMR0WPDg+232H3jbYb1NlU5oWaPqNtDGnJAQw3+/6z9ebAkWXbeif1832KPePuS+fLlnllZe3VVdxfQWNggCQIkoOGAomaAMRopkANpwOE+pJbBH+RIozHOyCSjjBJthhSlkY3RNKRxwVAgG41GA91da1blvm9vjxd7+L7qj0iP8hf1sgog+pqFRYSHh7vf69fv/c53v3MOZWVMnMq4qUEoq3gY9KMqwUeTFTyjJbJi7XBKecSitUepNMZ3FMZxi648R1uv042bbD9ZZfSwhn2/gn27TCRoCOsx0npCZgkksYzwXM6UCSIEKbgiHDIxQuIBiVIHYZJUS8z6ZBWFrKJiDu6y7j7jx5ZL/NiSSckQua9Z3FF07qgGd1STjqSwEXgs2S5SN2V/R2Nrt8zY0xHC5+fMJm2H99zAQ4DljHr0mMr4LrHv0vTus6h0EFbexG1coK2e4JFXYk6LuFB2OF92uFB2uFBxKcmfgdVBpkxAe/5KLLYSjYYY0RFUMg1KtQRBzxCBC9GY89GYC+GI89GYavqZ1DKfQzuSxH3d5KpU5UZSZSu0cB0FRhNX+Ibu0W3JCOWE//X+Tb423OF6UOK3/DnOtD9gyd3GsqzpyvDu7i6//du/zdzJs6gXf5QtbY1d6yTt0kn0cMT86AHzwwfMD+5jDR7T60wAuu/7U0KrCK4FQZgCYsMwjkSrW1pa4uLFizQajSNEZDHBUT5H52RjcT63LIt3332X1157Dcuypky57/u4rnskwVG5XEbX9Wn0lZxx7/V6PHz48HPRXM6dO0ev1+O9995jNBqhqiqVSoUrV65gmia3bt1ib29vmjyp1Wpx/vx5Op0Of/JP/kkePHiQz99/MIC+vLz8uSguRT16kQUtdo7Z34vAs1hyWcksY1wE0/l5X8R4Hhf+rgi6j2PQiyBmFoDOAvgiEC4eo8iUp2lKt9udatHzpZFiPfOSp6jNWXRRFI8kL5ot+TllWebcuXOcPn0aVVWPSFyOY8yLwDzfNh6Pp3FLi5nGZpn+orymeB9ywKUoChsbG7z77rtHIsxEUUS32yWKIur1+pFlqy8yyn4vBttxBtdsGx13rNn+OMu0H9cP8u+zUpZiyKlin8zZ9fF4TLfbZW9vj36/T7lc5uzZs5w8eXKqsSumK/Y8j93dXQRBoN/vT7K7vvwNfvHmG6SIQMZp02VOi2gpIfNayJwWMaeGzKkRLSWgribP8x593rH5uDacfTa/aOUh/xxlGX1B5WGqcz/WeRxp7MbqZyA+Fp+nsgdBBl1OKckpTWXCxK8pEaeVgDUpYkFMmCOmzATIvwigz27L+16xf3qxzO0DlRu7Ctf3ZD7dlbl/INOoJaysBTRO+BjrDqyPGNRcHDGhHKXU44RaFmOlHno0Qov7qAzQ5QGK7iKYGakL0TAjGqSE/ZSwnxANU5IhhKMMz5UYpRXa0kkGlU3s8hJ+uUpa0clKEpIRIRopkpEgWxGilSBa2QQIuilpIBEGOoKfoaY2ujpGNmIyQyGz1EmEFTsidgSCsIQTVYietQlvXEf75s/8iqKH/3Jhc3frKm9n1mGnIexJvyDY/IIYZW8oWqio834kV1xLUT20yKGsyyCmoE5kLYktE41UgrFKNFJJxjLCWES0BZKhxLhXwu6aSHYKqYCfaNQ1h43yIau1Po05l9JcSFSWaBsWjytNdq0KPc0kkiTguUPoUEDv+xj9Q4zhM1bmQ+orOkFLwm8mSHFG6Jj0/Bp2UEIci4heShwKCGmCVI2QrJTeP5mjfi9hb1ylaTnoRshQ1rFNnVrVJdZEbFmbhAUsCRPN8w1oDG0iScKXFU7qXda0HmXN57c7pxloJWjA/OKYjlQiEcTJ/fEERDVGkDKyRCSNJXBAcDOImISSVFIsw8NSfeQsJggkxo5J4Gm06n1Wqx2WtAGSCPtii7vqCrEoMF/tIS0EhPMJhu5jyi5IGWJfJOjpjCITQcuQmzFpBYxxiJEGBKlMNlRIewZjr0wvqRBkKrrkoRghgpwShhpeZkAmIAcx0VhGiCFLRFAyRD1FlDNiRZoYNyITS7QH2BlGENBUuyyt7rJwYQtjdYwqRFhehNVLSEcGuyxzfXGdXamFFoQkiUgmCZQUh4o4Ae4PhmcIJYWSZWMoPtWuA88Chl0Ft7yG0ox5SX/IOf0xS8Y2ZqmNqLk4YYVuWqer1Tn057h+/2Xsh2Xs+xWc2yUSRUY6EyEsJmRKSiLpIAmT1QMiBERwQFzzqXzrX5J2NALjDEHjPGIWYJQhqJoYVsqVks1l2eai5HBBsknEjLuKOQXtd1WTtqyx5IwxDob4ux7dwzKOuwJOCpE2YSwkkOIYIUuJZQVkATn1aDi3uFS1WUqe8Y1zVYTaGvfcMvfcKrfHFo8cg2U9+Aywlx3OlhxM+TOyy0sFvhvW+DXnNCkC5ySHf1C+xraoc1OyuK2UuS2XuK+UaCXBBLA/B+1nIhs1S47ghCzL6IsiDw2LT+Qq31YW6AQG2UhCHgooSYaXTQQ1chryn939L6jXqmiaRhzHPH78mA8++ICNjQ0WFhamUs4wyeiaK7Srp2lXz9CuniaUTaqdO8iP30N58gHCo/dIncGRIAqCIEwBcZ5UKMeXtVqNy5cvs7y8PJ3Xiq9i8X1/yswX8dLrr7/Om2++iaqqU1ItSRIURUHXdTRNQ9d1FEWZnrc45+ShtPf396fSVk3TuHDhApIk8f7777O1tYWqqpimyebmJmtra+zt7XHr1i2yLEPTNEzT5MKFC6iqyk//9E9z69atfA7+4TDoL5r4jzvGiyba42QJs7rVWXBd3DcvRa32ccx6Eah8EXA7TorzIrbvOFY+3ycHtp7n8fjxY27cuDGN6HJcybJJSt3RaITneWTZRJNcrVanSzHHFVEUqdVqXLp0iVar9TkpSlHmMgve8+3j8Zh2u023251mEy220XHXnIcHLAL0vO7VapXXXnuNS5cuYZomQRDQ6/XodrukacrCwgKNRuNLWdQvKrP35kVM+WxdXnTsovziOEMtB+Y5W56HcSp6nwPTGKz54DAajTg4OKDb7eK6LqIosrCwwLlz51hdXZ3KWjRNY25ujlqtRhRF7O/v4/s+1WqVhw8fohkW/9D8Ja7bVX52oc0fne9wGCgchgqHocZhqNIJFQ5DlcNAJYUJWFfD58A9B/ER81rEnDYB84aUfu5Z+iLGehbMF5/h49o1zeAglrkbG9yLdZ7EGjuxSjuWp3IaQZxkZE/kyXtZTmgqMYtKzJoSsSLFzAsx80LKnDD5XBMyBOF4gF78nusUgxjuHshc35H4dEfi+o7EzV0ZXU5YLXWpWQfMLzvU1z386oChKTIuKXgVnaBmIgYxSt9GG45RxyN038ZIHKxsTFl2sQwfrSag1ETU2oSti4cp4SAlGmSEg4SgD+NRmcG4wYF9ggPnNG13g7GwgKTFyEoAmoCgglJ3kZsxNGRk1cOU+ujqAKmckFQNEssg1bXJfYtjBAQERcpEIRWELMuyVAjI6AtS+kwUs2dkPHUPrXTwuHlqfNP591Inw+pf48e+to5fzTislGmXTRxNohk7NCIHIwqJAoEgEokEyPQUpRIhVyJEJSV2ZZKRQjiagPlsLJEMZbyejmALLKQey6nNSXHACaOHXgm5WW1ytbHEndoinqYjdXxwNeJQQbdCxHpGUJbIRIFS6qMJAaIaImkRehgh+BJjx2IwrpINJaz9mOyZyOiZTskO0IUYL1LwQoX58hjdiBgKBqNUp2XaDCWDdCTguzKylJEgUNV8KpYPBrQpIYiwYfS5WG6zaNr8q+oZQlnm7fgpN+R5nqV1PDRIQRJjZC1GIiMIFBJVnsRet1OIM6QkRbYzYkciDiU0OQQhI4okslikUh4zXxlRL7v4msbdbAVTjJnTHLJ6xHAZFCtgjhGyEhAZKYIn4o90/FhBNlKSGkgR1EIfU7WJxAS1q0CvRHdQZz+oMhAsIkFGUwJkJSLJZMJUJRMExDgh9UXwJ/IpVUwwrBhfkQkFCUGEVH/ODveBHqheRE0Ys9Taobm5i7U6olodUg4ThECjQ51PzE3SVGQh2iP0Fbb9dbo0iWWJLBMQIlCECF0OUHWf9dRhZZSQjTT27RJ3xTqSFPCacJMz2i3Wzt3CVB0e+6u89zvLxOfWkFeUyYrCkwr2wwr2/RLu3RKpKiGeihCbHmlZIVWNSaZmI0IsRUj1BKP3AOP2Q1rBHKXSWR76LYaxTM2MSU0YGDLLVsBLusMlyeaiaHNKsLGzlBuCwnVk7ig6D80Kg3INbW8f9gOScZXULZEOdARXIg0lGIBi+9SMAxLVIylXsfUFWuKQS1WbV+cCrjRcVkyfx47JHbvEnbHFbbvEM1dn3fA5X3Y4X7a5WHbZtGx+N65xPSrzC8Yei2I4HcOnMo8MHsoTwH5LKXNbKbMr6WzEDueDEeejEeeDEauRjViQoTzTdP435y+RCPC/u/Ypip3xnwQ/RoyAkgb8jbv/R+ZazWnWzsePH/PRRx9Nk/PkBNYs5kjTlJFSZVs/wTNtlfHiSwRzZ5APH6I8fh/l6QcoTz9A7m8hP89KX5Tc5jLQK1eucP78+Smjnc9FRZyTz8VFn7o8qsrp06fZ2NjANM2pj1guX1FV9QuDH8CETN3d3eXJkycEQTAlO0+ePMni4iI3btzg+vXrU8DfarW4ePEiSZJMQzFqmoaiKKytrbG2tsZP/dRPcePGjbwuf3CA/su//MtHZArFyhQZbvh8xJbZSkuSNGVuC+d6oYY5P+ZxZVbOUgTaOSs/U6djzzULWorHPa6u+ecioMuBcL/f58aNGzx9+vRIkprZksshbNsmiiJEUTyiRS+W4nXIsszq6ipnzpw5wk7PgvPi97xT5ec8PDyk1+tNJTZ5nYtG02yRZXmqkZ+V3siyzMbGBu+88w6tVgvP8xiNRozHYwzDYH5+HkVRju07s/d8tt5ftloye29fdI9ftP/s+fJoNzlrnsd8LSZayHV6vu8zGAwYj8c4jkO/3z8ib8rTA585c2aahSyPmzo3N0elUiHLMrrdLv1+H13X0XWde/fusbS0zMnTZ9HlL3OaFRhG4gSsP391QoXDQKUdqnRClcNQoR8pWFLCnBYxr4a01AlwX9Bi5tSQ1vPPDTX6XJSXFz0zs2163ApEcd84g06qshtrbIUyD2OdJ7HKTqRwEMuMYglVzNCUScbLWBLwZBFRzmgqCYtKwqKUsCDmr5QFKZ28iwlNCbIs/Zxx5XkeI9vh9nbMR09S7hwa9Fink66RIFHPnlFLHlNPH1NNn2BU+iQtg3iuQtgsEzWf68vnKsStCpmhInfHyJ0RcmeMOhijezZ6OMZMbEqig1FOUevSFMQrNRGlIpL4GcFAwB1q2KMSg3GDnrNE11lmMKrhjg1GYY1R1iALRMReiNHepew9pT+4SsIWktGh+X/+tf9Q3Qi02Fe+lqW8IgjZCUHK6rIax6KSpIKUic9zhT4fILJJWMQYjDChHoTUgwAxEkhjCVdUGMk6fcXgULNIM5HaMEA5TIn2JUY9lcyDhmajV0PSSkZqZWTlBKUcTV6lEMlMJgB1pMJYQBx6cHiIlURYSy0GlXkeG/OEkYr1OMG/qTG4r7NcGrG0NqK+6CLOx3QaOoemyVA1sAWdOJNRiFGThCQSCQMV3UnQ+zHproh9XyN5IiK6zzNKmi6mEWOLKuNMZ9UaohPRdw3atkUmiKSCgCmHtEoulhViKyq7cYWm6nLa7LFp9jht9ThhDHgg1Pm34glux/MMI5NMF0ASUOQQNY1Jhgp+oqA0EtKWQCKIiEmMEUWU7RBlnBKMJMauhhdpE6kEGboaYho+ihVjaxqmIrBYdhBqGftNlaCcsZF0qQo2iZzglVJiFUJbJU0l0CFRBSwno46DrLmg+FS6EoNnc2ztz3EQVsASCTSVINVQlBBBgDiTSAURiYQsgCyQUNOEhuwTajI9TcNIE9JUIFREUkWYRJHpgjjMKMUuc6VDGit7lJYHzC0eUtZDRpnFtriAEiVckT8hteG94dd4EJ7BVkpkWkYmgBALyFmCLMXUMpeT4x7Vvkt/oLJdrrGnrqH7A6p3P8V6+CnKo09A7hJdOIn0tXfQL62gzQl4bRP7cXkC2h+V8B5ZZJmIuOYhLEQkTQvUSZxz0YwQ6in1csA5XBb7KUFH5VHP4J5tUtYSDDPBMSRcQ+Kc6XBBGnM+G7Du7iN1nnHz2RZXg4RHpRr9tZMEZy8SLK6g9W3ioYXQF4kHKuVDB6njEQwU3GGZktyhPhcj1wwGUgk3UzhXGnOhPORieciFyhhTSnjglrjnTJj2e06Z3UBnw/S4ULY5X7I5X3bYtFxk4WjUtnzMzd9tQeK2VOK2WuaOUuG2WsYXJM6EI84FQ875A876A5qxfwQn3PB0vj2usr7zA9YZTMnAKIrY29vjo48+wrIs1tbWyLJsCtDz8+bzaJZlUw33cDjER8ZbvEyw/gbhiTeI1l9HiAPUZx+iPv2Q7N7vIm1fgzhAFEVUVeXy5cu88cYbmKZ5ZL4pqgNyTCIIAqZp0mw2aTQalEolsiyjVqtx5syZac6ZWZzwRQRhmk4i7z148IDxeDzFAK1WizNnzrC/v8/v/M7vIAgTVYFhGFMCNfdJVBRlmj3+4sWL/LE/9se4du1a3l5/cInLL//yLx+pyHEOn7nlM7vsn4PNFwHVWYlFzoIV/1ME/fl5jgPxxd9nAXYxAsUs8C5efw7ui0D1uBWD4vHza8uB25MnT7hx4wbj8fhzzpbF40RRhG3beN7EsSG/iUXgnXeI/JoAKpUKZ86cYX5+/kj7zr5mwXkePmgwGLC3tzeNJDN7bbMgPC85GC/+p/i/arXK22+/zYULFxAEYepAqWnaEaNjFvAV2/PL2PUXMbizKzTF7fm+xcg7x0VnKXpx55KWYtzWnFl3HAfP8xgOh9PVgvF4jO/7UyMqjyN/8uRJTp06xdLS0nQprV6vTz3LsyybHidJkimrfu/evemDflydZsusgVncJggCcSrQjdTnLPwEyLcD5QiYPwwUwlSkqUYTIJ9LabSIeS2esvHzekxJShHFoyD+y66zeB9mjTyAKIODWGU7ktmJVXZjle1Q4WmssB0p9GOJkpRSUjI0OUVUIJYEXFlgKImkEjQJacQ+5dDBcobodh993Efut0nbO6T7O2T9NqQpSZriiy2G8iZD+RRD6RQj+RSBWKWcPKMaP6IaP6QaP6IcP0ZmEk4rNTXShRrpXJVkvko8VyGZq04BfNIoIbrBBMAfTkC8dDhC7gxQ3RFGZGNIHlpNRK1LqDURtS6jNDSUuoLeAEnNcGyTkV1n5FYZj0uMuxrjXYnRQQnb2fRGB1XZHpSfeo5xH4RHSDxlA4XTLLHMJVS+IuzdMdXDD2lpHd79kQsMqmX2LQtbl5G0GFFNSSWQUhBSgQyBVBCIJZEMJg7ECaSpSCgo+IICoUA2FAm7KvogoTHyKPcjsq5MZ2zRTQwiU0KpBsiVGLUSopRC5FKEXg3QKgFSJUYwU7JYIMxU0khGGIlE+wrjxyalccIyDiflEWe1HkI54VmzxNO6xW7FpKcahJFO7Bk4iYEnqKSSiJBmSF4GQ0gOJbJ9AWEAhhOjeQnRWCLwJTYqA+yKwsDVEdoibqgiSBmymDJvOrQqDhgCB5lFO7JY10ectnqcsXqctgZ4kshvpct87C/R8ytItRhMiDMFhQhxCOFIRtUT1LmYqCUS6AoIUJJ8Wp5LZRiQ9KE30hnYJl6gkQkTmZgkJYhagqEnlLUYrR5jL0j0mipnhB4nsy4qISNJoFsTEOohSSiRZRKJJiCEIjU/piI6pJqHJIRkOxY7d+fotxtopkBYFRnpOn6mE8YaopiQCQKZICIJCSQZQigwJwQoYsqBok+kLMSkiYAjyETaRJctDjPSroBix1jikGa9zSs//SGm4hEj42cGVuJzRrqF3Iff7X6ND+yvMFTrCKWEWINMFMjiSRZahQgygSwUUXd76PcfM1g7T1yqozy+jnzt+0h3PkDduonx0imUH/sRtNcuYmzoqGaMs2dNmPYHZZwnJfxtgywUkdc85DWJsKGQygKkAlIpQSzH1ISQy3hUhym9rs6NYQk3FWlaERjQ02VkI+FUckjj8AHivQ9Jb/8A92AbX1ZIzl8mvXSF4Nwl+msb9Ost9KEHnQR/WEJsR4idmDgsIz7NkNyYem2AWo4JdI2e1MDCZjF+xmL8mAvlERv6gFprga5+gqfxHPe9KnfsEp1AZdOa6NnPl2zOWSNOmC6y8PlIJflckKQpbVHltlLmjlrhjlrlvlbBSmPO+APOuD1Oe31+o7zMNXOeb7z/r3ht+9Y0TGEcx3Q6HW7cuIFt26yurqLr+lQ2UjxnjjfyUIu9Xm+6X44RUyCZP0t08k3CE28Qrr9BWltB2rqK/PgD1KcfcCLrwM/8TcLSAj/d+9fU09G0PjlWsyyLarVKs9lE13Vc16Xf70/P+frrr/O1r31t6uN3XJklaYslzxB6eHg4xW2qqnLhwgUAvvvd706TFmmaxsbGBqdPn2Zvb48bN24QRRGKoqCqKhcvXuQXfuEXfngAvShxKQL0WVlK3mj5ttmST8qzIKyoZy+y9EWgP3vs40IhFq+lyGrnNzE/xnHx3POSGwiz4HSWOS++5//Lt8VxTL/f59q1a2xvbx+xLIvXlZdci56DWcMwpg6E+f9m9fmiKNJqtTh9+vQ07GJ+HUVQfhxAT5KEwWDA4eHh1DD4ovs2W//ZxDv5efN2l2WZzc1NvvGNb1CtVkmSZMq8z4LwF7XrLPA/TnpR/G2W0X0Rg/6iEsfxEbY8B+j5slk+4DiOg23b2LbNcDik2+3Sbrfxff+I4ZivNpimOX1YFxYWUBQFWZZpNpucPXsWTdPwfZ/xeEyv18NxHMrlMtVqla2tLWzb5ty5c1P2YLbOs32paKAed+++qB0m/wU3lZ4DdoVupHHgyxwGynMw/3x7qKCKKfNaxHwO5vUJgJ/PwbweMafFqNKLJTUvitI0a1jnJUgF9pMJ474Vyjz1YCuQ2I5V9hKVYaZRSl20aIwUOQipT0pCoopEukJYrhBW6mSSjGoP0EddtFEPbdxHHXUwxn20UQ/RDgi9Kn60xFjaYCifwpWWMJNdKtFDyuEDqvEjKvFDdMGd1iWvWyJA0iiRztdIFmokz4H7BMCXSeZrZLKEeDhEPBggtgeIBwOEgz7ZXg/2uoiDAYaVodREpGYZ4czLJEuLlJdKlK0h1YqdlMyxVC7ZJImUOo4VOmMrGvcrybBdk/vthuWMSqP+zrA69lRcfQln7hx6KeG81eWStIeUpNxJ5/hIWmHe6HHR2GZFb4M5ceoclzVGloJT0oglES3MUKKJI2UsSISCjCcqRJJEKoMgZoji87EzyBC8GMUXSUYqXl/Ht1WkADJPInIlIkchI0MUMxQhxqz7SPMh2WKKWfbRpBBBSBDUBLkSISSguqAIKU5JQCBjqR9gBTH7TZk0E5D6Oq5dpR002EsaeJKCQkKSCSSyQGYyWVMYAiMBngrwGxKb1T7L1pgglNgaVjlwSwjShOhuGS5LlTHVSsBYUnkS1PESmU2zz2mrz4Y5JJIzbmQNro/m6YcljBUXqZ4QCSqhp0MMWR9kL6VcDhDnE4JFAU/TSHwZhZQFbOYDF2kc0usrdDolhoMSkpIiaQlJJpLEEybYMBKopLhVCSQQ1Zh/v3UXmZAdQ6HXgrQRIioZaSaSCBKaB7U4REl9EnOS6XPwsIaybyAMDDqSybCsIFcSXEnHi03CUGMSj15AEFOELCNNRJYEj0CUGEsKq5mLlGXYqUwvUwmVCbNPlLEi9Vkst6k299HLfUraEE0IyTKBcjZmPdlm3Kvwwf47fNh9nUjTkGoBYUUkUiaJumQpRhF8hDgkkQySMUj/+J8TrV4kPv0y4vAQ5e7HaPevot6/iqopyN/4BtIbV9BO17DmbEQ5xe2YhCMFr1/CvVYj+LSEM1CRTqQkKwKsZYiCQBoIiFZCuRSyJgXMOTHbhwaPBiZZJNBQA3QtwNEyBmUL3W9jPLuGtXWDyt4dFuw9lloN1Fod+8Qp9pqLbNXnaK9t4i0tT0KFfltB7EdYdht5FBKODJzRHKbaRTMHYAoEVh1frTOX7LAq7PDmUsrlqs3FJYNAsrhrl7g9trgztrhjlxhEMmesCWA/a474N515PhrW+PeXtvnl9UdH2O183o+zjC2lxC2lzD2tyg2zyY5aAkHA9B3+0j/5P1Cr1aZOorZt8/jxYx4+fIhpmszPz0+j0+WleHzf99ne3ubw8PAI0VW8lnyeStOUxGqSnHyT+NTbxBtvkZx4HcQJfrvg3uEX2v9kOk8YhsHi4iKNRmO6Er2/v89oNJrO3Yqi8Morr/BTP/VTnwuucdwKenF78RrznCW5DCeKIjY2Npibm+Ojjz7i2bNniKKIYRhUq1WuXLmCKIp8+OGH9Pv96dyf+3T+0DToy8vL/Nk/+2dfCJqKbF1xW84o52WWFS+C7NlGKwK+2XPOSm3yYxfB+CyrXzzHcZFo8t/z6z0OLOTHO67NZjuZ7/tTFt1xnM/9v3j+KIoYj8fT8ENFj+IXAZj8++bmJsvLy9O45EXH0KIcI7/G/HsREOb7FTvjFxkxeclB93FsrShO4qa/9tprXL58GVVVj5U+zN7nLzMUinV/kdZ8Fqwf99AV71sxZGIOzIvtmIfFzA2pwWDAcDjk8PCQwWAwzYaWnyNnz2cB+tLS0vQ3y7K4cOECS0tL2LbNYDBgMBhMrfBarUYcxzx8+JBms3kkjutsOW7larZdvmwZ77i2nAX6xeMkGfRCecrCHz5/tZ9/n2yTsWOJuhpP2fcFbQLa5/WIhSmgj6lrE435cc7Ks/cyZ2VySVEuK9rf32evN2Q3VhjpTRyrhVNawKss4FUW8csLxFoJ1T5Esw9RvQFy4iJkAYIEqSaRWAZRtUlQaRFWGiSKijoeoI26KKMBoh2RujKxaxIFTTx/Dm3UpzK4TzV6RCV6SDV+iJZ2yWbIiOLzFUYRqaWRzNeI58qwUCdbapDO18gWarDUIGtWYODAfo9st0u216Vz4z7uo13CZI3kD/3f/t9hT/sjgpD2asbg5lz5wC0b42Vd909pur9UrQzieqPnasluo1LzKTdCdDPGsy2G/QrjQYWxXWHslUk8jdhXGEdltsRV6orGH24k/KHKgGDv0WTyqVi4FY2xKTO2FOySwsiUsC1lss2cOJZlw4jAkQgpEUgmsSaRGiJCIqAHMVqcQJwRJyIhIqgZWSLQ+e+WaPUC7ERjnOgTZ02JCZg2QC1FlCsuChGeJ6IveCjlkIveiD8e79A0HUZlid2GxH5T4qAp061IVMcZ2kAmGJU4GLe4HywylHVUL8Z3ZOiCdCNDaWcEgYwoZ2QSzJccVipjTClkf2yx7VTxUxlEgZISsF4ZsVy3EXTYjUs8dOtoUsIZq8eqPiaRYUswuT9qEaQS5VN9tIVwIjPplvDdSR2FUUYlCrHKPsJKir0iEKYqgiPhRyrlLGQ+clD8CMcW6Ld1RrslDMtHLYcgQZCoSOsh8xsR25USS6HPZbvPRWeAYUc8FjQe1lQGCxliM0A2ookDeixiBBlykEAWoFfHpH0N4cAkPizxZHeBKBKpr9j4NXBkHT82MEoOdlDFDS1kIcZKYvQoI8mgI2pYwy5qoNC0RHxVYU/QQZ34WyQiyGLIfGWXd05+F1FMEUnRCLEym3o8YNir896zd/j08DViHWr6FuqiTzhXx4sMGuUOIjHRx3eJv/8pybMBUVAjXn+J8NzrJK0VtKe3KD25QX3/IRiXkFYXiE5VSdZMKq0RxpxDmgrEgYbjGdh3K3gDgysnQ0olkU+9CnueimpD4khEnohgpChnHH56OCDegd22wl23SpgKqHGbVAmJGhWiuTrlzkNa3Uesu3ucSfq0wj6u6/KPf+kvMmy2KHUP+bH/5r+ia53mwDpDt3SWgbWB4ffQxyOykYA9aBBGBoa6jV6NyEoGXmkRTRa4VBlzpelxuWpzvjLGklM6gcztkcldu8S1UYUPhzVgYsj+5le+8zlgXHzPx1ZHEPnljZ/AkWTO797n577932OaJoZhTMau59nQ79+/T7vdplKp0Gw2p2qKfF7PjxsEwTQoRTGCS9FQKI6P+SvHdPLLf5jOL/4jRODlw+/wk91/Q7lcptVq0Ww2GY/HHB4eTgM0xHF8ZK5TFIUTJ07wsz/7s1Nn0+KcNjuXHrc9l7k8evSI8Xg8VQc0Gg02NjbY2tri2rVrRFE0laleuHCB5eVl7t27x927d6eOsKVSiV/91V/l3r17+Tn/4AD9z/25P/c5sDwLdmcB+nFsebEUo4TMsp1FIDvL/uVAZHoDZyQ0x4H6IriYBYOzJd+eg4VZw+E4Fni24ydJQq/X49q1a+zs7HzuXMX/5rKYXIsuCMLUwSGv23FyjNxay2Ny5tdXDGGUHz93nsivbzwe0+/3sW37iNExG5+0CIS/SPIy22b5f/P4n1/96leZm5v7XN3z99k+MmucHceGv8gv4bg+U7z3s8tvuRd6rlVO03Qa3jOXIA2Hw+mSWbfbZTAYTJ1Ac4eWYv0lSTri1b25ucnCwsI0GZWqqqyvr3P69GmyLKPT6UzBf5IklEolTNOk0+nQbrc5efIktVrtWGb8uJWZ2f7/IgZ7tr1mt3/Z5+Lxj9vHS8UJcPcnDq5tX6EdTsB721eeg3kZSWAC3LUJcJ/L39WIeT2X2kSIachwOGQ0Gk39KJ49e8ajR4/Y29vD8zwkSaJSqRxJpjV16lUM/Moifm2JoLJEUFvCry4RVBbxq0ukio42OkAf7aMP91HsQ6TYRRYiUjEhMTTiSpOwNkdYaRBW5wiqLVJVR3QDcDJSR0VwYtRBB72/jdV/SLlzi1LvNlLvAOJouiozO1keuV5FhvkawkoTYamFsNTAKauIq3Oo64vo5094WYaYRmInHOiCd2A2Y0cZIvA9tRL8y3Bfe9z+raVq/K/+1v9I4wLq0itoS2cwyg4bjT0Wqm3KVh/J9BBLIWopxCi5lMtjytURIDAelXHtMqmjYnhQjsCKwQgF5FBDcBVSW8AdeTzb2uJh94B0vsr8S+dQTyziVXS6mkFH13HKEmklJY1FkkAmTSUiScETVZJIRuwKsKXSf1BGbgusOmPqkUeKQDcy2QlqeJGCYGRkqoCQZmSRgChkZCEoacqCbnOqMuBS84ALjQ41y+ewIbDflNhryhOWXYRmL0MeqBy4TR6MVuiEZVxFRTjMkA9TKiMfqZvhHijYrkYGlPSQhbJDXffwY5kdp8wgNBAkkMWEtfKYjcaAWsVnKGg88ho8dcvMax4r+hhBTmkLGtvjKloloL7ZobRq45Y03Gc1vL0yfqqSmAKKn1IhoFT1SNYThnUZLRCQHREv1BinKvXYRw0jPEfA7yr4+xolbURjbkSr5lBthNitEjeMBapJzGVnyGVnwObIoT9QuapYPFyUcRZjtJaHbEYIZBBJCJ5A6maYioNVsxG7Om67wtbWPE6/jCZCaXEIDRelHJIpIk5YpjduYgcVFCJKRMixyBANi5gzqUM5juiHMh8JDUhBDVN+lD3KlR7J+iHychtJm8guFSJMXIzYwRlXuPHsZa7uvExiSZTlJzTMuxgvLREPXOJUImq0iB/uwXvXEW8/JRsppPPnic6+xmjtAmoas3D4GOPZba6sbZJqy3wqqPQqEfXGELVlY8zZpKlEKKsIkcq6KzPvajiBwf/ozxGIIpKRcEL2CNSUkZZxwrZpPOrj3bbp7gq0s2Ucaw01aKOIY8SaRrC0iKAKnIo6nMoGGKLDq/1H1LzhNHRvlmUkosquvMITlnkQL/A4XSRNE5TOFsLAJ7FLeMFJLNNlYSnBbKoMRYW9QGPd9LhUtblUHXOxPGLVdPkL117mrlPiRxuH/Odnbn0Oq8yy6fn8P0zhUSJS3nrIaNBHEITpKm6OV3q9Ho8ePaLT6UwBc5GIKwamODg4YGdn50j0s+J5j8NUOY7QdR3j5Z9Ebq5yOXjApQvnWV5eJggCHj9+TLfbpVKpUC6Xp35H+XHzuXhxcZGf+7mfY2Nj43PEcLG8CKflhsmTJ084ODgAmIZnzB1Cv/e97zEYDKZOqEtLS1y+fJl+v89HH31EGIZTmctf/st/mYcPH+bn/IMD9KLEZRYEHAei8u/HTfwvCnU4u8+LDIDZ8xwnsyj+XjxXMQ578bcXOVkcd97jAHvxv1n2WWi++/fvc+vWrRemgs//U2RpcwY7t1pzEHgcQM6yjIWFBU6dOoWu69Nj5e0BHDl3/lDkiZIGg8ER5rUI7ovt80VGVtHQys9R/JzHB33rrbembPrssWcNsi8ypl4krXqRb0Fxe35v8ge5GDYxl7Tk153HbR0MBvR6PTqdztQhNAiC6TVKzz3Qc1lL3p6yLFMqlThz5sx0Kcw0zSmLnvsR1Ov1KQs8HA7xPA9d16lUKqRpyuPHj1FVlZMnT05B5xTUIeClEiX58/3wuOfgRcbQ7H759+OA95etbsxu+6IxAyBDYBBJHPg5aJcnEptAnmx7DuIHkUJZCqliY4R9JHufuPcMb/8B/sFDhNEeor2HEgxQZIlSqXQkdfRxhuasY3RqVAhqy4S1ZfzqEmFtmaC2RFBdIqgtk4kS6nAfrb+LOthFG+6iD/eR7A5iFpJoMkG5iT93Bq+5SdhYJaq2SCtlKImggeAESIM+Sm8PpfsUuX+A2N1H7h8i9dvI/TZS7xAx+fyY8ezZs2n41ley74tAC1gH1tNE2Ai6xtfSUHxVkLI1tRoIkhFL8eGQ8PE2SmfMm2unkYcZbkdi0NbpHFi0R0162QIuFuuVkBNzInJJZCh69LIRkjCiJgwxVJuqMaRcGlFtDKk1hli1EXrJJ3Qk/B6InkQ509FiA9FXkXx18u4piJ6KEKsciga3wwaPqHGgmozKMsaah77gI1Zi0LKJ3j1ViBOF2JEJehpCV6bSi6m1Y5Q9gYOgws6oijNQwcrQrBglS0kDES+QJ6H3RChJIYvGmPXSkAXdpVJzERZ9wlWf/kpKtyZRdVIa/Qz6BtuH89wdLrNvlol0EbWf0Bh76IOYYF+mu2MQ+RJZAhU9oGV5yGJKP9TohcbEOVTMmDNczjX7nGwNQRN46le479RpBwaLuosqxwxR6PsG1aUhtTMdGieGBE2R3v15gttVXFvHUxWEKsikVCQfq+XjL6VEJjSDGM0W8X2VTmoyFhT0MCJ1M+K+SHwoUVHHzNV6NJsuzAk8bTaIUpPL7pjLzoDLzhB5IHDbrvCpbvF0VSBZ8jHnPCQ9QiQli0RiR8JIIyzDRlZCvIMSzl4Vc6AxHpTZjVXkhYhqbYBZ9UhVESe26I1b2H4ViRRLCIkSGT+VaQoei1Kf5jjDsTUeJhXsTOEEPUrakGRxjHW6y1z5EENwSQQZlRAj9Yh8he32Ce4+PUGUCiye6BDFHdhYJhk6JJFAWG+RjUMaD3Y41XGwBikP92La8xtEZ19jRy5xCp/zuBhBymgscWNgMLIi5usDlJaNujRGr3vEKGSxStVVWfdkBMfinlDmE9VEzlLqiUcihAzLCpo/pnTzIenNAeGeTKKcYFA5hyjCcilENVO8ss6uYdCSQi4IYy4IIy6IY04zRmUyf09SzXf5eNvhBwcSj9NF+rWLBNYSpfE28wKYaZXegUV7qLGy5FNuxsSawF6sEWUiZ8s2cpDycnXAH9vYpyIHxwL0IlBP04mTveu6UyIvCILp/CYIwlQS2u12efLkCb1eD8MwqNfr0zmuiIW63e507Cqy6LMgvYj38nlV13Xm5ua4fPkyFy5cIE1THj16xP7+/vT3arVKuVwmTdPpvJ7Pw5IkMTc3x8/8zM9w6dKlz+GP48pxuDhNU/b29njy5Mn0+uM45tSpU7RaLT7++GOePHlCHkY5j4luGAZXr15lf39/KnP5K3/lr/zwEhXlTqKzlZpluF8EjooVnAVXs0A5v0nFclwIxBcx4EVgf9w1Hff//D9FBr74vVjXvB7F7ccB+lxKcnh4yCeffEK32z3yn+J7fswwDI8s0eTJbF5kVOTnVlWVlZUVlpeXp/r54gM4qw1LkmSaTKd4vvzBmzVMjrufRQlC8X7O3u/8GDko3dzc5Ktf/SqLi4uf6zvHraLMMun577ORgr7I8bd4PbkTaDHZUK5Vy6MLBUGA4zgMBgMcxzkCznNwVFymyy3m/OEr+g7kYZ5OnjxJs9mcxq7PnUZOnTrF+fPnp4xEfk9EUZwCzDym+okTJ6YOo2maEqUCf+bTS9x3LX5haZ+/tPnshQzAbL85bp8XDVbF/V60OvGi8xX/N+tHcdw1FSeK3KE5iiIcx+GwP+L+gcP9tsPTQcyODWOhTGzOE5cWSUqLpKVFECUkp41kH6B4h+hBB9XroLjt6Ut220iJPz3ncc/8bEmzjMSsEdaWiRqr0/eovkxYXyGqr0CWovR3UXrbyL1tlP7O8887MPIJjVN4i68TLl4ibG2S1qqIag9Rd8BMoayQ1qqkpSriqIfUayP1D5F6B0i9A5wnD0j2t6CzB//od08Be9kV/M+16d9BAb4hqvGfksX2n1G1Q6rZdb76ZpOsHDMyZeyyilfVEVOoegmNWKGWVhFcC7ujs7+t8PixwtauiSklyGaCYyiE6sQpsBy5NNtPER7exWpWUOdqUBLQyi4LC11arS715pBSbYxecZBKHoKYIvoqoq8geRPwjqvS79bY3pvj0fYyt7ZX8HWFhUt7zC9FaAsZYRMGpkKoA2qCKKbEiUziy4iuiGSLJCOF8aFJ/0mZ4L5BMpARawlKmJAMZTJBoF7yUdSUAIlBpE/8KAyHStnFaHkIix7RaoA873FCcKgPMz6+v8G9/WUCTcdvyggW1OyAmu0RHwp0nlmEByJ6mhCEErKQUjMCYkTGkUKADBKU1ZDN2pAri4fUKwGHicl9p8bdcZ0glbCUCA+JKBOpr/VoXGzTWh/j1yQOtxsk71dx9y2GsY7cSKE20dSX1QC1GeDPpVTUgJUwwHIg9FSeJWV2JZMEAdFLYQhpX6SsjKiUBoitGOYF/JLJhp1yxR1zxR2yNPZ46FS44Va4bpm0VxO0NQet6SFJCRLJJPy3D0oaIes+QpKS7Goo4zI7To12ex4vVKnX+yw2O2hVH18V8VKDwbjJ0KuRwXNNu0Ir9dgIBrR8B9fNuJs1ORBrGImDKfdgLkY7E3POvMGi1kZQJUJUVEIIBb5/92t0dzXCZ7vU5SdUrygIV04iexFCLGLrJXxk5p91eFevctaXiBydu1mJ65nF9cwkReAsHuUgxrUlHvR0ggQWKkOE5hh5bURp2UZWI9JUpR7IzDky8ljncVDmqlXFlSRqdh/RtwmrGklFp/TwEUt7ETzLyKJ5euoJdkOLFdOnYU3CPB7qKgeSyqbkclG0OS+O2Aw7KO1H3H3wiP9fuoS/+wj59vv4C68hnf4RopU3eJrOM6dGLAsBqpswass83DJRtBShBENBAROkUsa8FnCxPORcaciF0pCTxpDvdBa4Pq7z8wsPWVLtKajOQXq/35/6yBUznef+WqPRiK2trWlSnlqtNl3BzMfVXq/H1tbWdMU6iqJjJTZFDFF0uMwzl9+8eZO9vb3pXJJnH839toBpMqQwDKfHq1arfPOb3+Stt96ahjp+0XyVl+PwZZ60KPc5C8OQ+fl5zpw5w6NHj/jkk08Iw3DC+hsGm5ubbGxs8OjRI+7cuTMlif/aX/trP9xMon/+z//56UXPMptfxJgX5Q75PsUJ8Tjgnb/Pgviio2dx3+P06i8yIgp1O/K9KAk57v+zVmdx39nt8BmAc113mrioCH7z/xUBQf5AFGOT53qm2TYtXneuaV5fX5+mic+vYfZc+XXlGt7hcHgkrvdx5bj7lIPZ/PNsOx+nI86BumVZUzZd1/XPMeiz9+g4A6EIyI8D5/n58iW73OLPX7nuvLgcVpQa2bY9lbP0+30cx5n6BxTvLzANrWSa5tRyLpVK09BK9Xqd5eXl6QCSr1xk2SRl8Ne//nUqlcpUQjMej0mSBF3XMU2TNE159uwZsixz8uTJKUv/2NX5D66+RIKIQMar1TFruj95GZPXqhGgiZ9f+TmufBlQ/yIjd3Zwne07X2RgFw3JYvrlPERibiw9e/aMvb09nj59yt7eHq7rfm7FJwMyvUpWXiatLJGVlxBqq1BdIbEWic05InOexGwhhg6yezAB8m4bydlHsg+QnX1E+wBxvIfi9xD5PGjPr//IMwZk1XnixiphfYW4sUrSWidurBA1VonrKwhxgNzdRu5uIfW2EfsdhEAmDSokySKRdp7IOIEUbqEID5GlXUS1R6Z7JGULz6qRtRahtQRnr3SBJpM0M7vAXuH9s88/e+63MU9jvfanyS7+PFZmcza8xqX0Jq+uamhLLYTlJvLqPH7dZF8I2U199gg4lBJcOcPwQB4JJAMJp6fTaZcZdcuEI43I0xAEWDTGvKlvcYVtpBAOnTKHbpm2XaJtlziwSziIzM93WVtqs7TQYa7Vo9YcTJj4sotU8kksn1RJ6O43+fY/+uM8vX6Gjmuy2ehxfr7LubkOq8s9duYsbhh1dkomo7JEUk6RrQhFiVDECEHIiCKFh++fJrutw22RqCtjyhFxKJIksFYdc7IxoFnxySToBBYHvsW+bzEMNQwzQKiFVDb7rH/jKedtD+9JhQePF7nTWyKsK0hLGZEhUnd9tEHCYFcj3BOZc3yUJGXsKQw9DV1JSBAIMvF5eMaE1YrNKwsdLix0CWWJh26dm6MGj5wqopCRCAKSnFBb79N6aZfFjSGeobF92ET8oEJ4V6c7tBCrUJqPCKoStqxg6iFSLSKtxawqNqdjh4oHB7bFvaTKvmQQKRJimCJ/NyaNRRpLXWqLY4T5BG9eYVEUOD+KecUbc86zOXQ1bo6rfJKVuFdR8VZCrDUbpRYgBhmKECHp6STbcJxSVQbInoTYsxj0azzrznHYbaBkGWvNDtLyIYKecffJJebEiEQWGUkKsSCQZcLzKJQZC3aXM09v0411dqw1xuYcqjhAnE/R5kKuLF7jfOkOpuDRSZqEko49rrDbXcY+1EAVKa2Pkf0+QauJnghYgs5YUunIKqdcuOTEXLRjqrbIk7jEtdTkemZxP9NZyCIaUUTgiGz1VVQ/o5H6JI0x4saI6uYIpeQjkRJnEgki/rBM+VrIg8YCw0qdUv8A2R+RmhLJ+jwVx+bsIKS0IyCMyuyOytwaW+hSylopmIR51EWeqTqhIKDEPiPJhDTh3H//Vynd/R7Ly8t85Stf4bU332ZHWODaqDR5jctEqcim7rD1VKe7pyK7KX/jJ++zuuhyY1ji9rjCrVGFTqgRZ5NoTSeMMf/XC789xQp5jo8cJ7iue2SlOt8nB+kHBwd0Op1pevtarYZpmiiKQhAE06AHeVS0Wb+3fFzN55ByucyVK1d4/fXXsW2bq1ev4rruNBJLmqZT9rxUKlGpVBBFEc/zpqvkWfZZlu+3336bn/zJn5w6in7RXJfPS7Ml9y3Mc7yEYYhpmrz00ks4jsMPfvADer3eNN763NwcL7/8Mo7jcPXqVRzHQZZl/upf/as/fAb9i5bHi8v9syxUDihyIJff5FmnttlG+6KQeMUG/DIA/iJWb/ZYsyA7n4RzcX9er2Ka2fz7LHAvxiJ/9uwZd+/epdfrfQ7cz7LxOXubs4iSJB0bF322SJJEq9VibW3tcwH9i+AnB6O59ZsbBLlFm5fZpf8XrY7Msqi5ZTsry5mVNamqyqlTp3jrrbdYWVk5woi/yMgqnvOLQGQR7OcPqud5R4BfHl4yb5MwDKcSo9FoNA17OBqNptKXfPCQZXn6v3yJLdeM5wPG4uIi9Xp9yq7nS4Oe53F4eEi73SaKItbX1/nGN77BK6+8QhAE7O7uMhqNCMNwqmEXRZHRaMT29jYnTpygXq9PjI4MfuX6BT4dV/i5hQPeqQ/Z8jW2PJ0tX2fL0+mEKvNayJrus2r4rOkBq7r3HLyHKEL67/z8vMgoKva9Yl+Zfb6Kz0kuM8r7ZR4tZzQa0ev1aLfb00gAnU5nOlnkfa54TbN9VBTFqbE0ff5EmUhvEFsLpKVF0vIyiTVPbC2QlJZISgsk1gKZYiK6h0j2Z+BdsveRnAPE8T7ieA/J3keMbAQ+P1YdeYYEgay6SNxaI26sEjfXSJtrxK11ksYqcX0ZwbeRejsIgwGCk5L5GkncIk1XEQaHZPsfQvdj6F6Fn/zn6/xHHFBjEVgGlgrvn33uHrxMfQ7Bs2kkAZIXkcQG46CMGSe8ajr8zLrAjywqNFOfeDSY9sE4jhlFAXuZz4EQsU/AvhCyk/nErSrCyjxBVSOLRYKRRtDTiXoqUV9FckTqXsiGP2QzGbGqDWmqDkPXnAL24nvbLhFnAnNlm/rpZ8yv7bMw0liLBSwtpu/qPOw1udtu8aRXY7Fic36+w7m5Dufne8xZNoPQ4Ibb4prQ4vpcjaAkEBkK8lxGXAchgdQXUQcZyjZEj2TcZypiVyALBeafy1K+srLHS4uHPLar/Nqn7yCIGRfO7fLzX7/Bbk1gty7iaAL1rkD62GT3Tovbj1cRyjLmWozTUPAUGdOOiQ5F0kNY8W0WHBchEtgZWuwMS2QZZCJkgoAgZdSNgEsLXb5+4oBWLeCpW+bD/hx3xjX6oUYmgK5HlJcHNC8ccOJ8B08zeDJqkn1cRryl0X9i4isSiys+6nzKqCzTE3QEOUWsRZilgBOyzTPBYugbzPdixBEMQ4FG5BDZEr1umUzIaCx3sRZtsoWUejXlbJpxeeBz0R4SjH3e34HffBZwePoi3qk6ypqHseYiixFex2B5kJJGCYEU0FrsoTZcsHXGvRq7nSa9Tp3UNpkXIspqRFvS6IkadcWnrRlkIlRiDzFNcSWVpd5TansPSXs9bF+ku3AWe+ECKDJSI8JS25xWrvLuxlWqpsPT5CRjrYYXmPz6b/08jajHq62AM0t9+k2P+2XQUgEzlvAElR1VZT4UuOQkXHISzjgpXV/jRmpNQHtqMshk5uKI1IX2UKHux9TtGFsLSE8PsS7YuHWL2ihic++Q+rM9Dvccnug19tZPMVrboDLqUfJHyIZIuFgjVkXODF0WD1Lktkq/q3N3XJpoy7UxHV1hWDJAizj363+HuVu/ycrKChcuXODrX/865XJ5OqamacaOr3JtXOa9bpXfbTfwZIkzls3l0pBL1pBL5oCm7HNzUOIv3XyDDIEz1pD/6tzvHgHf+YpzPh57nnck+3g+v+Zy0PF4zGg0YjQaYdv21E8OmIYhLgL0WWI0n98rlQpf//rXOX/+PJ988gk3b96czoc5YZmTWI1GY8qiK4oyXSHPCcnc5+vChQv81E/9FCdOnDiCR44rL8LFSZKwv7/P1tbWEUx18eJFLMvi6tWrPHjwYMr+m6bJpUuXaDQaU39EURT563/9r//wAfpxjHlxIiq+HwfOioDuONZ7loU7brKfla7MsnKzwOJFDHp+/tn6F4+Xg+S8oXVdnxobOduaM345C1185Zqt4XDI9evXefz48VQikZ+3yELnzpy2bU+tP5iEXSzGRX9R0XWd1dVVms3mEUCUP0izGrNiZsw89nexFJ0pik6mxfbOdWKzDm/Fe35chJD8v5VKhVdeeYWXXnppahl/GWA8LrLH7D3M5SxFJ9CcOS8aVFEU4boujuPgOA6+72Pb9tQrPAf1xb6cZdk04VCeJjj3FzAMg1qtRqPRoNVqTeuUD3T9fp/9/X329/eJooi5ubmp7OfSpUv0+/1phldRFKcMRBRF3L9/H03TOHny5BGwmSAgC5/vvwB+IrLta2z7xnPQ/hzAezr9SGFeC1gzAtYMn/Xn7xMwH6FIL3aaKX4uAvXiasbsb3A0Pm7etsVJwPO8KTAfDocMBoNpm7Xb7amhdZzPyItKsZ/OluNW74q/JYo5YeNLiyTlRdLSEml5kcRaIJnKahYQkvAzsG4fPAf0+4jj3edym30kp42YffYc5efM6xJnkDwH8ElzjaS1TtpaJ22ukcydIKsvgevCyAFPgnQuxCMi4B4BHzHmtwl4H7if/bef0f6CIGTIMvMXrvBz/8tfwTyxibp8AseqccvXuesr7McTfXyigp6ltLKAZhpQi30qgY3pDBE6+xze/pRkf5sLrSpnV5Zo1OtIqoKtiQwNif2yxXWzxVOjQqJnqFaAXA6R1IR4qBL2VcSRiGWn1LyIBd9nLRyzGY8opzHjQOXQrbBvmxzYFofjCgdji7Zdoufp1PSAxbJN03RQpBQ/Uuh6BtuDCqqUcH6+w/mFLufnuqw3BlxLm4gjGdtVuTuoc48G20YJdSVGWk+IFiGugiBlqG6GdADRjoy/p5AdiBjdlFbi0ywF/NzZ+1xu9ZCer0jZGuzVRHbrInvPX4onwFODzo0mj28uYEkS9fWIoCWxb1o4koI4zKCXsRrYnI6HrPgO/ZHO9f0mO0OLMBbJ08erSsJq1eGd9QPePblHLzP4bmeR68MG+75JnAqoWkxpYUzzbJu1zS40RJ4EDbzrVbSbCvZ9g35P5+SqzdxKQDQPnZLGfmwSpiJyNaJu+qxKDrUkJnZkbtt1XF9iPRmihiHdkc5+p4qsRdRXuuiLLlbdoxzvY9y4w89XF7k4v8BeVOL7/Qq/HjXolwSE1RB91UNphag9CaMjYY8kxNTnxEIHdWFMYkR4gzKdToNBtw4Di6orMNIVeoJGgsjp+JCVoI0QO/QlmaelJbqVBSqdLcpPriFuPyYMEpzNl/FWrxDJc0hGTFXb4+LKPX6i9W/ZVB/zoPcVvnX4Nd4/eAUylQ3d50p5yMm5Q/YbIXdLGUoqUIkkQkFhS9FQM4FLTsolJ+GiE1PyZG4mBtdSk09Ti1upgZ6mSB6MbJFyGDLfiXCijPFCTPUVm/RUjNl1qV2/g/jxdVJHoN1cJnj5DbonNjGjgNq4h6WBX1bo1UvMOy5LO32kRyHtLZEH4mnCtIzm95lz7rMUPuGdZfgTb5zE1D+fZbP4eRxL3LJLXBtXuG5XuO2UqUoRF60hFQKyOOPfm39AXZ5IN+IkZRQKGHw2f+YryznumZ1nc1lMkiTTHCG5TDTXhc+ShcVxPF9hb7VavP322ywsLPDtb3+b3d1dZFmeZlnPpTNJkqCqKvPz8+i6jmVZaJo2JXvyOSYn1VZXV/nmN7/JSy+9NNWh/34Z9CzLGI/HPH78GNu2AQiCgJWVFU6ePMnu7i7vvfceQRBMybm1tTXOnj3L4eEhn376KWEY8jf/5t/84UtcjgO8RVA9K2kodpDitheF5ctB83Ga8yLbXmTpi2zpccB+lumbBXRF+cYsu51bipqmYRgGlmVhmuYRqU0OcHO2qSiZyAGcKIrs7+/z3nvv8eDBgyOWYxiGR0BsMQlODt5zPbKiKJ9zFi3WTZIkyuXyNKV8UbqR16coc8k7cK71LTpwFNu+KGcptlleiuCneM4vYrvz/fOH8uTJk7z77ruf06YX7+MsSJ8txQEjH0Ryyz4H6Hk/KS7f5eH6cgu/aNAA0/8DqKqKYRhTvblhGFOAXi6XqVQqVKtVKpUKuq6jadoUiA6Hw2lUljyLq2EYUzD/5ptvcv78+WlEGUEQplFfRFHk4OCAg4MDTpw4Qa1WOxZYHmdAH/c8ZFmGl0rs+MbnWPctT2cYySzpE/C+bvis6j7rhs+aEbCoB6jS8Vry4+5J8d7kfS6/H67rTsNX5sA81/3nkXJyB9p8Zem41a4vu45iX3xRe80eozi2fGHdBJHUbJKVlycg3logLS9PgPxzXXxSWiTTq4huF9Hen7Dv9gTQC6M9xPEejHYQx/sIXg8K9246RkkKQ6VC2lqDhQ34lX/wt4m4QspZJFaQKRGQ4gIhPWKeknKD/+HP/xKPvsNmWeAf/oP/+1RuZRgGSZJMQoZ2+/zursxv7Nf4zqiJYcacqfVYMIcEgs9eBHsxuFaNtDHPWLNQ0oR67FOPPWqxTz12qcU+tcijHntkEdyPW3yQrNKWLRYqA1IrxtEkLNWjpLmIVgyVGKkSkQUiwlBCt6HqxswHISuhy2IQUHYiVBe6Y4sDu8SWa3FLbOAdaNgdgwO7hEiGpYUIgBspBLFEmk1CzP1Hb17nx888xVRjEiSeDSvc79e5369zt99gLzLRN0OSs5CuZpRrAYKa4mgyWQJZWyQ5kOBQxOglrPkubyqHXKr1OFkb4esC77VaLKVDUiNkry6yXRMZGQLCrkb/dp3DGw0WbJifC0kWFJ6oZXY0C1+REAcZLc/nXDrkVfmQph3y4dN5ru42OBgbRPHzuU2CihGy2Rzx45u7rM/bfKe7zPcO5zkIDNJMQBQzrJZD49Qhy6tD9MWYfaNM914N47qMf0+nu2eyWnE5uzqitBwxWJB5qlrsBSaur6BYEXUjoCKGCJHAnlNCSzLmfZfAj7E8mz27RG/QQC951Fe7lBZc5hs+l02PUqXL/sIhJ541GP/2S3y7U+dJQ0E44aOveWhrHmIiIO6rjHZ15jKXM9Uu0vwYu+GTCDDs1Rh1a9A3KfVllCTjgVVhGFmc9A5YHj9DDoaMZYm96ioHcxuo3pjW7l2aBw+RNZXR+mk6jU0GQYU0EJmv9Llcv8mbrW8jCCnP3DN81HmLO93TGHLGWd3jlfKA9fkOz2oJd8wUMYNGJJNkMnuKxliSOO9OAPtlN2XTTdiKdT5NDD5KLK4mJXqCjBqkRI5I5mWkHbB8F3EpwLo8QjidIu8M0D+8SvnGXbJBhHvyHMOzl+idvoAgQPlgGy12SS0Jb20BQYTq/SdYD8akuyJh6Szb+gaxJ/GSZXOlYnO5bHO5NKauTBzM932FPU/mcmkMfDZuRknGA9fkhl3lplPlplvFThTO6X3Oan2+M17mIC7xU6UH/GLp/c+RKDl5VZxvc+yQj405A+84znQltNvtfk7ako+veTjEd955B13X+af/9J8yGo2mDHi5XMayrCm4TtMU0zSZm5ubrmSrqjo9b3GMzgH+j/7oj/K1r32NUqn0wrlidnyfLWEY8vTpUzqdznT1Pc8emqYpP/jBD9jd3UVRlOmK+iuvvIIoinz88cd0Oh3+1t/6Wz98gD7rmAfH66GLrHCxosfphovAeubcRwB1DviKv8025IuO/6Iy+3tRzpHXIW/knB3NnQFzUF20CHPwUfQeVhRlCtI6nQ63bt1if3+fXq83ZQtd1522Xy5zsW37iDY8v4ZZ6cgsCJNlmYWFBZaWlo78fhw4zxn72W25hmv2Pn+RA92srKXYhvm2okxnNrJG7hD71ltv8fLLLx/xtC7Wb/Z70YDLH8wwDHFdd/o9B+t5OxRZ8zz+eL/fZzSaZCjLnUiCIEBV1amkBSYZUXOQrqrqESeVXHOXW/NFh9AcoLfbbQ4PD48sC+bRXtbW1njrrbdYWlqa9kVN06YA3fd9nj59iizLnDhx4lgWoGhwHleOk4Hk24vPm5NIR9j2Z9PPGnYisayHE+Ces+/65H1RD5EEjgzARWCeg/LcMBqPx4zHY4bD4RGAnr/nxkouM5o1omf75Iue99/LysxsG86C9Nl+N2sMzRINed3zVbdU0klKC2TlpYm0prxEWlokqyxNPj//DtlUPiM8f8/BvLv3gLT/DIa7ZJF35OKFf45OygmGvIbP14GXkNhA9FcpaaCCkWYsynC6lHGpGrOQDah4PeZSm0bqkiURfhjzYcfkO70FPnTXUBKbuYPf4pT9IX/09XXq9RqCJDOUVIaKyUA1GcgGfVmnL+mTz4pBXzYYyjpillKJA4QYeolBGgngZbzi7xOJEvflJo3EYS3uUZI8MiXBNiRsUySqZGj1AKUeIMgZqi1QchI8S8Arg5jBL/1Wm8VexMDTOXTKHIxLHNgm//beCXqugUBG0/IZeBprtTHnF3pcWOhycbHPXGnCGHqRzINBlYeDBtf7Te71azi+grCUIm2mLK6Oma85BJnInmQyrsmkVWAkkLVF6AuQgujDfzq8xaVKn7oR4Cmw15DYrQk8qUjs1QRSX2J0t4p7r8LqMOWsEJC1RG4KTe6LVbqmTqqD5cSsRzavSl3eVNr0tzW+fX+ZW+0aA1cjSQTIQJIykkyALKNZ9/mpt/Z5r9viiVtCFDLiREAxI+prPRYWx5QXA8ZLMrv7NczrCskdjcOnJnUj5OWFIWeXRmRrGY/qBvfSMnu+gWuryHJKpmVkQkaWCTQGQ06OnrIWB9SqDa77FR73S/S7Jay6TX2li9vU+HEx4OmTMg+3y/zSiW16jsJ39hvslGXMcy7SWoC55iLWI5JDlWDbYC0IuGD0SaoO7WqMUHOIIoVRr0p7e5FaR6bhezxVG+xJFdaDQ077u9RxCFWF7fIiD80FfFHhMjaXxTGiFvNI0bkZW3RHOulQ5o985Z/xHzT+KfN0eTx4ld/qvMNv7L1D225S0RIu6h5vVvrMN7s8qqbcsSbjzVwgQabQkVW2VZmTfsbl57KYi26CEEtcSwz+dVjj16MaZALCkwwzjlny+2Q9j4EoE2xqWJeHiKdB3upjfXKNyvXbiAcO9vppBucu09s8R1CpUdt9hukOMCoayWqLg1oZBAHJzvjTN1zsjsr1UYnbtsWcGrJhuny/X4Ms4+cX9/lfrR1NVDT7fhCo3HQqvDee5zv2KiCgEfEPl/+/0zksD0Wcr0wXo6HljpPFsS/HAWEYcnBwMI3kUoyWlmUZpmly8eJF3nrrLQ4PD/nWt741xUW5RLFSqUwTOOYGQLVapdVqTcfrfJ7JcVpe3zwnycsvv8xP/MRPsLS0dGyEwOIccNwckrfV4eHhkbqkacr58+ep1+vcunWLGzduTDXymqZx8eJFVldXefjwIbdv3+Zv/I2/wdOnT/Nz/XAAet5YswBxlr0uHnN2AnyRQ99sI8w2XpZln1uintVJ59vy4+Q3v3isvBQZuOJEnHeo/Hz5jS1KXIrXWuyM+RIPMNW8qqqKKIpTbXmn0zkSUm88Hk8zXxV1U/myUDFxUn6ziwbNcSxpDvbyuKWzD8ssUM+vvRiesfjAHXcfZ8+dt13uWZ0fc7YUVwuK/Si/t4qisLm5ydtvv83S0tIRpv24fpPXKV+9yEFvMUNq/uDmv+dLdY7jTGOaFzOByrKMqqpTOVIuX8mNtdzoMgyDUqmEZVlUKhUsy5pa8UVv93yAypNDtdvtqfY/17jLsszS0hJXrlzh3LlzUyMhZ+rzuuaZS5eWlqjX60fa5/dajltJetGq03HHHsfSUdBekM54icjyc537iuqyJNssSCPmhSFG2MVznWmIzzyEVy5nye9L3veK0rHicujsateLVgpm63xcOxRL0bifXYUo7j8L3mePedz22fFy9pzT8wGZ0SAtL5FVlkmsBaguk5aXycqLhMY8VJahPA/QB3YKr91jvrf5y0KCWmPja3+an/2L/zUfdyTujuAwFdErCVIpI9JEIlGkgc98OqIWDjDHbcKdBxx2Q9LWq+wolylLEV8pbfG2tcWaMkAUj89XMK03YD8H7j1Z5/+18Aq7YhmG0DpwCTKZN4Vt1qUhjqxwT57jjtSinIVcTA45G7VZ9FwCX2UrKbGtGhxqKr2SglwLcR+V2Ox5nJNHrJpjVowRy6aHLGY865f4P/3mW1T0kP/sD72PImXca9e5fdDg9kGDO+0GZS3kwkKPCws9Li72Wa+PJnHVs4xhqHO/V+Oj0QLXB032Di0SQUBdTlhpjblQ7VGRfJ5GZT4w58jMjOR/0qh4MV4sYyoxm/URm/URp+ojTjdsFkoOgwpsVwXuGgrbVQG/luA+LZE+MVnqC1yJfJpyynvhAp/EDbZVi6gmggkt3+ccQ16Tu6wGLo8fWvzm3WXuHk4c0NGgrEacqtm8vtJlqeXy0K/w/W6LbdfCUEMSQcD3FUotm/mFMdVFn2QlYxsT46aCcFul98hEyjJeXhhyZWnIxtKQx4sy/0A8Q6ZklC4P0bMIvRMSjUt0hjWWxzFvCkNeFoZ8vLDPzWGTztMVRjt1/LEESxnayxH/cW2LV3ybRTfk460qv/Wkzu/u1xHqCeXzLuFqhLwaoK54pKEIuxrro4RKtUvbDDnYXqbzZJUgldCFhDVxzJo8QtPgvljjKSVOCzavKCNOKT6JInJfKPFpVuYpOmcEDzcW2NEE1pdHhAshjSTktegpr0rvcUb/FlLQ5Fb/DX6j/VV+8+B1/FCjqcVcMRy+WupTaQ24V065baWkAiwFElIi05NVHukKzQguuQkXnIR/3l/gU7fCX5T2eDMY8L2+zg/GZa56daQ0omV3SLoBtmkRnNExL40QNzO0nSGtO/dYeviMStcj3LxId/McT+aX2dItTGJ8LUOWYv7es20uhBOJapjAPdvgn+3P8S/bc6SInLfG/D8ufXoEkOfjUhHzpGlKGKf8b3fe5obX5Ocrt/mTpWtTbFMklo57vWiMjqKIdrvNzs7OFODnv6uqyssvv8wbb7zBjRs3phKRHCvkOCwH6HlG0yRJqNfrn/lkPSfksiybOozmJKAkSWiaxokTJ/jmN7/JuXPnPpf47/cC0PPfcpnLeDyekqqLi4ucOXNmqpYYj8dTQm95eZkrV65g2zYffPABv/qrv/rDA+jLy8v8hb/wF6bbi8vFRQet/H2W5cr/U/x9djI9jpk6jrXKj1sEhMXtsxrY2cbUQDvJAADp6klEQVTNy+w+RaY5P04OsnOAXgzCX1xqzwFizvDlnSEHcjngzqN05M4WOVDp9/tTfW1+jHxJqWhp5sbCcWEXi3XM434uLCwcaY8iIC8+mPn1z7ZBUVtWvB9Flnz2vuTfc5A+qzPL//8ibXr+Xq/XefPNN7l8+fLUyWQWLOXXmLOruTzIcZwjTp0wWZYaDAbTrJ05Q5trmmf7Zb5klkuc8ggteX8olUrTJbccpOeMevEYOYDOI/SMRiP29vY4PDycfs+zuVYqFU6fPs1LL73E4uLiFOgrijI9ZhiGbG9vI4oia2trU0OgWNfjDKcvAqxfBshnt88eP+9bcRzTD0WeOipPXZVnrsaWr7MfWuwn5Un4uKRHOdjHcHZQhk8Quw/JDu/h7j8geC57mWV5iq+8HAfQj7v241bnXlT/2XHnReVFY9yLruW4sWaWuTlufCo+b/k+/X5/MiZIKvyXwUlgpfBaPuazSv+ZzHCHquDyp/7I16kJDlYyQovGjMYZnaHKk67BvVGZraSKVgowrBGZYoMJYr2KbVQJkaikAUIIY19Fj2POih3e0nZ4WW5TxUPIjs9VkZeRpPGva5ucCIa8Nd7hGVW+l67x/WyNEJl3xG2+Imyhyhm3xCa3pTluS3NoJFxMO5NXckhZc/k3aw3ELR3jscqOV2bbrbDtlrBjjUXDYdUcs2aOWbVs1kybFdNGl5LPVjgykae9MrfbTW7t1bnTbuKGMucW+lxc6HJhsc+5+SG6ki/bw45t8Tv2Eh+P5njWKeO0FSQjpbYYIK7GnA8GLO773G03uH1Yp2RE1KyQTISerxMkIqfq9nPQPuZUbcTSnM1BPeOabPCkLOIuBSSJCE9N5rsil8OQdS/h0848P/Bb3BMqBDUJuZURaQKriUvLDvB3RF5Tu/T3NG7s1dm1DaJMRJVSVsoeF+aGLM55HGQGH/brDCKVmuWRKCljTyPyFJoLI5oLLtJKwkFFQdmSUG6pjB4YeLbEanOMVAt4Q7vBlc0xDysCvc01ns7XeWaqGK5A1FcY9kxWnJCv+g4XGfNr3z1NYEqcf2nM8hmPq1oJTxS5Eji8Gjhc8cZkOwI/eFznd5/WeWbrnFxx0U4G9BYz/IUIbc1DqsZwqLDWT3nNdznsVvheb46dxCBBoCzGnFFtThsumSJyMylzPzE5LXu8poy5pLkoisDdzODT1OJ6atIUYk6oHmo5pNuI6VYTrgQDXs3ucEX5NvPqLfzxOT7ovcWvt7/KB51zkAks6BFvGjZfr/RR6mPulFJuWwmhAKuBhJbIjASFB4YCiFx0Ei65CRftmJPjgDSIuDWW+WBc4SOvys2wSQ2PRX9ANkjo6jqjkwrlKw7ZyZi5YcprTsArjs9JF+6rFv+6VGVH1Hkk6yzFIa+EDi8HDq8EDs0o4n9/9xQPHZP/bPMBl63hkXEVOALMi68kSYiTFLL0CFEyG2GruCKaO3/OgvT8mHmoRdd1pxpxy7J4+eWXuXLlCt///ve5du3aESySg/Ncvlur1bAsa3o9eW6RYnS6PMljXnKsIcsy8/Pz/ORP/iSvvfbaFFfMjlNfhh9z3PHkyZMjMhdVVbly5QqCIPD973+fnZ2dKQbMfe1yR9Jf/MVf/OE6ieYAfVbnXWSdjgNQOdid/c8s61KUjBQnpxe9F8vshHfcfllh4igaF7OTbRRF006RM6a580EOjIEjoDa33IrWWs52S5KE7/tT/XHOFLquO3WiyPW2edzO/NryZaRim+ROg8V2nq13lk2cGE+cOIFlWUdA1HFMelETP/ug5sx0kQ0vAtD8AZ+9hqIRcZw0ZpZJn53Uc5B/8uRJvv71r09lH/l5ioNFMe5pbkEXVwZyx9vcAbOYQbUIgvJ7B0wfLFVVp/cyf/DzrGWlUmkqfSrKnopJi4osf570aH9/n4ODgymLbNv2VI+eJ2Q4d+4cpVJpahQU73O73WYwGLC8vDxlCvI2Ow5sfskz/qXbjgP8xQE+1yEWteV5JJbxeDx5t23aTspeVKJDnb7UZKQv4pfWiKonABFx8Bip9wipP3kXew8Rew9hvD+NkDJbjmOri/V4kWHyRSD8i9rrRft80bjzZWW2/xevtdg/ZVmm0+kU9ZVfaEkIfxcBqPJfXupTXWbl4lf4n/2Z/4TtUcqBL3MYKnQijUGsoQkxVcHBiockzhB/HCKIZTKxRd+pIKYp6+UBcw2Xci1EKmXsSyWeUKcnGJPMnkJGI/VYZswCDnOZw3zmMI/LfGpTJjz2Pk4M/ozH1Pheusb30jUA3hG3+aqwxYY4YFuqcluc46bU4rbYIgMupF0uPQftJxmRm2JOrLDjVdhyS+y4ZbbdMltumUPfoKn5E+Bu2ayZY9Ysh7WSTUWZjBuHjsGt/Qa39xvc2m+wPbDYaI6fM+wTlr1ufuZM72YSvx0s8Z69xP1uleG+CocClVrI6eaQdckh9gV2BxZ3DuqYWshqw6NkRASZxO7YZN82WK24nKqP2WyMOVUfY8y53NIVHloS44UIZcVFaGu0ujIX/JCFgcjTxw0+6jX5JG4Q1CTKSxFhTcRTZDYym7PZmHnPo/dU4+GDEk/7JcaRjChA0wjYaDg06gEjReHauIYmxzSqDokZM4xlRntVRDJaCzbGcshoTiCxBbRb4D0wGPVMVq1DXl2x+cpGyPlVh05L5E5J42ZJ51ZJw5dEsoGM39NYHKb8iDfiXWnEGcXjfcvghmZxKBhcVUscSjKXQpdXA5sT/YD+PY33Hlb5aKfCyYbH6RWXqJ7woCYzaGXIK/6EZXclwm2d+SGspXDjoMFuaJACTTniJd3mrOnhyhKfRmXuRganFJ/XVYdXVJuKkvAQg6tpiU8SExeRVc1DLod06jFN0+PNeIfLfMxZ499QJqQ/usJ3u2/zL/a/ysPRPJIAq3rI28aYH6v2CGsut80Jw+5KGSc8EStWsEWZJ5pKRxE56yZcsGMujELOjEPUMOOGa/GBU+FDp8odv8RJ1WU1CUgHGVuSSGclo/yyS7YWMz/KeNMPec0J2bRjnkg6n6gWVxWTa5qFkSa8Ejpc8W1e9m02wklY2t8xa5hxxKve6HMAvRhMokhA5iA9H+eLgRdyUiyPjlacI4p4YzQa8fTp0ymBZlkWr732Guvr63z3u9/l4cOHn2P48/k0j+DSbDYxTRPP84CJukPTtOk8H0UR5XJ5Gr45P0Z+nFKpxLvvvsu77757JCT1lwH02X2SZJIddXt7e7qykCQJ586dY25ujrt373Lt2jXgszDMp0+fZnNzk2fPnvFH/+gf5cGDB186jv87MeizDO4sc30c4z1bySJ4OI59L2o5i68iYz8L+IuOpMcx9bPnyMvsMk8uTVEUBcuyjmi/i8cohgvKNfK5JCEHdEVpQ6fTmUorioAxd4CL43jakQRBmMYYPY5F/6JIJrkvwNzcHIuLi1NZzuwDUwToxd+Pe1BzbfpsW4qieASkv+je5sC36Gya3+dinY+7v81mkzfffJOLFy+iKMrUaMgfiqITJ0z07DmbPhwOp1KSPDV8HiFl1kDIDbLcOMv7Qe57YFnWNAmDaZpTwy2/VkVRpn4HqqpO61TUX+cRYvb29qaGQs6iS5I0zQq7ubnJ3NzcVDZTLJ7nsbOzg67r0+RUL/LP+LK+/yL2fPbZnR10i8C8qEXMQXkxEku+UpTXM19Byvt2mmVQWiBpnCKpnyJtbJI2J5+T+gZkKWLvMdJzwC50J+9i9yGC22G2tl/EqP9eGPLjxo3ZcpzROTsmzf72orF2VoI3a6gWl2j39vaKkaB+T7qmfBw/d+4cf//v//0jvhphGOJ4AXvjlIddn/ttF1usYC2fJS0v0k8t+olBLzEJkNHSGCHJiAKJNBKoix4Lik1JDnhAi55qIqgpF6qHVIwQX5JpCxaHgoVIxlz2GXCfew7eF3CZyxxKRAjAnazOvxDPIsVwJ2mhkfCOuMXXxG1OiCMEUWBHKE0Au9jiltjCR5oA9qzLxazDaUZIHG3vIBHZ9Upsu2V2vDJbzvPProUlx1Omfc2yWTXHnCi7aFnIf3fjAtf3mlhxzFavRMMMuLDQ49LSgEtLA1ZqYyRxciscUeZjpcF3nSVu9usM91XkrYzEFlltOJwwbKQU+o7K/XYFXUo4vzBgqeahqhk9X+XRoMzjfomqHrFZH7PZsKlZHodWxl4zpT8fUzo7RFFT6h2ZM25Mo6fQvtfk+laDTwZ1gqrI/FqA1MoYlBRGosoGNhvxGLknsPdI59aTCr49GZdLasxCyadajfB0iYdhidWyzdz8kKgc0UFivFNluF/BNHzKCy7JMviCgLEl0r9dIm4rrNU8Xlka8crKmJdXxsjNhLsljY8tnWumzkFJIvNFkr5KlAhIUsr/wu7yHw77dCWZT7USnxplrqoWzxSNc6HHZc/B2M7Yv6Hzwf0aAF9d73N+1eFjv8x3KKGu+OjrLpU1F6yENTulNRTZPajwSbuON9IRBFhSQl43bc6YLgNB4eOozK3Q4IQS8Kbm8LrqsKyGPEbnamJyNTF5kmo0tZBhKUGu+9TUMX+am7wi/w4L5veQgkW2h6/xG4df4dcP3qTjmShyxoYe8HVjzE+Uu/SrAbetCWAfSRmnPJFKJOMjs6WqPDIk1oKUi3bMRSdGSzL+23kTY1tj/aHKh+MST32di4bDShIS9eGRJNBezihfmQD2hWHGG17Aa27I2XHEnqhxVTX5RLX4RLPwRZF6HLEra4hk/NrePb4y7h0Z03PSsejImW8vgvSi9jwnHHN9ev6f4jiZppPs5Y8fP8ZxHDRN49VXX2V9fZ1vfetb7OzsHGH2iyvvOUDXdZ1Wq0WpVJqGWKzVaoiiOL22nGnPx818HM1ZeE3TeOmll/jmN7/5OQnt7Ph7zDh6ZJ/RaMSTJ0+m/m5hGLKwsMDm5iaHh4d88MEHOI4zJfsWFxe5cuUKYRjy7rvvcuvWrS8dx/+dGPQXWR0v0qHPVrIIvGf3Ly5Jz4L5/Kbl8dSLS/pfNCkXt88aFjmAyjtoMTpHLmvIgVbR8TTLsiljW2Tdc5lDnpLdtm2GwyHdbncaPi/f1ul0pqA9j/6SHyc/hz+z7C8IwlQHfRxIL07uqqqyurpKpVI5An6LQKsIxOFoAp6cjc7PUUyhe1z5IuNp1mG4uH+xffN6zEoTNE3jzJkzvPbaa1iWdcRaz4Fifo7cmWU0GtHv96ea/7ydgSPSkOJqSdFvIE8Slfsf5Ox5rVabsubFay/e/+NWW/JrHgwG7O3tTeN55waGqqosLS0xPz9Pq9Wi2WxOGfpZ34p2u814PGZ5eZlSqfQ5gP6iZbrZ1aYXMehFo7fYL4psSs6iuK57xNDIk1rkPhY5QM+XNvP/zwLR4/pTBmTlpSlYTxunSJubExDfOAVJiNh7hFgA7VPw7vWmx/+iclybvYgRL/523HFm23D2txfdlxfdh9yQzCeora2taR/+/QL006dP8/f+3t+b9kXXdacyq93dXba2tlheXubixYtUKpXp9eZ9wU9lBplJLzHoJwY7YYVnfpWDsMQg1bFRQRbgaca86jCSNUwj4qvNLb5ee0ZdCzgUTdqYtLFoixPgfihYdDBRSWhlDltChUwQkbOEf5z8Cx6kDX43WeF7yQplIeLr0g5fk7ZZlyfSsAw4wOSW2OKm2OKm0GSIxvmsx+XngP0sQ1Th86seoiiSpALt0OLZ2GLLLT0H7pP3NBMI0wnUXzVt/i9v/TaPuzVuHzS4tV/n1l6dOBW4sDjg4mKfS8t9zs6NUOXJczOQNT61GrwvNPm438DZU7CeJfi7MmIKJ8o2hhzj+xLbPRMhE3hpecClxT7zFQ8/lXk8KPOwV+JBv0IYi4SJSJZkrK0MWDzfoz8f0bjYR113KdkiG3ZK9VCl/6DGvXtzfLrfIJQFTm44lJdiwprEU9WkgwYIWAchf+TBAVf3amyPTYJERBFT6kaEVYpwDImhqHKm0WVu4YBo3qWrWthbVYbPqoxHFcRKjPXjA2QzwbqhkN7R6Dw2qGoxry6PeXl5xMvLY1ZbPo8tlX80V+MTwyAaqIQ3qtTTmNfVEW9qI95UR8xJESNB4ppmclUr8YlmcVcx2IgCTg58kkciW5+YPNs2aNQiBCvjz13ZJlMlvuuUuWYo6KsepRMObiuGDLJHZVZvlbjhWdiphCjACTXgnfKIDT2gjcrHocX10GRVDnldc3hDszmveTzIDP6Svw4CaHWXPzF/wIclBS3LeN0/5BWuc1H9Fpq2heic4Vb/Df7lwdf4Tu88TiChqxlnDZ8fN8d8w+qwW46mgL2vZGw6Ao1QJkZmT1X5pCSTCgJKkvE3t3zeHkb0IokPxiXeH5d4b1RmEMu8YtkshBFeP+O+LNJeZsKwr8QsDFJedwNedXzOjSN6SPzt5gk+1ctIWcp/fPiUP97fO0LM5XP+LJGXj9lFLXrR6T9/5Tr0416u6/L48WMEQeDChQvMzc3x/e9/f5oZtDjn5HMNfLZ6mAP0PIJZMZFfvn9OpgFTHAWfyU3zsId/+A//Yc6cOfPCQAsvGEeP7BNFEY8fP54mLQqCAMMwuHz5MgAffPABW1tbU8LWsizeeOMNqtUqX/nKV/j000+/dBz/fQP0ItAqgqg4jqfSi3xSz6MX5KUIwL4MUM9OWl8kf5ndXmzE2XMcN1EW5Sm5rCFnzovyglmQmwNBYOocmDPngiBMo1HkIDFncx3H4fDwkMPDQ/r9/jSkYg5ki1ZdkUXP65lblEW9cxFUFY2der3O0tLStCMWH77jHqL8/hUt4OL2YkjJ4r3Jfz9u6f+4e5nvX+xDORh5EROpaRrnzp3j9OnT03BKwBQQ57r9PFNnzpq32+1ptJRcG58/9DlTnktJiiC9VCpNgZFlWUdeRUfQ/LhFvVyxXkVgm0eQyVdPiqsSmqZNdXblcvmI1V9cshMEAdu2abfbaJrG0tLS70n29EUMb/G/RQOuuKKSX3+eCCIIgikznoPxfDUo314E5fmKx3HA/EUrW7PlCKMvCGTlZbLm6SloTxoTBj5rbEDoIPYfT0G7NAXwjxD8wefa64va5UXluPHo91teBNBz5lySpGnUoCdPnkxXi/5dAPrf/bt/dzqpjkYj2u02T548QZZlzpw5w+rq6rF9qXhNz899ZJ8sy/j/DC/zzwYXWE2G/Jj4lCejGncGTTquhaBkqHrCqWqfd5rbvN3coW6E02MkwACDPcHkb0tfJ0ZEJeX/mfwLZJ5P3IjcSFv8TrzCD5IlWoLP1+Qdvi7vsCS6R66nKxjcoMFNoclNoUkbg3MMuEyPS1mXc/TRj8kQm9dxMpbBvWGNv371HQTgUq3Pf/HGBzN9FvZGBjf36tzcq3Frr87eyODM3IiLSwMuLw+4uDigZk6IkH3V4NNSk4/1Oh+HddIdgebTiGRHpNPWMeWYhh5AAocjnSQWeGlpwJXlIS8t9RmEKv/5dy4jAG+vdvi1H79JmEhc36ny4U6N66KKsxiw/EoX/fQI9IS1EVQ7Ks6TKk9uzvHp4xZxJuKsSGTL8FJtyH9dvcbzRQA6tsy3Hszz3naTh70Sw1BBEMCUI1QtwtNltGrKifIzVjb69E/JuHqKVvZ5XUyopSp9scQ93eRmr4RyS0G8pdJ/aEAs8OrymAurYz59S0A6GfHXnvYZuhofhBU+DKt8EpZYFEPe1Ea8oY54TR1RlVJcBG5o1oQV1krcUEzm4ohWO8a/L/P4I5MTQsi7G0O+enKAoMN3ezX+B69CFEi0tJg/VR7ytbkxppbwG6Ma3xrXuOsbxJmAJGac0z1+tDJmWYvYTlU+Ci2uhSaLUkRDiTkQFX7J7PA/N3okWcZ9Q+b9ksL7JYV7usQF3+e16BmviO+xbH4HEYHUvsz7vbf4Z/tf5epoiSAWKGkpFw2PbxpDvmb2eVqKuW1NosQcqhlGLHGgqCSZwn9zP+Dkc1VVcfzbDVTeew7Y3x+XEYFXzTGtIMTpCdxRZA5WJoA9XY5Z7KdccH3uSFVadsp/2n6KEn8GyHMiqShvKQL3ooykCNBzPbrrukfCL+bHyI/p+z4HBwesrKwwPz/P+++/PwXnxbrNSm+LsuFGo0Gj0UCSJEzTRJblKViGSfbRfMzM5+Gij5aiKDSbTX78x3+c119/fRqM4fczB+TjXZqmHBwcsLW1NW2fNE05e/YsrVaLR48e8cknn5AkyRRXnj9/no2NDd5++20++eST/Hx/cID+K7/yK9OLK07ixYrljTXL5BbLcQN8cb/j2NfjmKnZibF4k1804RX/n+9blKfklk4e87zIkOalqNnKO6ckSVNAbxjGEd15zp7nQDwPAXhwcMDh4eE01nN+bTkoK4LtYkr6fL9ZGcUsQM/fJUlicXFxGisUPgPSRalC/j2vYxGYzzLuwBGP6eL9Kx4n/98sG36c496sYZH3peJviqKwurrKxsYGlUplqvPO71uSJOzs7EyT2nS73am+O39YczBdlLDkD0/+PZev5Ax63idyoJ7/twjwZ4H5LODM+1k+mOSGWu6tntcjv45iQqz8mooRhNI0pdPpMBqNWFxcPOK1XmzjfID7MvB43OBYZEtyJ9eiU1C+SpFHJBqNRlOmPA+RWGTKiwbcLGt9HEB/0TjyInlJcezJBBFqaxOw3twkfQ7is8Ymaf0EQjD+jG3vPfyMhe8+RAjHR9pk9vzF32a3/7uA9FmAn7dNPh4UjcV79+5Nx4vfL0A/efIkf+fv/B1c16Xb7bK1tcVoNGJlZYXTp09PHdCL7fuisXR2/J3e44yp3CPfFqYST8c1vt9Z5dP+ArvjMkkgoMsxG9UhF2uHnKoOOFUdMme4bIlVvi8s85Vslw1GR8b0/FxRCteyeX43WeW9eIkl0Zkw6/IOC9Jncrd8/wEat4QmN8UmN2iyg8Vphlymy6WsxyVxiEl85Bz59T8cVXhoV3l3YR9Tjj93n4rjbZZljAOV2/sTwH5zr8b9doWFiselpSGXl/tcWhqyUvNIyXiqlbhq1rlq1flUrVLej1l8EiBvQX9fZatvUdUidDHB9mSCQGSt6jJXCfkTl7a4sjJAm0kyPfRUPn5W58OndT61TaQNm5OvH2KdGeEtBNQCgUZPxnla4eHteR5db5EmIq8sDHllYcDL833WyzYwGcv9IOH7T+t89+kCd3sNemGJJBOQhRhNB18VqTcjXjo54OTZA8SzHXYbPuVQ4uRYwQx0hoLFPdPkmmMS3dVQb6mM7+u4I5lLCw6vrYx5ZcVmZd3FkDOeegYfBlXeD8rcjiw2FW8K2F9VHXQxJUTgtmJwVbO4qlp8oppoUUZlN6V/W0V4LPAjlTFvbw4pr/vc7lV5cmDwg8Mympjx1bkxX58f8UbDZi/V+J+GNX57XOFJoCEIoIopLxkeP1Ed0VJjnqQaHwYWnwQGc1L8nGGfvBbEgLEs8nFJ5YPngB3gTX/Iq9ltLsvfQTVvIIcLeKOX+M3Dt/lnh2/x0C0RJwJ1PeGK4fLTxoA39CEPyim/W03Z1RI6asYpT+CCLXLBEdl0QDuqFCVJUh74Oh+My7w3LnPVKdOQI14xRjS8iFFP4IaucrgM1hWXbDlhsZvwiu1wqu1x7/tNzlSGvLK4f4Scm5XC5hHJihK5YuS0HKAXicAcZ/m+jyAINJtNPvzww6mDZdEQyM+bpunU/y4nLjVNo16v02g0poRFXqIomhKr+YpjHh45Z9Lz45RKJd555x1+5Ed+hHq9/oXk7QvG0+l+4/GYBw8e4Pv+NFJcHs2l3+/zgx/8gNFoNMUJ8/PzvPzyy3zjG9/44QP04oXlIDL/nlsoxXLcID4Lto+LCHLcwFfc/zgG/suWJWbBKzDVj///yfvTKMmu7DwM/W5EZGTMkUNl1owqVAGowowGGgDRDTTQ6CYpUXySKNGyZIuS/DSQEknJNi0teaDe8tLEZ9mypWdby9KzJS9bS8OTRVl+EjWQ6gnNbszzWChUFWrIqhwiY47IiIyI9yPx3fpi5zk3bmRlgU29vVasmO499wz77P3tffbZRwESQ1t4cqc1GMhsDPdQDydB3fb2dgjMq9UqNjY20GjsKH1am5ubm7h06dLNrAzY7UFmfQeDAdrt9lj8ts+Lrn1PKhaLYSgE/1PvqAIbBewKzF2f2Q+q0H3hLy6y/KL3cjy0PTMzMzhy5AiOHj2KAwcOhGCWqxbb29tYW1vDO++8g/Pnz4/ll9cwAbsBlMBc890TnLvSKyqI5jvraseA/Gbj6zQHPcdTeUC91kEQhGE2yo809GZnZ3HkyJFdqxHadh9Zoch6aVw5w1g085B6yQnOeY1ump4EZJVPXcaevcb13RrdPgrrkEhhVD7+mdddQ2ZOYzh3B4LO5nis+2fgPVn5BEG/FQncvc+cQFbW0GvErEHkz/fee2/PAP3EiRP4hV/4BVy7dg03btzA3NwcTp06FZ48/FmZY/V2OUe0viSfPHaVM0SAd1uL+LW1U3itchDYAnL9bTQ7M0gFI5ws1XBnuRqC9qPFJpLB7rnF771RAq8PlvHdwVG8tH0IJxJ1PDNzDU/PrGAxMS6f2M4GZvAeFvBuYhFvjxZwMSjiTjTwIDbxYFDBvaMKSsHujfGmX3cZmvb6IAiwtQ2cWy3teNmvlfHuyhwSwQj3H6nh/s+87HctNRAkgY+yJbyWncPruTl8kCnhWLuDExfbyH06RPP6DD5cLWGtNYvCzDaG2wE63STuWmjg8eObeORYFfcfbiAzo/J4hCvVLF77dAGvXJrHG9dKWDpbxd2P30DpbA2dI1200yMsVpMIruSx+tEC3nv9EPr1WTy4VMHZgxs4dMc67u+so/9ZfHEqlcKvvnIDN7Jfx8fNE/i0lkNnO4FEAKRmRujPAKMZIFvu4Hf/1nPI3reKT0t9lLaSuKsxg1J3Fg0U8HE+izcHWWyczyD3Thr1cxlUb8xi5o4evrZUw/PLO2ExmewIr/cKeHmrhJe3Sri0ncH96RaemK3j8dkG7k+3kAp2VmHOpTJ4Y7aA19I5vJLKoz8IMHMZqH84A/SBYwtb+BuHP0UjSOK7q0V8d62E92tZPDjXxpeWG3hmuYkThS280c7hV2pz+PVmAWv9nRWEfHKAx/ItfK1Ux1x6gI/6s3h1K4/Xe3nMJ7bxxdkWHs928HimjSOpPi5kk/heLoWXCim8l0ninq1tPNZbwSOjV3E8+x2MZm9gpnU3KvWH8c/Wvox/vnEfrnXSGAXAUqaPxzIt/PZsFffPNnCuMAxDYq7NjnCyE+BsM8DZVgJ3N4HMYGdOrKeGCEYjlPoJvNPK4aVGES83S3i3s7Ph9KHZOuY6PVTWk3grl8X6MWDm/i6SR/rYPj+Lr/Zv4IvNCobDIX7lzEGcXanhax+sjIXDKjjXzaJ6WreGyAAI99fNzMzgrbfeQqVSCf93YQyuOOs8Yy70AwcOIJ/Ph048Xl8sFkOnJYF6v98f24NHZ959992HH/7hH8aJEyf2DNCBHTx37ty58AyVTqeDbDaLBx98EEEQ4LXXXsPFixfD6IxsNotHHnkEP/7jP76/IS4/+7M/G8ZIT/Jy2zJdWVwIjAn0FeQTVFjvmgINl9L2CVFLCkRpVWlKRW4UVGWp9/EUSuBmjmzGqwMYy3dOLyeAsditZrOJS5cuYWVlZQygW+8Mn0vrFLgJuggc9T6XF53H53JDoU4I6yn3ec91I6neT4taT95kW1hPfbd84VLmylMErATYy8vL4RIZvdn0Xm9vb2N9fR2ffPIJ3nnnnfA0Tg1F0awrNHAYX66gnNdoCIx63nVDsBoSUfyppMCYbddrKHSYC5+Wv4YfjEajMF3jsWPHxjaTWp5wPZufradcvSKML9eMLLVaLTzkqV6vhxs+dZXAPneSp5k85ftvktB0tdNSHDA9SqQwnLvjJmAneF88hVH5OILW+s0Y98/edzLOfIJgu+MtN069LM8ztIW8mMvl8Oabb6LVarG9UwH0paUl/PiP/zhyuRzuvPNOLC8vh/ykMs4Fyl0hhrzPGlk+/eBykmwPR/iws4jvNY/je/UjSG0PcWdQRWnQx2Yrgwu1ObS2UzhZquPOUg2n52o4Va7hRLmBmcRgrJ4A0EMKrw4O4tv9I3h1ewl3JWt4JnUVX0qtYC7R21X30NhFCu9jHu+M5vE2FnAOJRxDCw+gggexiQeCTcwHvbG26Ji5DBKrv25+Bq5U83jn2hzeuVbGu9fmsN6axZmDdTxwpIoHjtRw3+EaZrIjvJ0p4fXsHF7PzeNCOo+zWw3ct9nAwqfbaF1L4r3rZby/VtqJG8cQW1tJHC3uGHA3uhn8e1+4hD/06GXpb+CDlSJe/czD/sFKEadPV3D2yRuYv3cT3SMdrJSBdDdA6tosPl6dx42XD6JzsYhHcjdwT/EaguvfxtMPLOLee8+GeKCxlcK/+nARL1yYw+vXStgaJoFghGRuhEQW6A+B/EILv/fHP0Ti7go+LfSxsJXE6UYai+1ZtEd5/KNDRVwezmDw0SzOvJrA1ocZvHc9j8PlLXzhaBPLxT7+15cOIpgFfvonruPybBovbxWxMZjBF2abeCLTwBOzTdyd7iLAzt6Ei8k0vj1bxF/LLAP9APjlBAofJrBc7OPZ03V85VQdJw508UqlgF9fK+G7q0UAwJeXG/jychNPLTUxTAT4XiOPf1GbwyutPNqDBEYBsJDaxlOFBp4tNlBOD/HhIIeXuzm82s2hkBjiiWwL98108HcbC1hFEn/k1ApaC9t4MZ9CLwAeb7fx6OA8Hkx9F5ncqwgQYKZ5Hy5sPop/vPZlvFA/irVuCkESOJLt4clMEz+RqeDOmQ4+LAzwfn6ED/JDXM6McEcnwHxvhNfLQADgF84B9zZuxnN3hwm80Srg5WYJr7TncLGXw9nZOh5MV/Evv3sQ67UZ5O7t4PnfdgHXj6ZwtZQBggDJwRD/yb9+D4dq7dChqZlcuBrMcF9mVFOcQFC/tLSEDz74ABcuXNjhRceGVHUU0QHKVT0C76WlJRQKhRDLMDy4WCyGzjruI2MZnHvU23fccQd++Id/GPfff/8u59o0emQwGODChQtYXV3dkT+f7dG7++67cejQIZw/fx6vvfYatre3MTs7i1Qqhbvuugu///f/frzzzjt83q0B9KNHj+Lnf/7nw8qrd04bZBUNO10PGLLeLh9Is9fa+HVV2i6F4TIc+EyCbFplfBGg6QZMVSpcdqFlp4fVZLPZcCMEl/3X19exsbERPkuZu9vtYmNjA59++inq9fpYOzWDDZ/PCaDMHATB2KYIbaPtx2w2iyNHjmBubi68xgJ1n8dcx9l1HUG9Hqzk2sirmz7IP/rZ7m9Qo40ZdZaXl3HkyBEcPHgQ+Xx+LM5se3sbq6ur+Oijj3D+/PnQ+6zAmpOXm06Yx5xGFkNaNCMLVyuUV1yx5sor1qC04+ICsZa3R6ObKTwZpkMvActpNptYX19HqVTC8vLy2DxQvlXScbOClrGEfCcgr9frYQgLd+23Wq0wrtwlC+ICdOUnH5j1zXd7TRRN4+12lTVKpjGaPznudf/sfVQ6iqB5PYxx1w2ryc2LCAZbY2W5vK42XpKhTQxvSaVSeP311/cM0A8dOoSf/dmfxdGjR8NQPAu6rczjb0EQOMfHynNtR9R4uJwtwxHwQWcR32scw/caR5AOhvhy+RoemF3DsBvgQr2MC7U5fFIrY72dxdFiE6fKO4D99HwNp8p15NM3QXt7lMRL2wfxQv8I3tg+gHuTm3h65hq+NHMd5eTNA+DsnAyCAN1RAh9gDu+MFvA25vEB5nAQHTwQbOKhYBMPoIJlAfx8Zx/5vOq+3zfbaby3Usbb1+bw7rUyPl4r4thcG/cfqeLBo3U8eKSG3PwAb2bLeD03h1czZaylZvFQt44vtKu4c6ODztUk3r4+h5evzONyLQuMAqA9wj3LTTxyrIaHj9bw4JEqSpntsL3tXhJvXinjlYvzePXTedyoZ/HAsQrOfuEacg9u4L1HtzEKAmxfzuKO/3kbb60v4NrobqSSSTx6uI5HDzfw6OEaTpS7CD5b5XjnWhE/8/fPIpEY4Q88tYJ/dm4JK/U0kAfSSyMsZ3v44lINp+6oIH96HStLHVzO91HspfDpTAnDQRb/6bkBTnSBrT7w3vUc3rxWxD9+awnX6jthDf/Bkzfw819ZAQBc357By1slvLRVwEvdIrZHwOOZ1meAvYHjM33814Vl/MpsCX+ks4Hf06rhtas5fPt8Gd86X0S9m8TTpxr4yqk6vnSyiRv9NF5YLeC7q0W8tZnDveUOnj7YxNPLDZwtdXG1n8Z36gX8q3oZ77R3cmlvI8CR2R6+Umzh6VITxZkh3t7K4J+1yni3lwEQ4JHZNv7u4YsYAbiUTuD7hRReys/grWwSp7YGeLxbwRfwNk7OfgeD3EdIbR1ConEv3qo8jn+09iRea8+hupVEKg2cyGzhS9k6ftdsBQeTXXyYG+IfHRrgUg4IRsD/7TrwE9fG5b/q+up2Eq+1Sni1M4eXmgtY257FiWETX527jIdSa/gHX1nEp/NZBKMAf+6fv4lCuzeWC11XWXWlVVND6561+fl5dDodXL9+HZcuXRrbU6dx7/pOfcfkDkEQhBtFi8XiWPQFzyqhfqSDgxhAHQ2JRAIHDx7Ec889hyeeeCJ0bO0FoI9GI6ysrODy5cuhHuz1ejhw4ADuuece1Ot1vPjii6hUKiGGWFxcxJ/4E38CH3zwAcu4dQ/6z/3cz+2q4HA4HIsVBrALXPJ6FYTsVJuxxQXYbYfwftthttN8HhzdfKBx3Oqp0pglBQ8MNRmNRuH13DSYTCbR6/XCzCyMO+/1eshkMmNxvTwcp91u4+rVq7h27doYI1lDhH3qyjPKHJuTAFAymcTc3ByOHj2KTCbjBNm81gJzjU2nocHfFZgxO4TepzH8er0vDt1+5vilUikUCgWcPHkSx44dw+LiYjgRSf1+H5ubm/j444/xySefAMBY2kOGpDCfOccul8uhVCohm82G3nbW227+pBVsY81dk1v/U/6217mAub7oTWAMHw/O0lWDfr8f5oR1kRpS1gvC9FgE5czPTnBerVbH8rXTW25DpFzGsGt8XUBYw2F89XfxSZznxb0/DvkA1ig5i+H8yV3AfbhwCqPiYQT1azc3qlYufBZC8zESmxeRGO4Y/Gps0iCl0Uij7NVXX0Wz2WSb9hSDbuWu/axA3Moj1z0ce95nnSlahi2Xz9NygyDAYDjCh91F/Hr9GL5XP4x0MMSXytfw5fI1nJqtojOYwcX6HM5vlvBJrYxPamVcbRSwlOvg1NwOWD81V8PdCw3MZ3poDlP4Xv8gvtM/jHe2F/FgagNfSV/HU+kbyAfbu+pn69MbBfhwVAo97O+O5jCPHh4INvFwYhMPJWo4hA4A/54KIPrgLK1Dt5/AhzdKePtqGe9eK+OdlTKyqQEeOFrFg0cbeOhYHcWDW3gzP4fXMmW8mi2jFyTwhW4Nj7Rr+Pu/fBxrl2fx+JEqfve9V/HmlTm8ebWMD1eLOD7XxsNHq3jwSBUPHa1iPtsL599aYwavfbqAVz9dwJtXD2ArBZQfquCBmY9x56Vv4ke+cj+WlpZxuVnAG9fn8Nr1El5f2cn489iROh47XMejRxo4ku8ikQRSCeD91Tz+1D+9B7PJIf7G7/4Q1eEMXlwr4cW1Et6v5nFPuY3Hl2q440QFyZMVXJrbwpV8H0vdFE43ZnG6mcadzTQ+vlzEH/v7dyMIgP/53/sYDxxu7+rD0Qi4sD2LlzoFvNQt4OWtAgrBED+UbeKJbAtPZppYTPbDsRiNgAsbM/jW+SK+fb6Et6/n8NCRNp493cCzpxtYLPTx0kYBL6wW8cJqAb1BAl9a3gHrX1pqopge4P1uHt9uFPBvqkWc784inRhhaxTgzswWHi+08H9259BJJPD/WFzB7ylVd/FXCyO8lk3ipeIMXsyn0EwG+GKzh8f6l/Fw4kXk869iMLuKmebdGDXuwwvrP4Rf3nwA73dzaPYTSKdHOJ3p4gu5TVw4u4r8zDZ+4VyAA1u7dYuV2dQHK1spvNaZx+vdBbzZP4DBKMDRUhVfCG7gS/3LWBw1QoCujh2CcxdAHwx2DmhkKkRmr7tw4UIYBqPx7TpfMpkM5ufnw7TE1J3pdDpMtahzKpvNhgcH0iPOGHR1+AEYi0N/7rnnxvKhxyGrVzTdIrDjRZ+ZmcH999+PmZkZvPHGGzh//jyCIAg9/L/wC7+wv3nQNQZdhWuUElThbAWWSxjq9RTyNm+2z4tmgY8tP5FIjIFkDWthzDHBmIJKZWIu36TT6bFTJJmbu9FoYH19HdevX8f169fDeKSZmZmwHf1+P8x5vr29HXrRq9Xqrj603hjmIldm1g0Rtj+tkcQNo4cOHQqfxf819pzP1r53xZm7gDqzzvA3BbJW0WtdWR/1nNvxTSQSOHz4MB5++GEsLy+PWdDsn2aziStXruDixYth2kwNZykUCuGLQN21ckI+0DhzhsnoipAla6zaNlogryBG+0aNJ/YrwfT29nbYhmQyGXq2y+Uy5ufnx8ZJ+VczseimHh40wfhyesyZhUi95QzH8hklcbwPLq+riy98ZcYF6PtJOn5x6xP2TyqL4cKdIWAfhAD+FEb5JSRqV3fAe/UCZqoXMVu/hHJ/DcXeOvLZdMiXw+HwlgH6L/3SLzmdJ5YPo8LPbJut3PcBcp/M19/tCmgikQCCBN5rzuHX60fw3dphzHzmWf9yeQV3ZWtgkf1hEpfqRZyvlvFJtYRPqmVcqJdQmOnj1Fwdp+fqOD1fx8FSBx+nS/jO9hG815/D8X4TvyV3BV8rXEM2GOzSby5+HSKBc6Mi3h7N463RPN4ZzSGLAR5KbOLBoIoHgwruCDph3aIcKJZseCews7pwcT2Pd1bKePtqGe9cLaHWmcF9hxt48Ggd9x+poXx8C+/PlfBqtozXM2XMDEZ4fKuKxzpVPNqtYWnYR7efwLvXCnjz6hzevFbG+9dLWC508eCRTTx4qIL7D25gPtv6DIRt42Ilj2+9m8YHG8dwpX0KB8s9PH6yhifurOHREw3kZwcAAlys5vDqShGvrZTw2koRiWD0mXe9jkcP13FybguJxO6ED41egNcrJby4WsSLayWsdtJ47EADjx2s4diJCrrHavik2MPVXB8HuzO4sz6Lk/U0znRnkR3u8Oj6TB8zwwDlwfiepSAIdkKo+jm8vFXEi90iXu/mcDTVw5PZJp7MNPFYpoVSckd3dYYBNropvPNpHt/8uIgXLhQwnx2EYP2Roy1c7sziu2tFvHCjgNc2crin1MUzB1t45lATDy5soTsM8HIjh2/WC/hWrYD17RTSwRBbCHBftovnSk08WWjjn1dLeKuTxX92dBWP5G7uLxsBuJJO7GSGyc/gjXwKx7cGeLzdxBeG7+Ou2e9hmH8XQIB08z50avfjV9aewr9s3IFzn51Um8sMcc9sG8+nN/FUooq/NXMU2dE2/qP+p0gPt8f0tmsT6GAwxKfbObzZW8Jb/SW8u30AhaCH+xLXce/oGu4ZXkG238DFfhFXhkXc3Xofo15nTKcACBMgFAoFtFo7PEUwy1VAPYGdvFEoFLC0tIR2u40rV66g0+mEmIwbRQuFQng9MRwxGcP2NCmGzmFipgceeAC/5bf8Fhw5cmRqp43Kw16vh8uXL2N9fR0AQh17zz334ODBg/j000/x6quvhqeczszM4E//6T8dOhH3BaD/8T/+x3dlqWBYAT+rgLeNsQJavegu8EKQRIDhimFmuQpkeb/16vCzes9TqVToRaUStDH2msO63+9jZmZmDORlMpkwtGVzcxM3btwID6FJJBJhjDjbwkwYnAyNRgNXrlzB1atXw9AZts3GJ+sGDfVc67H0loEs5fP58IRRHQfrNWfbbZ9bT7veZ+PS9VodLwC7NsaSXB523TibyWRw77334syZM2GmE+2fVquFGzdu4Pr162FucXojmcKQXkm+65gzplzBOIG6hh9ou6yXUL9ru12gxgVutW/5nUtn5MVOp4N0Oo35+flwU3IqlcLS0hJmZmacoFw39XDDp740I4vmtbWHVt0KuQwU237fd1vGpOv2Qj4HwLQA3QVEnZQuYLh4KvS6jxbvwmjxNAZzd2I7M4dM5wZyrSvINq9gtnEZ1976FrqX3wI2L2E02L5lgB7l0VUedO1ziXje2GcX37uMOx+4p3MlCAIgSOCD9jxeqB7Bd2uHMBMM8aXyCp6ZGwfr4X3DANdaBZzfLOHjzZvgHQDunGvgg2oZ26UAiZPAzPwQj6fX8Wz6Op6YXUcmsXsl1jW/R6MRhghwYVQIAfvbwzkEGOHBoIqHk1U8nKzhJJphKkPbT7b/tA9c8hMA1hppvHOtjLeulPDO1RIubORxcrGFB4/WMDszxD+4chTphwc480wDHxYKOLi9hUc7VTzaqeKRbg3Zfg/d/ggf3ijirWvzeGdlHu9dn0cps4X7ltdw19w1bG5cxzdufBX3H+3hz/22C/jgegGvXJzDyxfLOHcjj7OHW3jyVA2P31nH/UdaSCVHO8bEZhavrhTx6koJr68UkQxGeOxIA48daeDRIw28fKWIb386jz/y6AoeOdwK23S9M4OXVkt4cb2El1aLmEmM8ORSHY8dquPQyQ2sL7dxvrCFq9k+DndnUOgn8FFxCwGAn/t4GXd00mP9aPu1NwrwRieLl7aKeKmTx3u9LM6kuziT7uD/asyhjwT+yvJl/Eihju0h8ObVHL51vohvnS9irTmDL9/ZwHN3tfDM6SbSM0O8vF7AC6t5fOdGAfV+El9ebuGZg008fbCFA5kBbvSS+F49j2/UC/heI4/+Z1isOwqAIMCpzBb+v2c+GXOMKW0FwJu5FF78LDNMJZXAo60+vthdxSPBayjnXkE//zGS3YNINe7FxuaD+OX1J/Dt7hIudWexjQDIAInMCH8wdQ1/dHQZzUGAV3olnE3UMDfqjgF0V7hJfzDC+e0y3tpewjuDgzg3WsLCqIFVlJHAEPdtncNPrv9/Qg86UxpzQ2atVgsxC3OhM6yXzyRlMhkcP34ciUQC586dCx2XxIN0snHjPFcbi8ViuLLM1UbKNu535NwlQD9+/Dh+9Ed/FGfPnnXuI4tLw+EwPFVUHakHDx7E3XffjVarhRdffBGrq6uhM/XP/Jk/g4sXLwLYRw96nNhCl4Dl7yrsFaBr56kwUsHMjQJWOH7WwLHn6O+6XKvGBK0ues91Y5bWRzfNAUA2m0W5XEa5XA7DDHiE+8bGBq5du4b19fUwtEW9sEyxqHG73W4Xa2truHjxImq1mtObqkYNQ100hgvA2OFIFjBqGYlEAsvLyzh8+LDzhFEtU5egVBnZ5SjXshknuoI7G/KiE0bJ8o4C9FwuhxMnTuDEiRMolUqhlZxMJkODZ3V1FWtraxgMBqEBRmDOFz3metAQn2PTJ+rzLbnGyfXZFbpl55E1yCwfsz9pANXrdWxvbyOTyYS71efn55HNZncdJqGpsAjOmQZUQ1gYX84NQfsBypWUl339MOl33zjcLpoEticB9Ell88WlWoa2pEsH0CvdgVb2CDr5Y2jnj6KSWMBw4TSQKQOp9EcAzgH4+LN3vj4d/ccYyDNGAHDq1Cn8pb/0l3aBS2A3UHc5O3zXa59Yr7nKId9GU5eu8PWhGscjBHi/NYfv1g7j12tHkAxGeGZ+BU/PXcc9uXoI1q0cHI2A1U4WH2zM45e+9yCAAAkM8Xd+8gV8t7+Mb20dwqeDAp5Mr+HZ9HV8cXYDs9I9LgNFZe6Oc2WET0e5EKy/OZxHDwk8mKji4UQNDyWruCtoIhmMb5y1/WwNBPtc7dN2L4n3Vop4+0oZv/zGYdQ6aSSDIf7k187jtz68gvdmC3gtO4fXsmV8mC7i1FYTj7QreLhZwT31daSHA2z1Bvhko4w3rpbw8oUczjfPYIQEZpID/D9/4iM8daoW1qG5lcIrF4p4+WIZL35SRqU1g8dO1vHEnXU8eaqOE4s7cemD4QgXNrOhd/3lqyU0ekkAARazPfyrP/TOrjYHwWdhTtUMXvwMsL9ZKeBEoYsnlxp47FAd8ycr+Bd3VLE6u41gBPz2lTKe3Sg58YDqGe3D1jCBV7s5/K/VA3i5m8cIAb6aq+OvH/p0F+9dqc3i2+eL+ObHBbx2JYf7DnXx1buaeO7uJu460MenrRl8+3oe37mRxyvrOdxZ7O2A9eUmHl7oIJUIcK47i1/ZLOJ/ur6IEYBCcojftVjDU4UWvlhoIZ80KzUGsF+bSeDlwg5gfy03g0P9AZ5odvGF/ie4Z+b7CIrvYZBeQ6p5F5L1M/jfNp/F36w9hlE3gXR3iAOJPprDJFqjJLIY4B8Wfh0zo+0xgK4hJ0Fw8yDHMMy3P8KvDu7GP048gSESOLJ9Az9942+GqYNrtRpyuRwOHz4cOi5ZZqfTwcWLF8PfdKU+CIIwCcTKykoYCqMOW+IcRjzQycoQVe4hU5nKQwx35uXNM1AWFhbw3HPP4fHHHw8dlnslhu7wlOd2u42ZmRk89NBDSKfTeOutt/DBBx+E2PPP/tk/GwugT2U2KJCzAMoCNl7vErw6IBRONg7R3mMFP39zgVGtKweF5euxsQw/IThztYOeRwBjearJHATMBDz1ej2MQdLTRVVJ6WbIVCqFYrGIYrEYhr7wP9tGtl1zZLNchu1MsgQHgwGq1SrK5XJ4WuC4YrnZZxxb7VP+poLDhiPRQqXlymUsflZjyMalq/HGSekCXzbeNRTqnwkWesiLxWJocTPjjh5koGVw8ujmUC3b9oP9rOQCIZPIx/sW8LDuo9EIlUolnOil0o5yKpVKYRpQLjkyTIWx5fV6HZVKBfV6PQTljB9UUD7JW3qrZD3pk8DwbwTdSh/4PPJKahhS+SQSCST7LeQqHyA7ej/k6/bbb+/EOmbKwF+s/n4Ad3/2egrAH/jscz74q/gEBOxP/TSwfg692S6GIyCB3Ss8Si6QaJ0uUaDclula+bSffePu6reb8iLA/YUq7stv4g8ffg8fdRbw3doR/IULjyIVjPD03HU8PXcdZ/J1Uz5wKN/FkeINAMA/+/gYfuLMZRya2cJPpq/gd+cuY2V7Ft/pHcL/3jmNv9J8AF+eXcNzszfwhXQFSezOlGXrmkgEOIkOTqKD3xGsYDgcYWWUxZvDObw1nMMvD46iPprBA4kaHkrW8EiyhjNBA+nAzW8ucG6fnUsP8MUTVXzxRBX3HGzgv/g/70N2ZoDHT1QwMxrioU4ND3Vq+EMAmqMAb84W8XpuDn9r+S5cPf4w7mlt4sH6Ou5Kfoo7Lv0afuT5u/BPL5Xxf71zCIkAuPNAd0welrJDfPXeKp6/b8exdK2axkuflPDyxTL+398+gtmZEZ64s44n7qzhydMN/J4HVvHvPriGzU4Kv+1/fxAjBFjK951ycjgcIhEEODvXwdm5Dv7QmVVsDRN4c6OA76+V8DfePoZL3zuNM0tNDO5bwfETG3j4sw2blldd/EnKJ4Z4JtvA3TMd/MFrp1EdJPFT5Y1dPJdIJHDHfB+//4sV/NTjm2h0A3zvYgHf/LiIv/3SAvLpIZ67q4nn7mri332yimEAvLKew7ev5/GLrx/GxlYKTy218MyhFn7vwSp+50IVH3ZnkQuGeKWZx/94/QA+6hzFg/kOniq08KViCw/ku2P6bzgc4kh/iN+x2cPv2OxhCyO8k0vhxfwM/ub8fViZeRBfaPXwxUoNDw/fwoHca/h3D/0ifucohWbtPixVT+Lj9Ufw02tPYRsJNBGghRnMYXtXexWgq0cdANLBAD+Md1AbpPEJlvBjjV/d8bR/tgeM4LTX64WhLSTu41LdzvFi6HC/3w/jzq0OIO7p9Xq7xlKdWNwbaR0JxHg0FjY2NtBut5HL5W5JzzETXKWyc3p1KpVCr9dDrVbDoUOHcPDgQVy6dAmdTmeXPIyi2Flcfu7nfm4XcFNAp+Sy9hX8urymrvstYHZNZAWNFqxryAX/4yZBZl+xHlSCZ93QubW1FSrOQqGA+fn5cEC3trawsbGB69evY2VlBZVKBYPBINxwyPATejQJ6LkTmqn0rl+/jgsXLoTxpRq6YRmVYQt6CAyAUMG7PDHsP/b9wsICjh8/jtnZ2V2bPbWP6QHXsed3a0zoUpV604GbcVnqfdd6clJZsiFQ3Ch69OhRLC4uhh5uTlyC0OFwGIJ0AvPZ2dkxDz55kMtxBOb87lKKtn2WL/W79rcqjagyfb/xeXxxT0Sj0cCnn36KK1euIJ/P4+DBgyiVSmE4i6ZIVCOy2WyiWq2G/K3Zgexzfe2LSz6Plu1H3zWWXM+fIMcmXuMrzxpHrnKi+scnvwCMGYYMh2PYnJ78q2W98847E/OgB38Vi7gJ3O/G63/vF7F4F4KDZ5CazeFgqoXDM00cmWniUKqJwzNNHE41MZ/YiZl2gXBX+1zzeNJ1bLf+HjUvbL/HMXSGI+CD1hxeqB7CC9UdcPnM/HV8Zf4G7snVkEjs3h+lfWzB99XtHL7VO4hvdg+hMkzj6cwOWH84vTmWn93XHl9/ro4yeGs4hzcHZbw1nMPqaBb3J+p4OFXDw4ka7k3UkUlMjvm3c4i/bfUTSARDpJLukDn1mFaCJN4rLeGV2RLeKCygmy/h0X4Tj3XrmL86xGPpBg6VduR7M0ggOxoiFTGvRghw7kYeL10o4cXzJbx5OY87FrdC7/p8aRsfVnJ4/lQN5czAy2uWD7QfKlspvLRaxPduFPD9zzZwPrHcxA8dbOBLh1o4nNt9kN5eQJgNwbT7qgZD4O2VDL5xroBvfVzA5eoMvnxnG8/d3cRXTjVxoDDA5dYMvnM9jxdu5PH9tTyO53t45lALTy838eiBDtIJYLMf4PuNHL7fyON7jQJqgySeKLTwQ4Umnio0cSKzjdFoHNypTriRCvBKMY2XCjN4rZjGQn+Ix+pdPNq5irPJV5EqvYth8QKarcP4zupTyFbvxHPtLIb9ROjN1pS7Vt/wM6/Z2toaS7m7ubmJbre7Y8zccQdyuVzoKWcZ29vbuH79Oq5duxZuNqUjL51O44477kCv18P58+d3ZQij55v7ydLpNLLZLEqlEvL5/Nhp4JSfXFkffeYM1ciNbDaLM2fO4Otf/zruuOOOUOdP6xQKgh3HIA9/Y53b7XaYzaXX6+GVV17B1atXEQQBfvEXf3H/QlyOHj0abhKNEpIKvBUo87u+A7uzBZBcYF6fq/f56sP7bb5zpi7jcogeEEPPpMadM985N4QWCoUwtKLf76Ner2N9fR2XL1/G6uoqOp3OmCeMz2C4ge6AJsBmDPGFCxfCvOh2pUL7UmOLFVhTydsDfmQ8w+/JZBLHjx8PU/O5gBkFufWoqkWvn204hNZN66xAnnXR613EyVkqlXDnnXfiyJEj4ZG/1nDkrnLNgMHQJV1lGI1GYzHmLs+59cRYI9BHPg+YDxho39vPtn/ZlwxbuXbtGi5cuICtrS0sLCxgbm4u3PyphwkRpHNTqKbF1DpHgWZX2yaRbVfc58QF6JME6rQA3VWnuAA9qm4ugE6eZMgdYyj1VGbScDjEu+++G8qkKMFu6jcCgDtPncJ//hf/Kq4PSljpF3B9u4CV7QJW+jvvIwQ4lGri0EwLh1M7oJ3g/dX2QfyTxll8tXARPzn3kdeAsWNtPVg+Z46Vddp3PgPIt5Kn1w1HwIftOXxn8xC+s3kIiWD0GVhfxZn8Tsy6XSV0yVz+dmmQxzc7y/hm9yCaoxS+MruKZ2dv4IF0bVd8uY8vXDweBAE2hjN4aziHt4ZlvDGYw9VhFmcSDTySquGRVB0PJOvIBUNcGmRwfpDHU8kNzAb+UDEr0+2Geg1n4P6dc+fO4Ytf/CKGh4/h1UwZL88W8dpsGUMAX+w1sBUk8O3ZMo4OtvB3Kx8iN3LLbDuW3X6ANy4V8NKFIl78pISL61k8eKyFHzrdwBOn6rj3SAcvXyji4xsZ/PZHNlDKxV9R3/kOXGzM4vurRXx/tYhX1vI4kNnGUwebeOpgA48vt1BMu+c/x9+3183uv3DVS2mlPoNvfVzANz4u4OVLOdyzvLXjXb+7ibPLW+gPA7y6kcV3buzErl9rp/DUUhvPHGzhywcbOJLd2Yt2eWsG32vk8euNPF5s5lFMDvFUsYUfyjfwZKGFi1tp/FqtiB8rV3FPJpQJGI1G6I9GeC+XwsulWbxSSuPKbAoPNHp4tNbEI9sf4GD2dYzmPgKyGxhtnsRw4xSGa6fQq86h399Gf9DFKNVEv7mTIpJla5jtlStX8Omnn4arr+yrubk53H333aHOUVzAVMiXL18OgT7xQCaTwZ133olGo4FLly6NAXTqas3ExiQPTNTBDC4McwFupnve2toKY9bVIDh06BC+9rWv4YEHHgiz4e0FoAM7YS2ffPJJuAGWm2Tvv/9+5PN5fPTRR3jrrbcwHA5jA/TYIS4+wWpBh8szvnsy3WRye70up/jy9Lo+u+J3Nb5J0+0xFzbvtXHRfGfsEsE8j4BPp9MhQNK0dPSQc0Jrkn2GTwA30xXSGOAz5ubmsLm5iVar5QQD7AcqdZsTnSE5Gq6j/aFlMT0fwz98Xh6NyddYdCvMrFdMy+KE0AMBOCk1VIflsWyXsOQE1fFTS58GGC1my5+kmZmZ8Jm6GVR52gWS9T+fsvCBFjuWFDpW6Ksi5bU2RpBpOsl7W1tbqFarYTYhHjBUrVbD2HLGoKsBaMfK8tp+kc/4mFYgTis8f9CJPKKbk/VgDbvUfEvPAlBK9lBObeBsphI+A9g5cXJzkMb17SKubxex0s/j9e5B/PPGaVzfLmBrtBMz/Peq9+OHixdRSuxkaohywGgbff9pH+h3V8ij1SEqe3zOnmQiwH2FGu4r1PBHj32ID1plfGfzEP7C+YcQBMAzC6t4duEGzubrAPzGFOlkqo0/WLiAn8p9gguDIr65dRB/pXE/+qMAz2ZW8VxmFfelGwiC3TpyUrsPJLfxfHIdXwt2QiyqwyTeHszhzUEJf7N7AheGedyRaOPCMIcERngsWcV/lXsv7A/fc1Q+usB5MpnE2toaVlZW8MUvfhEHDhwA+l38+LCPH2+vA0GAizNZvDJbwv9QPAoEAVaTaZybyeGRfmvsOdoujkkQBMjMjPDk6TqePF3HnwyuodqewcsXi3jxfBH/6B/eiWY3idZWEolghJcvFPHX/v1PxsqNMrJ3ngGcKvdwqryBf/+eCvpD4K2NLL6/WsT/8uES/vT378B98x186VAT2eQQf/vDAzg718Vff/pTZD+DINRhPr3mwh0uOlLexu97rIrf91gV7V6A71/M4xsfF/DH/v5xpBIjPPsZWP+TZ9fxpx9YxbV2Ct+9UcB3buTxV95ZwsHsNp5ebuKZg038jsUafs+BKvqDId7vZPG9ZgH/YGMB//nlY+iNAowA/KONRbxw37tQ73oKwEPtbTzY6uMPXR1iIxXg1eIsXiln8ffKTyG//SS+sNnGw5+u4T68jszcB0idfAHJYYDU2nEUly4iMdtB89x92Pz+M2N9PxqN0Gq1cP78eTSbzZCnaJDPz88jCIIwdzlJgbb2NUGz/V1DgXX8VTZQRlqsoDLTbhbVkNxWq4WNjY1wz+Beidgjn8+j89mJu4nEzkFJjUYDhUIBi4uLyGQyaLVasfVYLICuYCEKfLgEZRyPkguwW6+KkgXhvM96mwhEqPy4JMINgrzPLmExxgm4eVKoerdGo1G44ZMeSqY/ZHl64iSZhP2om1EJVrkTuVgshgNs+0z7lWWQYdVCpTHiA938zA0cNFhYlstroYyu12i/a330Wfp8joUuo2nfs0x+V2OD39XTTeGgYTh8ht6jBiT73gLzqI1vk8j2cZRCsf1qST0V5EeGCHH1hfseeCBWtVrF2toa6vU6kslk6D1nNhZdbeFYsY/tCsm05Gr3pGst8Ir73L0YDdq2OPfvxYsyqQzXnFIHgaYCpVFv790Pg8klj9kvC6keFlIbuD+omFVK4D9e+RpW+kUUEz3kEv0x/rZzh+2O8p67ZJpvnPSzldUkdbL42pzAaAysn+vM4zubB/HnP34IAUZ4ZmEVX5m/gXsLbrCuOi+RCHBXsoW7Zy/gjxQv4IN+Ad/sHsSfrz6ARDDCc9k1PJe5gbtTTWi1tB9s2227y8E2npnZwDMzGwiCAI1hAr/SP4j/YetO9JHElWF2rJ2u/WEKzDVGlzIzkUjg4sWLuHHjBp566qkdcI5xvkgEAU4PtnC6vYYgCPDfFI7izu0uzvbbY8+xz41yYMznt/Ej92/iRx+oYjQC/uXbc/hzv3wCg2GAZjfpHH/XvPT1ZSoY4bGlDh490MbP3r+KZj+Jl9by+N71Av7xhXn0hwm8spbEy6t5fOXoeD51l0z2zcFJsiKXHuH5e5p4/p4mhiPg/euz+ObHBfz33z6A/3hjFk+eaOOrdzfwzKkGfvLkJvpD4I1KDi/cKOC/fucgPm2l8cSBFr58sImnlxr4o8tr+KPLa6hsJ/C1985iGwEs27vqM98b4OsbbTy/1kRvMMC5wixem8vi/zhyDH81fwpnah089EkN9zfP4/jcd5HI7IS75U6cx9oLT43xw2g0Ck/p1n4YDoeYnZ1FuVxGu90eS5+o99MTzjhy/s8VYaZSZAYx7o3ic9SRx/uCIAgTJeg88Bl0nA/dbhfr6+vodDrhnrxpSeVdPp/H5uZmGLLb6/WwubkZnn46NzcXGjVxKBZAtwNhwbBaK3ZSWu+PCihfJfVea8Faz6ICX/5GxiAY48ZOLovoKZAKBoGb+aa3trbCUBVmVeCmUv7fbrdRqVRQrVbDFIlUtnwW20qG0nZpPDaT5x84cCDcyKeM5OofjW3W/iRzuBSW9jlDa0ql0q4jv11jqcaGAmLX+OoqgYJ5C+41I43ylyXf7xpTRtBuwQ1fdgOohr24gIsVLC6D0tWvtgyti77ruFlFalMkMmyHGVl4Ym2j0Qi9X5ubm+EqBbO1KI+5eILPpNKOmpNxyKWo414bt3ylqPtvBWSr7HLJq2kMEXuf3qv7HtTTY59xq95zrVcUyFBD9ma9R/jLh76Jj7YWcXq2iplgt2d4EnDiZ5dDxfaPa35Fke3TOMZxMhHg3kIN9xXr+GPHz+GDVgnfqhzEXzy/s3nxKwur+MrCddybryORGOdnK4eCALg33cS96SZ+unge72+X8Y3OMv6zykPIBgN8NbeG57OrODXT9vb9JOMCAArBAP9OegW1UQpvbM/hj2cuePeA8d1+VsN/a2sLH3/8MbrdLr70pS9hbm7OyfdKv7ezjn+ns45kWLfJ9ebzfTwRBMCPPlhFrZPCh9ez+KPP3XCW4brXlm95h2NVTA/xtaMNfO1oA7mZEf63Dxcwkxjh7ELPWf+oPXKTyCf/EgFw/+Et3H94Cz/7zAZWGyl8+3wB3zhXwC/96kGcXOjhubsaePauBv7Ufav4j+5fxWonhe+u5vHCagH//fvLmEvveNe/vNzA3zhxES+0ivht5U0EwY4hbZ/rkkMziQTONrdwptHFv3NhDbWZJN4oZ/D6fA7/18kfwszwCTzcfg2PZb6LI2/NhKEm7GM6EujkIvYJgiA84dMmG1B5wVVw4gEeuEdv8/z8PA4fPhzqQR7sqPvLuAer1+uFmI6k+9kUmwwGg7Hw1iAIwrNo6vU6lpeXp9ZHOt5BEISnPjNjYDKZDDP3ZTIZLC4u4tq1a7HLnirExSVILeiy//sEpxUAPiFlQbpea5c1FNgQfNGTran1yCD2eQqEhsNhCLQZ2sIDiTS0pV6vhwcYATezxNAosOEKvEaVMNtH67NcLoenNbr63vYL206ix1XDQHzWZLvdxtraGnK5XNgnOq46djrJCMLtioeCwTHhZMqjFz2bze7aQEpwr5NODQeOM/mOk52Gl05KGmG6EZQWvPLgJADk4k+XENTvPtLYfL7bTTrkRb7raZ8UVJubm6jX66jX6+EGHVXEGp6kcZQWdI5GO0clc8NoHCN6mr6KArZRxo+P731lTVOnOHQrwtrOef5nX5QxGuLC+/YLlFuKMjz0u/JLZjTAw7m1z9rlX0nV32zb9Z2fLXCzdbEOFPI1cHPFztWGKKPKBRDvKzZwX7GBn/4MrH+7chB/8eObYP3ZxVXcV2wgEbiBISkZAA/ONvDgbAN/ovQx3u7P4RudZfxH6w9jPtHHV7NreD63hjtmbsYM2/rZ/hmnEf7I7KcYpS85Abhe7zP8e70e1tbWcO7cORw7dgxPPfUUstnsWH2ixtB1TJtLz/vGwD3OAX7vD23Ife49bCzPOgldfHWz7HHd8wsPr+J33lnDYrqH8uwAQGKXvohqm+1zn15wYRyl5eI2fvKRKn7ykSq6/QAvfZrDNz4q4D/85ePoDwI8e7qB5+5q4ofvqOF3HN/E9hB4p5rFd1aL+B8/WMb5RgaPLrbw8lIB6eURTuS6uNBM48NaBs8sbWI2sTtFru3TIAhQ7g/w9I06fuhqBb3tbVwozOKNxaP4P5b/KC4/UsDRwxWc/OQqTn5yDYc2amEmuCAIcPjwYWxsbKDRaADYSUPtWpW1+Ic6Wucwy93e3kYulxuLfAiCAPV6PUy7SKdSIpHA7OzsGEakPtXx6Xa74SqljsdoNAp1qUYJ7JW4SZX7hJiGu1Kp4OjRo1heXg7nWhzaU3Z2BYdWiUQxq74rY/s+87vtOAvUdfBZH3tSKL3fVIKu2HeCIeaW5n28F0CYOogb72jdATcHh5sUbIpA9pvGTCmzplKpcFdytVoNNxv4iAqefaTPsWkXXUCLz2Uu7MXFxbA+1viyylRBpms5Xp9Jg0QnEds7Go3CZxJgutqgHhEF3eoFt6sitGDVY04wr3X1ARSXQepSzj7D0t5j+ZSCxOUt73a74Xur1RozCG02Foav6HjbGF4agj7QORjsnLrKNKE+coEc224VjHsB+Cxnv2kSSHcBzWnut9e6yle5QwVkPegWoCuf7cVosHWwbdG5pStS9jqST4lF9Y8FNi594JPnVl4zXtUF7MfAsmMP0+S+AO4t1HFvoY6fvuNjfNgq4Vsby/gL5x7ACMBXFtfw1QNrOJuvIZUcn0+2XclghEfSVXxhtoY/WT6HN3oL+DftJfzx1UdwKLWF53NreD63jiOprrPPfP1qAY/dT6L8o78PBgOsrKzg4sWLKJVKeP7557G4uOgc77iGl4t8MnKSMRJVFsuwzo2o65V09TcIAtw9x/DK3fHmUfWyY+HjK9/899U7MzPCV0638MypJv6L4Qgfrc3iWx8X8b+8uIg/80+P4rHjLXzlsxNNf/7sKn7+7CrWuwn8+moB310t4m+dW8Zscoi17gySwRBfXFzA/+ux8Ch5b5tYJ7YrlUjgznoHR9dr+Nqb57Gx3cM7B4r46NgSXv5dzwIIcOyjS+jfWMKnX/9jmKs18dx/83cwvHApjK3W3OUug9umg6YeBBB6xev1+pi+olOU5WpK51arNRa6rOUBN52BxHDATbnCOHQNS7kVSiaTKBaL4RklyWQy3B926NAhFAoFlMvl2IbAVLWxAAvYLRyjFLMrNIb3KRDUz/o/P7sEMgEdgBC40fpi+h1VgloHDiiXcuhx5+lU3BFMTyZjtzc2NsZiz+2x8joRrMd8NBqFjEOwqTFcuVwO3W7XK5BYdiqVGtuJr88jg2hbLVjkhNrY2Agz1ahQZV3tYUXajxoTZscVGPd66f+68sIDB5i/21rErIuCcT39leOrRB5Qo8ylZHwAXdvg6sM4ykqVqXoK2O8E5gyb4osbO5nCiqC8Wq2Gu+bJH8wtSz63qULZL9aLrn1L/kylUuFBWz4leisgdT+vt3QrBkGctk3TbtbDZQySB/U8BWtY8V4Ni7NpVW+VrBHpk+kkO3f0fr3eXqNyPwrk+cZA5QdlrcoErYvqBx8wd4FOHxC9t9jAvcUGfubkJ/iwVcI315fw5z+6D8PRDlh/bnEN95caCHBzjttVRQBIBcDjmU08ka3iF0Yf45XuHP5Newl/+PoXcHymg+eza3g+v46DqXHj2CezbTtc//E1HA7R6XRw/vx5tFotPProo1heXt4lF5QX9LOOiR1P3xjauruuszzh0ul2PFyOurhGgzWASVFgKS4gt+Xa/nNd76rLzv/A2YM9nFlexx/70jo2Wkl8+3we3zpfxF//9kEcLvXx7Ok6nj3dwI8dreHHj1WxPRzhn12Zw3/59nH0R0lcac+O1cPWy9bFZlKjvsz1B7jv3BWceONDdLe2cG0uj/cOLeDyT3wdg/QMNudLaD76AIrrlTBMhQcu6kuJeEf3lqmDKQiCsYMYNeMdDXMF4BrVkMlkQllpZY1mxtKwl16vh/X1dWxtbd3SRlH2J1NsE9slk8nwrJFisYgjR47ENgRix6C7hKYOtjKjC1zZyaRMzY73TTifsFWgxxhaBWXcFMp4KU5SDhSVEnORc5MBN4QyLAa4GTZCTyY9l/ZgnEKhEB6GQyZyCX8FwLTmRqOdncClUgnz8/Nhdg7eZw0hfqbVyE2XLFtBepRQGI1G4VHvzOhiBaFlbiuoXeE21sDSJS0L/nk/T2dliiKWR4HBOqglbpWHetdtdhYdhyhgrr+5FJdPqNvYWjUA+U5eoreAnzWEpVqtolKpoN1uh8C81WqFWVi0X7Sf1SCh0UdBYXnQKvUgCJDP5xEEAVZXV0Mh55rjUf02jeKMQ24lNrket/KMKCBp6+ACSjrHgfH5oYamjheJ88QlP26VJgEHK699fc85bQGp8qPPmLUhB1FkveC2fFunSe3W7/Z/rd/uto9wT66Ksyfr+JmTn+CjVgnfWF/CXzh3HwajAM9+5lm/r1h3lqvlzwQjPJXdxJdyVXQGH+Gl7iL+TXsJf7t+AqdnWvhafg3P5daxmLgJ1n3gXAGQDxC122288soruOOOO/Dwww+jUCiE7XT1g9Y5Su/6aBKgdf3nAus+gy2qbLuyb+Wjj+IYHLZffGVG9U/U/POVsZgf4CcequN3PFBFfxDglcs5fOt8Ef/ZPzuGZi+Jp+9s4Cun6njuRB3X7rqBlzcK+Pl7rjrbYFcL7Hznf4pPiKX6/T5yF67g7jfexXx6Fi//xPNIbW/j6Mo6rm9vo1AohPHn6pCz8oRhKfzdNVa+vtI6acgyQ4qpYymfGN+uBoEdn8FgEDq9yuVy5LjEIeJH4jFuFq3X6ygWi2MnoE+iqU8StYNJ4KUAygVSXJ5rvut/3ORnQyv4LJehQNADYBc4Z15wC+jU60uv5Wg0Gos7Zw5tBVTcnEfPOTce8D7m5HR5vpUZSbZfEokEcrkcFhYWwmUXMpxajXZMGN6hOcYJzjRtm+1//sb4LzLQJDBqhRXrYePSdSmKz1WDTEOSaDlzJUJXEDhWtNI1Y44FCJy0dhncGoXaDstbrs8+IKakY6S8qYcycJVAQXmz2USj0Rg7+IGgnMKDWYls//O5nIe6osCxJx9xLBRk6fxNJpMol8vIZDK4fPlyGL4Vh6IMnr2CdVef+8p0yafbRb72TOIR9ruGZwEI564NgdF4zqgQpWnrruEswO5N4VaZRYEpC44njRP5zRca5/OSqrK2hoHVM1H9ZOWZ8owt0yUfggA4U6jjTKGOP3bHx/ioVcI3N5bwFz46i+1RAs8dWMNXlzbwQKkBjIZj8tG2N5MAvpLbwFdyG+gME/h+dxG/2j6A/6l6EvemG3g+t4avZNYxnxo/FI6f9cXx1PGoVqt4//33cffdd+Oee+5xevddfeoal6jvdmx8Y0De85ELTLvmuU0G4HtGnPnCvlMMo2VZmRI1J3ztmSSTomSl/jeTHOGpky380Ikm/sxXV/DJRhrf/qSEf/DGIn7xXxzHw0daeO7OOuYgKZFHAfpDYCYxvgpu+YXt17THxDUEvjzts/iNF3Hy7Y8wl5xB7cYqMpkMFhYWwv17xFO+PqEs08gCG14L7NbLLJM6jhhNHVK2zwmSre5k+cPhMNS/S0tLYd32qkeSySTy+Xy4kkBZV6lUcPjw4bGkHJNoqjSLurRIUoAE+JeuogS8ku5+dXWQFQQEbQTXXDZ2gXMLEkejUegV17jzfD4/Fs/U7/dDD/Pm5maY85w5x3kyKWPVrWWoROZkOyzTDIfDcBLMzc2h0WhEbhbVcdDc6GynGhE6Fi6h02w2UalUwlM3XUpRx8bF8KrYrbDlfRwHBdksS5d+MplMCGrZN7qRUvtTAY9+jhJ2LsWp/7kUThQopMDju274ZAhLv98Pw6Ta7Xa44bPRaISnsXE5TFNVsUyfIB+Nbi4FcsOvvriawn5UHrBjVSwWkclk0Gw2sba25uw732/Ke1pPVz9PAzYngX3Xc3/QSMEsx0WNKCoRzf+rc+xW4yNZJuWOS6arfNK5aq+xvOOTK76x91EUQIwylH2OhCi+iQKUWi5lnr1355kB7i01cW+piZ85eQEftYr4xvoS/vwHZzAYBfjK4jqeO7CGB8s3c6O76gIA2cQQz+fX8Xx+Ha1hEi90FvBvWkv476un8GC6judzq3gms45CMJ7mUuUD+zCRSKBSqeC9997DmTNncPLkSS/4tJ9t//nGjve49IC7r9xnZdg+t/e6fnNt6PM5xawB5nq+gjIXnym/632+OkfpGV+btGyXM9JeHwTA6QM93LW0gf/giXVstgJ890IB3/6kjL/5/YNYyG3j0aMN/KuP5tHuJfGXf+vHeO5UxflcrZs9u4X1Gg6HYfx0t9vFYK2Clc82Q95xxx1hWXHCIyn79CRRiwVoKPjwijoj9URxGhnEQ7pCaTEZr+FGUQ2hi0uusWUkgIbu0NmWy+UijVSlqbO42Mr4lDatXMvYUQLKluGyhqxg4u/0nHPpg7Hj2tlaXwJXjTtnvLqeMqrxTfRqttvt0IPO5PRMw6ggWfPNqrVqGU/bR+bK5/OYm5tDpVIJrVE7iXmfglRdTWA79fCiKMA6GAxQq9UwNzcXxtC7GEm9DRQm2iYFFhZs6DX2IAJeQ2OCGXEIbEm6EsKJrh5z61l38Rb7UMfFxV8+gKBjQUOIYJqfe71eCMwZM97pdFCv18ONxvSaV6tV1Ov1MCWT7kK3vKvfbQiLhlwxvz3nAK/lGGpZqqiSySQOHToUGhPcrKxzTu+ZpExc9Z40Lr774/weVV4csDiN4RCHdK6Tr3XfAMfGggQLyqN4etr6aIiiVdL8L8rbaj+7PF56j/KngjolVbC+52rfuJ7vaqu9P+o67Q8XryjQtGUHAXC22MSZQgM/c/ICPmwW8M31Jfylc2fQHybw7IF1fPXAOh4o1SPrAgD5xAA/ml/Dj+RWUdtO4IXuAfyb9hL+WvUufCFdxXOZG/jS7Bpywc3wOfZzMrlz8ND777+Phx56CIcPH96lB/nu6h+V0ZYfbT/a/6L61zUGUYZ7XJCkz43a8Mn/XUaB1t+Haex3Oxcm8dakuvv4Xduk80bHYDQaYS43wm+9t4rfcnYT/QHw5rU8/vbLB9HuJTFCgP/jnWU8f1d1TNZwPrqMXtXPxDOFQiGUYdzIeejQodAJpKuzvv7T/WPKu5b4v6sclVX6n664kwcUvPt4nhs5t7a2Ynu3o4gOWxorqVQKW1tb2NzcDEOB4lBsgO6zFH1MagGMvc/+bpW+luGbwARXetBHJpMJT/tUQEom428EH0xyz7ghHh2bTqdDQNPtdsNDYRgDrPHqs7Oz4VILn0VQboUn66vMZAUcvehMucjMGtoXrvHQZR79XwG09p9rjDudDlZXV8MNozqJVIGqsNClQTWAtN/ZbruszmvUu6KhGMPhMPTm671qkE3aCBoF/OyEtXWx92l7OMbsX276JDAnKKenvNVqhZs9a7Va6C1nekPNHWuFiY+Gw+FYis9MJhOOF4EgjS2to8blq5BjH+ZyORw7dgybm5v48MMPI2OG7VyP4q84ANmlLOz/cUB0lML9PMiGamgYkb44XhZcaIgWZVHc2O1JNEnWqpJ2zRMXCI96hvWyusC4Vcb0gvE51mjQdxdwdtXLp2e0LfZ5UeW55Aff7y21cLbYxE+fvIBzrSK+sX4Af/GjM+gPAzx7YB3PHVjHg6U6EhNYuZQc4MfyN/Bbc9dR6Sfxne4S/kXnMP672hk8mt7AV2au4Ydm15ENdpwbq6ur+OCDD/Dwww/j4MGDu/Sga6xcAFTbFHWdS05ZmTlpTGxfAvFzkfN+C2AnXe+qi/7mwzy+e31GTty6+OoTdZ9vPFOJER492sCxchc/9ffOYrM9g9/z0OrE8lhvyinqEYbxqoyik4Hpp23fR62ssEztK463Db+zfanlaYgm20H9ps+nfCVOUadVEAThgUXNZjPchzUNWZ5i0ot6vR560YMgQK1Ww+HDh2OXGzvERQGDNlgFknZGlGB0gSeSLxbYMjs9xbTmCM552qfWw6UkCGy4BMJDiRimQqBI7y03bBJ8kTkZEsO8neo1d7Xd1V82HosTI5/PY2FhARsbG84MDrafNZRHQyLsZlGX4aT14koBl2Jsqka91palewEs7yiote3guy2P9S8WiwAQbgbhaofGnym/uPrH/q+/KXh0CQbLexpuQ+DrSo+oISzMWV6tVsNTPlmW8ozPsCUR2CmYJ7gPggCZTAZbW1thX6ng013uPu8Dy06lUjh69CguXbqEZrO5y9iNGjPt/yiFp3S7gLRPJtlrtA6TrrUyyvK0AlH7bPUgMaWlXf3hbzYF5371kQW4/OwaM1v/OHXQ9gO7l5ZVqVKBWc+5hpXYZWerEKOMQttGNUx9vGkNfR+odP1mr00kApwttXC21MLP3HkJHzXz+Mb6Afzlj+7B1jCB5wSsJ4PdK3Ra/lyyj9+WvYLflr2C1X4S3+4u459uncB/13oAP5Sp4NHeOcx+8BGeeORhHDlyZJfs8rXHBSjtta46ueaCDwBbueqaN677dvow/sbivcQQaxtdOiSK/31tjgL4Pozka09Uva3c1XIPFrfxK3/kbWwPduLXh0P3HNL+1bmrQF0PXiTmon62zyW5VjQIkBkOTAxncSXLtHOR/+mBlMDNkFl9jjqnmDHL5ZUfDAZYX19HvV7HgQMH9hROaOdBsVjE2tpa6EVPJpPhIZRxaeoQF5cl47PmfAzns4ziWKyj0WjMC8hYTm7sZP5xl9VFJlRwrmBPT/9k+Au9nww9YJwvveeMW6fHXUGoBaPaP64lJvVCE2hx02az2RzzxrvAEpmAhoI+m7H9TBnpGxeCt42NDSwuLoYAWMm35AYgtLIVlNvySS7BYw0ZCgluCMlms5ibmwvjrF2hOy5ejWqzfb6rzroawthyvhSYM/xJc5XXarUwxrzZbIa59i1vRNVV+4MAnaslupm20WiE5bBvXCsM6XQ6BPUcBxobg8EgPLhhOBwim82i3W7v6i/W2TV/fYZQnDFwURRgjnOfD5j4rp8E6PV6X/3tXOdv6hnnZ/sblQZlHV+36kF3gXD7v4Jh3/zQMqIAia++du5zjkWFpcXhKWuM2fvs6pj9bMfKXuMC+9oXrv4a/w6cKbZwptjCT5/cDdafPbCBrx5Yx4PlOpJGN6rTZTgcYh59/PbZS/id2cuoJov4lUoBf791Apv3/Fk8m6ji680KnszVkE7s9uy6+lJlpm2n7csoQ3YSoHS92/60dbAGnY/02a42+OqjZOWYbaurj+KQCy+5KA7Y135y/a6fk4kAAfwHB/E3O0803HQ0Go2FfhB02pznk3Su1k2xhY8n1bmkc1fj1F1Yy7UKqREGwHj61tFohHq9jo2NDRw/fvyW9/sQv3EfHZ/f7XZRr9djG497TrMIjHtXgiBwAlIrvFyA2QX4fYJANwgyWws3hdojX/X5KuAYfkAmYUpFeoypFBkzzNAWHkqUSCRCMM/NpMBNrz7BHOvtamM4AJ+BaTs52I5CoYC5uTlsbm7usrxcMWm81wInAkt7UI+PUZrNJjY2NsIwF5cidhlXVsD6xlLDYghQ+B//57Iax5cvrpTY3PYuoTVJiLqAgvYZgRK95fRkcpOwesv1ICE95bPVaoW8ZDfxRoEbWydb7yC4mQ2EhzxxwwsNTM5JPQ+AudKz2eyYkcEVIx44QT7gARGuurgMGlVkk/p/kmGy3+RTrNMA8mmus/PaKgwF5zbMi3wG3JQtt0ouOcxn8rt6omzomrZN79OylOzqoAu86eZx7RtLUcDbNyYuWecDXNbQj2qbHVvfXI6q13A4HAPrHzZz+Ob6En7po7vRGSbx7OI6nltcxf3FKhIYB+g2a9lcr4E73vkX+G8feADBwdP4ZmcJf7NyDP/l6mk8V9jE1/MVfDFXRwr+ueYCzVHtc/WBqzz7v28sJoG7ScapBYmu5+9l7k9q561SFC/HeZbP2JymDH22a86Q9zSVtW7upByzqXlJvj0nFnjbuhA/Ke/pdbZcOk9Vho1GoxAnKNbQ/hkMdg4sqlQq6PV6U5326SI+M5/Po9VqjRk7zM4Xh2KbCdrYKKFDQediCjtwWqa1mnWiqeW8vb0devMYB85wE1UuLmFJkE4ATq97Pp8P49b5P73n3LxHgM6lEobUMFOMes99isAVF0WlzXrbuMxcLoe5uTkUi8WdndMmN7JrYhOgq8GkSt8Vn2/Lohd1cXExTD1klbVrIlrhqArXCmMF6grKdUz1xSUqBZm27T6loNdpHbUPtK/YX9ZTTlDOQ6t4mBC95fSYM0UiN3zasfDVL4qs0cV+Y0rQZrMZelk7nQ5u3LiBdruNI0eOoFAoIJ/PhwYPjcrRaBQaHTwciStF/X4frVYrMmWWJR3b/VJkcQDxfpcV97o4BgZ5mzzNDbzkd02FyXeef6ChVPtxUJGVs/zNygKVv3au2Lb5nkPlyjnOtqjXypbh+uybLy5DcBJo94FL5VcXGNdrXAaKtlXbb/va510OAuBssY2zxUufedZz+MbaAfxXH9+DziCJp+dX8cz8ddyXrwDDm8ZOKpVCt9vFyy+/jLvuugvHjx9HEPTxU7PX8AfmV3B5O4tfbSzgr20cR+XGDL5a2MQPFyt4NNdE0jF0LsPN1Veue3x9rv9NAp/TznUL3OxYuZ7t4337mzUkXO2cVn5HtcHW2ecU8d1v66cGXVQZdh65+JukYS8aZkmHogJh3/wbjW7uG7QA3YaoUC/7yrNGq85BYgRiBl8fETd2Oh1sbGyg2WyiWCxGrgS5yPJfIpFAsVgMQT+NA+rpOLSnTaKTBKuPkTU2zLWMagfHlqsp6xh2ks/nsbi4iLm5uTAmnErQxjURiOhhL4uLi2FaRY0hZ7YNhiTw3iAIxg5A0lSM6mn1MZWrz9gvCt7JuLOzsygWiyiXy6hWq7vivVyKgoLbFUZBC1M9eVof/a3ZbGJ1dTUMKXG1R40O1/KitpF1YB2BmxOaE0lBuAXlmsNegb3LKJhkSNi+U4NGjTjrKdcUifV6HZVKZcxz3mg00Ol0Qu+1ln8r4MqnSNgOBemtVgv9fj9cyen3+ygWi1hcXAyvYz9q/DwNDh7CpSEvPpAWpUR8AGga2k9wPg35wJr9n5/VqOa7Xb7V9K8az+nK+sRyqYD2C6DbPT46rq7/fGQBqJ1rWo5vzwmvt599fe4CMhZMuOSiC2D7HE6TeDbqWVHyz9cXrjBBYIR7Ci3clWvg/37sPD5u5fGN9SX8txfvQ2eQxFPl63h2/ga+sNjCYDAIDyFinnPtmxPpLfzhxRX84cUVfNzN4Fdbi/il1ZNoDRP4WnETP1LcxIOZ5thG1Un9P1ZTj653jdEkA2g/KcqgmPTsKANE/7ft5H92HsWlKENUHUjA7o2UvrJcc9RFtr+idCbnjwJ09abrfpKo5+lqPnDTGaHkWzVRvUNcqKkV1YhQZ55iT5V7icTOfqBr166hUqmEm6unJStPKO8ZeZFMJsPV6jg0FUC3QtkHQNnRLoHFDlIG8wlR+3xu5CRID4IgPNXz0qVLSKVS4UmejN1WoKeHDdFayufz4aYAFdoEY5rDOgiCEERqnnR6WnUZWoW2er2tANVJwH7RkJdEYmdjX7lcRqFQCD2yk4wjgnQqfP5OA0JP4HQpPmDHIKpUKtjY2Bjz+tl7+DyXt0h5xBok6kHU/QQ0fDRDi+4rsPzkAhwu4u8UoAp+NDSJgJWeZK6mEITTU05vOVdYNLZcAfmtgipfG+ySPEN/KpUKqtVquHlUMxExlIurLABC40MNYJt/3cVv1giaVGdLcT07cRVLHIrymOn3Sc/gXLHCnv9Z2Uhep/MAQLgkDCA0qFQhqRDX0L79IDtu+lyXXLJyOqos1zVxwOukcdY566uDGksW8Nhx0Xsoj6YFkXFlj4vs/LV12vk8wulcAyePVvFTBz/AuWYO360fwV+/8iDaF5M4O3wPX158FHfds+DsV9XZp2c7uCtzFT+9cBXnejn8q8YC/tzKndhGgK8XK/iR4ibuz7TH7p3UJv3f9qfWxXXdfgDzuLIhzthMArOTQK6rnEnkAuW++3SF3a7Eu+pndbN9XlTdbRmcH3y+4jj1gvO6KN7mM+zZNDZrU9QKAH9jWKbd/ElMYWWBlcnAeOrXRqOBSqUSRmdMI6NcfZlOp1EoFNBsNkMMy4x8cWiqg4pcv1vFpJ3Ma5RBbCYRyxAqFKwxwAYWCoXw1E4qrm63i36/j1qthvX1dQA3M35ks1kUCoWwXoPBILyels1dd90VWl8aJ6zWq80Uo7HnvEYP0FGFa5WcteDGBsVkTWGYS7lcRqPRcG48tX3KcugJVYBO5tNDlTix7Hg3m02srKxgYWEhTL6vpIpXAbkPrKhla0NaaOlqbLkeOMQ6WrDoEyg+4KWTW0OaCFAZwkJDTnOWM7acJ34SmKvXMwrsxaFp79H+HQ6HYUgQ49JHo1Fo2HFPhRo8uneC/WLLd3321SVuG33t5NhNC3j2SnE8JS7wo8opylgJgpuGPZ+lskbnJ8dDjXWOx61uEGX5VlZYYGKNC5ezxdXeScCCseb2uRYU+YwoqxuiAJNLj7j6wlXvOPy+H3zpGk8X2L25sjfAqWwdZ8td/MzMFfyLN67hjcE9+Adbz+N/+fUknlvaxNcPVvDIfAPv1QrY6M/gmQObSMIa2MCZTAdnMlfx80vX8G53B6z/mWunMROM8PXiJn64sIG7021c6GfRHKbwSK49VkeX8WPbEKfvbHt95FppiAvA48pTa1i4yosCbNM836UjXM+MA6iBaNngA7mTyKVb7aqgvnyx5L46+Nqrmzj1u16jZW5vb6PT6YQOPmJOLYOfbRga60GnXbvdxsrKClqt1i3HoQM7fFsul7G+vh5m/yPuikN72qqqk0o9SNZz6BOyNie3Ff72Pu1UAgsCbx0kvhN4c/Pe+vo6bty4MRYPSVC9sbGB9fV1nD9/HqdOncL8/HwYBsNrUqkUcrkcAISZYgiiqUD5HiVIohQY+8d6soIgCMNc5ufnUa1WdwFuq1z5zrARTc/GcWIWHF8suk6oSqWCtbW1cEXCesvtJLUT2QJtflbPuXrSLSi3Zfv606UobD/ZmF4Cc83EQlDO9IjMwKL7EegtVwNOn7lXBT6thW7Hzva/8gR51Bo62keuZ7g+R9U/LqiJAldxrtkv8D5NWRagB8HNbARqqOn15GnlFXuolVUoVBgqq9RxsB/t5fzWJXkfSPeBDJ+st9dZxe5S1KyPXjtJobtCVTS8cBL/xGmj735fmIyPdIxVT+hnnYs2netwOAxXhM+fP4+Doyr+8yeOIZN5G+caWfza6gJ+6YOTqPZm0NxOIhkM8ftO3MDP3X3V26YgAB7ItvFAto3/cOkK3urk8a8aC/hTV+9BKhhhbXsGSYzwc8tX8VOL67vKiMsD9h7ffz5SDGANxt1t2l23SXXSz/YeH5i21/rA+jT18PGS1WW+/6OeZXVT1HV0Dtj2aR1cOlnvd62a2XL0LAi+KynAdo2D4j+GqWhsu+pmXQHgvVb2MB96vV7HwsLCLodkHLL15aop9xVNU+aec8m4wAGweznF53nhf/qbi4Fc99ic16PReAogVWh6aAw9oowlp+BrNBp49913ceHCBRw4cCDMdqExzzQGGFOkp3ZqWIMCZt6r7bN9QbICX39nxo1yuYy5uTk0Go0xcOib0DRkaLAoEaAzT6iSHc/t7W1cvXo19KITdOhzWIaCcJ2oBOAK1jVdoMavuQC+bZedWLZPlWfsCgfjyuk5pyHHlJqMKa9UKmFcNj3oUaEfllwCcJIyiyPMfUpBDRBbhv1uAYGdq5NI+9f2w36CZx/dSvkuBRynzqr4yZeZTAaLi4soFAqoVqvhDn1raOvSLZWQpt9y7Vcgv3IPzK3GoJM37PyxfeMDXS5AGgeksi166JHrmiiAFAfgRAF6H3jaC6/a6133uxw1PnKNvQIPNeLS6TTW1tawsrKCJ5988rP9JMA9pQ7uKV3Fz5y+ir9z8Qj+1vkj6I8S+LSV2aVPrZxkGxIB8IV8G4/kWvhPDl7BX109in+4uYw+ArzfzQNY9/anlmP1nAuYu/R8FLlWal1jasG5lY/Tyuso0j6clodcdbdjYvWaNews2XGwn225vnuV1PHkMmZILocaZaQrfE6xAeUjn6/4zeopfnbpPUYvELvpf5Q/tn9d7a5UKtjc3MTx48d3RYPshRj1Ua/Xw+9xde2e0iyyor6l3SgAoUJCfwNuWsYEC3bSq+AisaHKCDzRkx4HhqWUy+VwM4EuLROsDwYDbGxsAECYrYUnavKkUIJ23RBKhrL94APO+tluKFHvNMtkbvFCoYBisYhsNotms7mrPNegc2ldQQPLpffa1s8+fzgcotVq4erVq5ifn8fc3NzYigafzXFSj6ECdAvOFZBrvJgrVMZSFPhVYK5jTXDO+HKGsRCQq7ecYSw8DVQNnCgF75v0PiA0Lekc0faqwRC1SckHYKxis+3Ue6wxGpfsnPeBpv2mKAPU9d1XhvYNV9WY5ahUKuHgwYO4ceMGrly5EmaacpE9UMPuKQF2b5iye0n2St1ud2zecY665tuk8VFeixo/6/H0AQUfoLbPj3pWlOy1z+d1Lv1ljZhJz7cecX23gMqucrmW8KmPVE9ls1m0Wi28//77ePjhh1EoFBztBn7q5HXc6KZxvZvGf3jm8tg1rrHiXNRVtSSAP7F0HVf7GWxup/AzS9d3tdllFEXJDksu4177iH2jzhoX+Qw+/jdNnXyy29dnceWWb17FuYf8EOc6+3tcWecCvORR34ZGF+i38oSkeou/ExdYpwHnkk8e+PiF19hNqhq6afEViR780WgUhrRub2+HWNLXn5OIzyoUCqGzdJK8VNpTDLoLkLgqz99dXhvXBFJBoQPp+uwC7FaYqqDkZ2awYFlaV41HZggD49s5UKPRzXAatfJ8TBwFlFyeAGvdqdFRKBRQLpdRLBbDTDSWEe0zgyAILUrNLDIc7mwg1LRItq+Vkfr9PtbW1rC+vo5yuRz2hyob+1nBuYJw++KzrCCOq/RVoKj17cpbzgxAzO9dq9XCuHI98VM3fOrYuvjXNbZR3/dCFtxa0iW9uOQD6/a5dv7tlXyGO7/fLpDuaqfPwJtk2CQSiXAjM88ooNFKWVCv10OAHgRBuM9C+9jlVdIQOd08yg2kt9pHNLR1xcqGn6mCVblglWZc4KNl2PZq3/o8SlHAWL+7eN/lMPHxvAIuH39EKWgdTxuOYa/TuvpWTvQ1Gu2c4jgajfD+++/j1KlTYepbF80kRvhP77s0kZcnAdt8YoC/dvwTafu4/nX1j688V0iK1XUkl7Hjutf3vCgwHGfuWxnvIxcO8l3nAoVR5WndVTcC8VZnLKZSHKb/xynD1s0FoCfxhGt+cvVc79V22jMjNAmHjo96yCmf+bv+r2Bb55uN+tje3sbm5ia63S6y2eyedZ9er9lcpilrTx50rbBtnAs8k8FcQFsnsHYof+PzXAzu+t8+h+96gJEFxlones2VGTjoZBIVsq44QqU4lp8KadufamRwFaBYLKJaraLX63kVgRJDWZQ5WPetra2xzRU+4TYa7cRnrays4PDhw+EBRqrMXYre5SWP473zKR8Xj6mRRHBjQ1joLWcawVqthmazGcaWMz0hUyDpuNoxihrj/aAogeBTHBRCavRElR2nDlHX7qXNVmm4/r9dIN0+I67QdQE63dCsgFv7n/xpl3F5rQv46jIvgLFwLA2j2ytR8VCWMezMGtD6mcpO/9NVTu1TF6lcsnLXOlZcZMGYy0C2IMEl113l6H++76T9muN2mZ19qE4AdTDQCZNMJvHOO+9gfn4eJ0+e3AVeosjqbtv/rvGbZEDbfvSNU5y5Zu/1GTW2bj6Dygf4fXLTVfYksnVWrGOvi1O2aw7EMQZd5bAsvc41B6Lut3PAzjfXNZNkN40ELcue2Okz5Gx7WCfKTpW9jDhQDOobe2DcEGCZGxsbaLVamJub2xedxDCXZrMZO/QNmCIGXQfexQRWMdkJzM9WuALjwkqBNrA7b6+tk5ar5BK0+puvXlaZcumD1zD22wraqLiwqHq76m+FN+s1MzODUqmEubm5MD5ajQirDNWjrke7q4BjWkEbYuLqd6ZdXF1dxfz8fJiuz+U9188K3oGbSsqlXC25eEqX+3UpmF5vTZNIUM5lK3rKNT2iKxOLCrRJimWSEbZfNEmAT1LUvCdKiehvPoE+SYlNAvb7bdB8XqTzBdiJdWw0GgiCAN1uFwDCPS7ATc8XQTABOGWKhrHwM4E4+ZjhVfuxQXR7extra2tjm7L1XcPQdH+IZh6wZxGoLHTJVkv2QDYF+3H0AskF/KJkre+7/d3nsXVR1LVWj7muU/2pMpm8wM396XQaH3zwAYIgwJkzZ8Kx8PV1lMHjqmNUG13/+QC3S6+RLIB1AXgX+Zx2WoYPdFqQbg0Trdu05CprGtnv6pu91smFHaJ0lstYmWRE2efpdVGGuktHKd/T0UF5oKE1LgPefuaLsogyjCHKrjqrc8GGvfC3arWKSqWCQ4cO7XIE74USiZ1Di9bX1yMxraWpN4lai1GBDH+3k89WyCeQ2RDbMFuOgodp6mwBobZDn8HnjkY3T8hygX1bvtZfGc5eY7/b9rDN9l5uFl1cXESlUkGz2QyBAZlUlb4+P5lMIpPJOHdm27SLVLouhck0RAcPHgwBuvWIuwC6enatkHX1r5K1njUtoHoZeUAQc9jTW66e8nq9jlarFcaWa7iSa5yieOzzBpqu/nHVwdeXdq5aHiT5xsc+z/dsH9l+/o0G6nEAHknrysw/PB56Y2MDuVwOiUQC/X4/TIWq3mlg3PC1HlPgZq5zGpp6Cu1+GDY8iIMrZvZUXj13QNOfcu+NAnY1ulUG6DxXgGr7wHrTfQDM9r2Vu6pYXf3jAz5W7vjAahx+dfGRT6ZoGdYhQHlGI3AwGGB2dhaXLl1Cu93Gk08+Obbk7prnti22jva3KF06CUS4njVJz8UtX/kgijcmlWmfP6m+cShOvwDRINlXnzjOD9+qeVz9wGt9fRXVPhcots+xOED/t8Cecobn2qiMpAzlHh0fBtN5xlAwlqP7fRSTuYwKft7e3h7b8O86M2ZS/7qIZ8loVr1JNFWIi0+IEcwqALBA3VUesNuKtwOh5bHjo8CHS9hYSz3K0rPWme96kl0i4/PjeDOB3QYJmVONGNaJnqx8Po+5uTlsbm6Gglzrqadp8b7RaBTGzqo3jgCdZatHy9X+4XCIGzdu4Nq1a2O50XXCWkCuAN41xuw3X3+oAqPy0kN1uGeg0+mEceT0mFer1TCcpdFojIWw7Adg3Ms9PqU6zb3aJ5YHbb18QMEHRFzzj78pr2k5vrInAZsfBCMnjpK2c4HhYTQIa7XaruvVq6OHEnE+apwxveWascUuUe8HQL948WJ4hoM9rVcBOzeX87tmtWKaWQvk7YqZbhZXuQDsXplU+TIpdCYKANnYWNd9+tnyepRu0fusovdd7xs3BecKzPmZoS2rq6tYWVnBl770pTCs0AUqJs2zSaArLtm+snspJpUfxcMKpLRvfY4uX9tVf7nklA/HxKFbkd2TytH6u66JayBOWx9X/7kMWVfddG5reRagu+oXBIEzJzj/57grSHcR5xDrwM2d7XY73Eg9GAx2hb7ovjzFmcPhEO12G2tra2i325idnR1r87TE+4jd2u127LJiAfSjR486J4IFkD6lbIE7f7MN0Otdn5WiBKn1zrsEswUWKgB89yipYPKFt1gvug1DibqXpAKZzJrNZsODi7jbmPVUkA6MZ1lhNhebdpEAwYa6+NrPWPSjR4+GGV106VsnrAuUW6+aqz90Iumyv4awaN5y9ZK3Wq3wICECdhsmEFeBT0PTAD2lKIHqAiOTAAX/8xmq9rsLUEQZpVHtcT3zB4VuRcBaZa+kHh7gZrifjfEmKSgj0NcVHf43CVhNS4PBAJcuXRo7qVdP7+XvPCWZ3y0gd92jgFy98QrSNYSGMkpXSF39rjyq/WfHxcWHdtzsGPgMAR+A0/Fl/VWnKUiwbdL62/hYm22Kn1dXV7G5uYknnngC8/PzznnvkwuW7Bz3tS2qr215+rv2gav9tg5KLm85P/tWXaPaMy2w9f0XJbtsW3zX+8qYJCct71jZ44rHtsaILUfL1lASBaiTiPdYRw0wHuas81pxj20H5eTW1lb4G1NYuwC94jXdu8OyWC8mg2D2PZbDc2H0esoklptI7ORT39jYQLPZHJt7ts/jkOLSQqGA9fX12PfGAugLCwtjg2gHMw7YsQNJsvFQLo+crxwLtvk5SuC6ymGdorwvPrKg29VODT2xy1NRy1WuSZtMJscOLtrc3ES1Wt3VJz7lkE6nMTs7uyumlUvqmuTfpdxYh2q1iitXroShLuo507pof9rPllRhEaiot5wvbvjki6Er9XodGxsbaDQaYSywPULdpdjs972Coijwv5dr49bDhg/5lC2f61Js9neX8r5VsH0r5UQBEh9NUlRxKcpYsTyuipL/E6SSt+m14ZzjHoher+ctY7+Iey5UiRJgJxKJEIzTs85lWV5DbzvBOU+t1Rh2XkNZkk6ndx1K5jopmHxs5bB62tkfFly7eF77zXqefeDRJXPtb3ZzmYINS/Z+XTVhOQrQe70eNjc3cf36dSwvL+Ppp59GPp8fK9u3PK/tiitXokC6zyDg/y5dPa08c+m/KJ3owwU+2RWnXlH19QFny2euz3HK9hlE9hq9Li6413HSa328HlVXX3mTxgMYN0hdbdQ9LGq46jUWZ1ketTiSGIHOS66+8/lqCFMOEZ+xHO5T2wsudFEQBMjn87scNlE01SZR+11BprVuXN51HQANf7Ad4BJ6LivK1s0KK5cneDQahc+0FBWWYuOYou7zgWO9zmZs8NWVz+RviUQiXCopFosoFotoNptjG9NcdSZRuTJumzQc7qRdpDLl/VY4kPr9Pq5evYrjx49jcXFxFzC0L187yQ+cQATnjPNlCAvBOFMkalpEfm+328648ijPhmusfILE9fk3klR4281UUYBS79XPUeAkqp/i9IerrL0C9WkVi4/2w+Cw88PyPfewaPpPGpma/pP5caP6dD+UhM4L9TaxXPX+53K5XRtKrRedAJ1AXENm+Fm97Oq5pyyzqw2uMxTYdrtsHgeQ2hU7C3xc/ez7rkBBwwjt/LCgyK4MEqATLPR6PayuruLSpUsIggAPPvggjh07FmbR8clhq1f5PFe7bJt9ctH2rwugucCavT4OufQpf4sKI/LJAZ/uVfKVx3sm8YfSXg3pSfrpVslVL59B4SLbvy5s5rvf4gBbLy2D8iTKwIkygHR/o/anzi9+p7OPRE85gLHUjInEzuGMzWYTGxsb6PV6yGazzn6alpjoY2lpCR9++OHE6/eUxcUlgKKAtLVu4iholulSfC5PiAW7URZ+VMiJJRv/ph5iX715rTKj9YDYNtj6uiYRl9Kp/JjRhbHofL6dFLadmUwmBMA6nsz5TuXrmxi8dnNzExcvXsThw4exvLw8MbzF9pM94ZNAhfXgi/HkzL6yubkZxplzQyg31Nlx8I1RFGj33eMCt7dC+wX0FQC4yrd195URdZ+9Lg44jwLTk5RrVHnT9Nut9nEUiPMBRc4BAk7WWbMLaXYWV7m+/mU85q20xbU8znedw6yr5R+2jWEuqVQqPF05m83uCo/JZrOhY0DBvW5Q5T0E7eph52c9s0Fj2+mdB8bBlwUDNkzCUlze8jlkXONmwQLfGW/e7/exsbGBc+fOod/v45577sGdd96JfD7vlJ32GVHGiuqTOPPNpVf5u+seq7eiytZ+c60ku65TXeKr4ySjhNf5dJCP4vCBziffnI2SvVHPmFQ3i0tccnmSEWBlQRzgrf0Ypd9dZVh+oSxxxaH7gDqxkwJzJf6fSOykpeaeu2Qyia2trVB+qlzmYUnATawaBAFarVaYLc8F0KP0myWWmUgkcODAATz++ON44YUXJt4XG6C7Bn4SaNaGkHTy2TAPy/D6PL1Oy7T1sHWJ8iZMAue27i6B5xOYPgVrBZz2g6//yHSarzOTyaBYLKJcLqNcLofL5JpX1C4b6tJsNpsNPXhqZW5tbY0tWdt26xhsb2/j8uXLOH78OMrlcriJydcOVUx2gxw3ehIU2DhyzVdOcE4DQ5eLXc/1/abj9HnTfoFzYPI+hiiK8lZE3eO7Jo6i1ufoexxQ8INCPiWs/6uDodvtot1uhytCNE6tvIsCVVH9Pm3dWY7GhhIQZzKZXeEbmpfbOixcgJ1OBA2RoYedwF03qWroC73umuJRwbx63G0d2O+6+qeARsfE9ocFH5NAkh13FyCygJzXbG1todls4s0338TW1hbuu+8+nDx5Evl8ftemeqVpDFSXLvKBfS1f7/U929X+uBS1Um3rPs1zfGA4rkyLS1b3+/SN/S+qHlYe2ue46hnHwWifqWVacB5XvtjyXBjMx7uu+zQ5ha27hrzZ+gMI9+URBzC9ou6dATB2grz2r8oUOj5Ydq/Xw40bN9BoNDA/Pz/2n69/J1EQBMhkMshkMrGunzrNom4u8E1i6/lWRtVYTAX7wO5QGZZHIexiIhcjxP3PRXZZjcDYTh5fWb6J6TIY9F3/d01MvZb1mZ2dRT6fD4+R5UYLWoe6SdVuvkmn08jlcmNhLnw+gX4Q7D5IwFK32w296MzoomVpmzRLARW+gnKCGPWUM76cJ38yhMVOJpdg+7edLABxUZRy41z1keVDBY57Vc62/q654rv2B4VcBqhtB2Xg9vZ2uB+Cp/8q71rZFqXUbpWs3ORmLMriTCaDbDa7KzzRttvGT2ubFTBT+SrQpleLSko3nCqQt5ll6IVXoK6x7a4wGd2bod91vnAOKG9HAS+7+qp60Oo8Befaz81mE2+//TauX7+Oe++9F2fPnkWpVHLWw5JP10TNj6j5r3Izaj7GqcskcvWXK1zUBUhdssjHn65r49K0csal1/W/ODrJVU/XvVqGD5hbTBVXfqgucc19n8NE+1rnmyXr8bZhu/zN4jx1ICifaz1dr0QigUwmE9aPoSz0nnNu6nk32mfEJxqHPgkLTSK2gSuNcWjqGHQ7OBZwcoJZS4//awfa8q2gmAQCXMLMxZA+5vKRjQ/XNrC8KEEaBXwUUPnAvksI2WdQWTEOvVAohOBVNyG4NqOSKbPZLNrt9thEAG5uGE2n02Ptc/UBl2ivX7+OxcXFMJZLJxiBCsEJ424JyOlN4gmf1mPOdtF75xOKPgNK66zvt0LTWs2+6yfVedKztZ+1b1zGrq8sF5+5FKMCt2nn1L8NZGWgb24GQRCuCLGvNI2evccF9G+Xkcly6akOgiDMtz07O7srblPr6DIEtT0E6xrnaY1IC7IJ3Olt54veL92ISgCvHnhXeQTuvsOXqCR1jqjXWsnH25M8lyr3dKXy4sWLeOedd3D8+HH82I/9WLh/x4KbKCPQ8o4LMPn+c/GWdagp7YchrqT6x9ZB6zwJ0FoZ6Bo331hGGSS+50U9y1X2tOQam7gAX6/Vcnxlanlx+V3nMeeydZK5ylAD2adjOFcJom3bo9pMvadedH0ugbcdN90c6usfHlh048YNnDp1KkzZeCtzgs8iTppEezYJ7EYOn5BwbfJwTYyoQbagwGYK4btvQ4lvU2hUu3yT19dOFZ4WyNrvWpadhC7B4ipvNNrxoOdyORQKBZRKpRDkMubK3muJG8F02ZpKZWtrK4wrtaR1HI12PO7r6+uo1WqhB85u+tT0iJqFpdFohPHlzFnOjaCado5j47PwtW52XH4QyDeh9wLOXd4NnRO6x8H2l6sek+qgvGEpjqCapGxvJ9ln38rzbFo9VYBWeVHATzpoyGUI6ZjtJ0BinbQ9BOl6IJtex3rZ+GEb/2rr6ZqDugFcZQj1CBU1vegK1rPZ7C6PezabHQPtNm+7PWBJ0z66XloP9o8CR323/eqaa2wr8+S/8sorSCQSePrpp3Hy5Mmx02WjAKbLMIyS6766ah1dZbv0zyQdGJemCWuZ9Jvylqtclw615VmedQFCny5x8bkPUN6q3NdxiQNYfXWP+2yXQ2bSMyfxA8G8y9jgfNMVrSgnBuulseIsT+WsyjGVWSq7NC+6bROwI6+63S42NjbQ7XbDfSGfJ+0JoFvArFaxBaA6yD7g5PIK6DWa6YXPA3bnYY8SAnE2pdjnR3kTrNfBCnJXWVaR6zUuoWutVsvYXPotFosolUqo1WrodDpj/WH70tYzk8mg2+2OWZqj0SiMA6cSZ99xstHjNBwO0el0sLq6io2NDRSLxTDfugXmnU5nDJjXarUxUK6x5Xriqcvytu2y47QfZEFT1DPtfbdzImu/8KUbiLXOk+oTp66u+TuJpukvXh/1Peqe222E8TnkeXsar15HkDczMxOCM59zwKV4VV5Egba9kK6y6GeVO6lUCtvb2860Yza22xoRUYBAx8r2h20z68R3gnYL1vnuyhjD/MeaFtJmpNETVG0udwUONjRG+0DbosvlDG2q1+u4dOkSzp07h7Nnz+KJJ57A7Oyss80uUG4NO3uN7XOfM8j2s77b/3mvazXcRy79alcE4soR3/+q73160dUO/c3nqJjUV9PIMN+11jiyz74VOeaTR1FlKli110X16SRZbeWF/mbLJXbUTC6uOiowdxHlijpEVNZp2dbL7nMuBMHOSuj6+jqq1Srm5+cnJgnZb5oaoKtAZ8N0c456aFSI+wCDCmPtTC0XiF6GA9yebzUglHxgncxiLTRtj32+BdXKnGy7D8xr37iEoW0ny1HFyTh0hrrUajV0u92xvosCS+l0Gvl8PlyGZR24YXRraytUktpHOuG2trbCFGH5fB6lUmksvrzT6YThLPV6HbVaLYztqtfrIXBnzvJJQtjXlr0AwWmuvd1AMC5ZcER+1ZelaYwGC6h0rOMCZ5cB6rt2GooLGPabNM45kUiEIQusk841AkTKQx7tPKlfrAJX3t9vg0/HVWWs1lGXjVXJWWUWJbPijLvLELd6JAgC1Ov1sXEgKFfQbvO3a752F2C3XneCBY4jn6UbUi2IZ9/RUO52u9jc3MQnn3yCjz76CIPBAF//+tdx+vTpMX3hcqBEkYsvbF8qv/j4S51BUTo1zv8kV0ICu5fLhiLYeHPbTpd8d/0WNUcsn+1lPvlA/jT3uN598yauQWRD0CYZypPK0wPXtBxXlISSGnN8jwKyqrtYvjoO9Xl2JS9q3Li/jUBd+2R7ezsMK+GpotZ5bPHpaDQKQ3g3NjZw/PjxWCtB+0l7zuKigJoNs8DAgiy9xjKqTljLZFaoKOn1LMf+bgfVBWDUIFDQbAGKz+rScmz7lRGtMrf95OobWzYZmMqEMeiFQiHc+Gk3NbiWpYEdJUzlZXOpDwaDscwwmtVFUySy7AsXLqBYLOLw4cOhZ53hKtVqNQxjYSgON8+NRiOvV9LygYs+D8A2SfjtN4Byle+aE1o3YLd3YFqy7bTP0XdfPW15v9mJskA9qjYjAK/TcIrhcBimELMKzsUvtu9d8mC/+IwAivHiuiLG75znrv6Ylnzy0mUo+EImtX5c4dOwFM3LriEyDI0haGdYDDeeWg+89bbbVI/6sptPW60WLl26hJdeeikMZ7nrrruQzWbHAIzVi76+0f/iAHMrE1w6ZBIfanlR1/jIlZxAjRl+9xkWvvrwPld7bT2t3LHt8Rkvvvv5m6vOer0t297jMjBc7YySm3HDdaPKsiCZ17uAuCsMxBoHfOc4W57VuWuJ88vW1TW2CuBtewCMbcLXE9ZtvSmf+e4KC2W57XY7TGXNaAKlaQy3aWkqD7qdVGo5+YCkNlQH3yV0+HKBa94bZwL7DpBwtUfr6Zss+t1Vf0t2stoBVMDuMmJcQoDCzYL30WjHi14oFEIvOr3RLoHnegZj2bl5k+M6Go3C00U1jp9Wqi6PDQYDVCoVnDt3Dp1OB6lUKgTlzWYTlUoF9Xo9zNhiy3AJMR8g3KviuFWaZEAAn3+dWAdrwPqEm+/+qLLtc1xGaNRzJpV/K312O4SjVQbqQeVvjF8EbnqA+CLgAxDm3o3Tzkn/72cbOWetrOE812epnHJ5PX384HpmFBCzvObiIxeYHw6H4abzVquFIAjG8q4zTzvBt8a2q7ed4TIE8xrLrmXZ8BjmWN7c3MSbb76Ja9eu4eGHH8aTTz6JQqGAIAjCsCH2p7bJ6qq4faTX2vtcfeUDs1G0l7nJ9rgyetk6u/7X66Lmt+Vdn1Hh0u0uWRZVvu+/SXjE97sdMx+5xtuGprgMBde9k+rmq496tFUGTjKoVFa4rtP/NPOcPlNXXKL4md/pPVedyJBcyjwa27yGRrjd3A4gjCRgPvRcLve56vipQ1zYKbphj423oRxWefMaXYJwpRjzeQgsmNf/7DNYjpZhY+KUXILO1sMVU6fXqqD1MaarX2w7ffdpHZWBmb6sVCqhUCigVquFlqRlfCX+XigUkEwmwzRw+hwu0euJdozz4jt/Y8xto9HAzMwM2u32WCYWeuh10yfJxsLdDmt0P8hXLysoSfulCC0v63zQsdDlwmmeb581ze++Otr7bodgmwTqJlGUgqGSoaLRJWBNUQiMb3TkCZwA0Gw2b9kI2WvbJpXHWGkCKQ1JsECAsl37I64nz1V/n7yP0gM+8KXlsU5c/bOx4/YkVKZ9pJPC5m23GWYI6Onxa7fbqFQquHjxIur1OpaXl/GjP/qjOHbsGBKJBLrdbhjXz9AZDQNQj6NPF0zqTx/4iSOL7HWTgOe05BpDq8v4f1TbfXWP0q/TyOJp9I3FG75rfHzte2ac/60ud/WjTyba+xXXuLzkUX3iGysf1lD9bo3/IBjPha7z2IX37HNVBnBVkI4RPpuOB65qahnqbHHJGMahNxoNLCwsTFyddsmnvdJUAF2tiklLVNppCty1HL0nakLq/z7PgGsSuBjHei9cz3FNJp9A0frHvQdwhyD44ux9gIu/MxsLPej5fB7NZhPb29veOD8+h5uZ8vk8Dhw4gE6nM2Z80TulKeP4spsxRqMROp0OarUaUqlUeJy5ppfTZ9uxjwvOo0BVnAnhU0px73Vd6xKuewXn00xqzi8KtXQ6PZYlw86BadrIz3HGJaq9rvtuB2jfr7K071zyhuCKvM3fuaGaHlg6L26lzvsNmJQI0tXBwJeGugDj519omI+vXFvnOIBLf48DpnwAXonjo6ExwLhBRSWtm0s1tSOBOz3pQRCEXrVqtYogCHD06FE8+uijWFpaQjqdRrVa3XXQkj0RVcOmNK5dQbz2h0+OWz1oP/v6OO7v0xB5x+r8qDpNU59JMijOdVq+znEXj7pWeKd9hsqQKIeCD5DHeZbrOhdYt0aDq5xJDhq9xsoBy7vax642kVcI0Okg9PUJy/AZIpqYwvYDPeka+msNFsW4HH+mW9zc3MSxY8d2yfRJhsykcY6iWABd48csyJ1ksWmF1EKyTKvAVSe33fxpAbb9zUc+AOUSaFGTUQWoa5nJ1sVlhEQ9x1UnnwBjP3EJN5fLoVwuh5lRmGDfldddN1/0+300m83w8KJGozFWBzK9L6m/CuThcBhuoHNtWPQZLK7/49DtAC5xgf5en78fdVZDid9tbJ4FSHE9nj5haikKgNnrfEpwGkPGVU+dS9Pyjo/Il3ZPjAp2awARtBcKBWSzWa9XdNp67Ce5+p4KiEu8Vs4roNXf49Rtr2M77T2TnuMCinYuUMdxIyrHzW40HQwG4YmwmUwGx44dw8GDB1Eul9Hv97G+vo5msxkCeg21YX52C9Y1jp3/qTHk4kfW2aV/2Yeu0E07Z29l/tn+s5+t/neBtThk6+gCfFZnTtLnWraS796oOkTVNQp0u/5Xne8C2xZ78L8oQ8CWb4G2y3s+DWlf+eSd1QGu/ozCccrzGopi2wTsHJ6o93AFK5FIIJ1OhyHQnGt25VDbAuzgnUajgfX19TDk17Zxkv6z3+PyfiyAbndok3xWp+t+XyUVOJBZXJPDN+EsM1sDwioWBawWDI9GozEw6wKTfJZrs6tPAEXV3ZKG58RhWCpU5gVmqEs+n0ej0dh1UihJmZzt2draCoGGXZJSb7x6zEmc5Lrs7wN1VqBOommu/TxpPwGUFbBRpEJV54Y1iCxP7ieI1WfY58W5dlr6vMbezmcLkvR/dVzQM8MQCc7fSUaEC2xoPW436WbvIAjGDvAg6RK4yh0LFHw0CQhGKXVgtxx2fSbF6T/XfKDsAsZPR6VMVGDElZLhcIj19XV0u11ks1nk8/kwm4yGztiMMtyjQICgIF0/u8C6et5tYgVXv03qj9shE5SsEy7K0Ndx8QFjLdP+b/W2j+d8fOXCHfaeSeQC8b76TJIHPgPARS4ZEnVvFMax9/uSatg+s/3nMuRdzyXuUp6eRHHkADEOy9N5o/tItO/0+Yopu90uVldX0W63kc/nJ9Zvv2jqk0RJOpEU/PrAgN7vshIphBQA6n3qVY+j0Gw6J98GHQXCVlDYZTrXZLJx5y5vgYuRFVi5lgO1PVEMy/alUikUCgXMzc2FaQtbrRZqtdqYh9vVHjKyxpRrPdiPVN4+AWD7IApwxvE+xKFpwf5+0V4BlA+sTVN3ggnX2QM2FMHFd0q+jbr7bXxo3fnbpPGeZFTYe/cKNlxGhh0nlR/pdDqcc2rYa9o/AGi1WrvCu+I8//MiNb4ZU893tpdeY5UhChQBvwJ3veuzXUrdRXEAPsuMul+/u3hF5ZbKS5ZPY4ZOiG63i2q1imQyOZYhhqDcgvZsNht+pkMlm82OZYyhd05DY6i/7KmovE7fgfFVXtt3LvBr+28aHozSTT6d5gOTUTpwErlkjOu5UTzkwyzTAGX938fbFidYOecDtXqNXcGfVJcoXrdlaz0nkese5T0Xv7kMlahQXNdql4agaDm8nqEy9JDzfwXnmjmGLxrg2h+MCtjY2ECj0cCBAwec2E7rvF9yPHaICx9qAZxP+SgI9f2n5WvZUQI5rtK28ZFRCsEaDC7G4j1RSsXW2QofC7ptH9nyFPzbOvK7bkBKp9NhFpdms4m5uTk0m81wk6d6XS0w4/eofte6usbBAsVpyJZ7uyiqfZ8H+Z4/LbgcjXYy7Gg8sJ7IpmcTuAQJv3OPAcHH7er7KAXgeq5VIHHIJ2v2QlbBqlclCHZCHxKJBFqtFvr9PlKpVAjGZmdn0ev1wk2Y1uHgUpY+0LifpAeCaP9y7hOUE6Tb+sYxJOLy9iQg5FLi9hrX8+LM7Singa8u9jpN4cY9OhxDze7CTaW5XG7slclkUCgUxv6jcUcArqehBkEQ7m/Q8q1nnbyq89/qLV8avLj851s5cYW/uiiKj7T/fXznA0au51lvqIt/XDjF9UxfW3wRAj6K4nM7L3x4J47RPwkr+Xh8Ei+4+t51jV47qUzdm+GqE8tSPUX+pvxSRzH3nmgaWZ0fLr1ooxcULzEOvVKp4Pjx4850i0p7MXZdFAugu0CzS6nY71GeYav0XMykk8d6uW15JBUe+tlVV5+V6wLzPiHh6y+WA8Dp6YxS2DYsR9uv9yoYG41GYQaJ+fn58BQ7MpXGiOu7BexR8WhqQNn22PbbayfR7QYnP+hk+TLOtfQUJJPJXTm5abDxOhVMtgzlgdtNPn6I0/5JfHerZOWBygwaQJprGwDy+Tza7TZmZmaQz+dRKBSQTqextbUVjo/W2SUrPy++54ZHVV6j0WjsdE160RXIs094n43Nt7QXkB71m6sc3zOmKct1bZThMAl08TpmvgLGnSwEITxYiQZdLpcLz7AgaNeNqhoaQ8+fHq6km06th53f7TK/L7bdpQ+VtBylaQBjXFk/je4Adutt/jbpPhuKZvk+iqKcC3xGnDq4rvUZFVF9betis6HEqae+u+6JK7NcEQhRRgE92uQtlTeK6axMtXXWPuD+uVwutwsX2vutE0bl83A4RKvVCuPQuVl8Evn4I65sigXQfRamVWIKJq1Qs8pPQbx6u12hLPxsQapLcCrwd4FalxDylWX/s7+7/reCiC/1Lljh5ppU6hFxgRddEiLDMW6xWCziwIEDaDQauHHjBiqVSpjfXME5+5Xl0vJ0CYDf7OB5vyza/SY7geOCc77rJj678YzL5+oN5RhznOnh/TzGd7+MsP0awyjF7jLQ9WAwbQe9ocViMfSsj0Y7Kxx2KdYCgc+TZmdnUSwWw/ZQedGYIx/QY6sbSFl/ticqd7ClSc4Ml9HiKsNVpgtQ7zepop4E0u1nredoNApPVq7VaqGM11h1etdzuVwI3nO5HPL5/Fg8u+53sIBdM8OoN10zxSiYtyEx1tOo/eDzkltnWFQ/Rn3Xzy697MIhrt+n/d/Fe67xjiPDJhmKceR9FF4CbjpW9mIMa5+69ospr5NvNFuVD0O5Pke1W3EGeZPkSnYQNYYsz2ZY2d7exuzs7K5n8/qZmRnvmCvuZRz6xsYGOp0OCoWCsy4uuhU5P9UmUcsQjOdxbSoEbnacK05NO1V/0+UivTfOoNs6Wibnby5SYG3rboWznQDWAncxqo2xsvGbUbF8rvu0LQRbGraSTqcxNzeHhYUFXLt2DZ1OZ6zedpxcz7LPmYbUOIkq6/MCztOA4Kgypr3XJ0QnKYKoOuj9o9FOFh7+Ti9EEAQhYCdfMDd+v98PP09jzf+gkEupx2lDlPxQBWhDAPgbT+tVhUUFUCwWkcvlMDs7GwJf7WPX83x84ZIl+0FMx6rPIhFoMwZaDXbuPYmSqy6aBqjZ+sRpdxQI2StFgZ5pytB7XDJuNBqFcbLb29vhAUvkNXrG6UVn7HqxWEQmkwlXawqFwliudobFELizLLsJ1YJ462lXL7yCdY3/teDcrs5F9aOPf6KMNF8ZccYizn8+/e6rZxx+U6DLuW3xURz+1/sBf5iRS7/76sxxtbHs1sMcJbN8uAwYX7l1AWRbjuYq99WXmEwdHz7dbo0MLYOOCe1DBeMuY63X64UHLi4uLkaG4+wXTRWDbi0rWmDKFJzo+t1H1ioExpnBepwtg3Dwgd0WF+uqoR3KTNoOvUc/+4SJ7QP9z3Wdqz9ZZ1/oji3LZdXp5k6+eFJnv98Pl7Sz2Sza7bYzVs41pmR8V13iCFDXf1GGkW3fftBewPStPAvwe/ks8fc4Ssh3jV1p4ums/X4fhUIh9BpQGPE/xpv/ZgLn+2Fc8V6rLPSzHkSkgEYPqlHlRYCVz+dRKpXC+GGGt3DVis+ZNAdcCnw/ySo/VdA06GjUWWDGe1XhUq7aFTkt37bTB0j0/89D8fnI8gfrsNex8BkzFkix7dy3wJUxOw56wJKu3FDOk08ZQsNDsxi/rhtSyd82np38r7zgA+yqg20/KR+RfMaZ/c/VV1b3TKNnosaHZfvCYX11i5rT04B313N87dTPk0C6iywGiqobr7f9ws/2uijS0BRXnchPrvkQ1S6tj2vclIf1RHS74uPrY6V+v49KpYJKpYKjR48ik8lEtnk/KBZAP3DgwFhWFCtobawRiR2j97jCO1xLYyoAgOkmnrWufB58l+Dks1zKxd7rAuIu5W/L0klFZrFeKpcw4otKkZ5zekT7/X54mme73Uaj0cBwOAzjpWw/+8CjT/BZgeESWL7+jCIfYIpzb9Q1cUH6rYCAaYSdXh+3fq5rXMqCfNHtdsMYOXpD7cmvtxME/kbTpPEYjUZh1g27w5/5+xmmwtAP9iVwc0c/51s2mw3jh7PZLICbh391u13nUq2tr2t8tQ37NUazs7NjToF+vz/mtaIS1eVmhruQ9IRiylbduKWreWyDi998YNXXR/ba22GAu4wKfW5cY3sv4+UDZfrO05oBoF6vhzpSQ1wYHkNwzrh2fTH0jeE1ms6RGYrsRlTyhGuznRptLuMOGI9dV+eZ7QNXP0aNCf+34zMtf6iTytX/luxYuXg0qq78za46+IwC/Y3X+ZxoccqZ1KYoXlTcZ+ugfejCepOMBxuiwtVK2z5XueqEZbgLf6fzxc4nHXdrGFjHNLCTmWttbQ3dbjfch3Q7KRZA58YiG3eooR0+xo5rdVrQGwWQLfniu+1SNevje44ug0QBbTsxowCqq94uxuWmLN/EIJMyVIGKkh7zXq8Xxje2Wi00Gg00Go3wsKJpBFYUQNeNqy6aRjlFgan9Vr77RXGtbUs6ti6Fsh/14nOYAo7K/N92MD4NsS/y+Tzy+fzYxjv1BhPwDIdDNBqNsU2fVErlchmLi4solUqhB73dbmNrayvs/x8kPtYQBsoPu/+HbaO8tyEuBHGM3+z3+2Nherq/gfJCZYf2hwJ5F6gHdq8ucO7cDnDO51iwNe1c9d3vAvNWh/jIOmp0NZTzXJ9P8M1wFwXqxWIx5NlisRhmHqKHPpfL7UrpqKsrrrAYffH59rAu5SH2g109jwK6dgXY5czx6XbXGFoQ6zOMfXoqjk6NAtm+tkzib9XDcTb2s+2ujZG+6/Xlw2SuvUt2DDVawNbVVX9+130wrut8K6F8V7lD/g2CYMy5wNUqOl/s/Wo8kfe63S4qlQra7TZKpdJt16l7yoNuJ4FrEDV0wwpldnyUpWWfaSeT3qMTn9e54p6mIbXy7XKIWmkucG4Fjw3DcQkD7R9lSo0fJvhiqEK/3w+95Z1OB81mM3wxB3qr1UK73fYey22J7bVp1qYB+FHtU4rrNfhBottVt70A9jh9tReA8ZuFXF6zKCJfMuMKN3Xa2F3gZgxuq9VCOp3GxsYG1tfXwznPGOByuYxyuRwudxKc87TJaT1XvnbuBykYZniOep34HM0MRGNc0wqqsrSn6qn8ohK0/5HU2FF5Q1nH7xbga3/YJXDr8WIZPjCnz9Dr9d312QJLa1SobLNxrq7xdgEnV51d4N71bIKPdrsdtl/B9uzsbDgHisUi5ubmdoF23Xhq0zta77orvt160O2mVbtK7nKo+Qwa33dfH0dhAdbF50x0zb+4csdVBztWUcahqw9YXxuiYctyPdN+9gH8KKPR9bvvN/KdhsXZZ/M3GoGc/64ybT9Yo01lj/YPN4NubW2FoF0dzC4c5ppvW1tb2NjYQL1ex/Ly8i6P/37T1AA9CILQArGWPP/XBkdZpT4GIMD1WbIkLmPofXzXe6wQt6EzvnI1ft0l0F2CwVdPBfrW0icjaFwwveRcUrdhLMyz3Gg0UK1Ww9znjUYjDG9pt9uhpwwYF4BaX6WoyRc1Zlqmy4CJut5+9wnWaehWQE2UZ2GvZNsxTbt8AM4lkHn9rYC+uGP8g0iT2k0vITdMZrPZsdMdueTPY9nT6TT6/X4YUkBhXyqVMD8/j1KpFG4cbbVaY+Flmg4sjvHgG+P9pF6vF64aEKQrQOGJwSRduQPGwxn5m82+oHLGHhhH3cG+oVdWs0QAGFPS+s76WKeHPl9lttY5SlfZMdLVBiULPH0hY1pfzZShIUC6F4Tk0yFaZx8Y42f7O99VDzabTWxubob6nEkFSqVSuBmVXvdSqRTGvPMEVO630DAZDYOxQN1+doF0Gx5jf9N+iaMnfEB+EvCL+o1k97S5rre86yo/SjZYjBJHnvNaxRqueul8tkBZn22f4TIWdMx84SiusbBziIYf5RLv8zkXozCcOojZRjoxubGaCRW0T3R+sVx9/vb2NtbX11GtVsONprbv4pB67aMoNkB3WS6TrlUQr9+VsV3hEnZgLSNr3m8fQNE6qKDV63QJw2dUaD1dpHVVpogiG4PO9mmcMME4w1g6nQ56vV4YwsIY11arhXq9jlarFb62trbGDp9hn8Wx9lxW/LQgO6o8333TKKnPi1y8tRfaD0Cr88CWrf+7fvcpr2naNe31nwdNUmyWWH/rEdQc0wosgJuKmEJ6dnYWpVIJS0tLIXBJp9PhypYax7eDj31yKA5RZgI3vdcE1Op5ovec1yooVsWu5QLjgN2Gz/A6XziiKlIAY8rTXsc68Xq+a+52WxcXoOL/tjw+3/a1q24E3cov1E+sr2ujGz3cfG1tbY2NyTTzzTX3rbywek+NHfYP9YjyRS6XQ6lUQjabRblcDkO5GOPOTaman13j2mmURXnaFahrWIwF8hY3sO36sv1i+8M6L25lblpQbJ/rut4H6F3j5PvMZ/hCXHx6Ii5PWXznu0c90HqvqzwdH52jOo6Uv1pfPscF0qMMJFdqYTpP0un0LrxljXjlFy2fGZcqlQp6vV64cjqtfjxz5kys62IDdPVW6+BFAQDAHxfmYxjXtTphbfksiwrFDmwU+I/ybPusyShmtNarJf3NHhutHhUKawLyfr+PRqMRgvN2u416vY5GoxGGtGjqPJfnelpwpX2gCtgHCLX9t0I/aCBwP8A532/F0In7HPusvSii/RjH/aKoekzyirnKoseP3nI9HMO1LA/c9OYyneLS0hIWFhbCEJkg2DlNstVqoVqtolarhQfV/CAS26NtVSDE7zY+XUkBsV0p0BNufXGnKivpPY9y+uh/1vPG31i2ymB17pBHFGCz3tp+9o0+k2BRlbgaAi5PPq/XdKcWiNIZQ1nf6XSwtbU1Fsc/iXxAznoDXddon+pcGo1God5pt9shsNbUjdzHwZAY3ZjKw5a4cdUCdi2L/a6ZY3whMfbd5XXX7y4Z4Wq/Be3WKRjVd75+t5/t/z6ni0v3+sqadI+r3VF1i2rrpOtd/WvHxAJt9d4HQeA1fn1joXKD89I6bjVU2OIjnR8qS4jFdO7yGd1uF+vr62i1WigUCmNGhg832n7L5/PeflSKDdBVKFqPho3FdnWkbYR6r+M8WwWiChMdFK2Tz0Cw7dHrbQdboaX1dXmyWEYUOFcmIRMQXFNQ66ZPeuQ0jKXb7aJWq6Hb7Yb32jAcV53jkFVuSj6htZ80rSX6m5VcCtNO6En9HNVXnxfI3utzfPdNao9VfpZcQlHvpaeGQIFL9cxewTKsTEulUqEXkTHnzATDWF9uzKYX8gfJ0CFpGJ3tK8plhrlYxWRJ06apF5n3WV2hz+LvNrRF77e/qcGkekCfx7qqHPYpdk12YPnK1V7dLEvDxAce1COn8pRlkOdo5NDD1+12QxlP+b5XeWjBQtR8ow5VJxfHhmFJ5PVOp4PBYICNjY0xoyedTocnoWqITDabDb3wDJHhBlaGkvFlT0a1p6RqfaxR7XqxbS6Pu68f9N1SHMBu51eUDPAZBntxrNgybH1suS5HHn9XA0KxgA/8W7J9r0aTL90i5wXnAw009XBrm9TQ0d9V7qgDVPEcwTc/u7CSy+hOJBLo9/thmMvS0tItrWpOotgAHfAzo2uQ1dOgwNWntKygUyGt1+uA6cTTSaHLJMB4vJirM9UjYv/XOik4d1lLPotJ4yI1D7Xmp1ZAzvCVZrMZKv56vY52ux1uPrMng2o/aRvs5I8DgCcJpzgK41aAyQ8KqHEZKEqThLyrvCgre7/JGqdxFXZc4BwFQCcZD776xOWtaTw/LuWsYS7ATeFLoa1L7YxXL5VKYZxuPp8PQ0M6nU44R7maxef56v8bQZYPXGEfUYCc99r6a8YX9omGeQAYOxgklUo5DQWVW+qtZnlqOJD42R46oiErBMHsA34m4FRjRPUO66f3s2ztT9Ur1qghPxGUaEgIl9qBnbCrbDaL4XCIcrmMVquFZrOJer2Obre7ay+A0iS57DNa+R95nACZfaLAnPdwPBTs8fder4dmszmma3S/RyaTwfz8fLj3gx73QqEQZlTiwUzWy64hMRa0E9j5NqFagKhALQq4TwLWllz6wgciJ332XR/1m97rAuUusk4Mi3dcOs4F/u3zo+rJ/te9Gcovk/QK311hOGqo8xoN0Uun0yHucuUxp6yhF90F0geDAarVKjY2NnDnnXfu2tu3nzT1JlF2HiepXVr0KX5XQ30DqUJOLSedVJxsWh+XkncJUftMvcber9fweVax8DplEn7WTUH0/DBGnC+bHpEx5vxO0M6lz7jWvtbNMm7UtVFeATVifBNpL8z6gwLK94OiBEzUf5PK3O+6uIRv3LrttR2TyozzGzCZX3z/EzCk02kAN0GUXk+Bq6CqXC6HG+my2Ww4BwhKarUaarXa2IFgkwyUz5vnbXYEfqYyUmCjwJYeLa2zym/ep+9atpZl+1nrpJ4ve796r10b0RRUqwPH7ifgPWwzU0ayPjYWXXWdBQWqj1Q3sL0EiyxTgST3LSg4prFI45Eedg1jdOmnuHqA19vvWi8AY0ej04gBECYmcOkRrRf7iY6n0WiEWq2GIAhw+fLl8LnZbDZcleJmVJ6SqukfGS6j+0UI1F0bU+0+Es5vC+AtiCcPsA32Zcc9bh/rb/sx5xVnRD3bha8mGXNR/9nf44Zg2ee6ZKMLp3Eu8VkW1Gu9eC2vYSYqYi4eMke5rqtwLJ/PUszmWgUcDAao1Wq4fv16GNN+u2gqgM6O9S038GXBsr3PxSw6aMp46tWwmxy1HJ1Y1jtgf9f6R4F2fVkDwrZfl1V1dz7zBKu3nAC92+2i0+mg3W6HYLxWq41tBNV4RJ0Qtm99lrpayNNMUttnHItJlru+/9tGn3e7XMr0B5FuB1hXilu2NbRt3Sj8Gdaim9T4HwW8AkumXuSR61T4nMO1Wg2bm5uoVqtj3s4fNNIQEc2qQaDoW362zhS7QqfyHxjPFkI5qB7ufr8ffvet9tlnWbnmGl/W044j/1NSj7SCevWwKkBw9aMtV0Gt7Z/hcBhmP0mn02E2ndnZ2bEYfPajHiZETzIdNapfpp13PsORhgLDThQMpVIpdLvdsTbZVWo7Jj5PP0NkhsMhms1m2J+JRCI0SvL5fHhKKvO2853hZXoqqnr/reedgNwV264hM+r00/8sgGd9Lb7RvnG12/Wfi3dZpgvIRgFz+xzXfPGBYn6eFmzbqAd7jTVsrBPThhGqkejiJ5exrc5HzmUaYK7N2WynzabEfTO8nyBfnTesE8PR1tfXUavVUCqVvPsjb5X2lMVFG0tmVS+DT5jzHmvlqdVkmU8nu6t8C1CtkNfnWWVtDQpXe12Ma5dAFbjqZk3GlKunnC/uKGbmlXq9Hi6VE5TrISCu+tm2uepv+zFqUsUhFR5aDyuE/m0F6XEpbh/Huc7Fi3EF6q2MwzRjebtB+rTkU5R2iZzXWh7mHAR2ZBi9eSqwt7e3w43a6+vraDQaP7DgHLi5PMtlZHUs6GZZKiFXPnLKy1QqhV6vN+Y4URm+vb09tkqhSp2GgA1LcSlDHUf1orMsKlnNumKBsnrIFSDQO2xTpVFJ2xAgGjb6jEk8r95/AmCWk06nsbW1tWv/A+uRSqXG9js0Gg0EQRCu0rieH8f54rqH4JipFHVsCGYI0rWvrRNIx0brQWCs80rLB3bOEAiCALVaLRxnGjX5fB7Hjh3D8ePHx8LTNGuMxrcrgCdAJwC0WWRsCkiOuV5jQbtiFl1BcYFSH0jV31yfff/rCk/U+NpnWYzkq8s0QF1fLn3hAuqAe9M5x5ueby1DAbA1ijWMxbWKwrGxcov1pVNFY9IV9Ls2tvb7fVQqFVQqFRw5csTpPN4PfThVDLoOvCu8xFVBXjsJwNl4IgvabRwh3y049xkHOnksU7mEreszn8F62EwsBNU8Cpxx5RrCosCcu/Z5+iBPBfWBbhe52q3ks8J95JpwLsHjqse/zbSX9vnGxTcHLD+6SHl9r33um6tR1yg/2BWkSWW5nr/f/OLzDinfqgLmd+sxpYGtnpYg2PFoajpC5kZnaEur1Ro7zOd2zYdbFfqUiVah+cqn/NZVTPaTlbW23eoh57Uausi+da1+2vrYEBvWWz1frrqr3lBwru9WR5F4b7/fD3lHy/HJRK0L47l1Qyjr3263w/ANNYzU6ZNIJMLleRowBOk2h7o+3/afrZe2QT3JjAG3c0LzNus84jgCN1dGWEeGEXCsqB9dxpcdH/IGU5f2+30cOnQIrVZrF1AmENPDxrgSwNSPNI6iDl2yoM4F0K33Xa+hYeeSPb55G4VpdAyjHDM+Z5z9zaUz7HPjYg7Xc0k+41F53FUeeZAnFEe1y2W02IxA6oyxq3UqB4m5dMXIjov2DdtQr9exsbGBXq83FuYyDX6bRLEAui4FuwaelbZgURlU4wLtvbzWV64PEOhE9wlKC8yV1ENmgb7e51pOoeCip4mKnTvv9ZRPescZS8jQllarNQbI7bKPr59sXeP0hatvLKlBpVZn1P2uukYJk9/MFKct0wAoO1aTyreGUtS9rjkIuIGUizQcTeeg8pkCUld7ouh2gfRJRrZ6vbSP6C3Wg704DzKZzNjcYtx5pVLB2toaVldX0e12vcAjTr2j6FZBuRLlsI0317znrJOOK+/T3Nk2XleBmmZj4HOBm15s7Sd6mGkY6epGIpEIQThlI+uu3nv1gGkbLCjXtlsZpoYIn+WSq9pv2iYbt07i9QwVYYo1xluz/nxXPUNKJndykuvpq51OZ2yVw0Uuw9rOE5X1dsxp5GhMvYJi3QSrY6N9xjh/hrZE6X3f7zQetG+Vt+wY2rAXAnMaRNbLbsNlCNw1/MV63BXUK9+q8WB52YdFbLuj5IgPg/m+8zeLGyy2cVEcneSqO9/jyC5bJ9234Xse+VMzMSlOdBkkHDM6WzRRB1d3OK9Ho1G4iVTnos79drsdhjXm8/lYenlamgjQrUBzKWN6lqwwU3DOd+1IvlyWDTtFvVbK3C5DwDK1vcbW37aT1+n9Csg19k+zr9BbztRYfOnpngxloTeA4SvaV1Gg2bYlDrkmrktAu+5zWda++/cKtnxj8f+PZMMtAL83xXWt/u961/+j+t0CWa2fnRsM39or7SfwjFJ41kjXU+Q4v7nsqcBLvXFqlDDt6cbGBq5fv45areY0rH+QyIIFVYT8rspajRSSBYxBEDhjyTVWlPKCANol61gPazCx3wmCVEla3rdLzKx7VCiAgk3Nj6x1Z+w4PeGu8rRtbCs/U38ynlwzBxUKhTHl78tso+0sFoshcKxUKmi1Wrvi0SfJBLZN/+f403PJceUmzUajEdaf97Ae7Cfdr8HyGTbDA/QUQLnI9TtXIAjQVSdpu9j3vV5vVyyzer41DEbnOAG7BfVRm1FtrLuCeWB8g6o1IpTXtK0W41igqddG8bfPEHLp6yj84frsIzVObF1dgJv15LOtgafznv8TN+n4u/Qa+UFXf3Q/Ald07DOo0yxO1TYxDn11dRXr6+solUq3ZbPoRIB+4MABlMvlUAgoAwH+mCWd9ArUbadPGnRer9+V7ER0Max9hnpTrEVvvS70qOlLc5bTG05ArsCcXnM93VM9ILadrrrG9Y5MuifO5HIJvL3UxdWGH2Tw8htN6omyQsd1LckawlHXTSIr5BSk8H9rxGqYga1LHM9LlNK4FZ6JMkpIg8HOgWB8lm5mDIIdr182mw1T4VHxt9ttbG5uYm1tDRsbG2Hc+Q86jxOAALs9juqA0DSTBK3KmwrcFfipTFOPoV021uVtXc3Ra/l8gmLWWWPCXaDTGhPqDVfeVRBhn6ueOwIrDe3R+miWG62L5QMNb5mZmcFgMEAul9sFmAl0gyAYM5z4P/sxm82G+oebLuOQb06qXtMQlGw2i1wuFwJPesI1PlidbAQoHGfmRef+K9cGZJ/OsNdwY6h1XgC78967jEq7KZjjTyNDw2FsnnbG5CtwVw+7BenWo24NY73eetotcPfJd22La3x9wF772wXS+W4961EA3oUFXbpEn6FyR7Eg+8/iOeswUDngayev1VUmjqGVWWqAqx5QmWNpa2sLV65cwdWrV3H06NHQoWD7+lZoIkBXZtKHsgOimAC4ORAK2OlhcFlZKpysJeaywFSgWi+CS1G7FKkV5hRY/EwPmwJzbvSkV5whK8xdzvAVhrzoZNL+cNXHp+zt72phR1Gcsux/gPuIXR8o3A9mvN0URxl83qSK2ab749haYKLj4zOQozwrk0C0Ciad7z7+s0BlkpdM79f6xKEovtU+snJAl73pOVHZpNdyo9zs7CxGo5tx6fScr66u4saNG7FCDOK0//MiGhI0OuxGLGD3XiDdLAncTLun4SYq25S39BolBf8kejat0nbxOL3rBM429p/jzTJomKiXXg0PJdVB1jGkS+jaLzaEIZVKjaV6I3BleTzgip46C4ZcoZdav1QqhWKxOOZAmnbvkpXh7HdummU4SDabxWg0QrFYxGg0GjMG1MBSwMm5xVNFV1dXx7zncUn7msay3QfgMp5UbpHXrN7S9iaTybEsNQBC4K2A3Xra+V3DajSuXb3qyhs+z7sCefubC9dYfcB+mYQJfPJf7/XNJVc5rrLs//Y6n4OW/UgdZOeWzcwyyUCxbdV3OiRU3vA/uxLEuaiycDAYoNVqYX19HZ1OB8ViMbI/9kJTpVmcNPhWOVqhrROa3603WxnSAm7bWSRrtbniqqxA0vI0Y4F6yDU1IsNZ6CHn5k6CdAJ1/kdQbmMJXUArysr1kW1bHLIW7qQyfQLABRgtQPMBxzjtiSNcXPWahvYC0m8XsLexny7DUq9Vcnk4XMIpjmXv4kdrmLvu13msy48uPnEBLpeh6upra9SqPKAisbJGgXkmkwk9l3qvLllrnmUu+etpvpVKBevr65Gb9KL69jeaNETByiVg/MAm9rdmOtFMJnRkqMeLgNnnHNFsMfb5NoxEAao6YnQFVlNEqixiXfg/663AjfLe1odAiqRgXUMu+bt61bUvyXfqAAqCIMxkwjJ1SV37xM511ZGpVCqMSWd+8rjGoh0bAv2tra1doJHe4uXlZeRyOVy/fn3XqsZwOAyBK/s+nU5jfn4+3H/V6/Ui6xQlo5LJJPL5fIgNdP4rcHPJBgJ4uyKhfaEAjfKL9VUDX0E4P2tYDAE73xnaZDelqvedfMo+B25myeFLQ2fYH+QH1Yf2XfvC6mbLqxYbWCznGzOW5XOc2IgLF9lIBpt9yaWDtA5qJFOuufiARFnC+qu8oozSfS5aP+3X4XBnXwUzeC0uLo6FMkVRXH0wEaAr2LIWDTvEpWBdy5EWHLoEkA9E62/6zrpZ5WzbYNsCjKfT4YZPCivNV87PDFdh9hU9UIiZWDSExVV/FwiKsvRc47EXsn3gA0Cu/o4Cwy6AHlVenDpOojhgc79pPyedrT/nFpW2zhkLOmyfupYPoz676mznqZ3TVtDp7/YZVnkzBpCeChWMlqd8Apjl672u+urSMpUjU6/RI87QFd5LYGGVIoEPzyeo1+vY3Nwcy9hiZVEUTTsX9pvYbwqo7aoeiZ9tmAnHj6BTQ18IklRP8D4qRJdO4LWj0WgMePBdjVcNK2H9lD/UGaKK1uoZPt+u1KphZuujIM2Ou4ZYaJ1YD21nr9dDEOys0mg/64qtGkCsq7aLn1kG9ZUvO4YlF78SoHOFhPzN8I/Z2dmxfQG8hvUmYOVGu+PHj2MwGITnesThfQuqWFfOWTs+OpZ2/PV+jqX2nTWq9LMF7ayb6lAtV40ZBeq6GVVDZTSkxsa0sx8VmCuQVxlngbQCVVf7+Lttlxon9ncrJyaBbdv3Lj1hx9Dyhq50+p4VxU/kBZ2flH3cC6Iyyd6je44sLrPP397eRr1eR7VaRa/Xc55O6iLyA8MsfRTLg64gmI1RUsZQgWsZ2tfZ6hlgeS6GsMBeLWpr3fC5Wv/RaBTG2FEYUrBpDB43vzG+nOEs3OypseYMYdEl27gMbPvYd5+PSaahKCCtdbDfXdf6Jp7et5c+mJZ+o0FPHJrUDzq2TNekVridC7zWV/4k0KhzxCorC/Z13lLIWaClPKVeKgvqqcB8fBjFl9pm1kk9mvxul6WZscFu+pqdnQ3BlC6Z6z6TbrcbZl3a2NjA5uZm6Al0zZPPg9+B8Uwj05J6nUmqMAl+2RZ1Nqh81mfTM63AX69n2byH4R98hm6uVGCkOoFl2vh4lwFpgRj7zMorazQQyJPPNSeynVN8HjdSKt/bZwdBEHrIGYqTSqXQarXCdIbsLwuU1ENvdRvbygN9mHxAPdWT9AlpNBqFhhh1HsOgaEwAO5taFxYWwrlBPangfHt7G0ePHsX8/DzeeeedMNTT99xJNBqNwpUv3eynMlFlh+IB5UGOt+VH5S/lHZ0XWneLa2hwab04XiqT1FHAdz0hlcDQAncL4jWrjDoUlNTJ4/OyuwwdnbsKzrVtdsyi9I1vfH2yi/VSgM5yXPXQvma5NgSK/6lsd+FVku4XsU4ELV9X0Wq1GiqVCnq9HrLZrLNtlu644w6cPn0a77//fuR1e8qDrsqapBNCB4YdYuMQleyAqfVjn22/68RRTwZ/U/BAQeQ6TIjecuYst6Er3W439KgxHp3AfBqQGAUqrQC2TOa6Js7z9JkuYBSnDL3eTgyXQfV5AZbbSbcLeLmANj8zvzQFsc/LMAmkW6PYNSZWgNtn2FUpF29SFth6sg0qGO1+lrh9a8ukPFEPky496/KybupSecV4Z9afYLTVaoVhLZubm6jX6+GGb+233wzGoSWNm7aAj55T4CZocRkC6mlimcpH/Kzpy1RX2NjzILgZvqJGHvUGN1Wq/iAoJj8oICN/WNDiItVL9ETraoGCMzsXVWlr/e1GSPIYPdDD4c6BP4xLZ85nnWcK1F1x+Xb5nadvMtRFgVdU20msN/mh0WggmUyGce4Mp5mZmQkBaa1WC/srk8mg2+0imUzijjvuwPz8PD788ENsbm6i2+1GArI4Y5TNZlEul1EoFMJ260v1u67iqMGk2YY47mqoWrCqpGDW4h4L+AGMAUtth4J2m0lGD1viaoGGz6ics5tQdd7Z2HVtr9bZzg/LD4qhtH/4n/a9bWvUWEaRGkoWR7o8/9qvrnpo3TUnP0n3rjCMz664sf8spuTnwWCAdruNjY0NdLtdlMvlif3Acm0bXRTbg05SYeViVsvk1lJhg22cogUAWoa1kLVeLkai0NcQFg4ShRitf4JzesoZW66hKxq+Qm+I1snHeC6Lk9dHgfSo3/YCGFURW4/DNGWzHN+k9Hma4pIPKE66x9fHUWXFfUbUdfsJ3rW+9oRG/u4CyHqv/u/67LvH56GzQlxBvzX4FLTYOEsVrhSKKhBd9dDnj0Y3N6GxXAWW6gknMFcA7zM0RqNRCCC4v4Sn+nJpnkDT1a9Rv7nI1ccuul3A385f9aTp+PC0S5f81ZhqkgJolxeavxO487sSZTbHmtfbNJ5qGGg5nC/kK+VP8g2fwd/Vi66rSATdrIcCb+VJu8Jgy6Wes//x+l6vh9nZ2bBuTOeoDhCmgbP6VPt1ZmYG2Ww2dCzxRE4f2f/YV1w5Yj3VENM0wo1GI9xUyVWA+fl5LCwsIJ1O49133w33adxKGlbS7OzsWPpHDa2i3LF9S+ebGoR2LHk/ZZpdCVE8o+POdzVWVcZZve/yVOs8VCeChsJks9mxTaq6wZSfuSqp3nYCen1pfLt1orl0gDWirVfd9r2222IjNSJ8vMfPuqKlz1eHgZbjAuQW42jZPEzLzneu8HK8AHfqVu0zfu52u9jc3ESr1dolG2+VJgJ0dpZVnpMAgP6u/9slI98zdUlKJ4tOKFUsatnxOt3oyXAWespV4DAtoqZM5KmfvV4vBEwuBrsdFAU4XWT7P8pw0AkUZVhMqp/LOxC3vi7ygc9pKQ5f+sh3XVQ/7ZUfouqkyt/2y6S2RPWj7RsXGI8C+S6AbwUywUQ2mw2XprkMz+Xd7e1tbG5u4vr162Nx93o/P+t8t0rH1kWBmJL2o24C1zA2ygJmYVKP4rQUZZi6jCmSnbf7DdR1Ix2fQ0NGl/yt08Pl4dYyfU4FO7Y2tIAKUw0pgivN5MB7dKMqMG4saHgNPdKqs1wGgdZfN6hSh7jiQ5X3rNdcAQ4PV+J1rk2SutJg92io91yBom6WZT9zvhWLxXB1V0NzLF9Z/tQxp/OKMemNRgPXr19HoVAIZVK73R4DNYVCAdlsFpVKBSsrKyEw73Q63s2ZcSkIdrz1q6urKBQK4e/KRyojgJup9BimYz3umhBCf1fAbXWpb266jG51YvjuU8Nte3t77IRUDdsjHxN825NSKVPZXnVOuLLKqIzk86y3XetJUsyghhBfxFcamcC+tjJD+0n7SOeLGtk65i4d6JM/Vo7RQZtIjGen4wqLzivKE5cOsPXe2tpCpVIJV5WY/Ws/aKosLsBNj4T9XSs+CciT9BqdcEEQjHnJXEsbas3p8pZ9cZMnlbF90RugMeUE5a7lMlc7flDIBwxc9bVWaNQ1cYChC7BPMgSmbYfrmZOA7q2C899Isv3nE07T9rFrDlvvka88/d+lrKgAeEDJ7OwscrkcisUiCoUC8vk8ZmZmcPr06dAQrlaroceac9UCP1UedglSPTRWMAdBMJbmdDAYhEa6bvS2mZtUWavxcrvA8+0m8onGbpPshikNBVB56yLrgVY5zrKB8bhYDcOwQFmvsV4u/m/jTOmJJiglwLd8Qi+7tlfB8XA4DL3P1hgZjcY9+7pSy//JMy7grqsM1G82DEidUvzNGjhqCCuwoRc9l8uFe6Z8+soFGi0/aPrGer0+BvhIqVQqNGoVkA8Gg9CbeKs0HA7DshREqe634+QyQhWY6thyJYOfWR6v4TgpUFc8YJ0cyhN8Pn/T59p66txjSkrX3NOVQQ2LYUw7V2QseLfZZwCEnnYtU50d2ga+q6HIdhCMK+7SPP0E7nYzs8pYbSd5zdVfWg+fwWl1phq9nPNsoxr/LsNMy3M5qdn+Wq2GtbW1cB/ZflFsgK7CwGXZcAKx4rbzfZaIJV3aVyZVgcfr+Jsyhb5rCAsPC2JWBgJxhq/w5bLStJ6fB0iPC1AtuTwlrmui2uerS9QkiKrHpH7ab6DzGwGcbF9YAEuK08+83/X7pPvi1JFj71qKi7O6Zetn60LFofGV2WwWhUIBi4uLKJfLSKVSaDabSCQSyOfzWFpaCvur1+uh0+mE8d+NRgPtdhv1eh0AwrzDwE1wbk+DU2+sgnUFLpzvXB1zzYco54Lt09shD/azTAXnyp8qp/Wl8djWa6tKnCCG5RMEWSWoqRr1WmBcf9jwED6Ly8+6sqS8z3G16SAVLNFjTZ5hP9i6Wvmo3mVV6j4+o6HANtvyk8nkWBgK66Q5tbWN1rPH56iTCtgJBSkUCmOH4rkoSi5Zw8TKFxq7Wh+tM+eYevBZbhRFOQQ0REPf2XYdZ3WqWSPK5XAKgiBMD0nS8dRQWb4TsJMXOQ7KG9p3aoQqj+mqFOeh7S/lRwWXOk4E2DZzDDeh6m96MqoNmbGedNv3rA+fr5/VYGLd1DlKp6flS9e404B1OQ2igHiULGb/6koWz1Kwq4PUJXazqEsf8L3b7aJSqaDdbiOXy+1bmMtEgM4DAizzaUXVAlIB4uoo6w1RD4wPpOhz7UShRcbcsvxOYE4AbtMi0muuyzKqvGwbrYCxQjwuTVLmtwrOXeW5PvvqERccWgAaBfhvpzGzF5oGUN1K3aMMJd91lv99vKiCie+TDCaf4aRz0dbJXmMFpJYXBMGYosjlciiVSmGYS7FYxNzcXGgYX716Ff1+H6VSCfPz88jn8ygUCqGnfTgcolarYWVlBefOncPbb7+NRqMx9jxVJlon9WJS+Go8qo2xVQVp50wUn+8VpLvGd69zPw5piKBPxmq/qtLltaPRaBdAJACy5apO4PNVMVsgCNwEqgS2FvRqWyxY1jrxf30e49MV8Ljmp4Iz+zydIzQmLSmwojeb17HO5Em74U+fazescm5xfJR/2WaCfMaj29WSSXpHy9U+Gg6H4dxk3Vg/AritrS1Uq9WxA4niOoG0ja7fR6NRuNrGlTnFEdbjraErLEO/K6jmd/K8DR9SXMKQIwXsHFfiEQWprJN6hoHxDdl27K384jUu/MG6k5+azWZ4LwE5+YaAXcNj1LuuwN2e2Kr8wfLVuHbJCfWmMxqBTpFJvGHnAXnQjpMF59pPLkeyrlTps1XW6IqVS7ZbRwT/6/V6qFaraLVaWFxcxH7RRICez+fHlg+1YfpdGdsuc7qUjhXgCtKBcWtTmZBWmH6mF4xx5QQADFuhVa/gnELGBbStsLACZ1plbGm/lHBc4BfnWt/9LqAW11ixk2av5KqD/ner5dv7pwXvvnct3wJI+zu/T+LFaepnnzeJ4gL8qPvpNafcYIgLlcNgsLPrvVqt4tNPP0W9Xkcmk8GJEydw7NgxJJPJ0Es3NzeH5eVlHD9+HGfPnsW9996LX/mVX8HKysquZW39bA0eW/8oZ4PtByus9Tk+JRPVTz7D6/MgV1/Y0CaSq60+Q4iy2oJ3/g+MH7dt+5LP1lUdC34VhFInWA+5lq0xpOp5dPVJnDlo+07H0RoJQRAgl8uFiQfUS8e6sX69Xi/01OnpqEEwfnAT+1YNBP3MPk4mk8jlcmEcuAUj2t86lgr4LHBNp9NYWFjA0tLS2EoVVys6nU5obBMn+MD2JHI5DgCEAD2TyYQvrTudc6y3YgYF1QR3+rtuBlZwbUGz1kmxiuV7G9vOl3US6PPU4FAijyuQVwPDtfo3HA7Ds1q0T9VTTiCusex8ho1f1zAYXqOrmJa0jhrhwJAXnS/WENF+0fAlHTeXo0jnpg2D0ufo6hyNLoJ0FzBXcvEB79na2sLGxgaq1SqOHDni3GC6F5oI0NUjoV4YGx/ISmtHqNC1gFzJLhMCCCccB0vjQ11LJ1wa1yws9JLzP3rZLTB3WVtsj9Lt9HD5nqm/6/PjKHj1EO6VLMhWZTUJ0NnPt0quiewzIlwUR2lM21+2LpotwtcPGg/J76PRaGwlxyrfOHWIapePpuXxKG+c5vdVAa+ZBXq9HprNJq5du4a1tTVsb2+j3W6j1Wqh2WzinnvuCds+HA6xuLiI+fl5FIvFMH79X/7Lf4lLly7typBhwZXKHSUrg3xG0TS8EOfaKONMy7kdMoZgkMo1iu91VULBmio+BRUuDxfJtWyv9+nvOoZWj3B+sF7q5XUZYOxfuypiy9ffWF+frOVvLuNQ259IJFCr1UL9xeep142/6dxX4EuwTmCn4UY6Zi6DiV5mHhBkDQo73tqntl28jp55GhaMddb4Yj7fRdPytAVc9MA2m82wTpobfXt7O/yswFXBOwE8ZSwNIF5vgS/L0rq4jDEX7yiQVcDP+zU8Rr3umtSCZbP9JJWP/E/lidXP/M6oAqtb1PDVeHX1sls5buP6fWNIvKUrDvq/7qXQOcu+0D528b6VNcrflGOKL2lUqtHrqrcawFq+/cznDQYD1Ov18OTc2dlZZ9nTUqwYdBU8LsVil+KUsSZ1AstXQKMxfPSSUxDo6Z6aIlE3eerhQgxj0cln6zkJyPq8aL523SpFgfRbKTOul48UBcY+T3IBqDjXAdPV/VbamUwmkc1mMTc3h/n5efT7fVy5cgXtdnusfhoCohtzeIofjUhXfeKM3zQ8Mi0/RQliznU9bEMFO09n63Q6qFQquHz5chiukkql0Ov18Mknn6DT6eDuu+/GsWPHxjxHpVIJR48eDXMy/9qv/RrefvvtUOlagDFN/fdqBNk+8AFu/h91b9x67JWo4BREq4K2ys6CP13pVHL1uxqYFigAGBtXWxaf5ZoD1nBSnaFy3Xrv9f4osKrAzt5n22uvsb/pMxQoqjHDOaNGjm5YHQwG6HQ6Ywd8JRI3N/kpsOI84nMzmUwIoFkPa3T6xtu23afvdRzUc27bHmcO+Ii8yewk/X4f1Wo17PNCoRCGaCi41kON+F292AoeNRzK5/lmO60zkf+5DEb1/rqApK6kKIBkXRTc8rmuFRGtt+1n61zTMeHv6rDUnPwKcq08V0+8zTqjZI0aHdMome3KoqVz3PKVS1fafmc/Kf8qPlXD2VWu/a4rY4PBAM1mE2tra6jX6yiVSl5jVfthEk0E6JMs76gJ7PqujGyXenTikDkJsDU1Ik/0I2BnrnLNzsL/o4SuChQX806jXG3ffB60H8bA7XiuVfa3iyb19+0aD53EicTOQR3z8/M4cOAAyuUyyuUyFhYWcO3aNfz6r/86Op1OyGsUaPQ0A9jlPXN5RHxtstfE7Xuf8InL6/p8tovA3OVloUew3W7jxo0b2NjYCL05XJrvdrtYWVkJ5/zx48fD5wyHQxw8eBBHjx4NQ2jy+TxeeeWVMO5Vl99d/RcHnEzDt1YWWnASh/ZiJO2Vr33AU0G7VWo+8KrA0KUjFNDbfrJGgq9eWidVqhZ02HtIdtXXV77+7rvW1496TxRAtQDNto/1tun36InTEB16fhWoWw8rV7MY/ql9o/VSI83Hi5zf6slXEKp9YMuwY++iqP5mPXkaJ+d4o9EIry0UCqEDABjPcKN9QllEQ5WyStvhCkdxebsVmFvvOzEM62/3Q/A+nVt2zlnvL8tRIK+OTBcu0z5UnnQ5JHWesM+tx1/nPutmX+RJC9btdSTNCqSrRJqFyPKWSz7wO99t37raazMF6vy1c9POIR1H9l2328Unn3yCM2fO4NChQ5HZXMrlMubn573/h/0z8QpHZUi+CadWkYbAWCbWz/SW280FVNaaCpEAnZsOFJRrekSX0NR6uwYD2G1p+sinWG6V4giyW6W4Bgiw2zvE36LuvxUg8XlSnHHmdbxWvQrFYhGHDx9GsVgMs5VwU/WZM2fQ6/Xw6quvhrGmvJffKby5qcsXq2sVvc97MA2PWCUdp598ZdCrwoNLaIjoazgchjljCaqpXLTs1dXVcO6fPn16TLDfcccduPvuu5HP51EqlbCwsIBvfvObocKeNF/t3I9q5yT+dpU/DVkZ5aqD67l7mVdxQCefq3Nbl9F9xiHv94VlWeWnhoGvjvYZeq0FGdpfqkgJXjT0QJ/j6mdbR1f9XHwWZQy6vqv3n+2hN0+9v/ZgPHqD9WRM/q/gjRk7fCvYKvd8KxkkzfjBeyxYn7QKHYdcfaf1o5zh/0y/OBgMUCwWQxnh0vmuZ+gKBDAOtnRDIftaPfQafksQz/909UejD9QJyecB48aBjjNJ66igXQ008oXrfgXatn8tCOb/rjmgRpmrL3WMtO4aCqMpHROJROix13Fl2KPPyPcBdZdj2PK46hLbZhsSqfzgk7saItXv91GpVFCpVCamWywUCiiVSt7/SbEOKnIBWmC8g1xLp2wkf9elFMZakXk0BQ83mzQajbENnnpSGnfHa0opH2h0fY5Sgkp7UbqThPd+UBxQGXWvTwm5aC8GA/kmbj9PS/sJ/uMCVL6SySTy+TwWFxexvLyM+fn5sd3wzBjR7/fx0EMPYTAY4K233kIQ7Gy4YkxkvV4PlR0NS0s+z4G9xl5r/4/Txrjgwl6r8YkKDKjYuURPA3tzc3NMyBNIUYHNzMxgc3MT7733Hra3t3H33XePgY9jx47hxIkTKJVKKBQKKJfL+Nf/+l9jbW1tbPVhEt/5+sTV/ijAfCv0G2HERhkgUfLTAnerNBWIs6woRwnvizI6FJwoxQFh+j3K2HW129ZJr7V1mjQvrV60BgXBrgI5fXeFCGjMtf1vNNrJEJPJZNBqtXbJD3u9epVd467AxWVgWYC5VyPSEp+lgIrEPmRcOjP/aGYc60mfpJO0zux3gk2ODe9jn6k3nkDdZpLRMVaPOP9zGbQ2ZIXGgM4pa1hoG63xoeXYcnXcXbHulifsfGbdSIrztM9ZJr3XLJ96gtcQ7/kSAfjwVJQD2dUf9hqLH5VPyIcuvuZvDEmrVCpotVrI5/ORK1NxKJYH3SdwXdewMdowtTLZ6brpk+CcwIUDVKvVdmVlcYFyax26BtOSy9DwXRPnWpdx4rtmWrpVMK5110l3K+XeLprGcPDRpDb5FGocSiaTmJ+fx9GjR3Hw4MFwA2MqlUK9Xg+FdS6XC7OXfPGLX0S328WHH3445glRj7kLhPgMS1d74xhLUWXHBauuOgZBMJaqS4WkbiwaDAbhoUQ6X9h2ClB6p5rNJt577z20Wi2cOXMm5N/BYIBjx47hyJEjyGazyGazOHDgAP7JP/knuHTp0q4NunForzxxK3Nov4DMNGTnVxxAZcGl/m6v0+tdc1k98VYJ+gzBSbxt6+gD7Vb+uepqwYdtr51rk/gsSl8qENO8z+pVJOkGcvXuar2Bm6ciMqyMJ/e62kZwbgGWNVKs/rC6nXXbDy+67SN1iujvpOFwGO45I2k6Q96rjgQFi/ocbTs/u0C99o/12jLWnSuGNkqAYF2xi4YoDQbjB1ppOI2NaVdnhG/fkjWutD+UbOiM/U/5zDUnXd/tnHKRNSgSiUR4cNyk+ybpRNsP5Hd9ru8+l8HuIvWyc4V4Y2NjbIP2rdBEgH7o0KGQ4VxCjoJGFa0ykJ4KqKCaMeL0NNIjTnDebrfRbDbHwle4gU4tUMsEPkbhZ623T5Bbga33+uhWB8LHjHGfH1UnK5h9/TBpEsWtg1WW+2UIxDGA9rM837UzMzM4dOgQDh8+jHK5PJa55fr168hkMjhw4AAKhUIofL7whS+g2Wzi0qVLAMa9Tq526PNc19hrff/pu6tNUTTJKNXr9NALfudvmUwGw+EQ3W4X1Wo13DhrFa0NReCzz58/j0ajgQceeGCsTslkEsvLy8hkMshms8jn8/jlX/5lfPDBB2OhQpNoL+A8aq5Oy1NRxtN+ERV1FLiM81zXNdOAd3vfJD6fxLsu55H9zz5Dn+sCXnqPix9tmVH1dJXrqy91q8ZFM8SC/K45yHm9girdKMp56ApRUlBkPeDaHg1X0zZoakGbdWQ/ic/XWGULqFkHftdsLVqOtkn3yLAvLHi1AFzb5vIuq/FCzMTruOLBus7MzIw5Ll0pIbk3R+P/FbBrH1igyPr4gK6GAqknG7gZF87x1H5k/VyA3c49H98rn1kMZ8N5bJ0VY+pzlJ/ZLuoUG+Zl57yt6yRMxPHQNrAPGebSbDbHwsD2SpEAPQgCHDhwYGzgbQfydwXoutlTgThzwvKzgnKGrqi3nEcIc6OLDqztUP3sYgxrzVgQqeXsl5DZCyjdC1iIW65d/onqE62Lq6woI0fLUu+Ai25FqNv79rvPbLnk6f9fe1fz01TzhU8lQoFKaoiSAH4RX6MxYly48m/1jzDRhQsXrkzUEBe6IJFoUoV+WKAft9Si7W/B77k8PZyZOy3lfcXMkxDa27nzPXOec+bMTLVala9fv8qNGzckn8+nKzo/f/6Uqakp6fV6sre3l7p7tNttOTg4kMXFRSmVSkOXeWSlKTJ8hit+CyWIIeEsgmiRE1c+WYhDyOEz6gAbf+r1+omlWi4T/vQRW9VqVT58+DB0ooXIkeBbXV2Vu3fvpsv6z58/l42NjaENci74CKc1aU9S2bEEWhbZ1WM2FNbY1/kPtRppjKuYWHN5VlhNyHUeRsmvb/7gdEOVEp2Gr920vz6IG8YN/Ff59lW80+v10jHCv/NnyGSMFyu/2g9X1wkTWt038J/JGt4Zpy30M6SHc93z+Xy6EsfvWX1WKz0ickJht8Yyzz9M9pi8A1wnTHKZrIFoW/nU6fEpNCCAMIqy+x9vCGXOhf98wZWL6OI313xmbfDkz3oVnkkzE24ur8XDrPGl3ay4niwXZpTBUtrYKh+iZOMZuyOhHfTKN+eNXfsODw+l3W7L3t5eus9sXJkhMsIxi/wZJJ0HJ/taiRyfYw4Czu4sTMphHWeSzn7mPNlwoUKFiU8IWELqvMLqaBrcmfHdNRis55bAOU3nC1EGThN+ErCEW7/fly9fvsjly5flyZMn0u/303OPeam6VquJiEiz2ZTt7W0plUpDqz+IM6ssWhhw3rTQH7VeQsaNq29BaOkLMJAXkHORI1KRJIns7++bFgtdPggmnltqtZq8e/dOut2urK2ticix5f3atWty+/bt9HKkq1evDm0etfq6q65cinpoXw8lrD7Srtv5NIqsL59WvVvhQxS9rPy54g6dw608+94dlcBzOVwKRIgS4hs7Vn4g5LX1T181jnfgBgMSz/KZ5wCQA7ago1ywvOtbtF15ZVdSJiwix0Q0tN/76kXjwoULQ7dfosxIE3ln3+6QfqjLpFf/UW/WGGHCrt35XOXnfOm84jsTbtcmRn3qiS4zn/ON/7ofI396s6fOJ8D9D22vLfDcb9ltB+lrFypWEJjgWvWdBVaQOA2W11iJQniuF15dRHtwXrRyoRUktBXGWLPZlN3dXel0OjI7O+ssR0j5gi4q0j7evDkC/qLcEOy6Aos4n7gCdxW4s+AZX3rAy2a64VBZ1udRETqpjBu3paX7JkLrs4Us4RSSNxfJ1vVsEbRQYczkKzRffxosZfD379/y6dMnKRaLcv/+fSkUCtJut9OjEw8PDyVJEmm1WlKpVKRcLqcknpElzLV1QD/Hu5y30L7ja5dRhCgutdAnuIC4DwZHm7mazaa0Wq00D/zfKhvmFWyg7ff7Uq1W5e3bt1Kv1+XBgwfpCl2v15Pr16/LrVu30mMYcV56vV4fmqNQPl0fXHZfvfB7vvrS5Cqkb1vC1Dc+QxGavu/9kPk3i5BOos9Z5fA9s+bKLOUrVGlwvTdOXbORS5N2keOjFplgaBIJIsRuC1Yf0lZQV5lAfrSrB5MrfYRgiNIUWj/gFHBvxfG0mIPBP9gPnlfctR94Vlr839ofpC3PmqTifU0YmURqZcDy8daci8FeDGzV1y4VVlwoG7tOcRitpOp4tELDxlPOl94HpI1HesOmJbd0Hvg/v6dlIT9HOrpeWBni+mV3bv2bK32uB7hyNhqNdKOrz80la74LPmZRd1JMJDijHJ9FJLWYg4jjpkBYxuEOwBZz7cKiO1UI8fBBk5JRCM15hUsY8URgwRK4TJo4Dg4zKvRE52vnSbbTqPlFvnR9drtd2djYkGKxKP/8849MT09Ls9lMNzrXajX5/v271Ot16XQ6Q4LMRZJd3/XEyfnRbTOJOgitI2wChZWLlxUvXDi61RDjvdFoDF3hHkrGtBBpNBry8eNHaTQasr6+PnTE6vLysqysrKTXWBcKBXn58qVsb2+LiN+f35ofQvPn+/00cI2FSWDccRAadpw0xs3DWaSn29aX7rjxa+VBW1A5DfY9ZgulJvFsZRWR1Hdd5HgFnK2vSAPKtbbI8xnjmvAhTZ/LxLjgMoH0YDM6XF5mZmbSdsKFhLwBk0+F482ZXJ+6Hbjd2SLMRJq9BbSSo0metjhzHWmiivBcz5rIW8QS7cf9h9PmcLlcbqidLTcZvjCLya6l0On5Wa+mWJ913LrNGdY40fFZ70N5cR01yvsorPpk8m3VoTX2+/2jjaKVSkUqlYosLi463VyWlpbMfDGCLOgsVHlpCeQclm8IyYODg6GbP5MkSQk6zjWHVYxPdskSdLpSRpkMrI4wCYI5iTCcp3HeywofGq+rHnxWJJfV0GWVdA1Y/j4Jwp+FLM0YYSzCxvlMkkTevHmT+pgnSSK7u7uys7Mj5XJZWq2W0wef60kTcSuvLrJuTfKThGtixJIrrHQQ8DhmMZ/PpxuVOp2O/PjxY8jiEpImA5Mf5oxSqZQq+RASSZJIp9OR5eVlmZ+fl3w+LwsLC/LixQvZ2toyCbVVr1nwtRfisJSt0xI6ERm6IXJSmBS5yorntOM761lWmBCjRFY4tKOvLD4DEL+vwZY4fGa3McStSRX2eYgMW1KxigVLM1vDmUghfk2+LJ9jjHfkA8TYJQtOg1zueAN6r9dL97AUi0WZn58fSsM6QQVkHpwF//EH8s5GR0t50WRZl5PD8bua23Bc1r4Q/q/dQrRyhPZkRYDjA7i98ZwVBp03VvC0sqKVBV02xG2tXLjiGqeP8PjiPspl0pyR9xRYVm3uO4jHIue+POVyR3ecVKtVqVarcvPmTZmbmzPDP378WJ4+feotZyZBx0ZPVChvWOj3+6mLCkz72OCJzo/f+UQWq7PqgmZ9tr77cBrBGBr/WRF9H3EYJU09iTLB86XFGz2zBJzLApzVdlmER4dDHHrC1IqYK8+uZ1b8Vr54ktrd3ZXXr1/LysqK5PN5KZfL6WUFvg2yFlzCnPOnw2hBGxK/y1rgC4NnbFGBrzlfmoEj3vj0FvjlZZWR0+N8scBD3edyOanVavL+/Xs5ODiQO3fuDFnOVldX5d69ezI7Oytzc3Py7Nkz2dzcHDI4WOX3IYtkc36tegzp3674T0vwXdBzrFYAXWFD4xvlfZfQ1vk663oI/T3rnawN8tZ8hf4Lcu4bqyAQMHZhzwcTC6v/aVKlrYwuAsXki9+3LOg6Ld9vFqAMQMkQkfQkKJR3bm7OdOvgVQUQLz7sYjA4cp1h8s5WdsTDFnjXCj/S1eSZ60hb210KAOJ2yUOuY90WFpFkf2rAt4mUlS8QVM4X8mblWceJ97Wc5vJr2e2D7j8YHxZ/0eGs99G+bGTiurPkIspk5ZXrJkkSqdVq6YVLlhXf5/oCZJ7iUigUUsKN5SF0WvYxx9GI8DXn4xPha87LSy6hpTv5WWEScbs6GE9IWWRkFKHris/67Mszh8/aIARYlgHXQOZ86onNJ7zHsaxqwWApIZNoaxamPMny5P/58+d0OXkwGMjMzEza70PLZ0FvWtETDp8WIHKSzHDYUb5rIazrQeR4+RmXhcClDX9TU1PSbDalVCql7j9WnNZ3q2+zMELaSZJIo9GQ7e1tWVtbk4WFBbl06ZIsLi7K0tJSeq70w4cP5devX7K5uWmeIW0hdHxqaxTD6vuamGURFZ3WqNC+yr40fGOUn2UpNlnxnBZnRdT/TWjrH2/CA9EUOSbcvBGx2+0OWTthKeexwvu5RE6Obz5bHWDyhTjgAw7FHO/hDPJQOTIKkEe4DIKQwhCYJIkUCoX0pBrLIq2JIJdLE2hW/FFuLbvwBz6k30c9a3mpCbvvO89xFpfQBF2fEW8pcdonHM9DVl9dMhdyybKUI06WTVxerrtR+wwbhyx5iDxpX32E1aScP0Nhg1uKljsiJ09VY2AFptvtyt7enrRarRNWeM5rFjIt6IPBQPb396Xdbkuv15PBYJC6ssCtBcv6vDmUJwbWUC24tOmzmnzP+6Q+CaDOtTXRFdYiGaHL7H9bfVuDS1sd4f6B5V8fycoiiKMCedATOqcVQtBD2i1JEqnX6yPncdKoVCqytbUlr169coa5ePGiLCwspGfUApNU4HzQBN2a4HW78fXqgN6UFwomLC6ErPhkCXQr3bPA3zaviAyf2ITvgO5jWilkoq/JUtZKNT/T/fTbt2+ys7MzpOQxcQEXmDQ5F5H03oStra0TcxSfY+56H+EtjhHCN3yGJRe5tN7Jit8XRiTbOOcCCKlF0M8ztEuLBR4f2rDksrTjoAOf4ZPjmp6elmKxOBQOqz71el1qtZqsr6/Lo0ePTuSvXC5nljOXQQ4GV65ckampqfRWT8An/I14/qrOEREREfFfYzAYBGlvuVxu4DKCREREREQcYVyu6npPr7zr8P83mDjnca8FPU7mEREREecbcR6PiIiIOH/IcnE5/bb+iIiIiIj/EnEej4iIiDhnyN5GGhERERERERERERHxryES9IiIiIiIiIiIiIg/CJGgR0RERERERERERPxBiAQ9IiIiIiIiIiIi4g9CJOgRERERERERERERfxAiQY+IiIiIiIiIiIj4g/A/nNewbLYGbjoAAAAASUVORK5CYII=" + }, + "metadata": {} + } + ], + "metadata": {} + }, + { + "cell_type": "markdown", + "source": [ + "## Outdoor Example" + ], + "metadata": {} + }, + { + "cell_type": "code", + "execution_count": 16, + "source": [ + "from src.loftr import LoFTR, default_cfg\n", + "\n", + "# The default config uses dual-softmax.\n", + "# The outdoor and indoor models share the same config.\n", + "# You can change the default values like thr and coarse_match_type.\n", + "matcher = LoFTR(config=default_cfg)\n", + "matcher.load_state_dict(torch.load(\"weights/outdoor_ds.ckpt\")['state_dict'])\n", + "matcher = matcher.eval().cuda()" + ], + "outputs": [], + "metadata": {} + }, + { + "cell_type": "code", + "execution_count": 19, + "source": [ + "default_cfg['coarse']" + ], + "outputs": [ + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "{'d_model': 256,\n", + " 'd_ffn': 256,\n", + " 'nhead': 8,\n", + " 'layer_names': ['self',\n", + " 'cross',\n", + " 'self',\n", + " 'cross',\n", + " 'self',\n", + " 'cross',\n", + " 'self',\n", + " 'cross'],\n", + " 'attention': 'linear',\n", + " 'temp_bug_fix': True}" + ] + }, + "metadata": {}, + "execution_count": 19 + } + ], + "metadata": {} + }, + { + "cell_type": "code", + "execution_count": 17, + "source": [ + "# Load example images\n", + "img0_pth = \"assets/phototourism_sample_images/united_states_capitol_26757027_6717084061.jpg\"\n", + "img1_pth = \"assets/phototourism_sample_images/united_states_capitol_98169888_3347710852.jpg\"\n", + "img0_raw = cv2.imread(img0_pth, cv2.IMREAD_GRAYSCALE)\n", + "img1_raw = cv2.imread(img1_pth, cv2.IMREAD_GRAYSCALE)\n", + "img0_raw = cv2.resize(img0_raw, (img0_raw.shape[1]//8*8, img0_raw.shape[0]//8*8)) # input size shuold be divisible by 8\n", + "img1_raw = cv2.resize(img1_raw, (img1_raw.shape[1]//8*8, img1_raw.shape[0]//8*8))\n", + "\n", + "img0 = torch.from_numpy(img0_raw)[None][None].cuda() / 255.\n", + "img1 = torch.from_numpy(img1_raw)[None][None].cuda() / 255.\n", + "batch = {'image0': img0, 'image1': img1}\n", + "\n", + "# Inference with LoFTR and get prediction\n", + "with torch.no_grad():\n", + " matcher(batch)\n", + " mkpts0 = batch['mkpts0_f'].cpu().numpy()\n", + " mkpts1 = batch['mkpts1_f'].cpu().numpy()\n", + " mconf = batch['mconf'].cpu().numpy()" + ], + "outputs": [], + "metadata": {} + }, + { + "cell_type": "code", + "execution_count": 18, + "source": [ + "# Draw\n", + "color = cm.jet(mconf)\n", + "text = [\n", + " 'LoFTR',\n", + " 'Matches: {}'.format(len(mkpts0)),\n", + "]\n", + "fig = make_matching_figure(img0_raw, img1_raw, mkpts0, mkpts1, color, text=text)" + ], + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/plain": [ + "
" + ], + "image/svg+xml": "\n\n\n\n \n \n \n \n 2021-08-18T00:41:19.149192\n image/svg+xml\n \n \n Matplotlib v3.3.4, https://matplotlib.org/\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n\n", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAugAAAEbCAYAAACItHG6AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAuJAAALiQE3ycutAAEAAElEQVR4nOz9d7xsWV3mj7/X2rniCTffTnQ3HUAyCIKgREFARjGi4Jevjo7j96ejIwgYQEYwKyIMMyAzJAUVhBlEBEQQEFQQyR3pdPv2jSdX1Y5rrd8fa+86+9StOqG7L93Afs6rXrXPjqt2fNazn8/nI4wxNGjQoEGDBg0aNGjQ4N4BeU83oEGDBg0aNGjQoEGDBptoCHqDBg0aNGjQoEGDBvciNAS9QYMGDRo0aNCgQYN7ERqC3qBBgwYNGjRo0KDBvQgNQW/QoEGDBg0aNGjQ4F6EhqA3aNCgQYMGDRo0aHAvgntPN6BBgwYNGpxXNLl0GzRo0ODeCTFrQqOgN2jQoEGDBg0aNGhwL0JD0Bs0aNCgQYMGDRo0uBehIegNGjRo0KBBgwZfAxhj0Frf081o8HWAhqA3aNCgQYMGDRqcJxhjxsT85ptv5l/+5V/IsuyeblaDezkagt6gQYMGDRo0aHCeoZTin//5n/noRz9KHMf3dHMa3MvREPQGDRo0aNCgQYPzCKUUN954I5/85Cfp9/v4vn9PN6nBvRwNQW/QoEGDBg0aNDhPMMYwGo1485vfzA033MAHPvABrrvuOoxpMqA2mI2GoDdo0KBBgwYNGpxHnD17lhtvvJH19XWWlpb4xCc+0QSLNtgWDUFv0KBBgwYNGjQ4j7j11ltZW1sjSRLSNOVjH/sYZ8+evaeb1eBejIagN2jQoEGDBg0anCdorbn11ltJkgRjDFmWcebMGY4fP47WurG6NJiKhqA3aNCgQYMGDRqcRwghKIoCIWxl96uvvroJFG2wLdx7ugENGjRo0KBBgwbfqDDGsLq6CoCUm7ro0aNHEUKMSXuDBnU0CnqDBg0aNGjQoMF5gpSSJz7xiWNC3m636fV6eJ53Tzetwb0YDUFv0KBBgwYNGjQ4T5BScvHFF3PJJZcQRRFHjhzhMY95DI7jADQe9AZT0RD0Bg0aNGjQoEGD8wRjDL7vc/HFF+M4Du12m4c85CGEYdhYXBrMREPQGzRo0KBBgwYNzhOMMWitSZKEVqs1JuQNMW+wHRqC3qBBgwYNGjRocJ4ghODkyZOsr69z5MgRwjDki1/8YlOoqMG2aAh6gwYNGjRo0KDBeUJRFHziE5/guuuu4/Tp05w4cYKPfOQjnD17tiHpDWaiSbPYoEGDBg0aNGhwnqCU4tZbb+XUqVMkSYKUEt/3WV9fZ//+/fd08xrcS9EQ9AYNGjRo8E2NrABjIGiy3jU4D/A8j7m5ObIsGyvmURQxPz8/zuDS+NEbTKKxuDRo0KBBg29afOE4zP8y9F4IH//qPd2aBt+oMMYQBAGtVmtcQbRJr9hgOzQKeoMGDRo0+IaAMbAWw/IIlob2ezw8hKWR/a5Pv30V4twu/2efgcdedo/+hAbfgMiyjKWlJaIownEchBAMBgOWl5dZWFi4p5vX4F6KhqA3aNCgQYN7FYyBjWSTUJ9DtmcQ75XYLr/YhoWW/dSHD3TgqgN2HA4UwKIHz34r5Bqe94h78lc3+EZDlV7xuuuu48Ybb8RxHFqtFlmWoZTiH/7hHzhy5AitVgv4xrG5nBnAbWvwkMMg72afxldOQduHi+fv3vXeG9EQ9AYNGjRocF5gDAzSCWJdI9zTiHf1bUxJrNuwWH3XyPZ9908Q8HK+bgjb8ZxCw+fOwOPeCbmBZ94HsosBA8ebhBoN7iZU5Pymm27iTW96E6urqwyHQ5IkQSlFEAR88IMfZHFxkWc961lj28vXMwoF152FR/13SDX85MPhvz/r7ll3nMNr/wle8n6QAj76M/Coi++edd9b0RD0Bg0aNGiwLYyBUbYLYj1lutIw35qiaJeE+tJ90xXvXri9+qY0rKSwnMCpEVxzBm4fwIkRnBzB2cROW8tgkMOogESBqtt+JbznVqhGffQO+IHLz+eebPDNBKUUn/rUpzh58iSu69LpdMYque/7OI7Dpz71KR772Mdy6NChe7i1m8gKWC6tYsvxxPDkd214LQHPgVzZ9fzDREzHuMMeb94fxp/JcRP/JwV40r7pciX867GGoDdo0KBBg28QGGOVqLotZDsVu068cw1z0Wz7yMXztXHtzeH+Loj2agrLKZwawj8vwbFb4Y6hJd51or1REu1UQVGyaiEswZZA4EDkQseFng/zAdynDwciONyCo224uAsLIfzSJ+CmVfiDx8Fvfdau87888GtwEBp8Q6MK/NRaMxwOOX78OK1WizAMkVLiupZ2OY4DWBJ/2223cfDgQeDutbmMsp1J9bRxw8zaSBai8vqOtg5fNAdX7gNfWjXbAEpBpixJ/7PPwR3r0HfhMa/ZSraVgfn6emud94UILji89f9qeC6C687As99ixz/nIXfbbrrXQmwXRbyysmKklAghxh8pJZPj6uvYroTt5Lbuyok4ua7qf2PMlk+9CMBkO6t5prXnzrat2ma9PfU2TBYlmGyvUmrb37fTtidR/x07raN+PKvfUP/MWu9O65vWjvo66/trsp2z2ryX6Pft5q3aVT9O281fT4k1bb/sZh/vpY07HdNp0+rHcda8262jur6nrXPyU59eb/O067H+e2btu2ltrp8n0+bfzb6bte5Zba62V30msVNxkVlt2ut9pX4sJu9Vj377t7WAhfKzOGV4EVj4jsv53jrxTgv7sNvOPrLY5hy1ey4CZweivZbBUgKnR3DbBhwbwImhVbTPJFbxXs1gI4O4VLRnEe3QgY4HfR/mAtgXlkS7DRe04aKuJd37IkvEw0ZuanAvQHWdKqU4efIkH/rQh/A8j1arhed5CCHG9488zxmNRlxyySV867d+K57nISd6s8bAINudgj05LilsB3kW0e6H9lpzHXvdGW2vx6yA9WS2qr0SgyPKe0drOqGeNm4+2rnD/k2ImQ+FHQn6tIdyFYU8+WAer3QGeb87Cfrk+maRgmkP0jtLouqoX0T1+ZVS5xDbWaRk2nx1kj7Z3p3aPe237mUf14nAdr9ht+uddX7M2kZ92rThyeX3imn7pzqOszpys7b7tSLo06bvhqDP+n8aJh8I0zpV9Q765DrPfaDcOYI+OX0aMa13ovZCznczz2Snsfqe1UHYCbvtXGVKsJ65rGee/U7L78xlI/dYT13WymkbmctaaqelygFYB5aA5fKzNPG9/Dc/zZvrxHsusg/kWdCmVLRL68ixkmjfMYRTsSXaY0U727SOTCPavgNRSbQrRXtxgmhf2IWjLViMYKEh2g2+zmGMYTgccvvtt3PzzTezsrJCHMekWc4gd9jIPUaEJKZFIlqkok0q24hoARPOE5uIlURuIdy6Up0nledyXDewarYj7XWnsX7wNIfVZLaFZD2ByJutZm9Httv+9rEeDXaNmXtxx1vhNFJ9dxPtaeu5MwTs7tr2TphUGLdbVzWstT6H1NX/r3cmHMc5h6TX17WdOrcTYdyu3dt1Hrabd7v1TW5vWht3i2lErcIkSdwt9kK6ZpG+vZ7/d0cHcVbHeHJaNSyl3LaDMtm+Wer4NGynTM+av1r3dveXva531nZ2Q9Yn33pNI+s7IR8TbUu211JnTLo3MkuyNzKvNo8l44lyaLkFPb+gFxT0/NwO+3b4wu6IbwkK+oGiH9jxHTfje/7vI33zavJZ7RGvRgL9q45aov3vq3D7cUu0x4p2sqlojwpItA2ihOlEu11TtC/owsEIDrXgaAcu7sCRtiXgC6G1mjRo8I2GQlkFeZp6vTQynB1orrttwInViI38gQxVQCJaxCZEYggZEhETMiLQQyKR0PdHRGKFfb2QR159BfvmbYoSbayfO85q2ywJ9u1Lm//HuSXps8j1ZYvwiClke75lCXqDeye2VdCXl5cNbFXPpJS88pWv5Hd/93cZDoebK5qh3E0+HKt5Xv7yl/Pf/tt/O2ebjuPw+te/np/4iZ/YsfF///d/D8CTnvSk8TjXdTl69CjPfvazeclLXkIQBOcstxtlq05cqgd0NW7aK/0KsywqOyl/1av0iqjXCfpuVdpZSvt2x3i3Cvdu1jWrPfXtTOtk7FZB346g3x3YruOzXVv2sq67AzuR1lkEfS/rmmZlqY+f9QYJdlbQd9qPk+dNfdm9KOjTtlk/b2a9MVJGshpL1jOXtdRhNXG2EG9LrGvD5ScuXCJXjQl21y/o+/mW715Q0K/Id1DO5xV4zvTfIaWkUJqhclhXHiu5y+k84HQW8No7Lv0D4BCC/VhLyxzQBVpAiMCzv9MSbUHp0S6Jds+D+RAWAzjQ2vRoX9SxhLsh2g2+kZEWe/dmL49gPbVveRZbnGMbmY8MPU9x+y3X89Xrr8ORAgF4fkRWGAoZkhIxKHxS2SbWEQMVkIoWIx1ihEM/NCy25a5U7Lp1xNvmrViDezXuvIJex7SH63YK6U4kxXEcPvKRj2xZVgjBpZdeysc//vHxuC9+8Yv8zM/8DH/yJ3/CQx6yGRlw9dVX8+///u8AvPa1r+VbvuVbGAwG/M3f/A1/+Id/yMbGBn/4h3+4l584lchMI+aziE2lVE4j/NOI8zSlsiJAFZm4K28WtiMx5xvbbXc7knZnOwS7bdO0Y7cTOa+Gt1N/J5fbS8doN79z2rm3l/2zFz/6LIK+03J7Pd9280Zuu325neJeaFhPnTHJHpPt0iKylrrldJe1bHN4VLiEjirV7Pono+sXHG0nXD1f0A3yMdnulqTcd2b/fm1gpF02lMtK7nFT3uLMms+ZPGC58FgrPNaUx0C5xNohMZJMSwqq4w4GgcDgCQOS/xcYYm0uq8BXgNPASeB24Dbg+G3P4/OLDdFu8A0IY8qg522I9dKM8aMcOv50b/ZCCy6egyv3gyc2GVShraK9kW61ityxAl86XvmzHaS4mkhcQiQSq5JnMS2ZwGiZQJ/hwq4DyQqe2mB/z6XnF0TEPPGxD+cx3/YoXLcxaTfYBUGfJAPTyNNuCcO0B/qjHvWoc9YlhODAgQPjB26lJl999dXj+Se3dfXVV/PIRz4SYwyPf/zjue6663jb297G7//+78/02O6lnbudPqkuAlsI++Q+myR9k+vYi1K8HdnZSXXfbp27nXc327mz7dgrZu2HvZDzato0FXpawOJu2jOtkzet0zCt/dPI8rT1TmKab7za5qQSvh0Rrk+vn9OTSvRuMe23TDvflIa1McGWrMSC1WSTdK8llcrtsJqURDtzGWQOgaOtLaRSroOCnm+tIoc7KVcuFGPbSM/P6Xo5XT/Hl7MDQ42BoXZYL1xWc49b8ojT633OFgHLucdK4bGhXAbKZaRdS7SNpDDnEm1XGAKhiaSiJQs6juJib8S8m7PgZez3Mg76KfvdjL6b03MLAqERAr7927+9KUHY4BsCxpSkd7dBkLVpaQFz4XRv9kILrlgE3wVXlNeeKf3ZhVXD677sm8/Av5XDawkE7kQgZE29PtyD+x+cVLMFc6EmlAV//df/hy9/+cssrS5x4sQJHvjAB/KlW75EkiQ89BGP4Ms3fxmtNfNXXMEdN93Bfe97Xy656MJxdpcGDbYl6NMelvWHcEWcK+tL9bA/fvw4L37xi/ngBz9IHMd8y7d8Cy996Ut56lOfuu02Jrc1ub294EEPehD/8A//wNmzZzlw4MCulplGZGa1axam7bO6qj5JmLez29T3qTE2eHS3nty704KxF1J9Z0n8+VD564RvL7Yf2L0PfK9q/ywF/q6q4nsh59X89fNsVmeh+n+7zkH9/+32s9KwnkpWYrlJuFOH9dRhNXZYSeSm2p04rCVyPN2TmrlQMRcqeoGi7296sg+1M65YsCS7H1jlez7UdP2cwJltiTEGhsphrXBYyRxuTUNOxT3O5B5Lmcdq4bFWEW3lzCDaVl2riHYoFW2p6DgFF3kxc27OYkm0D3gpB7yMXkm0w5JoN2jwjQLbmd6ZVE8bZ8zsIMj5CA51bCyEUyralT87KWC1HgAZw1dWNv8f5dAJZgdB3mcBHnbBdOvInfFnaw3GOBw9epTPfe5zzM/Ps7KywuLiIt1ulziOWVhYoNVq4fs+7XYbx3E4ePAgnU7nbj8mDb5+sSNB3+4hXJHOegDkYDDgyU9+MqPRiN/7vd9j3759vP71r+dZz3oW733ve3nKU56yZX15no+3BZtkYbfEZ5YyeezYMXq9Hvv3758ZIFdtb9rys/bHXsdPkvQ6Wa/7aacRtMn1Oo6zxYc7iVnrmTXvbrEX4rldh6Oafm+w3OzUzp1QP46z1rtTR28a+d1ru7Yj5tX0aeS8Pq2upE92ICavCSEE2sBGKllNZKloW8/2SmK/11I5VrhXS6JtCbnElTAXKvqBJdv9UNEPLfk+2C64YjFlLtT0Q8V8qOj5lnCHjgamp03V2jAoBGuFw3LqcCwN+MyKz+nM42zmslq4rOYuG8phpCSJlqS7JtqKi0JLtBecjP1+yiE/54Cf0nML+k5BKDeJdj1e5XzESTRo8LVAXgVC7rFQzWpii8gszrCN7GvBRX1bcEaOrxmbY3+U1bZZ5ui/5czm/5kq04POINr3O3juuMqf7X+N7V1CCA4fPky32yUIgjEh7/V63HrrreO0i77v02q1mJ+f58iRI3S73a9tQxvcq3GXFPTJh7kxhre85S189atf5ZOf/CSPeMQjMMbwtKc9jQc/+MH8xm/8xhaCrpSi1Wpt2eav/uqv8uu//us7tmmSyCilKIpi7EF/z3vewyte8YpxUYDtguR28trOwiw/804djMqPPs3eMvkbJ4lStfx2ObtnkfQ7S4y3sz5U691uO7P20263PW29u7EA3dnjOct6MRksfGdQv3YmLTPTzoHJ5WZNn0Q9I9BkJ3bzeAoGmVP6r92ScFsbyWpcG54Yv5ZKpIB+qJmvEWyraCsWI8Vl83mpZNvp/TILScszwLnnim0TjJRkrXBZyRyOJR4nhhGnEpfTqVW5VwuHjUIyKBxiLcm0IJ9FtKUmkpq2o+g6iou8hHkvZ9HL2e/nHPIzDvgpfaeg6+RE0nYC7kon8q6cGw0a3N1Ickt6l/ZItDfSMv3eFMvIYguO9uCyhZJoY687pS2JHqRbifbpVbj2xGb+bNg+pd99FjaJdX3aTnn47y2o7s2u6xKGIb7v0+12x5VEoyjakhfd87xxEaPm/tGgjl0TdNje6lARjI9//ONcdtllPOxhD9uiHn//938/r3jFK0iShDAMAUsePvGJT2xZ1+HDh7dt8Kw21DO5ADz3uc/l537u56b+ljopqCuN9e/dWER2Uty3W8ckWZqmvNbJ+T2lOt9duCfbf3dvu3oTsh328rZlN6r7djAGhrkcp/VbqwdEZpse7bXUZbX0bK+lVtUGSsVaMx9p5kqyPRdpFlqayxYLOy7anD4fadol0a6u/XpBH2sdEaxmkpXM4Y7Y5TMrIadThzOpy3LmsppL1guHoZLESpJOIdoAnjAE0hA5mo6j6bqKC6OcBT9m0Ss4EOQcDnIO+hlznqLvFoRCUSfa00SFyX2/+b2741Shfi5M3mNmqeiz3pA1aDANxlibxl692ZW9oxtMJ9oLEVwybytCuhXRNjYQMitqgZDluo4tweePlUp5bO0mi+3pZPtgB64+MD3rSDfgm8LaZYzB9308zyOKIqSU+L5PFEUIIQjDEM/zcF0X3/fHyzT3hQYV9hQkut04sCfX6urquGRtfd6DBw+itWZtbW1M0AEe/vCHb1l+O0zaO+rz/4//8T944AMfyNLSEn/yJ3/CW9/6Vr7t275tnK5xmho9zdYyTbGepVLv1Ss8+cDei4Je/V/fB7v1Uc/yPe8WdwfBPV+k5HxYCe6q4r9Tx2zWNo2BRElWE5veb70i1OPsI2XGkTL7yGo5z1rqoA30AsVcYG0jY0W7JNwX9vPSw71JsucjQy8EKbcWINo8b2BUEu2lVHIidvm3JYeTicOZxGEpFazkDmu5YFBIRoUg1YLCCKtkbyHaWEXb0bQdTc/VXNTKWfAK9gWKQ6HiUJhzOFQs+Iq+p2g7m52Aah9t3V/nVundTv3e6W3SXs7zye1OZmnazobW4JsTxtSCEvdYETLXZSDkFKI9H8J9F0t/tgRhrD+70DYQcq0qVFN+rl/bXO+4pPuM9H0Xz8NDjk5XuiPvm4No7xWTb7ullOPAT8dxcF13TNarzn2jnDeYhl170Le+Gj93vgpzc3N84QtfOIcQnzp1Cikl/X5/5nZmKfb171nZM6644goe9rCHAfD4xz+eRz7ykbz0pS/lh3/4h2m32zuq3XcFu+lY7HWZ+nyzSMduCfckUd/JrrLb8Xd1PdtZOnbCXuwue/HAz+pI7K2DIYhzaw1ZKwn0Wkmu1zO3VLC3jq+GlRY2CLIk2n2/sN9lQOTRbsxcpOkHivnyey5UdHyNFNM958ZArASrucNa7nIyc/n8asTpUy5LuctSWina0hJtJUmVIDfMINqGlmNou5qep7kwKljwFfuCggN+weGo4HCQM+cVzHmallTjB/l2trBZx2M3lpM6OZ/s0N4d2M169kLKG5Xs6xdKT6nOuAvCvZLY5ad5sxciS7QPdTYrQmJqhWryMhByRqGatCrpPiNX9lX7p+fTnm/ZTCUNzg8mY4EmY+wmY4AaNKhjR4K+HQmcVlHz27/923n3u9/NZz/7WR72sIeNvbvvete7ePjDH75FPa+2Uf+usBsyWZ9eJya+7/M7v/M7POMZz+ANb3gDv/ALvzDz981a327m260dBvaeLnG79tQJ6W6U/u2mf6Nit8dmJ6KUKrmZzq/0aa+XebPX4lrWkVpBm7XUIVOSrl8wV2YVqQh25cc+1InHvmxLxgvmAk0v1OPgqUkYA7EWbBQeq5nDydzji+shp8/YYMjl3LHBkIXDoLSOJNso2r7Ulmg7lmhf0CpY8DUHQs2hSHGk/Mz7inlP03HP9Y9Ps5HUp9nhrW+Edjo2k0r0tKq61XzTVPNJgr4XgjztrdVusFfFfLfrbYj8+UNWzK4IuR3RXk1Ke8dktpHye38HLpqz15gsibYykBdWsd4SfLkBN57a/N+Y2SR7IbKK9vwUpXsuBLfJznevQkW8KwW9+t/zvPH4alwVK/fN9IxusDO2JejTlMk6aX/Pe94zHld9P+5xj+PSSy/l+7//+3n5y1/Ovn37eMMb3sC1117Le9/73qnbmexR7kVdnjYM8JSnPIVHPepR/PEf/zH/+T//53M6BncVe2nzNHvLrHVV/08bnsSsIMNZbdkNWd9Lp2Mn7LYDtNf1bddhhHPPV2MMaSHGJLqubK+XlpFNNbvKrW3HpUrS9W1qv7mwRqhLhfu+7bQk4GWKPz+nHyi6vkKKWW89ICmJ9krucCpxuGbQ5fSyx9nMY6XMOrJeSIbK2eLRNgiEMGOa7FbWEblpHTkS5cx7iv1BwYGg4FBYcCS04+Z86+WWcmtwdF3Jmex4z1Kkp9lMJo9V/RhMHrt6/EU9DWmdWE/auaa1ZSeCPqtdk22b9v9km/eCavsNyT6/SCYL1cwqUDMxfZBBy5vuzV4oAyEvLQMhqyNYqM1CNfVAyHqhmi2ZTKaQ7f0dWwBnmqLdDUpS3+DrGtU9w/O8sa3F9316vR6HDh0iCAKiKMIYQxAEuK57py2oDb5xsSuCPvmwrlTx5z3veecs81//63/lAx/4AC95yUv4pV/6JeI45gEPeADvec97xhlcZr3i3ovaNet1dn3el770pTztaU/jLW95Cz/1Uz+13U89L5hGtGd56GeR5+0IRZ2wTusA3BVFfdbNYrtO0azt7Ga9u0WmxCbJTraq1/XvtUSOAyPXEodESTpemUM7VGNCXXm1L5tPbLDkWOlWZU5txXZF3eKCsXXkVOpyzTDk9IrLUuaylHmsFQ4byhlnHUm1nEG0DYEwhKVHu+sqDoc5C17Mol+w3y84HOYcCjJLtD1F19tMVTbN2jLb1jX72Ex7K7bduTCLgN4ZYlon6tOI/521rOzU4d1NO3ez3e2I/d318N2rsv/1AmOsurxXb/ZybO0fveBcJXuc57oMhHSEDYTUbBaqmQyEvOk0fKb8f5DWCPwUon3hHDzoyHTvdqvxZ3/TorpPKaWYn59Ha00YhrTbbbTWzM3N0e12ueSSS0jTlF6vh+u6OI5DlmX4vt906hsAILa70WdZZqY9dGYpZ/WH+zRv1V584LOI4Hav0+sk5e5KWTTLerObtk5rc/21/LT5p9kGtlv/5HqrY1LfxrQ2TnZupmGWKrndb91ufB2ZsmXY6/aRerXItWnEO3EYFZKWp+gHems+7cngyLGiXdAPC3qBwqudDpOdnETZPNqrmcOp1ONU5nMq9TiTuSxntpjNeu4wUA4jJbYl2r4wRFLTKol23y2Y8woWvYKDQc5BP+dQkLMYKHpuYRXtGf7xSUy7nqrh7cjndqS7/qp18hquWz4mz8X6ObbddbKT2j15Ds/qbG637CTq46ftp1ntm9amaef8dtlZJnFXbGazljXG8LjHPW63T/GvGZvXugyEvBOFagptfdizCtW0PZvP2sGSX603C9VsCYSs59Ie2em9cAqR3uH/+QjCO1GopsE3N4wxxHHMF77wBVZXVxkOhyilUEoxGo0QQmyxvFQZXjzP40EPehAXXHBBQ9C/uTDzYO8pzWI1bqcHaX3enfI7T2KnB9h23tHqe5oCvZOf/nxiss2T+21W23ezrsk3D5N2gnrnZS8K5G6V8rwk2ueSaVsFclwVcst0ySh3aLl6rFRXRWvmyswjF83lzIXJeNpY3Q4Vfs1rOdm2tJbe73TqcmMccGrV5WzmcTZzWMsd1kuP9khNKNoYSrs0rgBfaKLSo911FQfDgiu9hP1+Ye0jfsaRIGfBt0S762owW+01k+2crjbv6pCco25PntvTUB+/mw7rNFV8WtuNMedkLpncbv083M21Vz24ZnnOJ9e73bTJjkn9N03rbNan3dl7xNfyoXq+t1WoMhByj0R7JbZvdaZWhCy90oc74DkThWoUxJkNpByT68HWQjXaQOTCwa61iUyS6gv608n2XGS397VCVsA7/hWOzsMTr/7abffugtbwV1+yWVqeeVXzJmCvUEpx0003ccMNN7C2tjYm57D5lrIoirEgUt2bwjCkKAo6nQ79fn9Xgs29Ff96A3zhFvihb4dudE+3Zu/48u3wievh+x4O+3v3XDv2HCRanUzViVV9T1N2pwUxznqNXt/m5HpmkYX6tFlpziaV9MmH+N1B2KcRg3pbJ8dX+1UpdQ5p2O32pqHaB5Odp1nHsL7dYqIMu7WRVHaSTWJdEe7Vsiz7IHMIXT1O77dJtu24C3s537I/2awaOVa8NYE72x+cacFKJlktifZXhwGnl13OZA5LmctKXirahWSkraKd6ekebV9oQseUBWs0B8KCK72Cfb5if5BzOCw4FOTsCxR9V9F1Nc5EpOY028VUFZXpGYm2u9Hu5s3StDSI9eV3uplPu+bqZHwWed3pLVj9HJ78DfXzb6cUiJMdiUl1fnIbu7lepu3X7d4qVd/TOil3F3YSGO7Mstsu8xL8Ey/eQb2eMm1th0DIA+1aRUjYUqhmSyDklEI1rpytXi+24L77rHodOVYtx8BGDM9/PWQGfAX//OI974rzhqyAlbLy5fIQfuv98MEvW2L7wf8C33HlPd3C7VFVDa0CZv/00/DWz9kO1P/6PnjOg+/pFn79oLqOkyRBCMHc3Nw4x3k93WL9vlm/N06zFd/boTWslR3qlQF84Vb46f9hz593/wu871fv6RZuD61hPS6v4QHcehZ+5L/be9p//zB8/hX3XNt2nWZx2rRp/2+nvG5HAnZD2CeV4voDuDrJJ5WwevDZtPXelQfmrLbstHy9o3Jnc3jPardNAebYIKkhW0qwr8SClVhsjiutJCuxZCNz8J1aoZoxmbYq9+FOzv32VcGSVQpA+x26s39rpgWreUm0E5fPxxGnVl3OpDbF32ouWcttwZqqDHuuBXobRbtVBkNaol2m9wsKDvoFR6KcxTKPds/VCLZ/OzHrLdEk9tqBujtvsNPI+bQO32Qbpi1fxywbyG7WW11bk+fvtG1MS4F4p8jmDsvUlfpZb5qq+Sa/Z5H1yWWr4Z3eRtwdufnrvyFVkvXcpulcz102co/HvoSfABZ2+HQu/4Pp3uyFqAyEnLfXlxD2bU69UE2daN+2BJ87ZsetJ2WlyRlWkaM9eMAhm/ovcKAs0opWMEo3H4bLQ/u9tAI3DGvjhvbBOdeChQ50Q5sFxQCn1+/yrj0HxthS89W2l4dbSffUzwhWhtar3g5goW3TFt6xakmv78LJ89DWWe2PJ4Nla281pqV8nFU1dGVk36A4Ek4Ovjbt/0aCEIIrr7wSYwzD4RCAoii2CBYVQa8Llq7rcuTIEbrd7ni+ryXidJNkLw+2Dq8Mbcah+vBKeb2uDm0Gofm2vVZdx167hbHB01+z9mdlm4abbavuMyv163pintWRvfcttO1vaAf2+jXAqa/R9TsLu8qAOot0znoYVtiJpN4Z7KRE1glM3S86SXDqyuFObZn1G2cph9P+n0UaVlZWOH369LgEcBAEeJ6HMYK1RNa8lILVRNrvWLKaCFZiWX4EKyNLvtcSgedQqwppq0BWxWkOdxVX7svGBHw+1PQCWy0ydKd744UQZBrWcoflRHImc/li7HNyxeFM5rKUOVbRLpxxHu1ZRNvB5tHeDIbU7A8UV/hZmUdbcTDIORJaj/a8b+dxxNb9Nus4TRwhtJ5uY9juWG2HaUr6rHVNyyozmXlnN+Mn2zfpE5/VjslPHTt1vOvfk53eWcPbWVnqHeb6eieJ++Tvn2aRm3XdbXctb0fC7y71euf7CMTKmSDaLuu5d+7/uctGOW49d8m0Q8ct6HoFPS+n5yuAJwPLGJaBmzFch6Eso4rE4ALBf340v1n3ZdcL1cRVpckZPuzLF2HhQhuE6QlrljTKErhhYh9wJzfgQzfDyhIMffhCjXyvjkrFvGMfgAvtrcOLHbjvwa3jEgVv/jQ8+lJ40qNO8ik+xwO5kjd87D688wS89H6z97HWsBbvTK5XRoZrhprlIcihZHkoUAr6EWCsZ/2qI2WlzPJz9WFLwBfaWz/zLQhqXvUvnoHv/yRc4MMD7gv/+d/h8fvh6Aje8nH40cfAY6/apv0TgavnEOyYrakay+FMlcWMSv/8ZGfsin3TO2mTXvsTccHP3nIDvnF4xsWX8/O3SO4fwaMyeN0/wtMfAM940I6Xw70Cayn85ifhYAt+4RFlfvnzgHpwu1KKlZUVBoMBGxsbZFlGkiRb7jVFUYzvZUEQIKUkCAIAut0ui4uLY596hd3cg5Sy19zKBMHeQrbL79uX4OYzgLHkNsmgV3aI59uw0N06fMkBeOildrgi4/Md+90O4cNr8M4l+IkD8OfXHeM6Vnjhocv5vS+0OJPCrzwI+v4O7deW8J9DpMvvkwPDJ0cZxUASrLncdEagtW17kkMvKtvWPvf7okV48EW1ttd+azeEa8Qaf8sdPJ6DvO9LOf9UnOY/zV8CdO/kWXHXsW2QqFLKwNaH2Mtf/nJ+8zd/k/n5eW677TZ8398y/Rd/8Rd53etex0UXXcS11167uaEZhKFOXN/ylrcgpeS5z33uzDZNs2oYY0vqvvSlL+XFL37xlvkmVe5J4jHrAT9Lba3G/9PNAgE85tJzp1UoioI//MNX8Td/+7dce801pGnKBfe5iqc855e49Fu/l+Wh4fpjy5zdUFz/0Tdy5vPvIl2+FaMS6FyMvOo5LDz6F1noB8xHhvnI0JUb3PiB3+GWf/1rNpbvYG7xEI954jP5Tz/3yxxZbDMXKaIyX3WlcNaJUaoMywmspJJTseR47HAydTmTOJxNHZZzyXrusFHIcTBkdg7RFjgY/LIEe6sk2n3XVoFc9BUHgpxDQcGhMGd/sKlo74Zo74S9dAinEbNZ1qPdbmM7gl7HLJV1N29Z6vNOnrOzFPHJZaaR7MnfUe+obkf6p5Hxaer4Tp3y+nomOzuTKvU0El1v5zR73bR2TI7bqa07vWlR2jDIN4n2WlYj3RWpzpypRNsYQdcribaf06uGvYKuWxC6CkdYdj2uCGkkmZIMC2+8nY3M5QvLvWvYVMpdYAVYnvz82pP4/yoy1vZKtbxUs/PCvt6tK9d1ZbsaN0ggCqHfg24HWi0II/ACuCWG4wkIBf/xKvjBq7aS8faU0u7GWBV+eQRnh5rTScrZPOF0GvPn12SskeO1FZc/9EaGuc/p04e49o4HYJSgfRqev7CpYteJ9+rIpilcKIn0XM/QW9BEcxqva9BtTR4YTmrNF1OFHkqikwEbKx7ksP8YnDllVcC3/v/gBx8961yGDQXLBZzN4fYEjiVw+xDecRxuWwM5An8VkgTEEMQt9njKNvzUk8vA1hrBXomtsu2I6bai+Wib8aW/f7cE1GCIUQzIWDM5J1XOHYXijlzzkY1Vrt8oyAYhq7dfwsrAw03B/6x9++FJuPV34PDc7rZ1vmEMbFTxC+U+rYbf+AX4zAl7PP/X0+BH739+2lAUBXEcc/PNN3Prrbdy5swZhsMhRVHgui4bGxt4nofv+yilSNOUTqdDmqaAvb/keU673abT6XLkosvx2weR4T600y+Vajlb0S6J9+rQvrmpyPN8G9otew0GPngeyDIe412ftXY0V8Knfx2+5cKdc+lnGlYKe96fKhQnVc4plXFrlvPW0wUCTcdJ6LWWSIY+8S0XcPvtC5gcHteGpx+eTr4rtXttBL5nmF80zO3TtOc0QU8jQw2+4ZhTsISGdRf57202RhJPwvt+Cr7zcvB2kJwLNBsUrJOzTMppYs6SskTCeziGwiABF4UBDhDy5zzxLp8fO2AmEdpTkGj14JJSkqYpf/d3f8f3fM/3jOcrioJ3vetdu35FM0kc3vzmN+O67rYEfVr7tiMiQmzaX+q/4c6oi9Vyb/83wU+83eazfs7DDZcuWoV7aSjG1pKVESytZiy99rcRVz+P3uN/iYV2wMq1f8mfvPgH+dbnvYZHPO0naYsBBxYysuA4D3rC47nk4ovot1xuveHz/MWf/ybfEn6E3/md38HzPDzP40UvehHXf/Sj/Pwv/CIXX/kAvnzdDbzhj36bW47fyne/7O2cHEnOJILlVLKSwUYuGBaSWEFmBNpMEO0q60gtGHIxUFzezUqibf3ZR0Jbkr3vKfreVqK9FzvQ1xo7nQ/3RDu2I8Cz5p3Wwd2OoNdJ/LTMLJOBmI7jzNwndQI9jVDvhaBPftezDk3bF9OWr37HtOmziP20dRQaq1iPSXWdXNtiVGNCXP2fOWzkLlIY+r4l1j2voOfXiLZXsOgnJdHGWjsQ5FqQFk6pkpfEPXa5fS1io/xfCEPP31zneL1+Qd8vuKCT0PEKAqn5wkd7LyIHciSKAJhnq71lHrjg3f+4+RBMcuh1odeBThtabQhCcHyQHuCBWoRs0RbXcRVEBaQZJBrCwD7s/RDmApjzYf0s3HEGPFEQzSd8VSV8ainh5KmMs6pgWSkGQpMIyKQhlxIlHQrhkhuPTPskeUSSt8n1PM6RAk9kuKbg9I37cchJ1lpgFMI3iEOaOy6AzDMkhcEkAjeG9kgghoJ0KFgdCs6OBHpZwB0SEgmpgNxAWp4AAdAyqA7oEGjB6CjoQ5BH8Acr8Pt/C0sbsDaEOIYsBpWAybDrSYGs9imwpnwPtAuJC7iWRFKSHynhwj489Oj0rDVt/9wOzSwoNANyNsi5XWecyBUnCsXtI8XtG4ITG5KzA8nqUDIYOoyGDsnIIR+5qESi4xCTRLXfIsA5hAwtKRK+Y39L2cmSpRXqfBRFytUmsZ5Gtsfj463zrCS24zMflvuz/MyHNoOPKN/8BOcpUNgYw80338xnPvMZhsPhWC0X0kNGC2hvgaTQLBUBadJiI3EZZD7ZRpvVkWSQegzzgI3UJy5ChplPYRwiJ6UXFRzeZ+h3JJ3QEIUCzwXXhbBjK8/uN/btSZLb3P7rsb3ebx/CNafs25G6kjzXgk4XnAUQxuDO53zYy3jvcs6JtOB0pljJNeu5ZpDZGiJZLiiUQOcSVUhU5lJkHlnqU2QdjBY4XoHjKDa05pTZjzGGYhCAk+G4mps8w/8xBt0zFH3IjCFVgiSDNBXIVNAaCbJYcHbkcGZVok+6CMcQtDRRy6Ajj6JV4Pdz2gdjMAWt+ZhrDiXc5GaskLJGxjoZQwpGFCQUZGgKNLp8tVhBInCR+EgEGqdMF1Fdfh73bFGCXRf5nSQKz3zmM3n729/O93zP94zH//3f/z3r6+s8/elP59Of/vRUcrSTFWS37Zg1bTsryiyFda/t+spJ+3A3Bj57TNAPDYttw+XVK8S2YaEF/cDH+y/Xc+Gh+fEN15jH8/Sn38YNH/t9XvDyZ/Bvn/sKor3AQ3/6+ayaiBOpx/HUZ/WiZ3HobJuP/u2f8EP/ciFJMM8wKdh4/wfgu17MKw+9HNYMHAa+s+Dzf/sbXPfvilYroOMZep5hMTTct6dY9AsOhtoS7ZbiQJner+dqXDmd2Ezuo8nhWdaH+v+zSOm07ew0707HZNb0ejsm2z75O/dyLs6ypEybXm/HXhX7yWmTBL3+m+rTq+DoWYWH6stW0ydJ8iQZnzZ9lnVkGrZ781Bvz2ShqWm/r45MCdZSyVpWpuzMJGuZy2qZXWg9c2pk2xkT7WHh4kttiXadFHs2PedCkHMwzHClsdYObSgMFFoS55KN3Buv88zA56tZi43cZZDb9fb8/Byy3fFyFsOM+/RGdNwcF4O0L7zQSpAph0HmsZ66rCeWwK+vetyeRKynLhvleC0Ah/+FxwYdhnjE+GT4FHgYPAQuLi6B3geegii3RHskLdEmgDCEhQD6HkQCAgyezMFJMCImdlKG5GyIggGKEZpEwlAK1qSkEA7FPpeDV/pkyucNeYvk7BzKuJZkixzXFHhG4WmNLw2hJ2i54LsO0nERSHIkmdLEccpwCGvrLnnikYxaqJHAxAISAan1fP91BuQCpMGJwOtA0IOgb1joQ3AQ3AiEFBRakOQQJ5CkgiyGIgWVGkil/d4AlqwSSwZGw79+DvABz3ZeXA+iwBLouTbs3w+HupZsXzwPFy3AxQtWoPmpN8IF++BNPw1/fgIevQAHUvjrT8N/eDg88KLadYEhRbFBzmmTc0qXanZWcPsQjq8LTg0Ey0OH9YHDYOgQDx2yWJLHLjp20bEPaQfSsgOiBNI3uJEhCA2tlqHbMhxswb624NA+wQVdyQVdwYUdONrdtPO4vuH94lZ8HL61uIg3nIarQniwhrf+MzzhKtg/462/MZYkrqQTRDo+l3hv+T+xy0VuSbCjc8n2oTbcb7E2Ptqc1guYWoE5zuF1/w77WvDsOxGsa8ymlWuWer20AbefOsytJ76TRLUYpJaAj3IPz1F0g5yOn9H2C6JAEbgK19W4jsHvGBa6MEdBYRSFSUmVyyh32MgchkXAl9Ylas1aMiqSvdCGbhvaXQg7hrmuwnQyijAn9goGomBoNAOlGeaaUQ7rhWCpEOhCogoHecChk7tkqc8vfzhEFR2ko3BcheMoBBopFK5r8H1DIK3NzRMS1wXHE+hWgdIFeQGDVDBKQKeSfOSgYgkjiY4lWsPJ0LDasudiJ9L0u5rD/YLevoxuN6PVS/FbKU6UY/yUzM1JnZxEFMRCkWE/yqZiQBhKzx38rbAkO8AlwqGNxwEC+nSZw2eBgH2E7CdgkZAOHl083BoBv5Uh/8BJHs1+Mgo+zxJP4ujeT5q7EdsS9Dq5mXwo/uiP/ig/+IM/yPr6Or2ezUPzjne8g6c//en0er0tD9I4jvmVX/kVPvzhD3PbbbcxPz/Pox/9aH73d3+Xiy6yd6onPOEJfOxjH7ONKsvePu95z+N//+//DcCnP/1pfuM3foNPfvKT5HnOfe97X17wghfwIz/yI1va9Qd/8Ae89rWvZX19ncc85jG85jWv4ejRo+PfkmUZv/d7v8ef//mfc+zYMQ4fPsxP/MRP8KIXvWjc3vX1dV784hfzvve9jzNnzrCwsMBDH/pQnvaSN3FNusiPPsTwl7dbJfq9P2y4oG8J+3ICyymcGMKXNuD2Uy4nRguc/qLgbAyrGaxlgtPew4lv/0cu/ZsL0VwMmPIlh0Bi8IXGFwodHQYEXU9xcbROz1/nvUbxpEMZ/+G+X2LRy5nzNO+/YZU/wfC+B3yaVqs1jhavih9MlhbeSu5mZ9TZLabZlaadM9Pm2e26J7dzZ5avj5sk7nt5i3I+sV2ndrvO5yRxdxxnS5npOozZmiJxsrMxrYM2Sy2f9pmFOzYc/tc1B7jffMJ/uGx1vD67XUOiBOuZy2pqMwSt52WF18zZtJLUCPZa+YkLB1doOp7mYCuj56uxut31CvYHGUeiBCkMUghMGcCUa5vuc71c33rmcnwt5Jry/0Q5tFw1Jtc9r6Drb5Lto52Uq/0hLbfApfSjaFBKEOfOJslOXNYHLqfTgBuTtiXaiVXMXUfTaRe0I0UYarxQ4/kGxzfQAtMVFFKQC3uMXKMJtGakHYAWkGBr7wwxLJc8LxaFztruwOm2NoInXSQfNnJSBhSsUzBAEwtDKgwrUnK6UrTxyLVHUoQkeURa7AcFnsjwKHAp8LTCQxM44DuCjifxXYkQDkY4FEaSFjnxSBGPIB56pMOA0RDUSFqSHbNVzVYC4Vmi7Zcke65bEu794ARgsCpeklm/bJpAlkCRClQCKrVWEtYF3Mimog1WuXYBAUKWedQNJcGH/n7Bf3ikJdkV0b5wHg50oT9R2TMvpvt6lwfw2ZOGvxto3vtZw+l1w4o35I+PpRw6qnjnhuDkAM4clfzlDQ4bn3MYjRySoUMWO6iRg04CTBJuVeYFOIHBjwxhBO2WGccHHFiAIx3JBW3JBV24sAeHOoL5dpla0hVs8/Z8G0ieri9lJYGzMXyHgZUV+EQC/UPw9yfhL2+ZTb5Vlc9+CsleCOHqCZI9H2zOG+5aMtwdIg9+8VvtcTu7PtuPPXPc0MZbzJV2rbmWffvUCmxnzS8tI1EXjgiBMgm5skHdiXIZ5g4bmc/prEWSuQRK0QtyOjqnGxR03JwgKvBbilZL48wViK6iiAxpIEg9iQlCYg3D3JAUguOF4LZCoHKJLhyKzKVY8sjv8Mkzm9PQdQscVyGEQqJxHEuyPVcTCoGPxJXghgIChUaTFylZBnEqSEeCNHZIRx5JLDEjic4kjm8IIqtmtyNDr61ZnCvo9wp6R1JavZSwk+JGGSLIyb2M1MlJZU4sFAmKHEWBLgNlYFB+nFLNDnCIcGnh0sfjQiLmCFggYJGAA4TsI6ArfLr4BELWNO87j4tp83wuG///ABbu8jrvKna8HGY9cJ/ylKfQ7XZ597vfzY//+I8zGo1473vfy5vf/Gbe9773AZukIU1T0jTl137t1zh48CCnTp3i1a9+Nd/xHd/Bl7/8ZaIo4jWveQ3Pe97zcF2XV73qVQDs378fgE9+8pM86UlP4oEPfCCvfe1r2b9/P1/60pc4duzYlja+8Y1v5EEPehCvec1rWF5e5oUvfCHPf/7z+eAHPzgmMv/P//P/8Pd///e88IUv5KEPfSif+cxneOUrX8nGxga/9Vu/BdhqqO9///t5xStewaWXXsqZM2d4+998mF/8h4S8L3jTtYLRHz0elm/hvtyMBpSx6f3APkwcYXuckQsdD/q+YT6A+84Z/unYx+hefCXPCj/JZXMeByJNz8mZC8AUGWmacs011/DbH/l9vuO7n8Z/ufzasSqaPvnJfO59/5vvffhFzF92GTddexN/+Y6384xnPAPf98nzfAtZqqderIhoRcgcxyEMwy2e3orcua47Jnc7ke7J8btRgydV3J3mnxzeLYGeRcKrfbCT+nt3EfW72imZZWupT693wqpjt13Bru0sJpO2llmf7bz4k9Ne+MmL+dyZFgJ4z01zGLBkuyThmZZ0vcJmBwpUSbRLsu0rLuun9HzFXGBz4vfKeX/qQ5dww2rIWuZweUeRZ4KbNsIxkS+0oOtXRLssXuWrMdm+tJ/Q9QoiR5XkzaC1QBWCYWaJ9losWUtc1tdcbk9CS7pLAh4XDqGv6LQLokgThgo/BMfTSM9AF/ScoBD2IzFWTdaazEhyKdGlotZ1FV2pCFEEWuPKAs/JEF6GdjMSTzNyDLErOCvFwHhizjjygHakq4UjCuGS45LrgKQIOZvv49VFgCwUvsxwTY5nFI5RBMIQOILAhY4n8RwX6UgKJcmMJMkykpFgNJSkw4B4GLIxEuhRqWYnnGMbkQG4LfD7hrBv7TRBH9zDVnErDKS5IElLNTuBvCTaRWqV7dGqgNNsktTKNuKU39KqpRIrcEtjP2goCshyS0urgLd9Xfup+3IHCbz6b+18j12AZ15qidnZk3DNjZpTA83ZgeHMRmlxGQgGQ0mWSKRrcEKN8I21rzgGLQVGgDECIyX0JKtrfV79WqCwHRA3NASRIYoMnRZc2DLs6wgOzsMFHckFXcmFXatm7yuDZjvBnY/VMQaG+fYqdl3Brv+/UarZW4h0jWwfaMFVi1uJdzVPf4aafVdhjD1u9Qwi4+EZGUaq4Y0YIn8zqHGuDe3IEm2/fDviBOXbpEU4Wlh7zCiHjWQz6PiWVWC1ChY2tjPUMgih8ecFbsvg9gpET2E6miyA2IVUCkYamxK4EGRKcFpJTuQSXbgUWYDKPfLEJz/loZWD41rLiCzVbEdqXE/je+BLQwuJL8H1JMID3cpQJrMd2RTSRJAmDtnIJY0lw1LNFgL8liGMNO0WVs3uF/T7Bb2FnE4/JeqkeO0UJ8hQfm7VbJkTy4KkVLLz0jIiMKTAGUAg8JB4SCJcIlw6uBygxRw+fQL2jdXskDkCuni0cZF3A8n+RsOePOj1/13X5Qd+4Ad4+9vfzvOf/3ze85734Ps+T3va084h6IuLi7zuda8bL1sUBY997GO56KKL+Lu/+zu+93u/l/vd737jkrePetSjtmz3RS96EUePHuUf//Efx5HOT37yk4GtBKrb7fKud71r3M6zZ8/ywhe+kNOnT3PgwAE+/vGP8+53v5s///M/5/u+7/swxvD4xz8eYwy/9Vu/xQte8ALm5+f513/9V374h3+YH//xHx+v67LHPZu/fadEGku4R9IB6fLECw0/cKnhwi4caZU3LP/cwhhVO9/2trfxri/+E7/3e7/HQf9m9rf3j0nU6tIa3//93z9e5ru+67v4+Z//+S2E6Jd+6Zf44z/+Y372Z392PN+TnvQkfuEXfmEqmawCRYui2FIwASxJ3djYQCl1jkLqOA6+74+979XHdd0xeZ8khZO/dZKA75XET2IvdpdpMQeztjVNSd7O8jOJ7VLuzbLT7AWT+23yTUi9g1X/VJ2rWdaaSUvJNFvLTqr4XhA4GiGsXeTbDm1w9XxCLygJua/oegpH7i1w1xhD4GjrKxTw7YfXOBJl9v8yEDLLBYPMYS2x1Wo3Epf1DZfTScRa6rKeOKwnLpmSdFoF7ZYmihRBqPEDjeOB8AymL1AO5EKgqkBprcmUJEOSO5LI1fgu9JyCrizwjcY3ClcUuF6CcDNytyDxNIkLI0eQuILMERSOZE1KlqRLIVwK45GpgDjvk+Qhyni4Ih/7s6VWi2KkciFNijSrRoihkk6ipEyLwlUmlY7JdHCpTK9KRhAPXZKhSzIUW20jCZuKcy7AMcgQ/FLF9vuGhR74h8ANQUpBrmpqdizIUihKJTtLIcsEg5PAbbV1ayzJLom2EJtEO8SSbKFtlpi8sJ/Ag34bFrvWVrFYyywx14awvNc6whI4pe1yo8xweqA5PTCc2TDctgGfPyFY27AdDq2tMi18wwdvMXzgrQYjBVqAEQKMxGgBWlgzvgZaIBesmh20LMHrlWr1gQ4c6Qgu6EgOtwWnTsGVBwWPudKSOf9Oq9n27ezKLkn25DyFLjO7TKjVFbG+cmG60j0fWvX5fCDLLWneUb0enEu6tSlTb5aZRLoRtKLyPPCsN7vVg7BrvdlpSbQHaZndZARfHUK8bjs/42we5fnU6kHUgU5bIbs5RSsj9XMGQjFEMdSaQW4Y5dY/fVsON5eWEZ05FLlLkXrkZ32KO2xMSeXNlkIjUTiuxvMMnqMJAQ+BY0C6GjoJyiQUhSDNBVkmSUeSPHHJRhJiBz2SmELghpqwVXX4DL2OYn8/pzuX0etntLopQTvDiTLwMzKvsozkY8tIXlpGrNEK1sqPW5LsAIdWqWbvJ6BHh/lSzd5HwH5CFgnp4tHBu8c9299o2HOaxfpD8kd+5Ef4ju/4Dk6cOMHb3/52nv3sZ+P7/hbCUOEd73gHr3rVq7juuuvY2NgYj7/++uu33fZoNOJTn/oUv/ZrvzYm57Me4E9+8pO3kKH73c/m47r99ts5ePAgH/rQh2i323z3d383eZ6Pf9MTn/hEXvayl/HpT3+aJz/5yTzsYQ/jLW95CwcPHuTJT34yD37wg3nwfsFfPU3zhbOC519teM8jPwTAT15tcKeck9MI67/8y7/wsz/7s/zQD/0Qj3jEI1hfXx/beaSUzM3N8brXvY4kSbj22mt529vehjGGF73oReN1vP71r+fjH/84P//zP89ll13GTTfdxBvf+Eb+6I/+iBe84AXjbVf7YVI1r7enGq6Tdq01SinyPCdNU6SUuK47JueVXUYIMY5Kd113qkWkUuMrJX9a9pG7aq/ZLbaz1nyt2rCb7U7bP5M2l/pbjXpHaZqKPsuvPunH3+0+uDM2owq/++jbeedXF7iiH/PYI9MTzFZtKTRsVFVoE4e12H6vl5/V6v/YQY0EvQ1NoQR/8sGjGAHddkGrpYlChR8a/MAgPYMIwLQESkIubLiQYxgT7VRIQlcjXYhcTVcWtGVBoA2e0bgyxfFSjJeSuZrE1cQl0U5dQe5ICkdwVjqckpWa7ZOqFnG2SFoEgMATuSXbpsDNC9xM4QmDJwy+YwilfW2rC4nKcrJUkaWSJHZJRw5p7JGPHEwsAmIRkNAhFfvJjCXDSoBnDB7FyQVrGwl70O+Dv88GhWoEeWGJdlJaRCo1W6fluBGwKjbV7Lw8UC6lV8RaQET5r08t53mpZmtt07DN9zbV7MVaqrZuaDNP1AMPCwVpDmuxJdpnBpozG/DFU7D6VcFgIElGEulMqNluqWZDWTRMWpKtAUVZ5rT0Zpfkpt0y9Fuw2DYcaosx0b6wtI3sawsW2jb1or2N7v1+YYxVY7cLdpylaK9nNsBxMZpuDdnfgisWpijd0flVs9dHu1OvJ4n3MLHKdfVGo9+GTmjjIQLPngcysL+hvwgXKOslj3Ob+WelTMTw1SWbtaae7nK+bQMgW11otQ2yk2O6GYWfM3CtvWtoFMPCMMw1SQFnC8GpQlDkDrpwUJlDMbAkO886GC2tmu0WSFmSbMfgehrX0fjC0DICV4DjAGGGiXIKJckLSDNJmkiy2F63prKMlG9j/PKtWxRpui1Nr5vS72Z0FhNa3YSwkyGDGCfM0ZHGhJB5irS0jGQTlpG4/MhSzfYnLCMXEDKHz3ypZFdEu4dPF48Q526xjDS469gVQZ9lA/i2b/s27nOf+/DHf/zHfPjDH+ZDH/rQOfMA/J//83/4sR/7MX7sx36MX/mVX2H/fqsaP+YxjyFJEmA2SVpZWUFrzZEjR2a2qVp2fn5+yzwVoU/TFCEEZ8+eZTgcMjc3N3Vby8vLALzqVa/iwIED/M//+T95yUtewsGDB/mZn/kZXvziF/P0S+xT5D/df28+6K985Ss861nP4pGPfCSvfOUr+djHPsYFF1xwjgJ91VVXIYTgIQ95CPv27eMVr3gF3/u938sVV1zBLbfcwl/8xV/wq7/6qzzpSU9CCMGDH/xgWq0Wr3zlK3n2s5/NZZddtqUAUp2g1y0vdXW4TuCmZfyo5tNak+f5mOzneU6SJGMrhVJqi4e5Uu+VUmMvfDVv9V23Y9TH1ffdNBIphBhXTN3LcZhFLLcjnrOsNTuR1WnTt4vrqMZNTp9UyCeJ+Kz/q2Vm/Y7J6p6ziPrkcrPuB5PzSSlJMjMm1muJw+V+wsqKw1vu2GfHxZvKtp3HKtobqYPnabptRbulCSONHxi8wNhsI6FBt6CQAi0EfVMw1BKlIdYOuZQYV+O60HE1PacgQhMYY73UXorjZig3I3YLEscwcm2yj8SR5I4gdiWDMttIUWUbKXokeUSmQiQKT2Z4Jsc1Cict8NG4sowjcaAlrCejyKFIE7I0J00s0c5GHsNRgI4FOq75szPBmGhjyYrTsmkCva6m3TV4HcXxjnstrrDPZS1CCiJyEZITkOGRC4cUb5TCaEXAKTaJtmKLbURUijaWZDum9GkraxtJc0uG+mXu8opo1y0DLd8SbSmgsuQXCuLMcHZoODXQLA0Mpzfg2rOwviEZDgVFLnF8jSwVbVzQjsEIm94VBEa7m2q2AnyB6BnclvVmt1rQbRnmW9YecrglSsuI4IIuHOnYds+3IPLvmpq9uhPJngyQLMcX2hLmuoJdD3acSrLPs5qd5nvwY9eGV4dg2HybMd+GTgtaobWSuJ4lq+1SzT6obX77JLeFkVZLcn/9ENI1m7+6TrT7HWh1IOoYul0FnRwdZYy8gqFQDCgYKcMgN8SFYZgL1pTgq1WWkdxFZS75SY/81jIAUiocT+FIhRBlAKRn8D2D70AosN5sF4QrMNFmAGSSQppK0oEgi33SUUmyYwkGvFATtjRhqGmHil63oNfL6M7ndPsJQTvGb+c4YYbxc3KvIPc0iSzIpLZZRoRBC4Mw9i3jSEBswDEl0daCUEtCJTmgAxa9HosyZFEE7Dch+0XIPAEdPNp4OA3J/rrHnkMyJsnbc57zHF7xildwwQUX8LjHPW7LPBX+6q/+iquuuoo3velN4+nHjh2bqu5OYn5+Hikld9xxx3jcdurdLHVWCMH8/Dy9Xo/3v//9U5e7z33uA0Cn0+GVr3wlr3zlK7nhhht461vfyste9jIOHz7M85///F0pjvU23nLLLTz1qU/lPve5D+985zu57rrrcF13ZqGaqj1XXmnDzk+cOMHVV1/NbbfdBsCVV165ZfvVfMePH+eyyy7bso56MOCsLBnblReujlFV+axOAisCXq2vKAqGwyFxHJPn+Zb5PM+j0+mMA1frpLDufa/Oq6IopqrB1b4SQjAYDMa2qGp91fpnKciTbxfq+6dqz2RxnPq+m3XcJzsL250f09Tr7RTt+u+u+8onf2M1z7RjOysDS52k18l9fR8YA3FeFsMaCVaGgtWRrU67OpKlwl3+H9vhSvGOc0kUKTotRSvShJFVs11fIzygA6pnvdm5EAhj8LTBU5rCCApXgAuBZ+g5io6jiNB4WlsF2k3BTcndnNRVDB1D4kBc2kZyR7IuJSvSpRAOufHJdECSzZMUEYXeahtxtcLNCzyh8Z0yBaksryMlKFJNniXkaUaSOGQjSRYHxCMHHe9gGwkMbsfg9zReR9PrarwFZa0WwuY7T3NJlkuyTFBkEpWCTgU6c8hTA2cdOF4ReHE/+wqASs02ZelcXf7laHTbo60V5LklzJEPc92t3uzq02vZtGyO3CTaRWkb2Ugq24hVs29YhpXbBBsDQTy0MrobGoSvwbPe7LFtZJqaDdCznQ8/qtRs6LcNCy3BgTZc0LFE+2gHLurCga5Vs/sROPLOEe1Kzd4pfd80RbtSs88h2eXwvlLNnubd7gfnp1BOVap8t37sOtkepVa9rrz5/dKbXQVAui44IeyLYK5SswtbdbUqj356CDecLfNv19XsDrQ7NoDS72jmujmmnZMGmbWMCMVQK4bKMMwhzQ2nCsHxwpJsnZcBkCse+UmfPOuCEbjeRACka/B8VZJsgS8ErhDIQCACjTYJeSHIckgSQRILspFDEnvjLCMms53EoGVolW9Veh3NwlxOr5vRu8BmGfHCBO0OMH6G8gtyT5E6BZk0ZEKTC+vLtme8vfyXKAMgjcQ3glA7REayYDxaqUtfe/SVy7zymMsd9jkRUS7wU003bI3fZAshxjFmSilc1+XAgQO0222ARvX+BsRdjpl+7nOfyxe+8AW+67u+ayYpGY1GeN6mBCCE4K1vfeuWeYSwFbWq0rgVWq0Wj370o/mzP/szXvSiF+H7m6WoZhH1aWQXrKf7D/7gD1BK8chHPnLLPJMktvq+7LLLeNnLXsbrX/96vvSlL42J2HYqaH35U6dO8dSnPpVer8d73/tefN/n9ttvp9PpnEOa6oRWSsnnP/95gPHbg0OHDgHWFlRlvwG47rrrADh8+DBwLgGfRvbqSmg9y8ekClq3v1SEriLsGxsbrK2tsby8zMbGBnmejws0aK3pdrsIIRiNRmitWVhY4NChQxw5coR+v08QBGNyXX1X26yIY1EU56i29Y7H+rq1SlTLTJLVOoGtq/T1LDfVPphU9SeJa139njz3plX8nIb6ebNdwaHJY1b3ldc7VJPLT1p5tlblhNWRtqnBBoLlgcPy0GFlaIm3JdyClZFkZSRYG4+TZBq6bU23rWm1DEGo8UKrlAkPTNdQ9AU5UCCQBnxlSAtDLiTaNUhXE3manqNoS0VYBkt6MsPxU4ybkruKuCTasYTE2fRnn5GSk1UQpPFJVUCS90jyEIPEr6f1yxVOpvCFxpMGz4FACISUUDjkiSLLRmSJJI0dktghHXoM41IViynT+tXU7DLbiIysku11NX5H0+5qvMMFwgUQ5EraQLBMkmeSPBPoRKAzQZYKskTCOjXbiLBytSPKiMdKzTY4gGe0tY2UGWLyojrWrAHLFJzF8gBbnMhWFh0AOQb1Wz/Kq42xeabTDJZHmtNDzZkNGwR583FYK9XsLJVIT4/92bgG7VASbQEGjJlQs12BOGBtI2HLELWsN3iuBfs6hkNtydGOVbMv7MIF3U0i1/LvvMVMaVidoVbvRL4zZfO4V9lGJhXty+dg4XBtfLRJxM+Xmp1ku/dj14dXR7abstCx3ux+C7otayMJA1u8RTrQ6UOrDwe09WaPchimlmSvDOGaARTrtvNTHZ+5tlWzow60OwanW0AnJ49yYje3arZRNTUb1gtYrmUZUblDkXjkax55GqFVB+kUuO5kAKTBc63Fq4Ww6fx8gfAEpp1RqJw8hyQTJInNMpLHbqlmO+hEICS2s9ey96lu29Dv5cz1Czr7M7r9lKCT4rcyRJChvZzUy0nFZgBklWVEl5aR1MAZAdIIHCPwjcDX0nqzleBgHtIzHsGo4ICMWNQB7VgzL206v0AL8izH9/0tb5g9zyPLsi339ziOicoTLFU2uYbruiRJMn4DXYllaZqO7bv9fv9OX0cN7r3YdZrFWbjssst497vfva2q/ZSnPIWf/dmf5cUvfjFPfvKT+cQnPsGf/dmfneOLvuqqq/jTP/1T3v3ud3PhhReyb98+LrnkEn77t3+bJzzhCXznd34nP/dzP8eBAwf4yle+QhzHY981zCaj1QXwhCc8gR/4gR/g+77v+/jFX/xFHvrQh5JlGTfddBP/9//+X9773vfiOA7f+Z3fyTOe8Qzuf//7E4Yhf/M3f8PKygpPfOITxyT1KU95CseOHeO6667btmPy3d/93Rw/fpw3vvGN3HDDDZw8eZLPfe5z7Nu3j8suu4wgCNjY2OBXfuVXeOITn8gFF1yAMYYvfelLvOtd7+KRj3zkWCG/4ooruN/97serX/1q1tfXueSSS7jpppt405vexIMf/GCuuOKKqW2pdyqqG0EVGFod47qSWs1bkfO6XWY4HHL8+HGOHTvGxsYGw+GQLMsoCssaqsppAFmWYYxhY2ODJElYXV3l1KlTXHvttURRxP79+7niiivYt2/flqwqVZvqAYv1/+sK/KSyrZTaohpPs4xUy1WkPE3T8XLVvgqCgCiyKascxyEIgi3rmVSb6xai+jamnYs7+b+nqep1Mq+0YC0pVerYKVVtWClV7bWkJNpDWT58SwI+sg+wfs8GFbVahjCyKf0c39jXul0o+pAiUEYgFfiFIVPW9qE9gesZur6g72raUhOgcY3GlQnCjSmclMQtiKVmKA2xNCSOIHcEieMwENLaRvDIjVcWqVkgVQESbf3Z5Hi6wCmsmu1Jq2b7ElqOtY2oVJJnGXmiStuIQzbyGI0Cm3s3FhBPqNlbbCMGr6dL24jG6yic/QXSsdwzK6yinWeSLBWoTKBTgUolKhMkaw6cFjaLSeX7lmarbQRLtD2jN4MgNRSFoFASV2q6YUE/UsxFBXOtgl6o6EeKjl/Y1+9yM9Wf0lBoQZxL/vSf971duvog0uw3iPsYJR6iC9k1mYwwCOEbJXxjfuk9BuNCJWCP1WwjLMEuyT9tkPusihhEm0GQCy3Y34ajnSrbiLWNHC7V7LuS0m/szZ5mDdnBPrKWgu9sEudJa8hCCJfPT880Mnce1ey10bmKdV3dHo+bIONxZjs2lWWo3y4tI2UVSEfaIN19Icztsx2uOLdq9lpZjfSOIVx3psxY0tosUDPXhk4PwjZ0ugq6OSrKySo1G81QKwaFJdlpDncUgtsKaeMgcgeVu+RLHvkdHkUWMRkAKYTNMuJ5myTbFwJHSmQoINRonZCVsQlJIkg3BGnskVU5s0f2TZUXaYKozJnd0vR7igO9nN5iQaeX0OplZc5saxnJ3IzMKYhFQSIK0pJkVzmzFTb4cR3KwjTOOACyjcciET08FsoAyP1llpF5AlpaotZjVpeWcRyHPM8JwxCwNs9WqzUWpSpbbZIkBIFnn1mc+8ayemZUb6Ar4UVrTZZl42QMcRzT7XbxfZ/RaESr1SLP83FF0qIoOH78OAC9Xm/XQlGDrw/c6TSLe5nnP/7H/8itt97KW97yFl772tfy7d/+7bzvfe/j/vffWnf3BS94ATfffDM/9VM/xfLy8jgP+rd927fx0Y9+lF//9V/np3/6pzHGcN/73pdf/uVfnkpuJklbnQS97W1v41WvehVvfvObednLXka73eayyy7jaU972tga8ehHP5q/+Iu/4JZbbsEYwxVXXMFb3vIWnvrUp2650Cp1d5odwhjDqVOnxir4c57znHP2y5ve9CYOHTqE67pcdNFF/PVf/zVLS0t4nsfhw4f5yZ/8SZ75zGeO53cch1e+8pW86U1v4i//8i9ZWlpi//79fNd3fdfYejMNdcvCNHV1shhNnRRXZHg0GnHzzTdz4403sra2Nib61es3KSVFUYw950EQoLVmOByOg4KDIEBKSZZlJEnC0tISx48f55JLLuEBD3gACwsLCCEoimILAa7v1/pxrbe96mRMYlpgZP231uep1AxjDHEco5QaB8XGcYwxZvwmaJqaXhQFBw8enNpRnDwXpZQU2h0HQa7GcmwTqYj3aly3kcixrWQjkYSBodvRdNpWsQwCgxvY4D9c0D1LtBMsaXAUeDmkWpB7gG/wfeiV1pFIaAJsNUjXy8CJyZ2UkSxfRQtD4hgSCbkjWJYOZ6SsVYMMSqI9T6F9HJHjidz6s5XCzRWetIVqfGloSYnjSEwhKVLIs5Q0yUljSRpLslFAMoo2bSOx2Fq1cZxtZNM24neMtY3sUzh+AdKQF5KsEKVtRJKnApUKdFYW1MgEnHbgWKmU51iy6tr1WzXblETb5iGX2hJtrQSFAqUFkafotkqSHSnmWop+WNCLFC1f4TmUWRwsKbW2EcEwd1hOHVYSh5XY4avrHhunIkYjlyxxEI7BCcogSM9gSjXbCDCL4j8p44KWm2q2J6BnDIEpRGAyPJMc3cdCv2XY1xYcbMPRjhin9LuwTOk337LBmndFzV5LN4n0XhTtVJXe7ClEeiGEy+bg4TOU7si1bxvubsTp3jKMVMOrQxvkWAXAzrWhE1miHfilZcSBbt9mDDlkrC97lNtMIytlFerjG6DXyowlNTW7162yjBicvrWMFGHOwMkZlQGQtjiNVclXCjhbSFQhrC87dyiGHvmyR562Mbq7JQBSlAGQXpkz2xfgC2kzjfjSnoPtnELllmSnpWUksZ3jtAqATCXSq+XMriwj/YJ+L6fby2j1E8K2JdnCzyi8nMwtSGQ+JtkZGoWNOhbAsPw4CLySZFeFaRbw6NFmriTai4QcIGIRfxwA6eNsf+BrqJ7jK6srLC8tjcdVZHpjY4N2u02WZWRZNlbIK/GrTsbr35PCV90e6bru+DlUfUajEb7v25ieJCEIAkajEVEUIYSt7XLs2DEuvPBC+v3+uP0NUf/6h9iOXCulDOwuVd2OG5rxGn9W0NmdXed265hUXXcanrZvplkt6r3Wur93cj033HAD73nPe3jAAx4w7mnX11Nv8zSCOk0JnuU/nlxm8kYxuT+mTav+P3XqFP/0T//ETTfdRBiGBEFAGIbjm02e5+Plq2DQitiePXuWLMsIgoBer0cQBOPlqhuVMYYDBw7w4Ac/mAsvvHDLb6rmqZ8rdUvO5Hom90+1X6ZZQarxlfpf+eYr6021bNWpAMavGTftOJpRJspc2Q6t+QvGZHstdlgtM43UM5HYDCSSJJe0W6VtpG09uNafjU3r51bZRgQZgpEWDJRgo5A23ZhvmPM1c75h3td0HUOIsYq2k+M6KcaNSZy8RrQ1sbR5eXNHUEiJEg6FsGp2WoTEeURahGgjbZGayjaiNa4p8B3wHEPogOs4CCExuVPaNwRpLElimx4sG0mKURkEWanZk7YR3xJtv2cVbb+j8XsGJzI4rg1Gy7UkzQRpSbKLzBLtMWmf/C4qNbv8Fpu2EesiMcgypZ9WgqwQSAHtQNELi5JoF/TCgl5U0C0rAHrSWDXbWMqgtCBVkvXUZSWVrKQuK7HL2shlGLvEIwdVVEGQGjxj/dmSMtuIVZ63pPQrXWUiNHhhSXIiRTvQ9EPFQqjYH2j+4tr5X0WJ20k4RswJNCvAinnDuJj9+HQ/58YwA/EeM41U86ym4MkpmUYmPNrTSHY/YGomrLsKpcpgxIpY7yLDSDWc5pt51OfLqo2dkmQHfpmto3xpkOtSzS6sZWStLLW+MrL/t4Otana/YytABh0b7Cu6OUWUk/gFQ1HYdH5KMywMo8IS7aIQZfBjqWZnHnnqkac+qnARUuN6BVKW6fyqAEjfdog9ISzJRuIgbE0tDVkhSLNSzY4lWRmwXAVACsArrUvtqFSz5yzJ7nVz2nMJUTfFD3NElKG9rMyZXZDI3ZVZD3AIa2p2D49+Wf1xsVSz95WZRzp4tHC/Jn5rYwxra2ucOXOGNE1pt9ukaUqv12N9fX38rKiSJVQZ7KpnRRVnVinkvu+P0x1LKfE8bywGVYp5XUGvnk1aa9rt9vgNdq/XG4tmrZb1qBdFQVEU3Oc+9xmT9Iagf91g5oHaFUGHO5embrcnyG5U+mnz7rT+nQj85Hbr5HqSaE8uW+/hTqvUOLkNpRSf+MQnuP7667n00kvPIeSTRHya6jo5fXJ4JzJfD+qc1qGo++OqG8e//du/8YUvfIGlpSWEEHQ6HcIwxPO88c2mQhAE45tO5R9fX19Ha43runQ6nTHhrdJcVvvOdV3CMOSyyy7j6quvHr9CnNV5m+zc1ZWJ7Toqk+up7D6F0oxyj1EREpuIYR6wkXrj8uqrsWQj8xlkHhuZz0bqjkuyF0bQbhW0o4JuR1p/dmBLZgsPtANK2iDIBEmsBQPtsFFIPGHJdUWy+56mLTQBxubPdlOEm2DcjMTJGAlrGxlJq2anjq00WZTZRpRwy2wjIUkWkaoQgRkXqXFNgWsUPhrPsfYJzxF4jk2tpTKHLHVKoi1IRmUe3pFEjcT2tpEQnLYl2n7Xkmy/Y3ACY20jWpAUgiyz/uwsFeMgSDKxWaa8nsVEl0p2LXd2ZRuxBWpMaRsRpW1E4LuabmkTmW9ZRbsfWetI27dKviMNAl0SFUGuYJRJVjKHlcRlNXFYTTw2YpfhyCVNHITgXDXbASPK6EwjtgZBKgGu7Wj4kSYIFK1Q0wkVc4FiISg4ECgOhQUHo4LDUcFCqOiHim5ocOT22Xguu+yyqTc48Ts4QB+YBxb+7gf41x1Jdjk+VdDzpxPpnch2y7v71WxjbCDjnnJmlwR8bVQGLpbp/Co1u7KMeN5m9hpbWdaS4WG2Vc1eGdp5quDHqqJkt2szlPgdjdPN0e2cPMgZOAUjFAOjGSpbZj3JDXkhKGoBkCp3S5LtUWQepgqAdBSiSufnGTxP47sGvyTZrhA42vY4tYZcCbLMqtlped0Wo8oy4tic2YG1LkWlZaTX0fTLAMhuP6fVs1lG3DDF+DmFt1lmvVKz85plpDrMAnBKy0hYpvNr49LFo4fPHAH78Mdq9gK2+mMHF+denDO7evt76tSpsc1kNBqNY5fqRHl1dZUwDMfCzWAwGD8HK1TPrEotr4I8wb659TxvfF2naTp+2wwwGAyYm5sbPzfzPKfT6TAajXBdd4vA5Lou97nPfTYDRxuS/vWAO0fQtdZbFPSZK5kgj7Om7ZaITyqh9eUmAyDrw9OsLbvZTjU8jaxOtrna/mTw4eR6J9e9urrKO97xji0BHduR67tC0Gcdj4rEzjoOdfV8ZWWFj3zkI/z7v//7eP4qQFdKSRRF49zoYRiOiXf9pjMajcZR55Wnu1IJ6ipCdcOqLCUXX3wxD3jAA8brha1BoNMwth1pwSDzGOQ+G6lXkmyHQR4wGBNsS6wHefl/asdJx9Bt5eNqkEFocDxl0/q5oKVBSUHhOKQ4xDgMtUtiHEJR0HcV84Gh72l6rqKNwcdY20hJtAs3I3U0I9d6s2NpSKQgc6CQcky0i7FtxJZdz7WPI4pxthGvzDjiOdY2EjgCz5FIJGhJkTnkqSRNNkl2WqrZajvbiGuQAXg9g9+11pGga/A6tsNhhKDQwqrZuVXM89TmzjYpm+p4fb1VyXVXnBMEKbEkWxqgVLPzQmAMtAJNL9LMtzaJdi9S9EJN5FtfuivBYErbiM3UsJYKlhOHlViyHDusxS6DkcsodimyKgiyVLNdrD9bVEGQNaJdqdlGIEKNExqCSBEGNpVaNyiY8wv2BYqDQcGBsOBIVHAgKpiPNL1QEbrnBjhXmMwyVIcQglRL1nOH9cJlPXdZyyRrmcNqJlnLJa/9ytzrgAVKIl4bnsPW31wGVh59lKtmFaqZJN9z4flRswtl7R8z/djbKNpZYcl1lWmkV6bzqwIgXReQpXXI2E5GnJ+rZseZtfHMtzdtI72yOI3fNrhdBZ0M3coZuTlDqRiWJHtYGOLctiUv1WxLsh2KsZrtoZWLdBSuWyCdyjJSC4CUFdGWSCGQ5flWaMgySMsAyCwWZKWSXeXMFo4haG1aRjotzdxcTr9X0O2ltPqZLbMepogwtwGQ7maZ9XRKmfUKdctIWFpGOrglyfZrZdYjFgnGlpFgD5aRrxcYY0iShGPHjlEUxZiUx3FMu91mOByilKLVao2TJVQJESpiX6npFReono2VMFU968AS8mr+6m2tlHI8Lo7j8f9aa+I4ptVqAYw7D4PBYBxP1el0uPzyy7ck5qh+V0PY75WYeVB2XahoLyp3HXtdrjqhZ9lT6m2Z1kOtiFo1PO3hV1+mPjyNlE/+9skMGlU7ZllcKtxwww1kWTZWkbe7WKY90Kd5nic7JfVgxWmdm6r9FdmtdzYqaK05ffo0H/rQh7juuuswxoxvPsD45lHvFFSBodW+rwJYXNel3W7jOA5aazqdDo7jEMfxePuVVaZKxQhwxx134Lf6HL74/sQ6Yj2x9pH11GGQ+WzUSPWW4dQjLlx8v6AVFYRBQRAovEDj+rZQDaFAl2Q4lxKNLb3uakOBREmJcBSBY3NnhyYjQuNrhdExfpjj+gXK16S+IXYg9QSxhMJzKRzBinQ4IzdtI5kKiLMeaXEAZdzSNpLZlH5K4VVE2wHfkbRK5VRnDnkmyJKMNClIR5Jk5JKPPOKRQI3k1pLrE7YRJzJ4PUNQfjodgzdvkL6xr7dVZRsR1jaSGuvPTi1RSAcClp3N9Va2EWlVbVGeNg7glYq20KYsuW6JtisNnVAz11IstNWYZPcjRdcv8F1tC5MAiCp3tiDOBCupw0rqsBxLTo0crl8JGAxdktjBGKtmy1LNth2o0pstpqjZAF2QkcYPNX6oiAJNJyzoB4r5oGC/V3AgyDkQZBwKMubDgl5Q0PVthdOdcu5PWtWEEGgj2Mgla5m7SbYLl41yeC13WMtd1jPJWm7J93pJwlMt6Xj2rcpcoJjzDX1fM+cr5nwNcAvwWUoiTpXFxQ4PzS+P/QR37uY9AWNskZm9ZBipSPf6yBahqSwj/U7pza6VWpcu9OehMwcHVU3NTmwA5PIQblkBd23TLrLQtmp2pwdhB6KOptfNrJrtWzV7iLZEu7BqdpobTivBHTWSrTKXfMUjPzkZAGntIvUASM8xRMJm83ClQAYSE2i0TsmLlDQXJAmkQ0lWC4BUoypntrWM1AMg+/2c3oGczlxiy6y3MmSQYYKczNmqZk9aRorygK+VlhG/Zhnp4HGINv2yMI21jATsJ6JfkuxWU2Z9jOrN78mTJzHGjAnvYDAYe77jOKbT6aCUGidIGI1G42u+So5QT1ZQKeTVc2/SAjsZy2WMGdccqeqP1N/GZ1lGq9Uax0dVwpfjOJw9e5Z2uz22i1bbafD1h10p6DDd3ztt3F3toe1E0OvDswh6HXuJaq6I4mSxnTp2Q/br7asCKd///vezsrLCxRdfPFX9nqag7aScz5qvuqiBsUc8SRLSNGV1dZWNjQ1Go9HYK+f7/rgiaFEU/Nu//Ru33XbbOCq90+ngeR5RFOE4zjjXuRCCbrc7JtZVVpTq5iKkRIsWueySyy7amyPRERuZTy66xCoilx0S0ybRLTLRIRMdUtNC4ROGGZ2WJghLou0bXN96eHFKNVtKMuGQ4DDCYaQ9wNCVOR2Z05UZbZHToiA0GtcoXCfFcVPwU1QAaSBIXUnq2u/ckRSuVbOVdKyabXzSwqrZthokm/5sXeDowqb0K20jviNwpYMwgiJzbSaQWJavoEvbyFCiYoEeiXNzZ5eqswjBa1ui7fc0QQ+8jsYJBVJaZTIrLNHOMltyXSU2CJK0tI1UwxXRNowtIzilbUSUSjaMgyCNhjy3PuvQs2r2Qluz2NHMtQ1zLc1cpK1txDWWxCLQxpAXhqywRUmWYsnSSLAcO6yOHNZHDqOhtdKIqhKkZ84Nghyr2dIyd4Ul3b7BjTRBZAuDtEKrVs8Fiv2h4kCgOBjmHPAzDgQ5c6H1kkeuwRg99WE1rVNdv6ZSJdgoXDaUV5Jqh43cEuz18rOWW6V7vaj+d9jIHRwBPU/R9wr6vt4k2UEZQ1B+2/GafkXGA4Mnt7an3qYLLrhgtzfbLT8sLyyp3s6DPctKorRVs8c2j7atEhr4lmjL8nzSoqwGWqrZg7qaPbTkux9tkuz5NvQ6EPXAbxmcXoHoZBRRqWYLq2YPCsOoMCSlml1ZRqw327XXWuqTpx5GO2UApLLebBSOa0o1W+PLkmQLYdVsbTtUhWIcAFm3jIwrQGYSxy/V7HHObMXcXE63l9PtZ7S7KUEZAImfU/gZqSzGavZkmfX6gbQkWxLgjgMgu6U3e7PMesgBQhYIxmXW3XuxZeTrBUVRsLy8zMmTJwHGGb1WVlbGpDxJkrFotb6+Po67qlfgrjzk1fO/So/ouu5YJa8Hh1bP0CqFojFmrKALIUiShHa7PX62j0Yjut3uuD3tdpvBYDDuTBRFwQMf+EDa7fa2PKPBvQJ3n8WlfqCnLXt3EPTJbU6bXt/WNPJen6eubE1r42RvdjKryeT6pm1z0h9dx8mTJ3nnO9/JRRddRL/fH3dAtlPkZr32nmV/qc+bZRnr6+ucPXuWpaUl1tbWiOOY0WhEkiRbbgJ15HnO6uoqg8FgbD9pt9v4QYTxeuSii3L7jFRIatrkokvhdMnokNImlz0yOiQmIjVtUloYIQncGN9PcN0M6WY4rvVWGqdM/+Y6KMehcD0KNyRzInInxDU5Paeg6xR0nZyuzAmNwtcaj9xWg/RStFeQeZD4hsSRpJ4kc8WYZBeOgxpnG/HLbCPn2kZcU+CqAk9qXKltaXdHWI1dSYpUUKQOWe6QJw5p7JLHDsVIbi1SM8s20q9sI4agq/Hb4AS2skyuBGkOaWqJdp6AqmwjWZ1oV+stD9qkbaQk2o6xJNvmzrav5QE6gWaupZlvK+ZbdniupelHmsC1VTAdx94xtLHkPMkFKzEsJ5LlkWBp5LA6dBiMHOKRJUhWzdYIH3tcnc0gSGNl7dKXXX4ENp94pAlCTRRpOoGmHykWQs3BUHMoVBwqvdn7woJ+ScRdOf16n7SoTV6b2sBGJmoE25Lqc0m2U5Jsd0zEEy1pO4q+r+h5qiTc5bdfG64+QUm2fUUkNVJuvV6rnPbTqufCdLta/T4AgsP/+eDYZ865Npfx9+O/hWfXFe2N2Kbhq7KMzLWhVanZZaYRWWVh1DZneFIWp9moqdnrMQTuVjW7CoCMOuB0FLKbo1oZqZ8zlFbNHukyALLMNJLXAiCr4jR5Zkm2yj0bAOkWyDKdn5Q2ANLzDJ40BMKSWqsDCyjfwtgy64I0YTMAsiTaOpYIYTsDYUvTjqDTVvT7Bf2+JdqdfkLQzvBaKTLMUF5dzVZbcmZPs4xUAZBVmfXO2JftszD2ZdtS672yAmRQ/YYG9whGoxE33XTTWGRqtVpj73kQBKyvr4+FrcrukqYp6+vrXHjhhWRZhlKKG264gUsuuYROp0OSJAyHQ06cOMHll18+VrtPnDiB1ppDhw6NFfbrr7+ew4cP02q18DxvXGPk4MGDBEEwfrZ3u106nQ5FUZCmKf1+f5xZruoo7N+/n0svvXTcWYCGoN9LcecsLpMPv7ptBDjn/7sTkwRyO3WrnkN7sq31fKPb2V3q26m/nqqW3anjMGu9RVHwxS9+EbBFlyaV9uphPUvVm9zHs+w31Q3lxIkTfPWrX2VpaWncm4dSSdeCXHRJaJFh1epYR2TY7/XMJWu10f0+yulTOJaUZ7QQUuN7Qzw3xXVTmzHAU+BoexZ5EjwP5dqqGMKxhWmUdDHaQWiBpzR+kRDqlEAXOHmGIzI8meJFGhMadORg2gEq9MkDFx14FK5D6jiMpENhC7WTqjZJvo8kj1DGxd2SbaTASQvczOAJhSsMnhT4CKJc0E5z8lSRZw557JAlLlkcMqpKN9eL1OSln1qX2UYibXNndwx+V9HuFLg9g3ANrueXWREkaQZ5Iiiymm0ktXl/ObvVjrIld7awgWsS69ywQZAGo6oiNQLPMZve7LZVtfstS247gQ0oc4RBCANGoMosE8NUsJwKlmPJ8khy28DlS6d9BkOHNLGEZUsQpFtTs8eZRmpqthQwb3BqanY70nRDzXykWAw0h0LNoajgUFRwMChYaCn6gSJyC6pLod7JnEyrOU3ZzpRkNbOftUyymtaGS6vIeu6wljlbyPdG7iAFdL1iTKjHyran6XsFh1sJc76m5xZ03WJMsnu+xpNbfePTLHTnPghtmc/JDvW41oBxWN6QpY1LsjqUrCcOayNbQGqtzGe/OhKslt9rI/uNdTZsfgQDJEMkKZIcgUKSHDoEi2UAZFyWWq/U7OuHUAw20/nNt8qy7V1odbE2jH4O7YwszBm5BQdQjEyVZcR6s9cKwZIS6LyWM3voki/5FFmH6QGQpWXENQTYAEhP2AqQ+AbdyshVtpkzeyjIRi5Z7MHIsdlGCoEb6rFlpB1pev2CA72C7mJGdy4j7CYErWyzzLpbt4xUara1jAgMBpsvewPrFfeQhCXJbuNykBb9MgCyyjJygJA+Ab3GMvJ1i8reWWVRqYj5cDjkzJkzHDlyZBxbtb6+zsUXX0yapgwGA5IkIY5j0jQlSRKMMePl4zge+9aHwyFRFI2tMUmSjLOxVMutra1RFAWtVmuckrhK85tlGaurqwRBQBzHnD59+pxUwPv27Ru/Cdi3bx+Li4tAQ86/HrEtQZ/0OM+aZ6d1VNirD2ov657EpE+7/jDdyZc+qWhN+st3047Kb1b1nBcXF7ek7quvbzf7ZbMNkCo59mKvJy7rqcvtp4fcfEfCmbUFRuooqWmT0SYtPxkdChHhOAleMMJ1ExwnL4m2RpFToBCegwh8jBdgXAmeRrgahEQoB6kMvirwixF+EePmGZ5RBOR4ToEfGnQLitBBRQEq8lGBS+G5KNexHxGxjjeuBhlnNq0flLYRLNGWKsdNtFW0hcYTtpQzRhBm0MlSsrQgT0uSPXKIY59BLLeWXE+xinNlGwkMTtvg9RRe1xB1Nb1Ojlw0CEegtM2dnZe5s4uyQI3OBCp1UJkkWQZO+rVsI9irqUa0JZbDuqbM9KdsarM8NygtaAWaua5mvq1ZbNvvuZa1koSuzZ1ts42A1saq7AWsp7AUC5ZHkqWRwx1nHDZGHqOhQ5HXgyCxRFta373B7rtxSr+KaAcg5gxeZDbV7EgzF2rmQ82ByHA4UhxuaQ5HBQcjZVX3UBNM3EEmz+nJt1MVlJYMchvsuF5TsS3JdljPN4l33Ze9ljskStJyNXPeVkW75xb0PMXhKOfKblyOL8bj+56i5ehzOgbV8OR1v3nPsIWRJu8fmx15wSCVrMclyR5J1mOH1aGtyroeO6yNyvz2ozKvfUnGR5kk8jXzLUO/pelEhnZobKn1Us0WgQ3e7M5rDipBqgyjDD51k/cFBAvAIeBybNdy04/uskqLkXfQEHUVspdThBmJV1g1u8oyoqyanRVweyG4dUsApEt+0qfIQlTRQTrK2kakRogyANK3JNsTmkg4Np2fKxCOwIQFhS5sBchU2E7qSJLFrg2AjG0ApHStZaRK59frKhb6OZnMOa0y7ndpihelLOcZ970oZ37BkuxUWjU7nWIZicuPOybZlZrtcYCAHp2aZSQoLSPh2DLi3UnLyDWn4P99B1w8D2/6EQjPQ+VRY+CFH4AP3QR/+FR4wqV3/za+GVC/L41GI86cOcPGxga+73PHHXdw8OBBsiwbi11xHLO8vEy32x3bRo8dO0YUReNUh8vLy3z5y1+m2+3SarXQWnPbbbdx7NgxFhcXxxnQrrvuOtbX19m3bx95nqOUGhfxe+hDHzpWxK+//vpxVhfHcdjY2BgX0fN9nziOx5W8jTEsLS0xGAyYn5/n5MmT9Hq9cVaZ80nSX/lX8OYPw4u+H76wBP98E7zqh+CRX2fnpjbwY++F998Ev/lY+NmH3XNt2TFIdDckfbtlt/v/rqxrEtOsL5PTdkOuZ1lLdtrmtGnGGE6ePMnq6ipXXHHFlpzpxsCocMZZRNYTl43MZb0W8LgZ/OiOx62nLrlxaEU5rUgRBgWGmDRfB2/OVr5wBdqVGNdDuR7GxZJsV2OMC7lAFgqvSIhMgpOOKIYbOKqgGxh8RyF8DR0XOgGmHVqi7Xso30G5Lkq6KDHPEJdMB2N/dqYCJGqc1s8xBY7KcQqFQ47UBVKAMAqnSAmyHJnGBJn1jxapT5p5xEmISSaIdmUbKUrbSGhwO7YSpNfRdLoKb1EhPY3NjCDJC8eS7bT0qaYCk4nSriKJT2BD7ar1CrNpGym92QKDNDaQVBgbBKmVJC9ACkPLL5jraA70JYsdUwbCGfotCH3wpfWLG4xVs3MYZYYzQ83ZkWFpIDgxdLj2rMNg5BGPbBCkG2hEYAMgcU0ZBCk2KYgWm5UgEdC3arZf2kbaobYBmqFiX1Sp2ZpDrYIjLcX+lmGuZehHtiMx69ye9XYHrPVhLZ1QtEtVe602bpx9JJNjIi4E1pPtKeYCGwzZK+0ic77myn5ux3lWye75ijlP0/EKPHFu1qV62+od8+2u0+radhyHQpc560c2Z/16WTBqTK5jOR63OrL/r40k67Elcv3IevN7kd33YWAIPDMuThN2Iega5rUiU5q4EAwzwUZiC1BdH7uYGOYiw3x1bFqGTtcQdSBsKzq9DDoFaZBzKC9WhKOVECbB0DFKtI0S+4wSh0whHVtmXfKe3LVl1k/7FHkPYKxmjwMg/VoAJNgy61IgQ4nxFdpo8twGQKaJIB1CNvLGmUZ0bAMg/bI67TgAsrSM9PZltHrpOAASPyMtFBuJYH1oWFv2WF/xGa16pOs+J8/63H6LRxF7FElEet8ViiQCI2g9/0t0fZc+Hr0yP/Z8WZhmf6loV1lGQpy7zTJijH37sDwq0y/Gm8PLtf/f+2U4uQGfvR2efj947sP3tp20sOtaKe1EK0m57nhz/A1L8IEbLZl4/rvh1v96t/zEb1porbnjjjsYDAasr6+Ps44ZYzOaVbbPoii49tprufzyy+l0OmRZxnXXXceBAwfGFpeVlRWWlpbGxe6qdMNV1e0qY9lgMGB1dZU0TfE8b2xfWV9fH5P9wWDAHXfcwaFDh8apE6+55ppxFhmAG2+8kfvf//60Wi1OnDgxJvNxHHPs2DH27dvH/v37dwxynwZjrCVupYxFObMGt6/A8RU4uQpnNmBpAGcG8IVbAQE/8VYrKmgDP/lm+OJv3I0H6k4iKcrraOKzPGXcHQP47Cm73Es+di8n6HDXsrh8LVFXtibHb/d/ffxOgaeTirzSNn3YylCwPILVobQ31aHg7Dr825cClgfPRC7v25J1ZD1zQQo6UU4UFYShxg/sg9LxDES27LrNny0pKiKhDbnBpoQTmjzdwAyX8NMhbpHhFDkyTYlEjucpZCAwbXdMsnUUoEMP7XtWzZYRSnTR5gISFXA2j0jyEGU8XJFv+rN1jixs6XWhCxyjkBiMznEyRZineMXIkuzMp0hdkiRAJ61zbSP1HNe+RgQK0ckRUY7byvBbMbKjkX3QCJR2UMZHK5vGzOQOJpPoTNic3QMHlibWK0tiKwGMVbQNeEYjtEEYO19RWMXcdxWdoKAfVgVqcrp+RtvPCd0CVygcaZCl3UMbSaEEg1xydgQ6WGAt87h+xWH9mGQ4dMhSiXQ1TmAQfmkZcShLroutKf20sYnxXIHYX3qzW5owsGp2L9QshIp9Y2+24nBkU/vNRYr5yBB6s2NFZnU8nbLQ0KCQrCSCtUyykglWEsFqKqaS7dVMjkl5rCRtV5fBj5ZY97xy2LMe8qv6myr3XKCZ9w09X9F2NI6zqVpvlwK0/hZL660xH7D5VqooNIN0K8GuFO3qf1s4ymUjsSkYq2lxLmn7ml5L0Q/tfm+FhsA3ZTo/gwyhHxq684pDRpMWglEu2Ugla7Hg5MjhpjWXdlwF0Rr6kaYbGKK2xm9rWj1bnEa3cmJPEzuaEYaRNsQKkkKQFoLby1LrlWWkyFzyMwHF8Q5aOThu8XjpKGsbQSFdo4WNWI7RZMaIVGtRXBw5FwoXdJSNK0CmqSAZCbJ4swKkHklMbuMJqjLr7UjT6ymbM3shpztnSbbfTnGCjNxohplmfSRYW5WsLfsMVz2SNZ/VgceZ0z7FyEPFbVTaQ2UOppAYXVZ7skcRIQyOYwvrRD70Qmu3uXHNdggGnzuCEGVQ9Gsez5++bOppsivE2SahnkayJwl3NW4ltoRlvgXzESyU3/OtzeGrDsCJdfjAdXZbrQD+9fYawd6GdFfDoxza/mau+flo83s+hINtONKFf7jJ7sEHHrzz++KbHdX9I45jTp48ycbGBv1+n9OnT5PnOVEUcerUqbHXO0mScZGiqoJoHMfjytlV0GZ1v6084VmWjSuVV1nUqjfqVTGi6o1ckiTkeU5RFOP7WmWRqVI+rqysjJdbXV1lNBoRBAFnz55lfn5+3JaiKDh+/A7C9j7WYstVzq7D7cslyV6D0+uWZK+MrPVtkNpzMM5tbQBkGZNi++BIwHcg8ux52g1tulLfB1XA/jYsaVvU+P5H7r5jlakasY5hJd0d4V5JbBGxjm/TzM6HWz8LIRxowZULsJrbwmm3/j0snYFvvfrua/+dwa4IOtz7/Uuz2vehL9kT7ZkPZsur7UmkuQ2gWhpYb+ZyleFgCEsDZ/x/lYXAFrEQrI4EXmDod6HbNrTaEITgBVCYgrOtDv7+APyAXEoUwoq0BnIjrP0Ah9wIFsOMjsgJjcYzCs/JbP5sLyMLFJlriF1IPUkiIXMEynXQrkshFijwiI1HqsLSNhIAwnqzRW5JtrKE2skKpM7RRYZRMegU8hgvG+IoD134FJmPyjyyJEInnXNtI/VgxcDgtBVuRyFbBX5X47VGyL593ZxrW62xKByUctGFJdpkApN6mMyFYYRaZmvubEeUJdfLh7nRCKORWiG1QRqJMZJC2btH5Bd0ooJ+kNMPM0u0gxxfJgSOTWfoOg6u42CMLVCTFoIN5bKe+6ylHqupx+3LLYZxjyR20UriBArhm7LCJ5sFakRpGTGiVqAGaIPcp8eKYjvS9FqGuciwLzIcbhmOtAwHI8XRtmJ/pOkGOW2vsLm9jdn2U6FOVFOF9V1nm7aRtUrBrjzZNZ/22tg+UirAZUaReiq/vm/o+4rDrXw8viLj/bKwku9Mt2tNDscZfOjLHS47kHHk6NbKtdU81TJpLlhLbMaX1bi0j4yEtYeMZKlgO6zH0ireif3eSKzPvFemc+yGishXhL7GL9Vs6RqijiHsKua0JteSRAmGmWOJduJwKnERKeNMNaFrUMCFBxX9OVvxVPYUpp2RBgWJoxkJw8gYRspYkq1guZCcKQQqd2wAZO5SjDzylYhiRgCk42lrGXEMEQIXgSNsDITxMpTOyHPB6ZFcNrEIVCZ9kziejqU0Q8czifCRGBHoXPgm2Zg3dDo2Z3avl25mGelkeFGGcnPiTDNIYG1dsrrsMVj1iFd84g2f9eMet9zgUYxCVNpGpfa3GHXufdRxwPMMoW9oB/bBt9iFA0cEh3qSQz04sgAXLsDhvmCxawlu6AuqWKm/+Ff4yf8NR/rwX/5onTd4GzxZhQRixCfcDZ6j9pOr/azugVxXw6c2bF84dOFgdyu5rr4vnIMHHrG2osCzv0kImxY/KWzV1DG5L8n1dRuwfHpzXB7ZsJz/7/01gh1uJdsXz9Vy0FcEvPzf3+HJbMj54Uc9B+N8hr55OfDc7RfYAz53HJ76p9aa8w8/DZcu3m2rvtdiMBiMs6NEUcTa2hpzc3Njz/ehQ4dQSo1Vcd/3yfN8nI2lsrhUmVq63e44W0sVsFkFghZFQZZlHDhwYEzk0zRFKcXRo0dZXl4eFyXyPI/FxUWMgUFsyAmJvcs5ecbj7PVHWMvbfGVlHx/51AEKZ46ljQeTXRORKI9UO+TGoTDSvg0u7ZdTSXZgsyrddxH2deBAD47MwdEF+73Y2YxPmWXZWt6Az9wI33q14bnyNr7sxXyH3g9snkC5mkKg083rqD5uTMQTODW0NQ5C1yDDnCBSHBYBN61K5kN49uX2XnP5fHmdTSHi3pSU/Ulu3wLcugpvvhbedA2YDMRNgIJ/WoXsx3a+Hs8Xdp1m8ZwFd0HY90rq726V/p2fhh/7H1b1+K4HwP2OlqR7JEqybcake5gK2m1DrwudNkQl0XY8kD7gQCEtH40NjBRsFLBeEtSFAOYDw0IAcz50pGbj7BlWTp/k4IEWfpBjvILcN2S+JvMEqSOIHclIuBhPoKTNNlLgkhl/XKQmUyEShSczvNI2IlWO1PaDzkErtNIIJShSF6N8VO6hCh+V+ejUQ6cuJnFm20YcA4Gy2RfaBW67wO0oWwnSt8VgNBKlLMlO8wCdOZv+7nr2ksmS62XubIQBY60iwmjQBqFK24h2MHgICnwRE4ghAetEcoBnBgRiiGNGOKIAlWOMxnVcwqhNb24f/3/u/jxYtvU878N+37DmHvZ05nvPBXBxgQuAoEGQNiWKESmJlGTFMotJFFuSU0okR4nLZVficiIncVyRK0mVEiVOHCWqqOJyHIlOrMGyZZF0iaIkkuYMgiBAggAu7nzPPdMeelzjN+SPb63evfvsfc65IEgJ/qq6und379Wru1ev9VvP97zPm44PKUXOoo2YNxHzJmZVR1RVRFsrhLykCFJsFUFeEemnUkecWZLEhhOAxDCNO/ajjkPVcKAqfs8n7/LiVHCQew6KsNPbPSH0/rxR1KCYbOfRe+8xxrIyQcE+awTzJijaQcUWnDVB0d71a89bRWkludqC5/g8SWSI89tOGBni/Q4ymMSC3Z/rLjwP17sgfpWdZPf2v/ZXb/CTXy6Cx+975khBbycZrCKKWW8paYykSILXfZxaRmmYTUj6okIpPaL31Rs3dIGUlJ1g2QZQn9eKxkqKOHTnnCSWSeYYFZak8MSFRU8NYmRwuaWKLZXqfxoeaheU7NZIOiNxO81p2iYOHSCd6DOzLVKG3GwV9UkjyhN5gi/bC1RfB2A9WNM3e2r7Nut1yMoe4vy8C5nZSRbqAorMMZ50TKcto3HLl/Xy72f7lU3HlYiyRglLYRs1sis9bs7SUTtPsnYRx2aR6HYZY9YRptLYSm8g25k+roVg6RqmmrT26Cg0Aop13xBICVT/mc9rmNfh3/7wJ+A/+dNBSbtsl990AZJPVp6Hlede7Xmvs7xvLe95w2PRMY8sVWRpY0sbWURmkakLF+XwZ2E2ylvBe3/xZVyjSHQA2nEGoySAQxL1vv2e940LUY/rLhzs312EdU41/O9+YAsMttTs4b5Z3SvmVwD2pQr31nOzD9BZ1ePwLHHMcMxxnPXX4W/L7MJjlvsYXgdAcZM7/NqVy6668xmAzeUpf3/5YbiWAv78H4R/+wee7z18qw5rLV/96ld5+PDhpl7sN3/zN3n55ZcRQvBzP/dzfPrTnybPc6qq4o033uDjH/843nuUUty7d4+DgwNGoxGLxYI4jqnrGq01k8mE2WwGhH1g3Ql0dp0Hpw3zSnNWF5w1ORVTzqpw3CpNQmkjGhfTWEXrFKYvbBKyF6tSj84Ncd4R5S3JqCWbWFRWE486sqlHZi1yTzA7AKQnm6X8WPFx9vOQ4vQ8w+Op8Myx/cVx6iwPOst96zi2lhMXts4llpW0LKVlrS2ukcz/7m0+UucbdXvdQabPoXkaQx55stihU4PVhkZZGuGovaP2nrPWM2/DiUVSWIyT2FpR39ujMZJYwF/+Pvi2PXhnBu8v4P4ynIwf93w3K4NVZ11B3UDTgm1DlDAK0B7RN61DgzgNNuREw+L/8dsO6FfuJf4xnRc8Ob5ZcF6b/oysgX/4HphJaLX+uf5szMdgU2gOPJUDZ0F2QAdGhdzpJIG9HrQLCSkQSYPSFUJUtKqhFC2VtJTSUsqwETdSMBOCx0JhiOhuRrTuDl9uU2qTYVxEJFoS2ZLJjkx0KGeZVQrnBErCYdoRI8laSd46TFPT1m2wclSKpkxDJm99lW0EiHw4wBUGkbeo3KDTClk41AQQGi80XSswVuFdHGwjnYJGY1uFrWK6+dZy+7blQnqkckjh6MkWHChnkd6DDZYR52V/QrEmMnMiNyf2C7RfkYgy2EaGTpDCY3EYB05GkEzpoim1nFD7gpk7oOkyujbBWY3QFhlbiOn92QLfCfxZsItcsI14IAOx79Gp6ztBWvLYMI5bplHHYdRxpJvQoCY1XMsMk9gwijsk502dhm1028tnjCFNU/6pT7zQd5QNv7XWwqwWnNaw6BRnjeCk9JxWitNa9CrBrpUkWEw8bMF18GiHKD/HJLJ8bNoyjQyTyPXebbvxaMf9ql1WDH1V+khIERqKIc/HdqON7WUM41m/2e3XcM7z9klM5wRCwm8+TLg2tUgF2dgRjzz7znHTGmojWLeSRa9mv19pdAPTrAft1DLOHGnuSHJLOraISYfPDXViaSJHJUWY6PGC2gg6K5kbyamRWKOwba9mNxndIsIajVKWKDYkkSGNDEVqGWVBef/qicZ1AuEEtyeh0Ji4xbiWdoDsPuP+QgFkE4p2k8yTZo48s4zHlr3p0Ga9JuvVbJV2GAxl64M3e65ZnsWsZzH1PGa1iJi9HfPGOsLWGbY++IFzy4jY/uDDibDyDulNkQliHaA0UgI1AjERm/b2ZRcECq0Gy4bgIBfn6nIOexmkKqhuXnjerhx/5fMOPzKsPmb5b37JsIwMZWSpY4NLHD61wcIWO2TukGOHEEOr+P6EzwlsqbArjV9FsNL4eUZ5pnFrRZIKqgZ0o/CNoKkl2YFHtaGJ0UMP8xZiGzqgyn6XZPrkotoE5TuPgkoYyXD/9QK+chxg+u4UPnNzC7rzcxgfx2F6/3mGx+Mpccw4Wc1omRHHA2TProDv4b45IJDsIZn218PtfZrukEX5EovqkHl1yKzKeHv117m/nKDsd1FfAt3DrEFjwnvfKPT51u0Mro/g49fO//7yQ/hzPxYmL3/gled779/Ko+s65vM5bdtu9o9DBKq1lldffZU0TUNvDyG4dvNFjpcRZRfz4MzyaPUx1g/2aPUBx0vFvK8Za8loXExtFa2TWCReyA1ki9SjckNUdCSjjuRO28P2nIOxJRkbVGHwSYMsgBxsJmkigZOQto7MeOK6I7eerHWkrSWpDVOhcbMl+4z427fGWK34TCaZi5p3hePMWe4by0PjeGwtp85y5h0LESC7ko5GW7rI4iXQSHyjsJWkqxSiVUSdJOkUaReRNJLUSXLvyb3nq76h9Z4bN1YIX5E5z5GDiRV0VrC2goWRuGFfvFDYKoI2Rkgbsha8R1rAhP4e3gi0DzYa3/XcY8Kx9s98AYiCoBrHoctwnoaT92kOL16Ho3H43d+cwJ0JvDCFF6eO61k4afjSDP74zwquKfgXU8d//DOCf+OPBOX9qkPdb7ez5B+rgv5Xvwp/9y34c5+F7zjaVttg1QXIPmvCmddpD92b2/U5iJ/W8GAdblsEeWTJ04ZrUcQ7K4338APX4Sj2xKpF6hovahrVUMqOUgTQroQP3c+FoBOCrlezh7SR0KQmxXlJIlti2VEog28to9jxwr5nFIcmNU3d8N69Y5Tcp2kzqlpTlpq60rSVoisldkgbqcSTTWoEkIAceZKJ2RRCNqxobYlUCiE01km8j3EuwpsIZzS08hnZ2R4hQxtqgUVubCMe4QXeSazXGBdOszNdU+iaIm4YRQ2jJMCtSQVv3MyIW88Ly5rXshvcbc6oHynemWfk3ddxQtDKKY0Y07oRnckxXQwIZNwXo0aElusqVGW6YR5uOzvbiZAok1h0bFCqQYmKWJYUsiFuZ0TlYw51zWc+fMTHrmfsp4Zc1hSxI4r0pknE4M0L277gbz56iTerMf+DO29wK2s32dRhOxTMWtE3oNGbZjUrG7M0mlkradUIE42YNZKzJgD3qhPk2rOfePZTz15CuB6U7V7F3s7OHvfFkIV2Ie7tGRaX3ar8y2D8qvqL3SSS7YjDezPFX/j7Yz52reNf/b1rtpv7GAvzSjCrJLP+el6prduh4HFeB7g+qyT3S01TC5wT6NiQxo6jTLAQkg8ftNzeM0Qjhxx3fXMaSxk5Ggml8NQuqNmtkXRWhOY0Xdi5h5qHKDSnaSMEHhVZksiQxT1oZ5YiM6TakQDKCZwN9QdtF7znVStZ15KqVhs125VbBZACZOpIe6vSaGSZTDum445i0lBMapIiFEA61dEYH1rNLxWLs4j1WUy5SGgWEd2qV7NLjW16NdsovBlsHoOaTWjk1KvZsQ4Hi0iDkoJ7c/kWoo/cFmTAiKAHzRHnaS7/3e/gBw96yM5jSCRI6WmUp9KOpbScasNjYTjVlqUOJzomsbjYQhJ+pzJ2CO2R0oWddNVvW4nHrBW21Pilpn0U4xrJCE1ca6JWEzUK2YbElqaVVI1g3cCqFXQOEhXUqgGynQu7gEo0EHXUy3wTjTTS8IkDwbUiHHAP8stV7UHNniagFdQtfPu/BV9/AH/q98J/+D++/LjkqXcU66sA+0mlGwxVvc/X7r+A6TK+46WEUbKH8HsBsqubAbLLQ+bVlHk1ZVYVzMucsypmVolLFe5dyB6n8Lm5pxbwnYfwwx8RF6B7G8L3ssun9582Hq/Cd7Gff7D/+1YZTdc34Fp63rg35xe/+B4Pl5rHS8W8STirFKXLWDaKyig6H9N6hfHiImRnDpUb4sIQFy3JqCMuWnTekE4s0cigcwOZwacGOZKYVNAlMkzQ1oa8C2Cddg61qlFlzYGKSVvHyAra4zNyCyMdE+mI1hgaZ/GjEess5/3OsI4TmjyjjBWllnSZpo0lJgGbEPYKrYRaYXrIZguyk1aRtZLYSmLfx416MM4HhPAu7IOdp+kh29jQv2AD2bXG1go68QRkSycQhjCj3gmcIaSiDQaADZuAiiGKBEkaajhGGURRuBR78HN1OBa98p7gjTdBOPiZPx/SYp6GnJfN7joXrJevn8EPfEEyW8PvlZ7/6m8KnIe7R/Davx8EqqvS+i4bHxDcr3zybyugP+35by3gYz8SYqZTBZ+95i+AuHXBR3SQBvvIQQIHqbjw9/bjf/AvwXoFcdLw4U++zse+8zdQkePYXuO96m5QtLdsI41NkMKRqpZMGgplGCvHVHsOI7geC26nktuJ4oaOOVXwP1cLTOyISsX6foo5i6AT2F/W+Mc9aG+r2X3aiEo8OgktxvPUhvbiacdB0mETwU/vHWKvSSLp6IwMxYIO+CIw58lOkHZQyBxCOIQw4CwKh3ChCNI7gXcKS4wjQdGQypJErMhkRSzWmPIR2q+IRUOWKvI0Jo6ioKRKhdIZVmR0hO6eZZeybmNW/aU2ETIJCoDMHSbWiDTE+plTDYNtxPYGzqHleuJRqSGKDVo3RKImFmtSt6JwSyZuxcQtyJtj4u6E2C6hm5FFIbqyaRqapsEYQ5IkpGnKkBErpWRvb4/v+q7v4ujoCCklWuuN/w/OixG99/zq8oB/+7XP0CGZqI47aRng22qWXYQHRroLzZJ0x1h1jHSI7hurjpSKj9zY4+61EQdZ2D730+DjjuW5P3wblretLcNlKBzqf3dXAvnufbvAvf17exag7z53GH/yPzriZ15PEAI+caNDCDbwvWwkifJMM8teFhocTVPHXmaZpMGvPUmD0r2XeX6CnP+EMS6Fo+kZh9feJU9LvIP3fu1D/c5dY7rzDpDOKrQO20caWVJtKCLLRFum2nGkDNe15YbyHEnP//3FiIcHFimgeS2l+oVROEh0AjPYRUqJN0MBpCNNLXnqGKWWSRIiJQ9Sx7XEcS123Ew9/9HB+6zzCt9I2tOE9iymnaWYRUyzjLHriK6MsP2B6WmWEaWDbSTq7SLRAKMyPMf4oASVXVD88yjUK+xnvlewz2+PE0+E43/zXyb/phi5SO2bXE5soa5119TE3FSFuyFTuy9SN5KZS+PUZaQBskXskNohlA/zu62AUmIWinYe0R2ndKcRbhZhlxG+VLhWhhkqE+os/HbDqV7h2tjc+iZU3IbJUQBj6x0uavFRS5K3FGNDkVvi1BDFHhV5rADjBa0TNEZRd4qm0zRtTNvEdE2Mc4EyMwFv/PB5LdBQK3TxtuNk3XG2dv39krO1pjOKaXHGv/nH/s/8K3/wcjXbU+PchFX5Asv1HRbrW8zXN5mvjzhZXOfh/IjHiz1OliNOVznzMmFZada1omqg7ZuCiQJu3gy78rMqfL+T9KJ6Pckgy0LaUxQHBZC+kNyIoK3UDtY2FLCdNnDWhmvTH51vZ3Dvj1156P2v7dh0xF3B8RLunZ6nizxc9PaGFcyq0GBr3QXPcWPDdxI82R6RG1RuifOOpDDEowDY2cSGY1vWkEw8IreQGmzq8LmmjcP3HNcdWefIOkdUdSR1R1x3jLwkNx61boirlqS1JA6U0hjn6bxjKaHKR6zSjHWSsowUZaRp0ogu1XSJpEvAhW5c0AhcHSwettYhorQBWTriUqAqj6w7Einx1iCExEsFWuPjmE5IGhtmzgJk9514jcK1CtNobK3BgBQWKTzKhe7S0oKw4hyyO3BdgGzb9pDtBSiQUdiekwSyBIp+W9/LYTzqgbu3onnvaY2naQnF5mtYrmFdCuoSmgpM5XE1ofFfBCSgWzBtOLH/f//r8Ls+AW/Pg43t/UVIUnq8CtvB2QrmpWe1hnUJdQVdBaYm8EkMpB4SQRR7zL1g5TsYeR78FbPZ5p523Lzq9nOMf/IsLpnuM6KB6xn8r79zC8Z7b9Ju9NvT3vOBCiJ0mhg+/d1fZHptRlm9wFvvf5y1KfiDBxF/6obmRhRxpAX72pNLgRAJkPTLv/oFvuwNyjgEcFBGrB7FyFpx+Kbg0VtABf/OD3v+mZcsun7Er/zCT3B9L+KF29cuKJTOC9ZttIlX/I12wk/XR4ive+KVwywD5KtTj1mHA2KmLU0ncQ6kWxG5Balco9wSaWZIMyMWa8aZBgxqo74KkBonYmS8hxETKpthmTJrx5R8iFaM8EKRtBWZq8h1Q6ZrRnET7BWZY5JWTNIl+4VnfwTT3DKKW37xMxVfvdvhBbz2tW/nUXWAqQXmpxSsJXk353vl32GqF+SEE4NJ4knTeFPBPlTGN02DEIIoiojjGC89Vlg6FfJhjQ73D40inHMURdEX0HiWy+VmmYvFgp//+Z/ne7/3ezk8PLzwvQ4QPPy4CmX6ohnPnbTkT91+g0nfoGYcGXJpwF8E5vOoTE9VVXzio59gMomegOLt89vtnO1nxX1ue7qv2i5379vtlHsVoO/+72Xrsp+HxBoQ/NCnS77jxZa9NGwLe5nbpMU8K+9cCMG7a41ahELhVw4eIManKGV4/Rc/RXkyRnvBv/vRGdel40A6JsowkuaJFve7r7G9/n9r6jmNJM4IzC8WuAeaXHr+ne8+5lBB4sN3saokx2vNg4XieKU4WSrmjxSLOuL9NmSS152gsSGdCa4PK9CnArlQYKrFxpudK1AZYfoZ6JynMp6yFWgZohcPMn8enZiG25PYE+vQ/MgraLWnVI5F5DiNLCfKMY8cZWx5J3G8mVh84no12yMjx0v/nPuLQngwHl9K7ErRzSLak5juvcSbs8jbRex9JaGWuAGyrQjeS8v5yfMA2cNF0NvHAAUi8ui8Ixq3RKOWZNIiUotKLMXYUWuPjILYsm7CVL6UiqbV2DbCNDFdm7MyExadJV0b8sqQ4Uk9ZE6QWsHYClQnka3Et4L7zYqTeo6uEpaznKpWVEbz0Z9umRYVe8WavdGCaTFjUpyxN3rMZPqIj91+zDSvSHSEkilK5Dg34lffmvL1B0d84Y1/jv/Jf7DHyXLM6Tpnvk5YVhGrWlK1grYLNRla0dc6BN3EiwAXeQxFGqBjegAvjmA0giIPM7p/7yvh9h/4rgDdQ93SWRsuD1v4Sgu1hbGF/f5yoGFfwn4cLgfJ+e39BA7660LBv/DT8IvH8L/69BM/4W+ZYew5ZJ8s4f3TEON3f9ZD9jJ4iOcVLOpga6q2IVsCKsCzKizpyJKODenYkt20pBPDaGw5GJ0HGPg0zA7V2lIKiwCy1hLXhqxzxHVHVLWMnECta9Sq5miVwftr0saSColWmqppsFJRJxGrJGWd5cy1Zh1nnKQxdaypY0mXSEwigpLdQ7bvIdvUClcrZAOq9ugFqNITdQ7lHBEgnQ+OTaEwUuKlxKGwyJBw5hTWhvoY1/VKdqPBghAGiUc5H2pgrLgA2d6A7QS2JYB2R988D3ykEVEIvBgge7CMjAuIkzAzI2SYUDMmzEysKjaQvVoH4fTsMZhyC7K1R2UQZZBmkOWCIgvWlCT13DgEdW1QuQXGQNfCsvK8vRT4OnxepoKmgT/+fw3gLlKPzgRxv9wih1EO05Hn5Rueo1FImbkx9twee16cwAsjz572/Mk3FD+7lPyFFwx7b3r+01+S/Kt/yLLTTP4JUewyEeyblTn/O6agX/Y/n3sE/9V9+Bc+6rn5HFNpT3vNd07hb/0a/ODHYXT7Hm/79/lOvo0fe5RROviXboAST0LE9rKftvymg+/7Pzh+8y3B//PPetYHgqqCz0bw138Ojiae2/uCk4XjN994yNfefIzOr7PqEtZNFPLNG82qidDSMUlDlN8ocbxxmtF2kt/9kRnseVIFd0XFa8c510ctR1nFr3zh13n73UcYOUbE+/jogNYXdGKMEWOsmiDpiPySiBWxXxKLNYksSWVJHtVkqmKcGCaZoZrfY/boDSaZ5aXbe4xHOWmabi55njOZTMjznCzLyLKMOI43We7eezrh+fxRybSW8I7jP19EfF/eUT9WfOlhzqeSL9Au3qcsS4wJZ6EDRG8P5xxt2268f0Mr5GEjN8ZsWi13Xcfp6ekmq/bg4GBz/2w2Y71eb/7vpZde4vf9vt9HmqabjrBu99cGfG62x7t1zg8evE+mgsy//T6HbWZYp+E9DFFb3/Zt33ahCdU5oLsLy7qsS+a2v3379mUFl0/L9P4ggL49LtuRlK3gb3wh58MHht/70WbzvO2xexJxlbrvPPydugAPP5gv+OXJjNtdSnS6xz84zvj9B2teyrrN57X9OVtrQ/awE6zaiHktebxSPJhrHiwjTlaa47XiuFa812p8JalryboNBZbGib5lzbllRCnQqi+C1P1t5VEKkMEZbXyA9LILoF6kYcYgyg13Esm1LBQDT1NPpj1aOMDRaE+lPOvIcRpbTiPPXDvWsadNLGbwZSduYxmRyoXpcuc3Sn+30HTHMd1pQnca4xYRdqXwVbDBuK6H7CEDfwPZhAPvYGfzXIRs7VGZJRq16HFDPO7IJoY0tyS5I0ocMgonDMZLGhuKdetO0/RFsV0bLko60rgjE5ZcODIPiYPUCWIj0EYhO4FvJbb21I1jXXkWJczXikUVEWvL/qhkWpTsFQumxZxpccZ0dMykeMS0eMSkWJJGGiUzlMjwfoS1E2brIx7Njni82OdkOdlSsiNWtaJqQgGu6L9zIc8dc15AHIVCudGWT3VSBHWvGEGeBTAhzNjTca5kn3V9ykQLpy2UBnLdA/UQ5XYZYG//3V/vxVw4Gf1WHNaG+L7TVUjyuHcW1OwBsh8vtwr1mouQbX0P2dojiwDQySjUlWTjANjpxJKMbSjqvgyypUV7wRTFRCgmKCZoJqhw39bfw30pYTamM5bPf/k3eOfslIfWUhUjZlJR5xlrrahitaVk70B200N2FSBbtYKoFehSEtcC7TyyNWgVCg6dEGGWCE+LxCDonAxFj7ZXsY0MNWBND9kOpDyHbGk9whLsIp3Ad8GP7TqBNaIvZO83dBFAWGpPpC1pKpgWilEumGQBspMEdHReu2FNaFi23oLsdQnVRskmQHYbPgeV9pCdQ54HIE5jiLMwWyiHhC/XL7sT1HVYZllBXUJbg617X3kU6nt0BlEKWb/ccQ7TwrNfhJSZa2O4NQ5xoy9O4O4YrqdsarDgyWPWdjz2Zbe3n3PZ2K7d2j2+7t6+alwmjkm5K0VvPf+3E9B3kx5+q+OyZTxt/XeVt+H209TJq6Yp/t6vwT//F8L0pRBBnR5nQ1Oa0JjmYASjuOX+O19mFHfcvTkKhW2ZZZx0wQ6QW1LtGL6Tn/r6EX/+775C51TfZlpQxIZxaplmAaZpT3n3jS/hqsdIN2cUNWi/QpgztF3g2xNuHsSMcoXtvdXWWpIkIUkSoihCa433niiKGI1GPHjwgPfee48XXniBO3fukGUZ+/v7jMdjiqJgMpkwmUw2UL6t/G5vnN571us1v/qrv8r9+/f5nu/5Hq5duwawia06OztjNpttup0NjRrOfWBuk/u6/bmbrfeSpunmPZRlyXK53Nhbuq4jTVMgqOdDUwkhBN/zPd/Dq6++ugG/3e91+/7tH+028G4XS+7+CLXWvPLKKxuY3/5ctotKB1/77mO7KS67BanDuOy+3e12d9223+dwgnLZuErZvyzFZfv1LtupXbWe1vnQbbNWnKwlDxaa+3PFo1VQs8/K0Pxn0UjWTUhjaXrPuduxjAjpe7CGqLeOaOmRyvcgFk4MWiepjKCxkkz3VpzUMU0t0yRYWyaJYxQ7EiVQyuO0oI5CnOkig1PtOYssq9jRxI42HZRsh0gcIgqWEal6X3YtcKXCLCXdWRwsI2cR7izCLjWuDhn+vguA7a04bzZlOYfsjuD/s4N6z+aiEoset0RFRzxurB5Zo4vOyMwZGdtOxN6hEF5I6ZyMIvSk6TR1q2nbmK5JaNsIEKRRR64tqXdBze4hO7ECZSSqk9BKbONpa0dZe5YlIepyrREC9oq6V7KXTItFgOzimElxzKR4yLQ4I400WiVoEazy1o6Yl0c8nF/n8TxA9tk6Z16mLErNutaUjaDtAARK70A2PWQnW0p2D9mjUUjiKvKg8nkFVvZOQQ8rE+wi25C97II3f6NSb0H0wTOAey8OHvpv5WEtzMvBlx0g+73TJ5XsWQWLClY7kI0MsywyN6giwHTaA3YAbUs8tsQjgy4sMrf4zGJiQ61DUWLsRYBooTdgPd5A9TlgD3+nfadXBzR45t7zyFoeGssj4zj2ljNvWWBZCkcpLbVydJHFKY/oBL73ZA+QLRuQFUSlIKqDHzvyfd8MIXAIDMM5sKDzEtM3xbsI2ef1JXgf4lNFgGxhg11Ebv3WvRHYVuCMCIp2r27je7U5Ah2FgvU0thSxp0gseR46+0YRCDzWeoz1NK2naWQ/IxTRtArTxcErXolzyO5nAAclOy/CyWmWhd+O7mNGITjiTEfoo1AHiB/sIm0FtvIBsnVQslUKcb+sfFCytyF75Lk58twae14Ye+5O4EZyfsK6fRy7jNeeR3y6zHe+ffuDAPru614F589ar93HlVLffIvLZQfs7ceeNj3/W3nND7K8Dwrvu1MT249924uQRuHxP/P7Pf/en3JEW5/eAFdf/vJX+Mmf/Ie8/PLLjEajC4rq+ZcXzqiFELxybYmSIWbrX/yue/zZ772HVn4DhU3T8CM/8iO4R79C0zRIKdnf36frug3Yeu+J5A20ilFbsBXH8QZcB2tGnudYG1TiKIrIsow0Tbl27Rp37txhf3+fLMs2rYGfptx672mahi996Uu88847WGtZLpcbQFdKbRT4a9eusVqtODs7Yz6fb3JfgQ3ceu83oG6tRYiQD6u13sD5APhKKaIouuA/j+OY6XSK1pqqqvDe86UvfYmbN28yGo0ufK+7P8inKc672/P2/+Z5fmH9tz+n7ROC3ZODp+10LoPpbTV/ezzPDmvYBofPcPukZHhs93+H7WV3PeouZJHPKsn9meT9heThQvN4JTlZhaLQeSVZNZKyDX7q1kjM9mKEANGDdQ/aw22lQBaeXHhSPKZPYamMREmYxKGZ1DRxfXSiYy+xFJEjUQ4lwEtHmwrqCNYJnCWeWQSLxFPHnnup5d3U45NQdCy3fNlCeHxLsIwsFd1c0x0ndO/F2NMYu4hwVUg08N0A2OLclz2AdkcvvfrzDrW9vxgNInJEo5Z43KLHLcmkIxtZ4twRpxYdA8rjRICAxigao6m7AbJj1s2IlZsqpTuUNVYunJXWdrRUtKKmFo2rZPO7bujfJVqJa6GrHVXtWJZtyJVfa05dxCRriEZrVLEiLxbsjc6YFKdM94+ZFA+YFKdkMUQqQYkcfIFzI1bVIY8W13g8P+iV7Bd58OAVllXEulaUjaTpwHtxbhe5SslOYboHd+7AZBuy0/C52X5yoPbnnuzBj/2gha+2IXnkIOltIi5cDjTsR3DrEuDe/jv7JybT7Bsbzl2E7Pd7Jfv92ZaS3XuyF3VoSDNAttlAtkMWwS6SjCzJKNhF0hsBuJOx4dbYnkN2aujioGQ3PWRPdyD7oqKdXfg7RSL6TJ9aeM6849hYHhjLI+s4dpY3fYj4W4l2A9ntBrIlvpG4ug9eqBW6U0SNJK41SRuTeEGEJ3f0+xRP5z2th8o4jHVYFMZJ3C5k1xrbahAOtfFkuwDYvZItzLmSPajZGyXbAE7iVO/n1Y44ciSRJY1CmlhSdESRI4kBPHhCIzwn6TpJ2UaUjaKuQ/O7s2XEcR1sHrQCVGigplNPkjrSNHSSzmJL0cfKGlOhlWIy2cPZYOFqaljXUJbBDbBawvwx+B7cRerRvUKe5QHcA2TDnWvnSvbNERu7yEtTz/XEo3fcCpcB8VXHq+Ew9zSGvExQetrfl4m0T77uk8fuq8b28fgykfcyy+plwtmzePa3vEvaniZ41vOeZ4U+6LgMpj/I/z3vuLUPb/wlz/tn8MkXLp+qMMbw4MED4jjeAOGwMWxD3ADf3ntujiv+xr/8S8yqiA8d1kBQM4Z1fP/993njjTc2rzWo4dsq9KDQCiE211prkiRBa73pODZA7Wq1Io5j7ty5w82bN/nwhz/Miy++yHg83hRTXqXkbg/nHK+//jqvv/46XddtrCnbG+gAhUIIDg4OGI/HzGYzTk5ONk0ehk5rw/O7rtucgAztlofPY3iv2+83TdNNG2WtNUVREEURs9mMBw8e8Nprr/GZz3zm0u/sqm1igNNteN997hCxeNmytn/guzGFuy2Xd9/f9m9q1zP/tHGVEr578tEZz7zSzCrB45Xk/VnwZT9aSo5XktO17JsDhbjDqg154J3lKZaRANnRANmZJ809cX9ADJYRifMwToKCPU1DNOQ0MUwSxzi2JDI0ahLCYWJB1UP2PPHMYlgmUCaek9TzKMi955aRXs0WwoPtfdlrhZlputOY9r0EO4uw8xi7Vvgm+JwHFftczRaBWCwBstvePrJtGdHBMqLzjmjSEhUt8aQlHRuSHrKjJGTrOil624yi7SG76ZNn6rbAmCmRNqQYssqQlZ7UeRIHsREUBlQnEb3ybhtPVXu+8n7zlndy3Nlo0rosj3Srk6SNxtkymRRztz8+5eDgVN0eL5iOHjLJH5MlllgnKJEiGPWQfcDjxXUeLwbIvsXrjz/M8u2Y1Q5kK9V3FRwgWwYvfpZAkfRK9h68cCdMoQ++7DQDVGgz0HKx8HGA7MctvNbnHe/HsC96X7bpgTuCw2co3MWQgf4tOryHxQ5kv3cG98/OIfukL4gdPNllF1JejAcEyMQheytIUgR7SDq2pNeDXSQeWW6MLS8UFjHYRWJLpS2tdKRePmEXuQjbyQX7SLIF2ZXwzHE83oLsE2d5o4fs5aWQLaBR2FrRVRJbKVQ7QHZE0sWE1hU+XDuweAzQOE/rPdYLbJ8sUtutngV94aMbIFtZNA5FSHMSxiG6ANmYUFguut6f3QbgDvsCiRUCF4HXjki7kBalO9KoI00NIm2IdD/D7hye0G26bQWtTai7KPzuW029jjhrYnwjQgCE9IjEoxJHlBji2BJFHXFkiFNDETnGKUghMMYj0Bgr6YwKM2ONpm0VZSlZnSp8K8IJV69k6yRA+7z0TMeS6QhuH3kOR3B95Lkx8tyZ9HaRCdzKQYsnwXYXNrcf292Oh/uv4ohdCH6aQLbLedsz1Zep4Lvrtb3cgW12j8PPgvLnfe5ljoPL1vl5xlMB/Xmh+ioguOzxy1b6g4wPCuKXjcvObq66f/vD3B+Fy2XL8z4UKn7961/n6OjownpuQxfwBMRO0o5pZmCrmHdQid98801ms9lGbR6GUgrvz60sA6wqdT7Xug153vuNou2cwxhDURR84hOf4O7du0RRaA82ZL3ubuzbG9YA+6enp3zlK1/ZJJAMn9cAz9twOSwniiKuXbvGZDLh9PSU5XJ5oTPb9o95OBEZ7h+WkyTJ5jMYrCND22Vgk+YipcQYwxtvvMEnP/nJzWzCVd/7cN9lP9zhfW1bOQDyPL/wOV32w922xlx29n6VNWXYiWx/F+efqWTZCBaV4ngtuD+T3F+EmLBHywDZZ2VoP79qRCiANILWcIVl5NwuEvWKtoxAJ56x8GQ+dIMtu3PLSLCL9JaR1DFNAnBn2hJLj8BhY2g0rCPHPIVZ7JnHsI49y9Rxlnp8YiF1T0b5uS3LyELTnkR0JwndWRwsI+uQYjBYRjaQve3LHiB7sIy4oDhtIFt5ZG5DcsNBSzzqIbuwJJklSh1S06dqSForaYwOCSOdpm0Suiah7MYoGU4w8qYjrT2pdyQOEiPIjGDfCGQXDp6uhbp2lHXLsuqYrTXLJqWLOmRRkRYr0mLJXl/8OB0dMx0/Ylo8JktaYhVRm1a3Jj6uu/Tt09VBe//sNg/PbqmT5WF8Uh4l787vJk2XZKMkomokTRu++21PtmNLyU7Oley9Cbxwi9C4bRQKxNIMhO7tIj7YRZ5IF2nh9TbkkE9j2Af2DRy0PUSrcP3Sjhd727s9jp4MCPhWGt6HpigDZN+fnSvZD+Y9ZK+27CJND9l9usYQ6SmL0MtisIskI0N6fVCyLTd6u4jILT61dImhVpZOerIesqdCMd7xXwf1OrlgF8k2dhFBKTwLLI9tD9kmKNmvb0H2WjpqZWm1xemLkD1YRuQlkK29D6m6DowPJ+8t/TmwBetChN8TkF1rXBcgW2uLFg7lw09ZWYG0AmkEyghkOySLBGVbtoPCHbLIXQQ69ogYksiRxY40MsRpRzQOsrgUfbqW8bTGYa2mbhVNF1O3mq7RrCrNvI37XiQEIE48Krao2JAkFqUahKyI4opYLolTTzKJscbRtg7vIzqr6EyM8xldq2iaiGqZ9MsUkHhk7FCxRUcGrVuyxJBFFSNdcStryMWa64XhIKq5FjfcSGtuZp4sjjaWVSklURRx9+5dsiy7cvt1zoEF+5yC1lVwvP2/zwu/u8febajdXeYutD9tFvuy2rDnWaer1P7t19h9bJj9v+r9bb+n3fe7O74pgP6sFb5sec/z/OcZ38g67r72sz6k51muc44vfelLGGOYTqcX3uP22dpVyuiwHtvvw1rLvXv3Lqz30Ep4sIHs+pq3IV0IsYHbwd7Stu3G0/3KK69w69atDUw/7cxuW/WHAPK/+Zu/SVmWF9Zv2ys+jG3QHJaT5zlxHKOUYj6fb95vkiRkWUZZlhvbSpqmnJ2dXfj8hmLSYWzDt/chXQUgSRLOzs6oqoo4jp/7xPAqiN5+bNjhPe9J4y5wQ5hinq0lZ6XgwULy/kzxcC55uFIcrwRn65AvvqwE61aElBEjrrCMsBXlF6B7YxkZBctI56A2AbSVhL00xCXupb6H7PB3EXkS5VHC4ZSn1Z619iwSxyyGs8ixjDx16lgljnczB3FQs2WvZocoPx8k01Jilioo2Scx3VmKPYtwS40rJa7tc8DNNmRvXTbFj/5JX7YCmdiQMDLpiMc18diSFoY4s8SpRcWAEljkBctIs+XLXrUZIEiijtwY0qUlnXsS54kNZFYw7kC2vS+79XS1p2paVnXHotQs6phWaPaKiqSoSIoVe6MZ02LGtDhheu0x0+IheVIRaYWSGcJnOJdT1vucrK7xeHHIyXLK2WrCe7MbLB8krGtN1SqaLuTJS+lxiJtIQKB6EbNBUCNYIVgieEjK2R/4p/lnRyMoen+p1L1dRGxBdnsO2WctvNlA42Cse5tI16vY/ly1vv2U4sdpBOpbuPjR+zD9P6SLPJgFT/b7M3g4h0cDZJchYWTwZNdmgGyPzIKSrXJLVFjSkQlq9jVLMgl2kaOR5eYGsg0mthvIzjdK9mV2Ec10A9kBwPMesi2CdQ/ZJ87yoHM87Ds+vuEcMwxL0VBKR7UN2SZAtuuVbFMpRK2Ia0VUa2KTkODQPgB2DiQDZPsesl04kTdWhGQRI7HdedGj6xSoANnRRskOkK2sQJhQZOz7GL+uDb97vwXZxkusPm9KE6WhKc0og2ISOoDHUShQFP132RloWk9Vw6IvfCzXnuVccFZrfBNBnYLoITux6MQRRS2CCh23RHFLkXXkcUcXd0ip8F4iRIKx4STYmDhExrYxzVpCW4QNKmED2VFqiXWHVjWJqhlHKwpdcZQ7xqpErO4zZsa4O2VPlNy8fsRqtdqIWXVdc+fGHYQQnJ6eUp1VtG3Lyy+/TLNqWC6XvOcc1eEheZ4TRdEmfnhysMePHZ2SZ3v8SfcymVc72/05Tz2rBumy+7frqXaB+LLX2Bb+dl8TngT27WU87Ri+u5zL1uFpnLoL+lc9f/fk5Cr7ymWfxVW21e3xXBaXXcV3e+UuGx/E7vJbsb7sfoiXQdRVyuVlt3dTNi67fdV6np6ecu/ePY6OjjZq9PYGd9X67NoPtq8HT/dl71lKSZIkWGsv2FK6rnsCXqWUxHGMtZb5fE4cx3zmM5+h6zq+9rWvURQF4/H4gsVld512N/CHDx9y7969J5TzyWRy4f+3VfftkwDvQxrK9evXEUKwXC43r5EkCXEcb1JZ0jRlOp2yXq8vNBoCNh51KeWmYHSYbTDGEEURzjnW6zX7+/tPfI+73/1wcjMs4yqg995vdnzGwqIOjXseLTT3ZqEIMlhGQgHkvJIs6mAZKQfLiLnKMrJV/NgnjcgUkswTCU/uBI0LwO48IX889eznjr2UC8CdKE8kPV54Gu2oI88icswix2kUovyq2PMwsbzf20U2anbUp4z0UX5uLXHrEOXXHSd0D2LMWYRb9ikjjcSbIcqP0N3yQpQf53nZw4TRdspI7IJVZPBlj0MucZIHJVvFvWVEhPSD1vVZ2V1E2wVfdhl82SRRR+o70rUlXTpS70ksZFYyNhLZCc592Z6qMaxrw7xULMqYmddM85ooXxOPlkzzeYDs0SnTg0dMR48p0gWx0kgRB8j2Oetqj1l5yPHiGierKWerMQ8Wh7z2KKFsIqomQLbtIVsEwA4tAnq7SBp7isQzSkM82M3rjklhKQpDnnmyNMxu/F9eK/5PaKLeZZAjOKAXrfvLXSD/UQ0HDvab/oEtxfrOU+L89uKQ1f6tOryHsjmP8Xswg/dO4N6sV7IXvV2kDEr2sjm3i7Q9ZKvcIUa27/hoNwkjybUQ5xePLUcjy40tu8jgybbCM/Lqgl3kYrFjxGTjyVYbyBZ9C7mVcMwJXR7vd45H1gYlewuyt5VsexVkN5Ko1AGyrSCBkDDiQqZ87Ol92dB6T+sEnQuWEWsl6yFZpPdkO6MQKlg8BsjW3qOtRDqB7kCb3o7VgWlDJ0jRCUwLrgv+aiN7yE5CvF6c9CeRRSgsjAbIFoQCRQttC3UNy/I8XWS5gNNH4CuC6sx54WPSFyjmOWR9NODR1DDOStqiwRhHXRuMEXhSmlZRtwpnYtpOU69jaEdBMEhA9gq5jjqiqCONDXsjQSZLlJ0zkiXTqOZ61vHyUUxhjjHHbxOpIKTFcczh4SHWWo6Pjy/MGN+8eROtNffuPewtrBFC7HHnzh2cc7z77rubGq3RaMT+/v7m2Lk57qcxrdUsbYO6FvM4cTxYPCDdm5AeKY7vOo6vFwiWGGP4s/7V51aSn+f+Xd7ZHrtWlMsAfRdqd4F297WfV5m/Stm/CrovW+5l3Po0iH/a7W0metr4hjzoT/vwL5uOeJ7/e9YZ0fOsz2W2jO3n7G4YV63/09b3qmU/fvyYx48f8+qrr6KUeuLs8zLP0+5rDFMjw5lV27abs+ZhDAC5nQwyWDmMMRvVfHh8e9nz+XyTOnL37l0ePHjA48ePKcuS4+NjkiRhb2+Pg4MD0jR9wi6z/V3du3dvk7IynMANRZu7MwbD+l62fQyWl+GzGAA7iiKm0ylSSsqyJEkSjo6OaJpmY2UZrDbbSvsA8HEcU1UV1lqyLNsku+x+5uczGp6yU5Q2YV5HPF4lPCoTTsuEkypiUccsmohVpylbRW0UrVOh0cMVlpHBLhJF4W8ZhaZVYyD3nsaJvkhLkEeOvcyzl55fT9OQMpJpt7GMdNrTRFDGjrPYc5Z4Ftqzjj1nqeVx4oJlpE8ZkdFFy0iI8lN0M0V7FtM9CJYRPw+Q7Rp17r10O77sDWSLoGRfFuW3k5cdj1uScYDsOHXo2CGi3jLiJa1VNDZA9saX3RQYO0UrQyo60tIG0HaOxILuIDUC2avZrgkH/qbtKGvDqtbMq4i5yXFpTVSURMWSUbFgb4Dsw2OmxSPG+RmRlEgZfNnW5pTNHrP1ISfLQ06We5yux5yUd3nr9BVWddxPdcsA2cKHhie7kB158sQxSjzj3HPjyDEpDEXRMcohzz0qEjgpNjb3lRPMO8HcSOad4KwT3O8UK6NJpGdPOvacZ9o6pt5Dzi1Ct9DH/fVpf322/ff6TwzY8q05qh6yT1dbSvZZgOxHy6Bwn5Uw7wsfy/ZcyfYDZBcWNTqH7GRkSV8OinYytuyPDEf5ll0ktjTa4IDRYAW51JN9DtmDfSTvpzUssOwh+8xZHhjHIxMg++veMfOGxY6SfRGyJaZWdKWCWhJVPWQbQYxAe4/wjsSHFuiGc8juequIdaHb43pXyTYKqSwqcmhpQ/2yF+heyY4toQCzh+yuFXRNf931kG0kRpw3pUmHvOw8RFUm0wDEqleynQ/1VV0HVR+1t+ohe72G2TGh8DGUYm0i/JIesPMM8lEoGr55CPJagHe3ifCDuunTRdYhXaSsYDHvl+sEIvHIRCHjHKU0SerQoiSSFQdFR5Iuie2MmxPB3X2FWNwjqR5wM7fcvnEdpRQPHz7czJQPM7fWWlarFfv7+xeK8Y00uKPDjRVyOHYJEWqy4jgOAlJZYiRU2hJ95Doy1TSuZelbFlNPF0vaVz9EKS1r3+LzGJdFNPoVau1pI7CJxmmB6hyyNjzoPKkR2OURTW2ojWBdl+BzvPfMHzymO3j5UvFp9zh9mcXkaffvKsZPE0+H25cB+tNEwqe97tPWfdfSunv/9mNPe5+7AvPTBOPLbj+Pe+O5LC4fZFz1hr7R5cEHt9hcth7P89pP+8Ke9v9d1/GlL32JoihCk52tL23btrKdrf00RX87B3o7P3wA4a4LudGDnWUoGB3AfCjY3AbSYYN6+eWX+djHPoYQgmvXrrFYLKiqiqFD56NHj5jNZhweHnJ4eLixhWyfVQ5Z5INffXiNAeovU5+v2h6cc0RRxMHBwWb927bFGIPWeqPql2VJXddorTc++sGuM3xmUkqEzmhkylqmHGvFoybB2Jt87fUPox/e3uTRl52mMorGKDoborLOv/hwkWpIGXEhYaSP8pOFJ8GS9ckStRGsO4mW/oJlZIDtaeoYx45Y0VtGoI48pXbME8+pdszjYBkpE8sidby1pWTvdn8UG1+2ojuLaY9DXradRbhVhKslbNTsbSWbyy0jO75sVMjLjscter+H7JEhGZlN8aOKPE4KrBC0VgXQHjo/9paRjS9bdWS1IakciXOkzhMbT94JRl3wjLpG4FpP2xrKxrKuFYsqZtVm2LhF5yWqWFEUS6b5jL3RKdODE6ajx4yLEyLl0SJGkAS7SLPHojzgZHXI6XKfs/WYeXmb9+YfYV1HwS7SBsgWwoe83t5Zs61k57FnlDjGmecjB45J0THKW4rck6UhN9xJ0X+cgtILFkYy7ySz/vp1o1gagRYwVY6p9ezVjj3r2Ys8e7FnGjlejML1cN9hIphGjql2JMI9ccC4efPmn7pyp/RP2KjbcyX74QzePe27PvZK9vFW4eOyDkp2bUL3TS89KreokUUWIUFkSBdJrvUZ2SPD3shyWFhkZnHpeYQfBMgOnuyLcX0BtuMtJTvA9wDZBlgyQLbjgbE8tJZja3nNO+YYFgQlO0C2w2p3DtlNULEHyFalJh6UbB9+dsJ7Ig+qL3zsfLi0fZa/sSEVpNyCbFNrvFVIbdHxOWRrJ3rLiCQ1IHaU7GaA7BZsp3BG0fmIJgqReskWZGcjyMZByVZ911vPOQw3WzBcVQG0F6cXIVum281ogkKejSBNYXQIt69vwbvpFfJ+uWUZlltWsFj0y7Ug4qCQx2mA900zmgJuHMLhCG6Mw+XOGF6YwN2J52Zu+el/9JP8wi/8Anmes1gsuHbtGrPZjJs3b7K3t8d7771H27bcvX0XgHk7R+YRR0e3SJKEtm0ZjUabAAYAhKAWFrOX8iBuaaKWRkMXCxodanBsWtNFgjYSdInFxJIujkMTo1jTxSleCXTr0K0j7jy6iYlaTWoh6jxxa0kaz6hVxGeGidKkViArg5mXxJ3nIBmBsbz33nsbtf7k5HTzHqu65rU3G1rp+Yx/Efu77QXxDZ4Ot5fdv8syzzMuY61tXhgevyyaePt/hrEbxPC09d1+zmXP3YX8y1jtMkB/2mte9nrP87l9IEB/2gte9QE8yx5y2bTBZeN51fXn3bi212/3g77qjPKy+7z3nJyccHx8zK1btzYWkW0QH8YA6FdNn1y2IQ6e7uF/hzPJYfkDqFZVxWQyIYqijbK9/Z6GH+tnPvOZTfKIUoqDgwPu3bt34WTCGMOjR49YLpebiMJt2C/LksVisfHAD+ty/XrovLg9E/Cs73VIokmShDzPw7ojmZWCyiWszT7HVcbjMuW0jJi3GZUtKG1CaWJaF9MRY32YbPVIti0jpA4pLXrlSdpQIKeUR/Z2EY3BIkJmtlV4LygiwyTuGEeGUWQYxx3j2FColliEPFvrLePbB5hMs0ocZ5HnLPIsI0eVOh4mjvcTdzHKL9qJ8lv33R/num+xHmNOg2XEVRLXqE1G7uWQzXnKyBW+7HjwZY8a4okJvuzcEiceFXtQbBpmNFbT9sWPTRtjtn3ZuiPtOrLWkji38WWnHRSdCE1pGolrPV1jKZuaddOyrCKWdYpRBpGX5EVJXiz7pjSnTCcnoQCyeEykLUpEKBH3SvaURbXP2eqIk9U+s/WYeXmNhw9fZPVOTNUq2k5hbA/ZMijz25CdRD1kp55p5rh+6NkrHKPCUOSQZw4dh3QRIwQNUNqLKva8k7xlFAsT4lGn2jF1jmnr2fMBoKfaMY0cN7XdQPVUO/Yix0RZ9iL/RMLIdsHQZRa33f3D0xSd34nRdBch+70BshfwaB482adbdpF1uwPZoxDfJ3OLLkJDmmRsST8S/NjxyDIZWfZ7u4gblGwVuj2OvWYqgpK924BmTLJVABkeK5AIJB2eRQ/ZM+94aEJW9rFzvOYsc9+xEA1raamUpdmG7Fbh6wDZbSXxlUKXmqiKQsqOB+U9+GDzkITQj66v92hdULHPPdmh26OtNabReCt7yLZoFTpHRl6gHUROEHeij/EL2dhdI2hbaHtF23SK1ihaFyEU6Dj4srO0z7MeQToJ9ykZLs6Ds6EhTbsNw30zmtUcHg2Q7bYgO+8hOw/gnmQwPgB1DRiUbPsUyH64BdlRb0NJw3IvQPYBHI7h+ghujuH2BF4Yw0uTANvRNzDv733oSLm3t8eNGzeo65pPfepTrMuSUljkzSmPtaf+6CEuj3h7krH2HUs3xiSKRjd0URuU7LjAJJIulthEYRIFIkM1FlVbZGVQtUFWXX9tkGcdsjKIqkVVhqQyFI1F1wbKjqixiKpDyzBrPcD/MEs+8EEcx5vHjdaUUm74oBGC06TaJJlBOL4Ox2UpJYcHB6h3ZkwmE/bvTi4UNF7GH1c5Bobrq+D5WeMyB8MuoG+/xtNAGp4N6NtjVzTdLiTdDqO4ahbhWQLrZeu5+zle9Z52x3MD+i7A7r6BZ00V7D521ZTBZf/zLMjfft43opRvv4fdQs3Lnj/cN0QEvvPOO4xGI/b29jaPbQP07kZ22XTMZcuPomizzF0v1rbVZXid7erkAZ7h/Mf+6U9/mtFodOG5R0dHzOdzVqvVE+u5Xq955513uH37Nvv7+5vPpmka6rq+8OOMoogbN25slH/vofUxq1ZzVkoeLiLuzSX3Z4KHC5hVEYtGU3YxtYvpfEznopBFewGyBQiHlB4lbciilT3sxg6BIxYNCIMVmo4IQ0RES+JXxH5N4tccFIKXbkwYx4ZUGpTwCOkxEbSJoEpgmUsWqWSVQp0K1jksU8/7ad+UJtnyZUsP9hi/lrhS0c50SBh5FGPPQl62XYeDu99usX5Z98entlh3RKNm48uORx3pyBLnlmTLl22F2FhG6qH4sYvp6piyDb7sWHdkzpCtLMkyKNmJ9aSdoOgEqhP4JiSMdI2jbhpWdRfSCqqYTmhEFoof0zxA9t7oLID26Jjp6DFR1KCFRogYbzKqZo9FvcdsfcDp6oCz9YR5uc/jx7co34upWk3TSYyVCHHuyXaCHrL9OWQPSva+Z6/3ZBe5I8983/VRYpWgRVI6mHeSsw5mrWRuBO92mi91AuNhIjxT49irPXsmwPU08uxFjhdiy6d0ULKn2jJRdgPgI+U3CSPPu4M+f064bO8/dhOedtWs7eVv7yc+SCTY9ujMOWQ/msM7J1t2kS0le+j6ONhFWgcOjxoPSrZB53YD2clHTK9kW0Yjy7QwiF7J7mJLqyySi5D9pCc7eSLSL+/rX1s8C/EkZD92jq85y4yOJQ2rXci2Iswm9R0fuyr8XlUZEdWKxGkiB1FoHI90noTeLuIGNXsAbIGzksqcx/fZHrJVZNGRRWtHJAJcRx6SIWGk3wfYVtD2kN00Qck2bQ/ZNmzzui9+TNPgxc5GAYyjKFyEDJ5s54JdpGnOLSNVGZrGlI/h+DLI3lKy83FY/uQQ5PWr7SJluWUXedRDtgEiNjnZaXbe8XFSwI19OHyhz8keOj5O4aUp3BmFIs7f6rA41pj+0j1xu+xvr3z4e+FrHr06Z/3qHRoVbCNdNEL460StRzcW1Whk1aGbElkZWLfIsw67KMOPYd0SrVv0uiZeVIiyw68bVGsRWz9Z7/0mhSxJkk09VNu2iD4NzIgwQwPQbVk1u67bPFdrvbm/rutNgEKWZdR1vZlNTpJkM6MeRRFxHJNl2eb+7YLEwYL78OFDDg4OuHbt2sYaepWgdtX+5mmCwWXC5u7/XSXa7nLmZRaR3XS03WU87X+vel/bkL77/Ks47Vn3XSXwXrbeu+MDn4te9cLPo6A/zwptP+dZ6vpVZ3Xf6NgGzudV7Muy5Mtf/vITKvPuBrK7kQyRfbuX7fclhNic/W6fUQ8QPBRADjaUAcqHkwJjzMYy8sorr3D79u2NBWUAhCiKePHFF3nrrbeoqurCukopaY3gtfeWFOU+nT7g8Trhq++O+Up1jdqPqSlofI5TY/7a3ytoXG8ZcTuWkT7bWggTmhwLixQ2lEOpFmRHmqSgIhonaVyEwpKrhkI35LIm8SWpqEioSGWLsC22a2mVgSKGaUY7jakmKdUooisUdqRxRcJpAadphUx8aMYRuXNfdiXwVbCMtMchM7t7a7CMbEX5maEhjbg8yq/1vS+7t4z0jWku+rI74nEToCa3xJlFxx6pexXX98WPRl30ZW/nZYuOtLJkpSN2ntR4IivIrUC1sm/eAab1VLWlrGuWlWRWxsxtBnlFnNfExYpJMd8kjOzthxbrcVwRKY0kwtuUst1nWe8xL/c5XQ6QPebts2t8+X4cOtV1IfMXQmHXBrLFOWRnsWeUOiap48N7jr3cMCpa8sxR5I4oFjgZDlydEJROBrvI5qJ43yh+00gaJxjJYBfZbxxTF+wi+7FnP4bbqedTUVCwB0V7rwfwQlok5yfPu/uMq1IHzv9+8v5vdL+zG8m1W7uxvfyNxcx45qXkxX+ejyE4QvMikjtIbiK4geAIwT6CPQTj/RFUvZLtpEePTMjJLmwP2b0X+yMBuANkG6aFhdziE0sXGxrt0F4w9oo98aRdJMB2tPN3ULLZguwZlvkA2dby2J5DdrCLWModyBZNmFGylaIpJW6t0aVGN4rEaiIPGhd8xkCMC+e+TtB5H6wiW4WPzihcH99nG41355Ad6WBHi71HO0FmBdJIpO2V7B3IbhvoOkXTKZq++Flt+7IzSEeh+DEegdbnrdVd78keih/L6lzJPjuF04pzyE52PNlFSC3JU9grziHbOzCm93o3UG7Be1nB4hj8e4R9lb4EsosA2dencHj7HLLv9Ar2S5NwO/ktQHaHuxSq13Q9WPdw/QR0n/9PjUUjKIgo0P0l2rnW3PY5mVOcvnefw9fnuGWFXzWIssUuStanC5q63iSHrdfrC2EBWVHQtu0mmlBr3c+Ux6g4w2m3mZWO45imaciy7AJoDzPbgwI+HJe3PcjDsV0IsbGpDiEIg5VzOK4PM9lJkmzEsrquqeuaOI4py/JCylgcx0wmE0ajEXmekyQJRVHwG7/xG7z66qscHh5ueGJ4ncv46mnjKkgfxtNg/LL/3xWBdznqKlbbVbp3GWw39GQQVLfHNgNtv/4gbl4lVO++/vB/V4nUvyVAv+qfn+fLe14le3tcFlx/2XTKZQr37nrtvvkPsrFddQa1+xznHG+88QYnJyebNJLddbgK0Lc3lsu+qOG+w8PDTSdNY8zmh5tl2Qb0h8Y+24r9dhj/3v4hH/rYZ3mwHjGrI+4vIh4uNCdlzEmpmNcRs+p7mFeCymg6F9E5jfVyxzIignotHUJ2CBnsI0p4klSjlUMJj3E+ZG77CI8g8iWxWxG5FZFdErsV2iyJfIm0LXkScfuFOxy+dJNKe05FwyyBstDUhcbkkmokqFIFWY5IsgvdH8GHKK51iPIzs4j2cUp3P8bMNG6hcVXfYt3Inbxsnq/Fugo+2Cd92QGyo8QjIw8SLEPx4+DLjp7wZaeyI2ssae1InSO2ENu++LGlh2yBbR1NbSmbilWtmJcRyy7DJQ1RXlIUK0bFPFhGRqfsHQTITuMVsRYIkYDNqLop8/WU43lOaW4xW02ZlwXvzQ/46qOIqgmQ3ZkdyAZEnyqT9pBdJI5x4nhpYplmhlFeU2SePHNEsccrMFLQAZWXLKwKFxOuH3SarxpJ5SSZdMEm0jgm9twWMtWOw9jxkbzrwdqHx2LPnnbsxRDri7NdwwzPth1sd2fpnMd7gXNP7sOeBuYfFMCfphABGOtZVIJ5KTktNffnigdzxeOl4nitOC0ls1KybCSrVlB1gtoKWhdOYtTYEH+6/oocWaEy41RhrSqM0SNj1MgYNbZGjYxXI8PRiE3hY6tCt8exD4C9J84Be8jLnm4ge/h7gGxJI1yvZDvm3vLIWB5ax2Nr+Zq3zHzLAneuZCuHiXrIbkPHxw1krzSq1ESNInaayHkUHtEXXUfeIxCbGL9uyy7irKLqek/2LmTHlkQ7ogGyvSCxAmVkUNRbgentInUDTZ8y0rVbkO1DHGUU977sPEB2mgco1r0ve1CyzZZ/utqyi8xnMHtAgGy7Bdk9DBe9z3uUBcgebM3e70B2FdTsureMLE/A3+O8XXvfVn2A7FER7CIvTgJkXx86Pm5B9u1x8Jt/oO0aT9vD9aNL4PocuK+6PwB4iyNGXgHX4XaO5pD0Cegebd2O+tSbq4Zzjtlsxhe+8AXuv/YaerViNpsxm81YrVY0TYNzDq31JmhggNchjW3oMzLYVxeLxQaih5nlxWLBcrmkqqpN/VSSJKxWK9br9YV1Ojo6om1bZrPZBgyttezv7xPHMcvlcgPYXddx2Mcl1nXNer3eHPfv3r2LUmpz3/C6A6xv21+bpuHk5IQHDx4AIeZ4CISYzWZ87GMf45VXXrnSgvs8UdRXJa5sW0iGsQ24V6UEDuMqQL/q+cPzdmcnd5e3u+67z33abOb26+9C+jciSl81xNP+ua7rZy75eZXmZ/3vsxTy53m97TOm5zk7uWx5u2dWl204w0ZV1zU/8RM/wbvvvsunPvWpJwDhsimWq9b5Mhg4OTnhZ3/2Z/nlX/5lnPeUjaAhR+U3kOMXqblGFx1RiT0qV2DkFJHsYeUIJ1MMMY4IL/ofxmAZkR4tPWq7AFJ5hHB4b7He44XEeEXrNQZNLFpGumU/8yi7pF0+JKUkEzXX9kfcun5IVa14f/aYk3ZNk0vMYY49HOMOctwkxo81opCILHjARewvdn/so/zsSmHmfZTfaUx3GmHnEb6SQc02EozAb1RsLnZ/bMXFFutb3R83edmjjnhUE08Mcd4XP6YWpQNgOiHpGKaeoxDl17dY79oY+rzsTBgy7HnnRwuxFeg+xo82dH5sG0dZw7KSLCrNqo5JtGGvKJkWK6bFoleyz9gbHTMpHpOmC2LlkSLGu5S6nbJq9liUe8zW+5ytp8zXIxZVRtnElI2m6dQ5ZAemCpAtQSmHomU6iphkMMmCL3s/txS5ZZx7iswRJb5XsqFDUPmgXi+sZNYFNXvWBXV7ZSWxcOxpx0RbJtox1Zap2rq9ge5Q/Lg/FEZqRyyv3oHt/g63x7Z/+7Lf77ZVbfekfftEdnun+kEB3brwfc7WgtO1DJC90DxaqtCRdYjXbCTrNnRSrW2IsHPKI0cGPQ5KtsqDBzseinELSzwOjWhkYRFZSBcxicNoT2ShFTxAcCrEE+ktFy5/jVf+iwmKEQqBoMYzxzLHMfOhIc1DE2L8Tn2wkCywrKSlVI5G2QuQ7WuFqRXtWmJWGlWqoGS70OVRewgoF2wxxve+bH9e9Ghd3w7d9PF9jcbWoQZExxYdOZLIEWlP7CAiJIxoKxBGQif6+D5BswFtQvHjEOPpwsnlUPyY9oWPSR7SRaKthkjOhhi/rleyqypc2hK6ivPCRwsyvqhkD5Cd5MG6oXTY124gu4WqvQjZTQWmAt+waXIzQPaw3HEOkzw0xzscBci+MYbbowDZH5rCrQnkzwnZHk+NvQDNqx07yNPgeniOwZOirlSsn307wHfEb3+GZ9d1/PiP/zivv/4677777qaD9dBvI8uyDZQPaWhDz5D5fM7x8TFRFBFFEaPRiLIsWS6X7O3tbWagh14beZ6TZdkm2GEA9DRNN0r2YFMZaq8GoK6qagP1dV1v1O+hR8h4PKZpQhiTMYaTkxP29/eJooj1eo0xhtlsxvXr1xmPx6xWq01B6/7+Pm3b0jQNZVnSNM0mV10pxc2bN/n0pz/N93//929OSj7IGMD7MkB/Wkzirjo+jOcRNp923/ayd5d11fKuEn231+my48f2e9w9nlwG+Jet6/Xr168ExG96isvzjqtsK886WF910HweKH/WB3XV6182rQIBoF9//XVefPHFC57wq5YtpaTpPMtGM681p2XEw3XC8TrmpIw4q2LmdcSy1SxrwbLytO6PYD6k8ZuvyveQDQi7sYvgO8D0SSMgZUskDV5GCJ1RO42WjkncMYkN49j0RZAtI23ItEELh/CWla1Z+poqElQTTTWOqEYxXaGpRwqXe3yyR5NOWSaO49jxFW2RMgF3ZxPl184U3WlCdxzTvR5hZxq31rhK4jsJVl6E7MssI7st1oe87MGXPQpxflHREScdKjaoyCF0iLAzSKyPMT45z8ve8mVHuiVzHenKEC8ssXHExqMNRC2MWoFvwDVg2pa2M5RtQ9mlLKoYIxUyr8j64sdgGQmQPT56TJ7OiLVBigThM6puwqreY1VPma/3Oe2V7JPyJm/PXqJqNE2r6GwoSBzaqnsRIDvSnjQelGzPJHW8NHEcFJ5JUTMuPHkOaepDYx4R0kXunc74mV/5Em004sVXP00bjTjuJK93ioWJQ8JI55jimHSOvR6293TwZt9Mu/NiSG0ZS8NEBX92qp48ybzq9/NkYeTT+yvs7lSH39lVasUwBuvI9v9fZimz1rOsBacrwekq6iFb8WiheLySQcmuJItasmplULJNr2Rr13uyDbJw6D4rOz6siV8KsyvRyJAVltF2x8fYYbUntoKRE4y9YOIlI9f/7QSFFYy9ZOwEIxdRdBGjRpC54I1oJPyJO80fBfa9Z9+38rpr5W1vxQ08H0FwKKTfF9JP/nv64QaycSAbhW8UtpI0a0W3DEq2rhWJVyFRxIPAh22Q8+JHM0C2O29E44zcZGTb5knIjrUnFp7US6Lek61sqM0wzUXIrhtCK/QO6o6wLxCh+DGJA7xuYDgJwD0c9wf/dNee+7LrEpo+xm91QoBsswXZgyd7BNkExhkcjPpowAGye3Af7CKDDaWqYHkG/j4BssV5/vZGye4h+8URHF6Ha33CyO3JeVv12z1kP03rcvgL1o81hsd0vLUB56sV6/XWczye7BkgfUDCixRXAHhEjkL9DsD1N2MIISjLctNUrygK7ty5w97eHsYY5vP5xqP99ttvb+wfAHVdM51ONwljA8zv7e2htaaqqk2sYpZlOOd48803GY/H3L17d5M4tlqt+PznP8/e3h6f+MQnKIoC7z3z+ZzPfe5zaK357Gc/S5ZlGwX9i1/8InEc89GPfnRzAlGWJW+//TYHBwfcvn2b8Xh8wXOe5/nmucN+8fHjx0gpWS6XLBYLrl+/zq1bt+i6jqZpuH///oWTkG9kXNU9+1njg85QXia4PG3Zz6PK7z7/qte9TCDaFnWuOkl4lkD7rPFbAvSrIPtpz3/Wsr/Rk4LLzmKe9TpP+/K2H3sy0xxOloZ/8Etv8259h9niE/z045yTKmFex8zqiGUTsWoVZaepTWgRbqzEXWhMEzKzVa9iDxcpHUa1kJuQGkJL5xVWpiAkyiyI3BJtFr1tZI1ozvD1jEg4ijwhKUJOan5jn8OPf4h1oVimknUKbeY5TT3HqQ+dH1O38WWHKD8N9QhfKsxShc6PJ0nwZZ8OkC02lhFvei+EE8/dYh3l0aMupIyM+pSRUVCzdWLQkQPtQpQfCuM0rdO0Jt40pamaAmunaNUR+YakbNHzjsgYdGuRjUXULhyQdIZve8tIW7NuOta1ZlknLL2GtCLJG/J8wbSYMylm7I9PmBQnjPIz4qhForA2oW7HNPYa62afRTnlbDVlXo6YVUfce3CHsgltoTsj8D40pAmQHb7vqI/wy2LPKPFMMsdLNxx7uWe/aClGjnEOceoQ8jwRce2GdJFzFXtmJG8bzbwLB8xpGxJG9hvfe6/D9ez0mOqtLyLLOa/kD/nhH/w+DjO5sY5k0uGc3drGzxWC3Z3M9o4pPOXpdrDtx55n57r7G3yarW1Yh2UtmK/lRsm+P5M8XPaQvR4gW7BseiW792TbyKNGJhQ+jrYg+6AluWuIRkHZTgtDUVjILD51dInFqVAEOLIBssdebEF2D9wDaPuIsUkYLQS5EwgvqCQspe8vcIrnkfMcA18H5hJW0lNGnkp52shhIh+U4VbSzaKfdZUSdq2UXWhp17p1lWiFpcX6TnhvhUdkiRgmzuh86BBq/JYf24bOqrY5L3wE0JEljh2xdiSRJ5aQe9BWErnQWt23km5Lya6aAMdNq6n7AtPhJHvo/JhkIVkkLcLfeQYjGbaioSFN14aEkUHNbnr/9PqMc8iOQG+ni/SQPckhmoSGX/SebOuCBWXbk133KvlyDv4h501udgoqh8LHF0ZweC1A9iZdZAJ3p3BrDKP0csh+WjHjlzH88nOo1xUGAU9VrHM018koGF+pbmdo5FMsIf91G1JKXn31VT73uc9xdHTEwcEBDx484OzsjLIsNxaSJEnY39/n8PCQpmm4ceMGo9EIay1f+9rXmEwmfPKTn6RtW8qy5Kd+6qdYr9f80A/9EOPxGIC3336bL37xi0wmE1566SXyPEcIwb1797h//z7z+ZxPfOITmwaD6/WaR48ebWyqA/g3TcM777yD954Pf/jDpGlKmqbcu3ePz3/+87zwwgt853d+5wb0f+M3fgMhBN/+7d/OdDoF4MGDBxwcHFAUBQcHBxv/vHOOx48f8+jRI15++WVeeOEFJpMJn/nMZ75pn/c3WsC+e4y4SjEfxrMszJdZVp7Hnn2Vsn/Z83ZtNLvdQXePd9vPfR5wfyqgP8uL9I3C+fOOD7L87ef+8huCz78t+e98l+GnviqoWsEf+6fNhdbT3kNtQpzfyRrunQruzeD+XPJoLjheCU7XgnkF80pQ9okGraFvs65B/N6wR34jZGRrfW4X0Sp0+xOJJxOO2Hs676itonOSTFnGccckMf3nAy9P1kziltmj+8xOHuGFxecRNpOcyZZyHMG1Ke4wh2mCmMTI0REiO0Kmd0MTBu1otKPto/yWa8n7K0t3JmlPNN07cd9iPQqdIdtBzWanAJJzb/ZgGbnMl50Z4lGLnnREowadt0RZg4oNWhtk5BGRwiuNEyGppTER7Y4vW0pLqlqSxhCtDbEx6M6gG4toPXETLr5pcV2L6Vu1113Cusuo7AiikiRrKbKSSRYSRvYOzjjcmzPOT4njBonE+4SqmbBupqzrKfNyymw9ZV6OWVZ7PHx8g+peTNNpjJV4H2L8kOeQHUd9hF8Co9QzST0fuglHIzgYw3hkGRWQZ+Hz6ryn8Z6VgVkXWqufdaJPGJG82ynmnQwJI7Vn33mmdVCvt33Zd3PHnjYBrJ8ofnR4fzEyatj5//s/9iP8+k/9DYwxLOov8al/9lNMssmFZlTbO9dngfSux/Cq53sPP/aFESD4/k9V/J0vF3z8Wsd3vVhfeL73sG4Es1Jytgoq9sOF5tFKcbzWnPSQvWwUq0awHpRsCzZ2qJG9YBeJio54ryZ5wRCPDNEotFs/2qSLOEwcIDu15yA98aJXrQWFG25L5kvNOw8Lfv9Rx4sN5A6EF5QbyA7Xp3iOPZwieF3AXHiWylMqT6UNre4h24NoJDQKWyvaRejQKtYC3cjeKuL7CD8BOBwCv3FyCRonlXdCeiuFsxLXqdi2Kja1xrZh1y4j69rYEUeONHIkCjIniJxEu1D8OBQ+NrWg7iG7bqBpdLBmDJDtQm1ClAQFOx3sIimsJLgUblwLoG17//QA2XUP2XUN1Rx4G+hARFswXIRGNPkYfAKTA8jjYEMZmty0W57sC0r2AvwjAmS7rSY3W0r2tIA7BRwe9naRLU/23UmA7nF2rsjD1cWMg93jAYbXd4sZveHYdhhpqIWhFhblQzGjtJoDpZmIi3aPAs1t8qcCeNpblH4nh3Pws2/A3X146fB39KW/KUMIwac+9Sl+6Id+iB//8R/n/fffZ7VacffuXYqi2PQa2dvbQwjBw4cP+frXv84f/aN/dJOVfu/ePdbr9QZi67q+kJI2pKcIIciybHN76Og9KOxa681jA9zleb6xugxKvnOOoijOe3tsKbGj0YgoijbLHdZ/WF4URVRVxZtvvkmSJNy4cYMkSfj1X//1DYzXdY0xhvV6zXK55Pf8nt+zSYv7rY7h+GEwPEgec9QekIts8118kO9t6y/+93835+dfj/jshw2vPVL8z/5QyWfvPp8Qu7tuV63HX3k34v/3IOF/+qGaV289ZOpTbrjRlScJw7J3C/ufNSN89ft8cnzgFJerOmI+67HnOSPZfe4HGcP/vH0s+MG/mOA8/G//C81ZGawCf+5vxmgFVSNoDHSWi23WZZjWjBToKKickRYhMzv1JFnwQbYOqg7KTqCwTBLDJA552ZM+L3scW3Jt0cIjhcclUEfQJLDKocwF61RQZVDFgjIGmTl+LbXIxCGjPaSehCg/E7o/ulIhZhHdcUx7EmNe09h5hFuHZhi+lb16LYJa/QGi/GRqg01kryUetUTjjijriDKDjgxC2+BllhLjNcZHtDaiM8nGMlK1eVisaki6hrg2pM6SeEFsBVFH8GV3AlqJ66BtOurWUDYtizqmbDNc1BJlHUm2ZpIv2CvOgmXk2imT0RlJUqEAYxVlOWZVj1mUY5b1hHm5x7KasKwLTs4OaR4ndDbGWH0pZGvlSXQAlzy2FLHh5tQwSSrG6YJRZskzS5IYUD5EGUoRyKTYDwp2J5i1MOsE9zrJrE92GFvPQUUoauw914NafStzfGIcCh6nym2a0+zHnkkEWl78jWz7pa8uQBF4H04mdncczrmNT3HYyW+nAmy/DjxbNdjdQW3bTnaLbf6/Pzfm3/1bBzgPkwPPWRuk0htjS+cElQm/KRc75NiGhJE85GUHJbsmvms3kK1HhsPccpRZXBbsIl5AZgNIT3olewDriZeMNlYRxchrRkYyXggyG+C33FGylzKkjSyE513lOVHwD71G3G750dQR5+ZqyF7EsBboRhFZj8aj3GZPg+uvDb2K7c9V7B6yMY3CNhG2CS0Yo9gSRZZIWxLtSaQj857TRfRV18nON9L6RnrfCU0nYozIsGRYMud0UjmflN1A9v2239tF0rHYQHaUBRV61KvOm4SRBpq6L3wsw+1mBYt7hJP4fj+CglUKt66HQspxAXrS20Xks+0iiyUw65epwr4qiYNCnme9XWSA7INzJftW34zmpWmwj0wyEPK8mPFpUXxrDF+g42cvsYOs6Z5ZzJj3tw9JiW2EbWP+9usxP/526Nb58ZHm188UWkj2M8GjBl7M4Gt/6Nz//k/a8D4c485K+HN/G/7G58O6fvHfhleu/+Neuw8+pJSMx2PKsuTGjRtMJhMA9vb2mM1mfOUrX+HDH/7wEz1EBsAeoHsIahhyx4d93OAj324OuP347j57G+gG0B/2q8O+dGCp4bGhQHV4LdnnnwObKMXhOUqpTYO/YXlDZGMcxzx48IAbN25seqx8s4bHY4ShkS0/evgTnOozIh/zpx/+cfQVqGlsEEAXteRkKbk3lzxcaB4vFY9XgrO15L2Z4iv3NR7BL70bAYKvPtR87n95/FSFevMaHpZWMjNsEsFmJsxGL6xgZiQPGvjbp5I4qvlL0a/xcvE6AP+L1ffxop1uvq9du+Rw/+YzeIpF+jJF/1njG/p2LqvK3X7ssgreYex6tJ91djKMpxazdnBWwawSfO4tgVPhS1k4gUsAAU0sSHPPaOJJXWivvu6CXWWawX7mOcg9e5lnP/Ps5zBOIFGghMdFnlZ71spyqhq+Pr9PnSv8fkaTwTKFeerxffeKofujVFtRfr1lxJz0nR/fjEInyGVIGaETYOgTRsTztVhXHpRD6o6kWBPnS9J8RTFtyIqWo5sFxyJn7hVoQaodUgpaq2n7FuttG9M2MWU7ZuUUWjbEXY2uWnTbIpsGWVtEbRBNg6tq2lWHaz3aR0QyJ1djWj+m7HK8jEjyNUneMM0X7I2CL3t6cMqkOCGJ1ijlwcfUzZh1r2avqgmzcspsVbCoct6d7/Ha41dojMZYhfcyFH0KwhFDeLS0aGmItSGRLalqKFTFYXbCXmZ44c6E/akmz0LXRysFRkgqL1k6xdxolk6ztIqF07xhc5ZOU3tFJiwjYRibjpEzjGXHWHTcLlJuJ56PjS37iegLH+EwFRykgv1YoMRFj7S19gK8XoTtq3fmw29iewe0679+lv97eF0p5UZVSdP0mZX5lxXV7D4+DOsdZa8WL6VjKRwr6fnCCyWTHwq52fGBJS1CjvZ0v0MXQ8dHBwIy06vWXvRKtuw92nJjGxl71dtFJKO5IDMeEKwvgeyl8KwUvKs8y8izEJbF5jmetQib0tix8YLnjSQuJbpU0EhcJ1jXguq90NJbSs809uH813sMfTt1dw7Zm5bqtca2CiFAx4YocsTakkSORMJIhHhMbUH2bdVNK2kaQdN6qsZSt4K2ldSdYgPZfZQfUrxCQktGQ0pFREuOQRBKGj1nOORLYz7WNp6mhrr0NFUofOyOBav7bCBbRcE2onQorhSDct17u50L6nIchVztoT7C2JCvjofawZv3gHcJ0apZ8GXrLMT5qaR/nRjIwB30br/+zCVT0LlQWDrKW/7yH2j57K1QzPik1zrcfgvDb1wC188uZgyxkLfIgpLtNcpqTBvTNprTSnNSah5UkndLuF/B4zbMgC0NVG6wC4VddPhRXLz+lVm43QDrvrvm6yXUFvJvHhs9MYLtK0D2WQmz6vz2VffNynAcPSvDTPEkDcfW1obj4FcefGsCOsDh4SEvvfTSpnBztVqRJAlxHG8AdyjS3FbEh8zw3X3fNsxXVbUB+gGUnXNUVYW19kKjQQjRzMOy0zTdAPfQGVtrfaGZoDHmQmxiFEWb5w/LsNZu1nn7hGG4Lopio7oP7znLMm7durWJcoYwT9fIlka0NLKhkS2tbKllQyv66/7vRrS0qmVlOxaVZFFJlouc6viA2ec+xno+plzk/IPTA1ZlzLIWrHuLYWOgNQLnt0RSgogW9/0vsijUWaXa4yWI2ONjUIknu+34mw9jFlYxt0EomztY0LESHaUwVNLQyg6nO9K4YZQ0FHFLmjfEcUsUtcioQ6gWoTt+GOi6CK0sFo9CcI85d9x48/l8UCfIrq3mdwTQh3GV1+hpHqSfeR3+4j+K+aFvM/zp7z5vCW/dsMMQnA7X6/P7zmrBWSmYVYKzSoT7q/B31YnwJXq4Vnj2P2ooM8dnUnhLNzjl+OE8YS+FNvKUkWOtHfPUsIgci9hSRo5Z7HicWGwU4jjEVvdH8IgG3FpiVgpzOsKeJpg3Y8wswi0UrlK4RuE6gbMigPbQmMbwzBbrSlnioiQr1qSjFdmoJMkb4qxFxQYZQ4undIJOphg1woiczmd0JqVtMlbNlLmNkKIlkQ33Tx2Js0Qs0J0nbiboTpC14GtPtWxYL+Y0DRif0voRjR1RMaKIl0RJyThbMR6VTMcrjm48ZG90QlFUaGloG1iWKatyxKIaUTZTantEZQ4pmwnvr+7w+umHaTtNZyXOBcgWArzsPdnKE2+UbEMiGg6yhruTBaOoIo1qkqghjjpEFNpht95Ro1n6iEqktFFBJRJWxBxzRCnu0MiEWDgmGMatYWwME2UYS8NYWcay40ZSMxIdY2UYiY6JNJvbmouV2gOwfvzjH2c8vtjddRjh5kWAHrbx4fpphcSDsr09Bsgexi6kD8r18LzdM/o0TfnDf/gP8/M///NMJhPyH/hu/vwLDT9QS/5QnZ+/kBSshWeBZSkDYK8Vm9sDdA9/r4RjJR1L4VnLsD6DB3u4TD9r+OReh6oUH/tQx89ox41K828kGeNWUFQwcuC2IHsh/BOw/a4ebjuW0jIX8MiBjRxShDbpIwfxOuJsFvESgusWZKtwLbTGUzaSthNYAziJttB4wUMneM9uQfbQUr0NLdVdqxEi7AuiKORhplE4cExFSO2JLEgTgH6A7LqBqrFUjaRpFXWrL0K2F0jtiXJPUniiwhPHYQZPJTCaOAofrGfGiHOvdyXoaoGp0KyJOKPoVWePxqGwSGxf3ekfxH1KiQndFKX0oX17utUYyvdxgVbQDfY2TyhGTzxqBEkKOhPoBGQULHxhZk1Q2qAbOKDQnknsmaaeceLIEkseW5LEEicdUdKhkg4Zt4ikwSUNLqkxcU2tOlbe4PBIq/lRpflHl6jXttKUq4RPHRZM5JYP20f4vlFX2Wjul5L3K3ivhHdL+GoNxz1kryxUNjRhMoTPYBewt29LQAtIJGQSjmLYi+AwhlspvJDDCxksO/iLX4WXcviXP77ir1Vf5xPc5Cg54j8sW/77eUz+HMqldTB/Clh/7T68/ggOJrBq+serc9gG2MthP4e9LFxvX17Yg0/fPv97+znTLHj4f/4N+B/+SHjeZz8Mf+XX4XtvwSe/CXaXX/ga/MQX4U/+N+AjN37ry7tsDPvb6XTKdDrdNA4a4HaA5wF6B8geElOECN07oyjaeNYHoWMQPdbr9WY5aZpuYLiqqk1EY1EUm6SYbTV7f39/03/EWkvXdcRxTFEUm0QZ7/3m/u0TiKZpiON408QIYF2u8Rr0QYQ98tTXW8zYo1VC/YJhtr/G/1OaVdGwyjtWSvPXJz+NjizzSrIsFeXxhPrkkPJsQjkfUy+OKNcZ1TqlriKaJqJpFa2RGwvoeZcmgRQhHU7FDWnkqaOYceq5eWAZjTzFyJMXjrSAJHOIWNJJT0UQTZayYy0MlTDUqqOVHa/0kF3EDUJ15GnD34tbnGxJkhYdt3hlmTjFodUkLiJzmpGPGHtN4RQ5EbmPyN2EzGmyNqJoYwofk3nNOy38Z3rGd8aeXxl9jQOX8u31dZw4n1l+VojI7th+/mWC2LNEst/Gc/jzsX0G8S/9xynHa8lPvib5yz/nWbdB+Z7XglgF5Xov9exvq9n95aW9/r6tx/bScPvFf2tE2Qru1Z7oekfy+0q+FHuSg4p0WvOjQ/fHLkC2XUvsIsLNY+zjCDdPQl72egBtiW0lbcNWyggXW6wPUX69L1toUNqRFh35tGY0rRlPa7JxRz7qiFILfU50jaZyitJElCai6iKaNqZtYxb1IafmFlJYIlETNQ3xukObDtVYfNnQzkvsusbXNfQFXsLGaJEh5QgrJ1RijIwN2eiU8XjB3mjGJKsZZwvGB8fk2RqtOpy1mFaE7pNNTN2krMqc2WrEqt5nZY742smnKO/leNfLZvg+SUaAcAg6BC3SVShfkqqWG3sVn7zpOCwWjDNDkVnyHFTk8SrYRWqvWbhzFfusFcw6yX0/ZuUjSiIkjsK35DQUtOS+IbUVmatIXcWeO2HUPmAaOVJTEndrjjLFt3/oRcZp2MS3u6fubpvb19vDue1iSL9RPfI8v3Jq8LIC5WHbH6Ybn1bdfdmU3eAl31329mX7TH37f4eTiBdffHETwfVf/pFXaVTH59OOv2lKKhGgey1CWsfYSUZ+UKwlIyf7AkjJC1YwNuH+wrJRuXPjyb1AcEms1FG4/WcOW9aR5006/o/WYgjQvep37mM32FPC7UlvTZl4yYtOMLHBujJ2gr/6KwWf+5UJrpEUN0sOvvt9yr2GNYYHXzjkvjlXsk0d4TqFkG5jF0kiRx45RpHjRuQ4iB3XYsettONG5LkZC/71Xz3gNNKgPH4hsG9qrFXUwz6gr9kQyhPlEI88ce6JUx+a0eShEDL1bithZIBs6OoQtdfMJc1xvy9RHhmHDG6pQiKTkIGVQ1HnOWQj6EhxZH3tuUDiUTgUnjAX7KFuxRZke3R2rmSL3p7iZAhNct5jvaDQAa7HiaNILVliSWJDnBh00qLiDpG0ELeQNti4oushW0l/oXDxoiVEMxLR1n0Xc65zNKnXGKOZt4LHdQDrexW8vYLfnMNbM/jCe4CG+CBklQ+Q7Z8B2YpzyM4V3EpgP4KjBG6mcDeHO1kA7TsZHMTh8UxdXgDqfQD9x63loWl5aFt+5M2W6E7Do8Twoze+gnTwq+WKr5UxJoe//J7h9ltjFpV4qqK9qIPlcoDnSQJFBKkKFqS//4WwDh+5CX/hT1yE771+Bvg54qsvHc7DvIFbR/BX/0cwa+B3/w24v4ZIwr0/A/vp8y2rbkN32rNVr9Kv4N0T+Nf+gzDz8P/6SXj7L39j6/m8Y7CEKKWo63qTWDL4vods8wHWjTGbx4b7tp0DUkqyLNt04IzjeAPOQ+NAuHrWcTiWDOs1vHZ4AdDjCJkozHWHH3e4xNF9yDP90CHJNOXk7gIbO0gFxR84IE/hnWRNaVLqKmX5Hd/HLz2eUs4mrO+PWJ59P+1XRpRVQd3EdCamsxEmZAoHxtmCbCV8SF9Sff+LXpAYpx49BqkdSIuTBh1XVDSsuppsWnLjU2fc+tAbSGn5lYffjfQFpeqoo5YqaVjHDVncksYNUdShohahW7zqkMIzbROmTYZbF5jFiHaZ053lNOsxb5/lPF4kYSd3ClUZkeYt/+kfm/PyWKDc1bB8vjEIKmEppaWUjjPZUKmSv7r/Lmeq49eBH+jWvJu8zn8+XvHfXnz/E8WfH3TsimZPzqJfPn7bAX3XG3Rr4plXQUH983+o5sU9NrCdRZfvBLeXA0FZmJVBWX/tIby3UkSFhxhIPK7TrH90imwEy24fupCb6w0XfdlGnHd/FH0qQBTiu8aFYbJf8Sjr0EVLPLZ8+0stOmlZrk8QiYYkpfaa0mpKE1F3MXUP2vMm5bgNXrek7UhOO1JniZ0ltj5E+XUQtYL9TpAvO84ez4haRexiWpfR+oyGAosGWfat7msSWTN1a7ryMU05x5kK6S0ej45jpNQInXB04wWuv/AxGv2AtS05Kw947d4N5uucxhdI36Ldkph1SIPxa1JZouwC7ZaM4xUfe0Fz58Nj/m9fz/CpI+Exdx/9f7h2Y4+XXv4Iv/T5z7P2EhOPcdkUm05w2R5tMuGdPOOdYkyrCwDGwjBuzpXskTBMdYjrezGqGMcdkV6hmyWZr0lNRWLWaNdeONYaazZNm0JUXojUUybs8JIs4UMf+hB5ojY71WdlXA8we1Wm6TCSJNmoIsO46gx6G7Z3mzRc9X+7HWaHHff2+g8q+a4vfTtdZXsYY3j77bdJkiTk+dYWk2uUh39lPeXQqQ2IJ85fKES7zHO++7kBeHlRyd/120kpuWklr+tgZ/mTi4iPdpIpiqmXFIgnCuC2O/Puvu6vOM1/1iiE93zydz2gurNERI73/sFHMfOUsRT8e5+ZcyM+rwNIlUdeYvwN7wOW1jN3kjOneFBpshhk50OCaQ1+6tHec5D1vuxS0NYeW0O7ELQngJEBsqPeyqE8Qg8ebLFlFxlmj0COXK9k+6Bk+wDiZsjX7HzYMcYeNQ7pPioVVIIajUUTuvFoFIIYEfbrQvhSalu+NHFHWWq2ALtDxW0A7KTBJw02rnFpTRJZ9mLPSJ4D9mirkPGiRSQnZ/LE/YkPnYRnLZy18LCG9yp4dw1fXQneWgoerAK0rfo4xLYD24ELabHndTO2vx5OiCTn9TMjsPvwkSJA9LUU7qQBrl/oQftWAgc9hCfqia8+/J48zDt41Bnum5ZHtuE11/HTbcOjxnDqDXMsK+mohadTHqOCXc5KiRMKIzSdjuh0hH1VI3yMsoqfW38a0Tma44x2qfClpPuVmB+LIJbhbUjCROqeCycNRwlUAlYK5muYPYTXytAYab8IDY3SCHzT+/rX8N/6jiffV2fhpISzJgD2WbN1u37y/rP6/O95G85p9hLYi2Gq4f4pmDbM6PytXwTleuhe9wC+A+HDfU0XEm72i5DpvpeHWEnrw/q35sl1/2aPYR/ctm34zvs+JgMkD0C+bSEZFPXBy70drTeA+baffLivLMvQ52BPYXOHjmP2xgdE45j1tzWITOCShlo0TL//GiITnByt8OmaykZUXYJb/DMs3h/xk2cHlGdj1rOC1SxjPo9p2wLjCjqjMXaroaAQ5xbQ/iJEP5/rCXd4h/QeJVyILPaeZFxx7XDNnYMVIqqQukRHFVFckSQleV5STNZko5J8tKYoSrKsJM0qIm3oTETThQAIKxQ+garLWbUFd/k6ydldumVOezymLo9Yr2OO64iq1NSNomkVTSvoOonpCKlwNjgMhAxiRxyFeOFOQIVEJZaxa4lHBp1bHhRrjhPTQ3e4rGRHKRoq1VLLjka2dKLDiA6FI/U+dIrxDoVjT7bsY1FY3o3//+z9ebAtW37XB37WWjnnns459547v/uGqldzlUoDVSosBiEhECAssMCWwWojM1jgILoBu90WHWB3+w9jgqDBdGDjoCOwsTEyBtQWQqgkqoSkkqpUVdTwXtUb73vvzvdMe8p5rdV/rMw8efbd575XYmgTwYrYsXPnzp17ZeZamd/1Xd/f9+dmsl+M3sAsDMI+zp5vA93fSPkXLnHZLNtGA5vrfvuzNV97KeTj79L81fsBP/dFxb+zW/MPfsZjqeG3f6zBCHi4EhxmgnnpdEtFBXXjpqftY1Z+1slGrABPYh4KRGCIIkMTa/xJxfuuFuzv1USTBi82WN/QCMgRrLVgXUtWlce6UuSlT175LKqAspxgjcRTNb/0sMHXNaqckiAINHg1qArGlWBcOda9KaGoNFlZsa481lWIUT5e2JBEDWnY8GAV8mjt8/yFjG+7MedecYtEH6I8F2xhrMQIj8Z61MTUIqUkpTQxC7tDSUIZh8jYoPQS2cxRekHEGs8sUXqJqdY8NVmRh3v8rdX7CJIV73rpL/Aus0BGHgUSE09gtEupEio/RUcTdLSDnjzPanKBX1YJC62wHxUgFE014q3n/l0OqSguTEg/doPlqy/A8hDv5A5p8xJBvYLsBJnPiXTO+29e5nd812/g+vVrW5MVdDfDLutbIQvqusZgwIOmOZWEdLo/oL8hdiCyaRrCMOTq1at9dDxsl1xt6xzbpCObo9wkSXqt35Pa/bBsOh+90468CXA32fJt2shNcA+wXq/56Z/+ae7du0ccx/yav/gTfOBHfh8fkxOumha1CECdAu3z6vNOjnVYr+Gx/+mjkE8lHk83iveUp98P28A7YRWEEPzBj2RcHzsP9jeTXf7s68/yPr/kIy/Cp1+L+Dd+bU6lPD63VNxfKw4yZ7d4nEnmmWSVC9aZIM+ce4nOgbKbFcPppwOwAz22BVDCxTFqx2gL4QLK5ciegmycXKTWuHuSoY0OdSBbjsCPDF7omGwZWoTvZta0cMC8MRIhIPEb4qAhDhvC0AFs1UpE5l5RBlEV+nEZ+WEdeEFdemGz9MP6tufXx0IyB+bfY6//7jMSkZ7BTlpZiGo12IoAiRTumpQajkrLUQn3Cngrs9xew+tLwZsLeLhyrO+q9y93INt25EfTkh/DZSscje1bhC9QvksYlIaQRjCdwMXUZbnskvE8M4EbU5f9chy6c/uH/yf4hdfgL38z/Mb3QG3gqLY8rJuWyS74gq75ybLmoKw4tJoFmrW0lNKBbC1BS+leQtFIj8r3aTwfIxTSBiijkE2AbAyyBrmWeAuJv/AI1h5R5pNkirDw8WuBrCXLDL5+LIgtyEpz7wQwTl7lBZZZCm+NhJOfjFrQPToFsB0InyUQR06vb6VzlzwZgOp/8BWnC//AM/A7f/xxEL6qHdu+E8HUh7GEVEBsIbDQkqBQw7SBpIQLpQvgXeewyBzAvrV2g5hRDFrCxQn8z59q69vW+eoufODGad2HxzJNwN+CNP7BF+DvfRb+w9/8xO7+z1SGEsCyLHvpyhBod9rsIXGijSaYhpSmIn4uIdiLKN/V0PguCHL3xj7EgqOnlshUUXsrKmV59x/9JioSvlhVFMcJq4czVuIZFuVvIFukfOpnxmTZmKIIKSqfqnbPeGMlVogz4NpVZgNkWxDWZexWWHzfEtglSbJmNKmIk5wwKgiijDguiOKcKCpIRhnpeEXSguw4yUjinDB00piydoYKtXGJCSsdUugxRTVivd5hvrzE/fmU/M6YYp2QZTFFHlIUPkXptXEzkroWmH52ESfjU05SF/qWKDCkoWUUGK5NK6b7OdNpzSgtGY8q4rQmSipk2FAHFaWqWoDtXpWoqUSNlBrfWKS2RELzi8Ig0Qjh/tiKBisM0ioi4zOxPqH2SUxIYiMSHRAan9D4RCYgMgErBZ+L1rynnnAQvMKr0R0+mD+LsK49DMuwrWwj8raRcEOi7kkk3Zn/edLDMM/z/svN7c6TC7wdALn6fxmxKluJRAL8GuGGCW9Y+GorJVHW3bjbNMtJZJnElp3Usj82XJkars8sV8eGa1PDxZFlllimieWWf8yfG/0iTTtd89KX38sLn/8IWnv4nnvYpUHDKNBMAsM0MOwEsOtbLgSCi4HkUiC55CnGwvKx/2zs6gQoaUm8ktQv2U0F47BhEjWMwsYthzXj0H2eRppxu36WGELv1DJpUfj85v/+o9B6OyhhHYtt10QyJxQZIWtCkROwIpY5iVcRyZxI5gQ4pjtRJaHSVFXJYrFgPp9zfHxM0ziG+erVq/zgD/4g/8X8o3zJf87RFULg1Wv8ak1Qrwi1Y6mDakVCxZVJyM0LE67OYnYDQSoqvv4rv8CvfPqTkM+xxrEG165d44d+6IfwPI/PfOYz/ORP/iTHx8c90OqyqXVg9ubNm/zu3/27uXbt2mOSjK7RVlXFfD6nruszWuwuAKbT6DVN00tWhsGXALu7u30K5F9NGTLuw5t7V9fnn3+eq1evfsP73ZxJ6l7n9b8n9cttWTC7MgTvXTk+PuZP/sk/yVe+8hV83+cTn/gEP/qjP9r76A7/850Msje33/baFki+KdUZnoe3Y+qfVI9vfvMi93IFpYU58PdaQCgduOmYbCkHSW2scMGNWjhJRmCZRoZZbLiQusRPP15HVIEAvyUDvtzet3yQsQtW8lOLigwydDI3IwQaly20NgopDFGgiYKGsNVfe2GNCjsGu0ZFFbGvGQWasd/ek3zLRAlGQpFYj8Q6gD0SPmMZkFiPH42++C3AAnfU85/mt1XD8yL+JgKI3/qddn1UOpmI02ILXj2BOwt4mDmd87q0LhNnCaZpWayaU718Mxi8APi4zLztTIEfQNSC7J3Ysj9yIPupCTwzFdxsAfb1MYQDsDZ8VuQaDmvLg6rmgS55oEvum5r71BzamiMcyC6kaUE2aCUcky1akI1PZQMa62GFRJkGaQxSG1RjEQWolcRbevhLRbjyiDKPOPcISw9VSWQtMK0bU1bCMndA9WTdBk/GZ8HnOHaMcBg4IKpaMtMAf+PnoWyzj/7v/4nT8Z9hss9hszuWuzIwCRyTPfUcyI6BqAXZSkOXp07Xp5lQs8KB7I7NXuStXGZ0Cqw3gfTw83DwsJM+bj/5r0JpaMhsTk7Bmw/f5NOf/TQ2dLasOTnBNEQHhpPihOnlKTowVKqmklCTUKwiisMR8/tjsuMJ5XKH1UnCcpmQrROKKqJqgpbJdjlOLEOQPWC0+yLAOnCtcEy2FIYoKkhTx1aHcU4QZYRRQdS+x3FGMl63QLsF2IljsQO/pm48qjqk0r7rA0ZRNDFFnTrzhaOUfLlDthqznscURUqRx5RFiLEpTRM4FrsSNI3AdCx2K33pWOwwgCQwpKFhnGhm04rxuGIyLhmPa9K0Jk6cE5sJa2q/JFdlC7BrSuFYbCMaFJqwZbFPQ2dcwJ4VGis0wkp86+O3YDo2AbEJSE1IbFqQbYMWZPuEJmg/u+VtCbXO031vyk41GoXaimfPk6luPos3Z9o3fyeEYDKZnAuYn8ignzeN30UXP4mhPK9874drfuxzPjcuGMzHBHcK+O6k4R+/6NFYy1/54Zx/+xNn57yeBFY2v7uiR6TWZ0HFs/c+wI9/7sMIK/ibn8j5HdeH1nKyfW0/ZndMgn/juYZfeFnxzTc1f/0HX+Jnf/aTvPvd7yZJksem4DcZz26dWz71Px1HDe+5sOblg5RvvjbnT37TT/ETP/G/Uzc1aZri+/4ZG7zhdNtQAiGtC7r0PI/d3d0enNZ1TZ7nlGXJL/3SLzFTB/Dem0ir+d7P/T+4XN0njmNms1kf1DLbn7UaZR+lKqDCWsvR0RE//+JnmXgaMRljjOnt+rq6ffzjHycIAn7mZ36Ghw8fngHX3fG/9dZb/PiP/zg/+IM/2Cd22ASTWZadAdub04hD1rjbbydx6YBvlmU9oD+vbKYh3ubtP7S7Gp7zNE377c7Tkb8d0N1k6c8r26bUhsf9JOA8PLdhGLrg0CTpA5A6Df070cKdxwact+15g/ShB/Dmfo2BRdnGo+SKk1xwkrs8BfNccFJI956fvp/kknkhWBQCpEUE4BVQ+5a9qeE3vK9mGhtGkdOFh4EDlFZZjIJCGNYCFsZ50s9rybwWvFpL/mml0JWBRrlpVr9BfaIhjGrStEK2ANsLa+LAgetRYJj4zjZzGlh2PMNUCBKriI0kMpK4f6l2fUwqJvjydApdNhKhu/uGYNnAXEuOa8WDSnGrlNxaCj75wm/7T9Bco2EPzVRoUjQBDR4NigZBAze+Ih6XjQgcyPY7aZ8gCCyjEMYjwW4Cl0dwbSK4MYFnppabE8vNqeBq6sDa47Ix525yrC33m5oHTclDXfGGrfgcNYe15vhQs5KaXFiqM1IRgZGKBo/a96g9B7LBQ1mB0gqpLbK2yBzUXKGWimDpka49otwjyn2CUqEqgagEuoaiFKyKFmSvYJ453NSB0bQFpkkIcQih79xmpHSyJG2ci8mqgmUJ88KB3mUGLy8gf+jY5TBw8kjp02v6awnVhfbzHvzIP4GdAFIcyA4t+MaBbDQkFYQN7LaynyyHVTs4eLSGNytXz53zgPTe2c894G63jYPzJaT/RywWS0lFjgPZOQUFxZnPbql8bJtlXbFcezTLMfVih/XDKQfzD7M+GbNajFguEsoyoShDyspz3cZK5yoiJH2MFdtANgjTAWyXAyXyS5LUAecozoninLB7/9/N+wABAABJREFUhQ5oJ2lOMlo7oJ1mJOmaKM6JoxwhLGUVUjUhtfGp8aiNT1al5HVKtp4yX1zjzvGY/M0R+XpEkccUeURZBi5ws5bUtaJpJLoR3XQc2NOEiL7nXM9C1RD7FbN4zaVLFZcve0zHNem4Jk0qwqRGRTV1UFCpkkyVFKKiEI7FbkQNQhNaQ2AdwBYYCjRlx2C3Nxrf+qTWZ7dlq2MdkJoRiQmIbEBoAgLtue+IiKwD10Hj4fHOiLZt7PW2sik13SY97fY3tIbcBrQ3vxs+R4frNp9534gU5okMelmWdrNywwMYgqshq/ekChhjefNIcGXqsis+KAVPJW6EX9aCi5OzbhWby08asXSlRrOUNXvEzGuJtoKL7zCgZVi01mgDd+ceV6cNP/9PPs3h4SHPPffcucc49DTt6rvJllpraQw8WAZcGhV87cUX+OQnP4nW2mmER6P2XJ16oXbLXXDLEEhu00J12+3v7/Pcc89xYkJiTzAN6ANZNpPUdGVY18985jN85jOf6QNfiqJgtVrxzDPP8AM/8AP91KC1lldeeYVPfvKTvPHGG72VVAfiOy3fd3zHd/A93/M9ZwZ41lqKomC9XveJF7rjHV7fsiyp67o/N13762Qv3bl//vnnSZKBO8mWa9Tt4+3KcBvf9/nQhz5EEARnru1meRK7/aRtn1TOY5nPY5eHA5dbt27xZ//sn+XBgwcAPPfcc/yJP/EnuHHjxhPru1nntxt0GAPLUrQBb4J5Ljlu37vPm2C7W78oBUrANLbMYsM0tuzEpmW1LWlk8EPwAov0LbVnKT0nV8iE5VEjKLRgUUlOSkXRSLJKkVWKRis8TxMEAweRsMIPaucw4mvSQDMOTDurZtnxLTuexVOCa0qQWo986XEzlSQ4wJ1YRWike34/QWqkjWXRCBZacVAJ7uSC+5XHW5nirZXHg0xxXEjWpUuqVtega3FWj90FpneMtmmlIg7Ddu4tDR4ligzJAskhgnt/7Fv5XTcn8OwMnho72chevB2s6VaP/ahugx6bkvu64p4teWQbjmg4EZqVMBTSUEscyFYC00lFUNQ2oMZH47kpeauR2iAbg2pArgRy6aQi/sojXCuSzCMsPPxKoSoXN9SUkJWwKk4lF6tCEPmWWSp68DmJIYmcNjvsrCLb49MG8mYAsltmedkC4KJyrLffgmzh4eRGsp04kG59Gjkt9TiAke8CNlPpAkiVcSy2qaGpXTbVvIDDJdw/cnVYFU72NE3OYa470J1uZ7lnibO4/FepaPQGmC7OfD4F3We3yWzBsmoo1jFmNaU63iE72iE7mbKej1iepCyXMassIC98ikpSN5LagLadALvTY4tBACTuuxZgS2FdvhJpiEPHYjuQnRFEOUGUE0Z5KxXJSZKWyW5BdicVCYKKpvEo64BKOz12g0fZhKwrF5y5XE7IljPWyxHZYkSeJRRZSFGEVFVE3fhUtaJpFFpLrJZdVjLAzQK6rOMOYCeBZhRXpKOCKFqRxGumE81koonTBi8sIKqRI0sdNpSqofYaGs+gZQOeQRlNiCUQFs8alNAINJYG206Xefj49pSpjm1IYgNGOiRqgXRgPMJWKpKIiNiGhNbHtx5SbI8l2la2gd3NsjlDvK1sOqU9adb2vJnbIRk4xETngfPNOg7xz+b/DYH7r5pB3zy4YYW2aXDe2e/h5t7p56cStzyJceK4Lb/5RouPYte4JB8z37Y36rOa2PMucF3XfYat9XrdM6hvHEsODg64dOlSD4o3LfE2G47neY8NWnr2G7g8ygFYrVbtuRHUdU1RFL0/6pAx7SQdT5INdO/ddqvVijiOudQmZ9h2DoYs6vB6FkXBK6+80n/uso+VZcnly5cfA9Dvete7mE6n/NIv/RIvvvgiq9WKxWIB0Pu5fuELX+DmzZt8+MMfPiNtKYriDKjZVjpZy3DQsk0qUpblEwH6k+xBnzRtFQTBGfeWbXUdnsvhNTmv37wTBnuzbNv+vGM6PDzkx37sxzg5OekTbiwWC37yJ3+S7//+72d3d/fMMVrr3CM6UH1SSOaZA9PHebs+o18/BOCLwqWUn0WW6RBkx5ZJZBhFht0dw05YowND7VkqZSiUJQdWWrBsJKtaclAp3qgUeeUs83QmXWbZwElE/LAhsg1xoEn8hlFiGAeG53zDLLTs+o6tvODBBd8ykx5xy2THRpEYRUjkQlO3zBCcufG22YNPFJwsBXdLB7Lv5IrXl4I7meLRWnFSOs/0ohZOj90IF2A6ZK+bAci2AjwnmRHeaZB6HFji0DAZWfYSy5WR5drI8NTY8PTY8NQEbowtV/fGW2/s4u8TKFXPxqPF5XG6eOrjT09/1wNT8xlb8eM0HC4aFgvnYFB0emy1qcdW1H5A7fsYBnps7fTYogK1EqiFwl8q/JVHtHZykahQqEohK4Gt3LlwIFswz7rAQcE4tuykECcOtE4Sd+xhKAhSx0zS6m+1EaxrB7CXpe1B9t3M8srCxSd5HgStA04HshvpwpOMcnLJNILJBEYX4JIPqecCMwNOQXYnFSnLUxZ7fuzqfbcN1tyUgMxS2E1hZ+fJcpFJ7BI3/atSLJaKagCg3znQzm3BqhDo9Qi9nFKe7LE+nJLPJyyPd5nPU5bLkCwPKAqPopbUWlIb4SwvcTNjnJGN0Aqy2+BaLJ4U+NK6xHKjjCTJieN1C7DXBNEaP8gIw4w4Kk5Z7NGaOHGgO4pypDRnWWzrUWufvElYlynrfMLx4gJvnTzN+lZKsU7I1zFllVCVkcuOXfvUjUJrB7IZBDwiOoCt8VVD5Gkir2I6LkgnD0nTnCjKCII16agmGWlUXEJcYxJD7TXo0GICIADrGaQ0eFrjmQZlNF4rmZFCY6ULPjeNINCKuJHIUhLnAWHjEdUKlUue2X+aC+kuYavTDgdyEd96SE5nYzdnf4dE2XBGepsE5O3Y423P3W3Px7cjgbfN0m5i2CG22truB3joPCDf/f5JMW7b6vmNsOn/XINEvxHq/l/0vrad2OF3Q5DaNA0nJyesVqsegJZlSZ7nFEXB4eEhZVmSpimTyaTfX6e1Xq/XCCH6AMI+3W3rY7pZNj2ty7I8I6moqgopZc9yd8fQ1bXLGDbcB9D/9xDIlmXJ66+/zvvf//7+d9tGe915GX53eHjIwcFBX6+OwR6NRly6dOnMb7v3Cxcu8F3f9V1cvnyZz372s+R5TlVVlGWJEILVasXP/uzPcu3aNfb397HW9gkdhteju0bdcW5mqOzOYfe7YefpGPVvtA11xz8Mchz+bxzHbzuDs3kD27bd5j66Tr7tOmyC/CcNiE8daByQOVhqPvnp1/nc3Qn26d9F5U3QcsyhSPmf3hzz4/9DTJFGHOSgs4Bl5iQjQsA0si2bbdlJHMgeRwY/1vi7mguBy4R6TemWycb52GrBqpasa8W9UvFa5VFUHlUVYHOJp2v8piZo2uDHwBALw8jXjGPD9cAw841L/BRY9nzBvg8XfcmEThaiCIRECh8hgq0Pgu69P6/aaYGPazgo4VEpuVNKbq8lry0ld9eSg0yyKE6D0puWye6DHjsNdg+yW+DQgexOj+1bRgEkoWE6tVxIDFdSw9Wx4eZI81SquTbSXEvNY84iru6C3MBJIzg0hoem4ZCGBzT8rGg4EoaTQvPa3eYtPEZWElolfKuEslKIp77LOYvU+KxtwH9oG2wX9KgVUvvIxqJKEAvlgh5XHsHKI86cXCQoFaqUiAp0Kagql2hnmQs3QFu7+s5aMDpKLNMERpElClxAmDfCJSRrMUqtLasOZBdOurTI4bUVZIcu+DMIBJ4PyhNO099JRQQI5VjyNILxHuxdtjzdMtmxcL6SUgt0G6xatXrsdeEkKfMTJ3O5VTq5x2PMdQekZ9t12L0sJvo/rlRkG8unMY9JQ96pVCQzBUXuY9YzzHJKfrzL+mjG+njM4mSH5TxhsYrIco+89KgaSdW4DLm9r7wQpxKRVj3SAe3O9tKT4CnLKK2cFjvtWOw1QbjGDx3ADsLc6bHPSEWcq0gYVjSNoqwjau23emyPSkfkzYisGLFaT7m3uMj6zphsmZCvE/Is6gMeGx1SN37PYjstNoOAR+OkIsq45HiqIo0rdkZrRnsFaVKQJDlJWhOEGVFcoyYNxJo6aGh8Q+NrtGcwngFl8GyDbwye1aiOxRYaIYwboGiB1ApRC7xG4uUS70QQVoqgkHiVQC8tXu3jNwqvlqhKIAtYPDzBV/6ZTKha13ieZTQKmc1m3CwucO3aNdI0RUjhzBnkqSHDtldXhvfaIVjdxF5PehYP97cNL533LB0C+G33/83P55Fmw/pu/ueT6j80OHg7I4pueVPx8HblGwbo207UsJLbAMTbTZMPtztvqnjbSOi8fZ0HzrfpjZqm4Stf+QqPHj3qsyt2Fkzz+ZyjoyMePnxImqY8/fTTvd2S1pqyLLlz5w6r1YooipjNZn1WriGT/aQihOhtnLrASq01ee7Y9Y6xHQK0Ydaw4fnuBgSe550Z2d6+fZu9vT2uX78OnGWiN6/XsKF1xxaGYT9o6ZIx7OzsnAGyw/1EUcRHP/pRnn32WV566SW+9KUvcf/+/V5jfv/+fT71qU/xfd/3fVRVxXq9Pp1ZkPKMPnp4njYDDLtr1Uk5NuVWm/s4r2wbXW+us9b20qNNEH5e+9xWzqvP4+2+TT6yFm3yEcFJJtpsf3Kw7Na75CQdo+1+Pwo0vvkmPPkckcgIRU4kCkK/IQ5LvNE9Dr55jfAglYrnTEIB5Eawrp085I1S8VLlUZU+TRVi1wK/dgGPcaCJA+GmWgPDJLBcjpxN38yz7PlwoX1d8gT7nmCMwuOshl4IiRDqzLl4DHS39FlmLMe188q/X0nu5oo315LXl4K7S8FhLljkDmRXNTSVcxWwm+x1B7hNy9JtCXqcBJbRyElu9hPL1ZHh2kg7FntsuD4y7EcGT26/fkvtQPYjo3lkGx7Zmge24gVhOJINc21ZWTeDUKlTqYiWAiMUugXZtfWpbQiEKKtRxklFRGOv2Qxt5rK2c7k2J17RHHtrc+Ktm2Mv03O/sGvZfOez4jfpyjmtDKUiy1wQeI7FTltAOm312FEAfmhRsZMBIFyGz7IRDmQXsGjb2iIX3D0SFPdBCtFLReRAKlJLaITA8y1JBKPYMp5arvow8mDkWUIJXptzwrQsdlVBXgiy1lXk5JGr+8NGMIntY3KQWeqOZ+eCZWfUyklGQyDuZDGB985mrLY/uwTd4tv1+3f67Nvc/sy2Amrqxxjr3G4C7gGbbU/Z7MLWNKsU1jOqxYT8aJfseMryeMT8+AqLRcxqHZAVPkWlKBtJrd2sRUdk9yC7k410x2+dZ7YnXBBq6Bt2Z9mpVCRaE8QOYAdhhh/mRGFGnOStq8iaOM1JkjVxXKCUpqxCZ9unA8diG59Cp5TNmKKakq2vsFjvkN+bUK5bJjt3SXTyQlGUiqKEqpWL2WHAIxbpGTzZZvX1GqKgZpLm7E+PmU5qRqOSNCmJkpIkrZFRjhxpdNjQhC2T7WmMMhjlgLRvNL7Vrn/iGGyEdrlXjEBoSdBIVC1RucSvJH4hCCuJX0q8ysOvA1Qtnfa6lvjtu9feG621+L7fP2+656EQbuY9iiInD7UGGUq0pxnJhDiOqaqqf1Z28tOiKLh//z63bt0iCAKuXLnCjRs32N3d5eLFi70xQ0d0bLbTYSzUJrA+zyL3Se1+G6jf/O9NsL7Nn3wb033e4OJJ4P7xJISPS13eyQBk+H133p6UrHBY/pkZ9M0D2mb9tlm2sY5DVnpbY9jcbpNhfLv/3GZ1p7Xm5Zdf5tVXX+01257n9QA5z3Nu377Ner1mZ2eHnZ0dpJTUdU2WZRRFgTGG+XzOwcEBq9WKJEkIw/BM3Z/Ennadbqjh11r3wH6YfniTNR6mId78n2Hn0Vrz9a9/ndlsRpqmj41yu99tnvf79+/3vy+KguVyCcC1a9fY2dl5rFMNG6FSiul0ykc/+lE+9KEPkWUZb775Jq+88goPHjzg9ddf5969e72UpxswdMc1rFMfPNc26uG5GfqWD4/pnZYnde7NG0OXfnm4brMdnv0NrCtYFKrVZbeykR5UD8B13mbObZdPMvdMmcVtnoCUPknXJLLEMaRTy/SCcVP6vkAL5xObNZo37j/izcMly0ZR+SnH6gKVSihkhMAS6ZxAltgT2Qc67nsBF33DLLLs+pZdv+Zi0LDvl1zyBZc8xdSzSLHtxuuCrrfdmM8OAC0r7QD2YSW4W0juVpI3Voo3l4oHa8VxLlmULsCvalqQ3UtFBiC7W9dJRTzHYksPVOCY7HFkGafuvF1KLVdTB66fnRqujyw3xoYLIWf80bs2oC2cNHBUG+43NQem5qFpeEXUfKZlsueVYS0NpXKuIo0c6LGlROP1ILuxIYLgVI+tLbKyyAWoherlIkmmCNeKIFd4uUI1AlsKmkqQl4KsdIOQ+3NZAhEuk/wSOG5fSwQZggJB/cyew1cWS60hq51MpNNjzzO4n8OrvVSkzTTqC4QSmBZkV8JlDQ1DSxpZRlMYX7RcUJbUt6TKOYvIlnF0UhFBUbSDgxwWx4J5JrifCR6IdlCQWHZS0YJtB7pn4w5UW6YpzBLL7kgwSy2zBCaxxVPf2EzrWbD8zn43fJ5tu1e802edwZxhqDN7KhMpKMhF0S6dbtN/bwsqDaymmPWUajElO9xheTxhcXiR+cmI5TJklQVkra901ZxKReyZQMeNZWtPreUleNIyTmtGbRBjHOeE8ZogyvDDNUGQE0YZYZiTpGvnj92z2M7ST2tJWUWUTavFtj6VDsibMWU9JitmzPNrPFjNKO6PqfKUIovJs5C88FqQ7RJ6Va2mv2exzcC2L3AzIWlomYwbrkxrplcdg62bRyhvgfKWqCjDGzWoMdjIYCKLVgbjG4xyUbqe1XhG42OQViM7Blu4HGA0Er+RhJVArkHkFpVb1Nog1xaRG0RhEblFr2q3XIBe1W5WpyWRejcyYzBKkQ+ee5tgcRhv1sU9dfioe252zHhnICGE6GfeOxzSbd/Fi43HYzzPo67rHstYa3n06BF3797F8zyeffZZ3v3ud/ez3ENntq5NDzHGkEDcbP/nJQnc7EdDDNi9dxhnqHgYPneH2w4Bd4dbhjPg55Fpm/iyO8fd+TyPwBvWe5txwya436ZFP09i05V/JoB+Hu3/dhcDzh9ZnQfOf7WlO5HDiwyObb5z5w6vvvpq/30XpNmdyKZpSNMUpRT7+/sopajrmuPjY5bL5ZngxMViged5vVxlGzt9Xhmm9R2ywJ0bS9cRh/vc1HsNwetwhN010MViwSuvvMIHPvCBfgTXnZ9t571pGo6Ojs50yq7Bfuu3fitxHJ/ZfrOhDRn6IAhIkoT9/X2+5Vu+hdVqxcsvv8zR0RFXrlx5bMCw6fk97LybnWDIImyekyeVoaxkW6fq1htjKbQka2JePoxY33eMdgesj9fOC7oD3B2j3a3TFqaRAxguy58DGNMYosgSjQ1P7Qlu+gahBFpZGgQVkGk4qYWzXKsEr1SCk0qyqAX+2jKrLbPAxVnMAvea+gbVZDR3vsqV9RHv9Q2xKVDlAq9aMvUNqWfxlCKKIl7ZCymf2ucPjj/ChybX4G2i5q11rhWLRnJSix5k384lt5aKN1eSh5lzWFkVgrziCUGPg2WxAbJbPXYYWHZCy2Tq9NiXU8u1keXpqeXZGdwYGW5OBJPASXOGg1JwNnUnjeBRrTmwhke65L6t+Lxt+CmpOa41C23Izlj3gVbSvTyJjjzHYuOjiVwSGqNR2tn3iRLUiUQtJN5S4a8UycojzBVh4eGVyln3lS7BUVaKXo99lLvkRZMYZolxUpHYAeAotAQe+CHIscVBbKiNYF0LfvzL8tPABJhhmWJ5Gsv7AYWkQtKg0H/rBdsHPFYChAdJJBhFMNqD3Stw07MkCmLPEhhAW0zlEtPULfueFbDKBScnMM8EbxaC0LMONKfCSaLawdA0scxmtIDafd4dCaaxYZa6z+MIlJJnHlTd/fSdMNKb94O36/PD+8w72f7Mf2BpaHrgXIjyDFM9BNm5KFsuu+ylI6UosaWPXe9gVhPKkxnroxmLowknRxeZnyQsV06PnZeKolbUjaCx7h5yBliLdhmgXaUAJcAX4HuGCzut/CNxTHYQZS3Idkx2GAykIr03tlvneZqyCgbe2E4uUpnYgexqTJZf4kE2pXgwpcxGlC2LnecBRaHIC0VZ0iagcTroYcCj8l3AaxS4GZtxYphMaq7ul0wnNeOxeyWjiigpUHHustWqgkKW5FRUlNSiRlMjhCawFs9qpKmRaKQwINsku0YitUI1yrHYa4EqQa0tcqVRucWsGvS6gcygVzX1oqQ8KTCFRjcNRVH0z9eqqqjrun+WlmXZA+CunXVg/IzzWjv7PgTrQyC6GU/W3cu6fQ//s2vDnuedIbfCMOyJPd/3CcOQIAiIooggCPp1nucRxzHj8bgH+FVVobXmjTfeoCxLvvM7v7Of4R8OGIYAGB43x+i2646vw1fb2Opt7Ht3TrrlIUDvXtvIwWH9unPane/uP4cYq1vX4aoh4O++28a6bwPW5w3Yh8fzJLJ2W/nnnkl0c1RwHljfrNzmwQ2dSzZB6eZIs1u3bXnbNIYxhsPDQ1544QXKsuwbUVEUZxpDWZb9aPPSpUtYa5nP5702PcsywAUPzmYzLl++3GeZfBIzu9lIL1y4QBRFrNfrvqN0pZPSRJGzoem+d1oy3TfUbv3mw61rFEop7t+/z+7uLjdu3OjP7yZL3LOH7SxCl7GzC+J8//vfzwc+8IHH3GS2Tf0MH7bDTn18fMzt27e5ePEily9f3nr9u+vdvQ8HHsP3bdrt4flz/wulliwrj2Xpsax8lpXHolQsS9V/XlUei8pvt3HbrmqPxkhGQcPeSLZptB0IGYWWKLYEKVzdMVz3BXQATwgKa8mNcCC7EswrwUuVW17VglBbdpo202UAO6FlFghmgWUngGcCy6zVes8CmPrOs3/ia5Iuec4Gi2+t5fh4yf/4K59nXsw5uH3Qt+833nyTD37wg3zljTfYuXCR8aUbhC9GjA8i/um3Xean74e8vpTcWSketY4rq0pQli3IflLQoxZtlkfbBz0qH7wAksCSxJbJjuFCYrmSOqnIU2PDs2PNtZHhamKJPc60BfeS5FZw0sBBbXqpyEPb8ClqDoXmcNWwlIZcWippzvhjG0+ifUUTn4JsQ4y0GmU6ZxGLzC2yte7zltLZ92WKMPPwC4VXKSidVKZsk6etCsF87bTUvnLgNE4cKJ3EkISWOBSEIXgpCBzQ08ZSaFhVgmUJi8Ix4stc8HAhyR+5axt0riIKbKfHFtBIJxXhGh9DsgSWWN7CsEazoqGgQlMBNfI7b9rfU1WQ55AVTvd9cuB05A9qwSg6ZbBnie2T5EwTy2ynBdixcZ9bAD5LYRIb4uDMbe5M39y8920rmzNgw3Y8/P2TyI4nAfQzA24sBSVFC6RLWVGIgrxdl/fAO++369YVFDRoRJHCekqzmlAc77A6nHByOGN+NGK+iFmtI9a5R146FrvS0sUIWs4C637ZsdgdyPaFY7LjoOLi3toFPJ7RY7dgO3QstgPZrbNI0slKSowRlK1Pd6X9NugxIK9HFPWYvJqyyC7zIJtRHowoshFlm3wmz30HsktJVbn4g20ZHv0WZMfhKYu9P62ZXioZTyom44bRuCJMCsIkhyin9gtqWZCLioqKSpQ01FjR4FlD0BoSyZYyz2jIhEVaDw+fwAZMCNi3AYkNSe2IsYkZE5HYkHuv3eX47iG70QxRgFcLinlOkRf9MzvLMvI8Z7Fe93bEHTBtmqZ3LOvK0PGsWx6NRv0zt/uuA9HdM3cI9s7DJ10ZAtzhrM3wGT8kHrpn4rD/dIRhZ3SxWCz6Yxr2Ec/ziKII3/eZTCaMx2PSNGU8HhNFUW/BnKbpmWMYHsfw+J40k9T15SGZ2a3fxiAPSbpt+x1aTW/r55vnd8jgD+u+iX2GeHNYj23Af7j9tvtdV4aGFkMi9RspvyqA/nas8LDCT2LUz2MyuoMZCuq7k9WNwjZB2XkNZBOYg3Nqeeutt1itVj0ABdf5Og20tRbP8/j617/O888/TxAErAcduq5rqqrqp4cmkwn7+/uPBXC+3XkC+qDLl19+uf/frr5dRyyKoq/fsCNva6jduu5cdQ26rmtefvllAK5cuXLmN9s6Q/e5G8TMZjN+02/6Tf302vC32x6oXYPsbiRZlvHqq6/y2muvsVgsGI1GZwJCuzpvc8ex1lJpxaIKOMkFhU3ITcSq8lk3AavaJ2sCMh0hDnbJdORAdwu2ayNJ/YZx4JJHjYOaUdCQBA1haPATy86kZM8vHchuNbONVFRCUaiY2ks5rgRfb5nsXAuSwjIzlpm2zLQD21Nh2Qlh3z9ltTvQvRNadkLBbiSIvcf1bcPj3XajGX4uGjip4agUPKgktzPBrQW8trjC167+Ye7EFcWliMaGGAL0eyQ/qyXsiVPLvgVwJOCr9tS6z7dn9NiebxmHkI4M09iwH1uutHrsZyeWpyeWGyO33t+ixzatHvu4hgOteWRaPTY1XxeaQ9FwUhnW2lLI1h/bc4Mc06ZT10Gb4a5PQhM6VxHjWGxZW+RaIDsWu01CE64di+0XrXVfBU6PLZzkIneSi6wSJEELsmPH8E5iSxxZQs/psb3EARNrLY2FvHEZiJ1UxAHfg1zwxiNBVQuUsk6P3aamt5tSkeBUKjK6YLno295VJASky9mBbrX0ReePXQjmc3j1gWiAK8A14IRTicuZ5W+7bnt9tpOTdEDcMo0fz/I4bH9P0n+67x//7bBNb3tWbLvPbDoobYLu7jnS0PSAuRAtiBbdckkpHINdtgx3Kcr++5ISYSUin2DXE/Ry4ljsgynHh5c5OR6xXEas1iFZ4ZFXDmA3Wjo76SHIFtAHPraHqOyQxdZMJzlpkvV2fFG8xm/BdRA5FjuMOj12JxVxshLfbyirwCWfaQIq49MYn1JHDmA3I/L8IstsSn40oXhzTJWllHlIkQfk7SChKEWvxdZDqYgVKK/L8OgGk2lsmE5rLu0WjKc1k3HNaNSQjEqiUY6X5Bg/p/bd+S0pqah6Fluie4Dtks5oajS1MKyt6u36IgJmNiQhJDEzxkRMbUxqIyIbENmQ2IbEhM5VBB/J4yB3s20ZY0hzwcsHGXm54OjoiJOTExaLBavVqjcsGBJaYRi6PCCzGWEYIqWzMu6e4x2g7dpg16aH+KZrv5sy2vPA2JB1Hm47BOXD74b72nReGwLL80Bo9/ztkvw1TUOWZVRVxZ07d3rw3OXI2NnZYX9/nzzPeemll3jXu97Va9+HfbLDSduwwyYm29x2M37tGy3v9DfbBhabryGGGR7LNmyzrQ0O28Tw+2Es3SYz/07LrypI9O0A+jsp20D55ndDoLfJ7m67QJsN6LyLeHJywoMHD3qQXVUuAV/XoDsWfDKZ8L73vY9r167125Vl2QP1TocehiFXrlzpO/iT6rZZr+5m8fzzz/Paa69R1/WZC9qNmrsOJ8Spx3lXhpk1h9NrQoi+83XTZFmW8eKLL3J4eMiNGzf66a3N0o2yO4lLEAT8ul/363j66afPHNPwXJ/XLowxHB8fc+fOHW69dZ/jPGBpLlPML7K8t09hInITs24Cx9rWPqs6YN2ErGufVROwrgMaq4hk5bK5ehWpVxLKgsir8PwGEdVMo5KdixnKLzFK0EhBJSSlVayNYmk8ltrjlk5ZGY/KSmLZMJYNY9Uwkg0jWTOSNWPVMFMFY6V56oLl6Ys+u5Fg4un2/XEXjq5sypWG17s7Z8YoSis5qiwnleBeKXhzBa+eCF5fwr2l4CAXLAtBVkBZgz4v6LED3OB6tR86uYiyCE/jeQZpVqSBwWfJ1M8Iioc8c0ny3R99lg9cTXlqDLuhsy3bbKuNhUUjXDp13fDI1BzQcIuGzwrNodEscs1aWeePPQh6NFJiAkmTeNR4NNaxebR6bKX1GT22XCrUXBGsFPHaJ8w9glziFx6yltjKnYesOLXuO1m7rKAdAB23Mx2j+NRVxE9bqYhwXHalYVVbVqXzb5/nsMgEt0vJS3MXJOf7tg94FMrZ9TXyVCoSh865ZHQBdny44TmQnSpLANBmeWwq6/TYvVQE5gcuCPjNloHfGQQ5TpPB8swyS8wZn+xZYvn4jwbfSqs5t3+bc3WFVaX/1JMeMlpvZ3822263/ZOeAW/7bBBQ2aqVKTjgXLVMdiEryh5ot0x3+7kD343Q+NrHZhPMakK1mLA+nHH8aMbJ4TXm84TFMmKdB2SlR9mBbOOkInZQjzMZH80pi+2YbEsY1cyma9K0lX+ka8KoZbKj7Kweu5WJ9GA8KgBBUTrbvqppAx51QN6kFM2YvJywyPZ5lE/J3xhTZCOqLKbMQ/Ii6FnssnIg+wyL3Sab6qwl48CShJbxqGFnVnNzb814UjEaV4xGNfEow09yZJSjg4pKFVSiouwBdoUVGt8anLFmmzqdhkoYaitR+Hg4P+wRIbENSBgxMhFjGzElITYhEc4rOxYRsQmICFCoxwZ88M5yUlhrMfbxgeIQHDVNw5e+9CVeeeUV3nrrLe7cudPbJCdJwmg06hniTvrRMd7dPjdB2uazrXsud8DYWufA1oHg7jjiOO6zXnc2w92zOQgCxuMxQggODg6oqqp//gohettbIQSLxeIMIdfVvdODd/XYnHXv/q+r4zZpSXe+O8De5Tc5OTnhtdde4+tf/zq7u7vcu3eP3/pbfyvvfe97H9vH8P2xbr5lED5c940C1W+kPGkm7V/kYOBJ975fzf/C2wD0fx5AfFt5J5XdBuA7gDpc3gSJb1fquubNN9/sJRtdxxp2MCFEP82zt7dH0zQsFguapumnw/I8Z7FYoJTi+vXrTCaTXgu2yQK9Xd2EENy4cYNLly5x//79ntEfBod2r86ucFg6hrxjn7uO2Y3+h6x01yFv377NwcEBN27c4OrVq72FYLdvpRSz2awH6B/84Af52Mc+BkBRW1aVx7JSLEq/l4MsStUuu/d5oZx/dgar0iPTIQ0+PgWBXBMvc/bf9EmCmjQ0hKFBpg0jVTPxKqxaoaWlFoJaSAorWVufXERkhJyQkNkALRSxLYlsyUQ17ASCsawZK81YNeypyn32NCNZM/UNoxaQj5VuE1XIM+d5kwF5z7vew2jU6eksSkmslWhtWDYOYB9Wgnul5M214LW54I2Fx4NMcrQWLEtB3kpFmtqB7Md8sWva1Mr0LPamHjuOXODjTmy4klqujDRPjSzPTgw3J4ZrI5gFgjxb82M/9mPcvXsXZRWJn/DLX/invPdbv53D0mBmM+YKxs8+xatP5/yKyjjWhnluyJWhVFC3mR5dwKPAhF0SGp+aoNVjnyahUY3TY8vjs3rsaKWI8lYqUihEBaaUrT92p8d2AFmKU5nFTisVGcVOqxr44CfOKQTpbPkKbdsENIJFYZhnjsl+uID8kUNgbycViUPr9NhTy1XfkHjWJaCRTrFDDaYNVq3aFPDrwtX75NCB7IeVIAntGTu+0+OA6dQdz86os/Az/Xe7I9FKRc635txW7N/m1hM3aMu2QKnN77r70PC/zwPiGk1Gfgqk5VlGuxBlKx9pmVZ5CrpL4cgQv4mwqwl6NaacT5g/3OH44CLHx2MW84TVOmSd++SlR9koai0cyO4qMQTXHewe6LH9NgFNmhbsp05nHScukDGIV61MJB9IRXLi0Zp0lBG3LHYQ1FSVT1mHVHXrjW18Sh2S12MHsvM95vmU4nhCeXtEkaWUWdSCbK91FBFUlaTaZLGNQEp72rcDSGPNZFpxdVowuVEzGtWMxiXJOCca5fhxjg0Kaq+iliVVm9lRtyy2wgHsjsUW7Z8VCGo8lPUJCBjbgMiErUxkwthGjG3MeMhi02V2DPAHUOHtmOFtRaO3tyX9+PrNgLttMzfDdUII5vM5v/ALv8BXv/pVqqpib2+P559/niiK+gDJIVjt+sLQla3TlVdVRdM0JEmC1pqDgwMePnzYm0KEYci1a9fQWvPWW29x9+5drLW9DPWZZ55BKcV8PueVV17B87yeTU2ShGeffZamaXj99dfJ85zOeEJrzbvf/W4mkwmLxYI7d+5gjHOUA7hx4wbXrl2jaRpu3brVx6dFUcQzzzzDdDoF4MGDB/1xT6dT9vb2er17l8ujUyMEQYC1zqHswoULCCGoqooHDx5w69YtPvvZz/Kd3/md517Xf11Oy68WhD+p/P8FoG8rm3qgJ9VlE6Q/qWwew9HRUT9y7YIyhvsFB+Ktdf7cw6mi4RTXyckJdV3z7LPPcuHChT4JzJA53Tbts209OGvCj3zkIxwcHDwWBNH5lwK93KUL3OjO3TYtlDGnnuIdSBfCBZogAw7WitcOHyBf0swuPUMwukRhYpa1A9yvNr+fWze/DxHvcSe5yt/5/zrZSKkVsaeZhAPJSKiJQ00QWrxAE8YFu+OK1FRMy4LSGPJGU0mPQkbkIqIORrwZTsgIsAhSUbevilSUpKImpiSxJbu2INQZI+m+H6uGhBKvWuJVa6x25+epp54641W/Oerfxo4YC8tGkJHwqIKHdci9OuZOHvKgjDiqA8pXYtalpKigagMezw167LM8Pq7HjgJLkhomEVxILVdHlhsTwc2x4V0zJxW5kkCiLHbAGrlrDGttOWkEB7Xhoak5NDUPqfkFafhxGo4Lw7JyQY/5v/UdaE9gfIlREiO/n88J6dKpt5keXRKaUz22qC0qB7noQLaHv/RIcucs4hUKr5IulXopKEtBVp5KRdZlm+Uxce4pjsV2GtUwsASRQCUgpMHiEgAVjXhMj31UCN46aKUi0klFVAeyh1IR1UlFXIr6dBeebTM9JhIiYZEGbG3R1WkCmryAdSFYLOBkLTjI4J6WTmednLLZPZOdWKYXbe8msjsWvWVfB8p9db5P8GYbPNVHDLc5fzp1233knZahTtUY0yefKURJIc/qrItOEtJqsE+Z7KoH341o8K2HqmLsakKzHJMdTzh+uMvxwWVOTkbMFxHrLHT+2JXnrPuMQBvRUv1DqUg7KDWtbKDtOkpYoqBmZ7IgHeUtO70mStZ90KMD2S7BSxznfdBjB8YB5yhShU6L3QY95k1C0YwpqjGL7Cke5hOKO2PK9YhiHVMVzravKD2KUlKUkqp2LLbpWGx3y0G1LHbkuwRT47FmPK64dmXFeFw7FnucE44ygiRHRTk6KKlVRS0rKlqALRpotdh+D7ANov2jCg9tnUwktj4zGxCbkMRMGNmQSQuyExP2INu9B3hWIcXjs7tv9/zsZ/s4JS02JYjbCKnNNvqktnyeLOo8gL75fNVaE0URx8fHWGv52Mc+xmw2611KhvvxPI+iKHoC6ud//ueZtEn8Xn/9dQ4ODno74Bs3bnD58mUePnzIrVu3+kDQKIqoqorLly9zdHTEcrnsSbKqqjg8POQ973kP9+/f7+ua5zm+71MUBWmaUpYlRVH0sWS+7/es+IULF7h9+zZZlvVOZR3LHYYhb775JsfHx709cRiGjEYjjDG8/PLLHBwcIKUkTVN2d3e5cuUK8/mc4+PjXtLz7ne/myRJKMuy16EPmfxnn32WJEmQUnLz5s0ntpF/Xf7FlW8YoG8CniHo/OcF6Df1XMP/25S/bCvDG8jw5lHXdT/11clCgDNWRd30T9e5h2xq14C73z/99NNcvHiRNE3PuL9sC8Ac1uW88txzz/HGG2/w4osv9jKWLmobTgcuHevgBTGNHFOIhFImFDamIKYyCZUdUeuUkpSSxL1s0i83hHiURGSEeUZ0XJD6NZOxYTKFNIXJJcmsmfP0u3YZ76yo5ZoaSYEisx5L47PSigc6YWU8BJaRbEhFRWwLIgGqKfFMiVetmJQLQp0hizl+vWYWWL79I+9l5lumocD3VB/UMmRWusGStvpMJk+tNY0LMaKRChOOOYku8mYd81CH3C1i7pURD+uIkzpgVXkUtUdZS3QjME2bhKZjtYaMtqXNoNGBbIv0XKr5NLSMJoJZbLmQWK6NDdfHlqfHhmfb5QuBJhrMOvbHgmCpBXMtONCGA9NwYBseUPMTouFR03CydKnUc2Wole2DHo0SaL9Lp95Z93kYETiQrfWpHnsF9kgg5hZx7KQjzaOSuAyQC0NcaUzREHopYbRLVsk2CY2gaj2mp4ll3DpwpJHTqYY++LFFpjg9NpZKW7K6A9mOCV/mzt3l5QVofWrdJ/0tUhHVSkViJxWZ+XDDNySeS6UeCtFLRXTrClMUgqywLDNYHDk3nTe3ZHmcpcIx2QnMppwGOg4Z7mGWR/nkvmqdQgYptwUQPf7bt5tF27yPbU65P/7/bXwJmv8rf/YisAPMznnfAWbPhDdPGe72HSAkQJUpZjmhWY1ZHuxw+HDG8eFTzE9iFsuYZeaTFT5lLSmbU6nImaBHAXTm2NatkoAnLJ4wJEnJbJYxSjPi0Xqgx3YSET/MCcOMKCyI0zXpuE2hnmYEQUVd+xSVY7G7gMdKh2T1mKIZkRcz5tlTlIsxxb0xxWpElUcUeUBZtCC7clKRuhI0Tdvf28G0kNZJRXzrpCKxZjKpuDgtGV9dMpnUboAwzojSHC8pICzQXkWjHIPdiIpGuPToEt2z2LJjsYXFWkVjPbA+vg0YWZ9IByQmITUhIxMxMRGJcaA66t5tQGD9PuvttrZxXhvqirEGK+xjz8/h83vbM3xb++0A81B//U4Gk+cNXofHs6mx3iS1zttfEAT8wA/8AD/3cz/H3t5e/+xumobj4+M+7ml3d5fJZMLHP/5xPve5z/GVr3wFpVSf2bpj0DsjiQ984ANUVXVGDtMx4s888wy3b99mtVr1sWsdjrh58yYPHjzg4OCgd03pfru/v38mW3h3Hrv8ItevX+fLX/4yURT12ARwSYVu3uTOnTv4vk+SJL3k9fLly1y6dIkXX3yxr4vWmtFoRBRFfOlLX+pnDOq65vbt2xwfH/Po0SN83+fd7343Ozs7WGu5evUqu7u7PPfcc3z3d3/3GcLrX5d/ueWJAH0bq/2kB0fXWYYA+0nTXu/kv9+JVmlzSmxbYKq1tk861HXAjpXuftPdpDr9WDftNJQ+SCk5ODjg0qVL7O3tMRqN+oCSIcO+7Ya37ZgaI5gXgmUrFfGe+R0sj97Fw5OaWo6oREphYho1ppZjKpFSixEVCQ0RSpcuCQ0ZARm+XOPJEuU1+L7B82s8dUyijrFSoJVCez6N8inbwMfSS1h4u9QqRpqG0OQklIwmu4x2JuhYoVXNRDaMRMVIOva6B+ImJzI5oa0Rgl6b9+joEQ8fPjwz0OmmEbXWLrBW3sATHoKA2gpOtM9xEzAn4pEZcb9JuF+lHFQhKxORZwFl451meqtFC65bYP01TkG2wIHsTi7igfItnm+IfU0cacZBw15YczEouBRVXItyrgUFF4KCsajxheULX/gCdV1z/fp1RqMR0gu4+vz78WYXuVeVHFinyX7LlvwKmqNac6I160Eq9U09tktC45jsxnpYQpT1WibbIkuLmAvUXOGtHJMdrhRBJgkKD69QyFJgKkFTOn/sVZs85jhz/TP2CiJZEHslsSowdcF4ZJDUKGkQoWU81ly+MqYBlhWsK8GiFCwywbKAV1eS/NC1Vb+V2ggPUKdZHh+TikwslwLLc8qQegOpiAbbsth13eqxu0DNY8HJWvCwFMSBZZY4dno2ar2xW9A9m57KR3ZGZyUlOyNIQ/o2OOz7w3foHvjnaxWfVIb6183fnkdUbAJxx2LXlJSnftdimMlxe5bHbptK1ABvWstxlYfzbJGuFgfj7MFblzm4e9E7OdgJ1/NRWGZRdDH2yUqXSr3SgkY74UMLfc4y2fYUZHs4FjtUDaPxmkuj9WkCmnRNGLcBj51UJMqJorzXYrtskDlCWsoyPJNGvdI+ZZOQ1SOKZsQ8u8aDYkp5f0zx6ogqSyjzyAHs3ElFykpQ1g5kGy1Obfvs0BvbkgSGdFwxHldcurhqtdil8+weO5mIigusX9GokkbVToctGoxoQLS+2AOZiBAWrMRaDzNgsSMTEJuAxLRabBMx0s5hJCIk0j6B8QmtjxKPz9YO29Nw/VYgy+Ma7s19nbe82SbPA/rbLC7fjv3efG0ex3m/fzvziPP2tQ3YCyF47rnnePTokYt1unWLL3/5ywRBQBzH3Llzh+PjYzzP43u/93sxxnB0dIQQLk7rwoULANy+fZvpdIq1Lmhyf3+fqqrY2dlBKdVLUpIkYXd3t3ch6/TuXcKgjr2eTCZYa1mtVr2tYRzHSOkyhXeEVOcKM5vNeicVKSVhGGKMYTabcfHiRabTab//IAiYTqdorRmPx/1vO319WZZcvnyZ/f19oigiz3OMMezt7XH58mVeffVVlssl+/v7GGP4/Oc/T1EU/czB+9///jNOLv+6/Msv7zhIdNiZtgUVPmn7beWd6rO3bT8smzeabnkY3dzJO27fvs18Pu+nvYbylWE9Oq1a9+qSFhljyPOcOI57P1EpJdoK1qXPqgpaeYhi0WqyF4Vq7fw8FqXTbXc2fovSI28UgdJMeneRBjW+yom+izFrpNJIWROoQ6LgBHwPEQRoz0f7IZWKqVRC6SUsvT1qFaFMTdBkRCYj1BmhzgmaNX69Im3mBFlOoN3noF4T6oygce9Kl5gWPH/oQx8iTdL2gravjWvSnSMDVJ6HVgEr4/OggFfqCzyc3GQu95j7e6zFhEKkVDaisQHG+PyV1xVWiy16bOH+b5DlEQ+kZ5GexfM0kVcRxhUjVTL1cm7OFNeTmqthxtWo4ILnBhOS0zaxXC5ZrVaoKIXRjLUMOJIwl4K5B2+Egi/5MPdiMj+l8CF/7tdjQ9VKRZyriBE5WtyhTk6ZbEE8SEJjkBnIY4FcqFN/7LUiyDyCwkMVElFLTCmoW3/sZe48ppcDj+lRcpqcJQ4toW+drjo6ZbG1hbyBZeUcSY7XhofHmqyOmOsxTe0hhEEVBulZhLLgCUwu0W/IXiqSRJbRCMa7lj3fkvqQSkEoLUqDbU6lIlUJeQnrXLBYOInLYSa418iege+Tz3QJaUaws2fPOIp0adV3x4LdkSD0H78vbAO+mzKmtwMW54GX87Y5r/Rsn7CPZXAcguhCFGR2wyN7kJzGYgiJiGyEyCeY9ZjsZMLh/QscPdrj5GjE8TximQWsM5+sdCx2pQWNAW1EiOBK+xocOC2aQ2PQttSkUcneLGfUMtjxaO2s++K1A9iRS6UehgVxnBGPnKNIkuSEUUlde04qUodUTcdiu+QzeT0mL8cc59cp5hOqh2PKdUqZOalImfutFnsQ8NicF/BoiXxIE814UrE7qZhcWjEaF4wmOfEkI0wcyCYoMX5Joyoa2dC0INs6+/cWYLcstjAOyFsPYX2U9fCtT2gCotonMRNSEzDSIRPtWOzQBIQtkx0aH4U8Q8Kc1142geOw3Wyb+e2eQ+e1wW0DzPP+b/j74fZDAmpb3TbrN/zPbbKTbnnz2btNnrJtP+eVbedrSA5uO/Zu286M4B/9o3+E7/us12vu3bvHt3/7t1OWJUdHRxhjqKqKr3zlK7z++usopbhw4QK7u7s94zyZTPoZ3M4/fDKZ9MmCOptlKWW/fdM0PHz4kPV6zXQ6RQgXxzYajXj48CHHx8cIIXo2upOoPnz4sMcply9f7ln1Lot5mqbs7+8zHo9JkgSgN6kYj8dMp1Pquu5BeZe3xfd9lstlD+avXLnC8fExxhh2dnbY3d3F8zyCIODixYvcuHGDr371q+zs7JCmKS+++GI/+5Cm6TfMoFdY7tFwAw+5hQj51+WdlXcscflGL9CQxX5Sx3on++5uYN3A4O0e1MM6dOXw8JD79++fcUTpoq+FEDTakpuAZels+3ITkZuIrHFuIlkTsKoDchuhqyn1aynlawnrJiRvPAJpmEROiz0JGpJQE0WGIDB4vkGlll1Vs6MqGtm6i9AGPg7cRe7qiNJOCbmEV69Q5RK/WuLXK/xm7QB1/ohQO5Y7pSLQa4ImwyuXePUSz9RnptyGWr1N27TufHTLhdbsXL7Btfd9hIfJlEMd89CMeaRHPGpGnOiYtQ4ptE/dKBqtWqmIOJvpcZjlcWDdRwsOhdL4viYOGhK/ZuyV7Pole96afbVmX83ZZcHIrFF1RtgOhqDV1mvNWsPSehRehLp0Ebs75cS3fM2X/LLvsfB91r6g8qD2hbPuU7NWjy3RQqLFMAmN54IejdNki9oiVsCxQS481ELgLSXeUjAqfEY2RZUKKoEunbZ53flMrwVlIxiFDoxOEsskgUnipCKB5wIelbIIYTHWzaasG1iVrR67TaV+v5C89gCaoVSkHbAY1Watl51UxKVST2aaKMpJmxzfVIS2wbMWqUEYiW0EWnsIExGFO5RNwPzI+XrfyVxf6r2wB0GP0wSmoy75jGEadynVjZORpI79DvxTP9nz+uh5Hrjv9L4wbMdPKudJSyyWmqbP2+hY7GKQYGZLGnVx+l1JhUIRExGaBJuNaVYTVoc7HNzf4ehgzPFJwnwRsVr7rIpWz9wIau2kIr2BSkfoD1ls2lAGIPA0ozRnb8+lUU/SjEqdHCajzIvjzIvj3Iuj3AvDQsZRLuMkF8lo7cVp5iVJhlKGooh6qUgHsEsdkVdj8iblpLhKkU0oFhOqWyPKlsWu8oCi8CjbgMeyFtS1eJuAR8NoXDOe1FzYWzEal0xmOelkTTTKWy22A9haVTTSgWxNjRENQmg86xhshQG0k3jgoayHtB6+aQF24xObhMSEjEzAWEeMtdNfhy24jkyAx+nzY9gmDleS/+hv7NBowV/8fYdcnp4f23SeC8ljMyQb/9FJFYbSjU15STfLeJ7rU7ff89jwbeuGoHvbvobbbyuVFvzlXw6QEn7kW0sCdf7M1LaBxSaBtild2TzG8+p6Hos73I/Wmlu3bvWM8tWrVzk6OuoB97179zg8POTTn/5074gCsLe311+bqqr6YM+yLPsZhTAMmU6nFEXBgwcPuHjxIlJK4jhmMplQliVlWfbyTCFE7x7TWRkK8XjmzyEO6bKQ95airSlFB7KjKOoHWlmWsb+/z2g0YrFY9FJY3/d79v3Ro0c9BpjNZr1FdHfso9GIMAzZ3d3twX/Hrnfa9G+EPf8zn4G/9CX44Q8afuETd7hFw28l4b/j0jvex9uVByX8oRecFe3vH8HffgH+Tx+CX78hk3/SM+G8tv6nfxb+35+DP/ItsP9B+GsH8B9fgt+3t32f/zJkP28L0Hu2yJxNKf+k0fA2cL6tYw7XP+mG2Om/N387lIxIKdHGpa4+WjtnhS6r48ESvn7L4837H2BRei3oDsh0SKZDch1RmBAlGmKZE5ETypzIq/D9Bs+rEKFGyIZJUBMkS1RcI8Mc4wdUQpHjszI+C+NzR4dUVpJI3TqFNL2jyEg1jEQ9WH9q8TdWDVPfMFaaUMFyueSXfumX+OpXv0pZlmeuS6cB7KKyzyQEUh6lF7PwEspgwjrZZ+ldZOHtsvIvkMsxlRjREKBtgNEK67LrnAY8fo1Blkce02MLz6KUIfBqgqAmjkrGXsGEBaPmmDi/R7K+w7g5ImxWkK0R0N/IfN/n45/4BNHOPrmKWHkBC1+y9AUngeSF0GcZXSILPMpQUgcSHUgX9OgNmWzVBj36W4MeZWUQC4l8IGAusIcNYm5dMppcEVYBXq2gkjSVoK4lWalYVYpV6WMtxF7JOGyIw4ZR2BAFmtlIsLsj8UfgeS7DozZQNra17uv02I4Vf30tyY+cdjdoA0Yfk4qITioC49g6qYhnea5No55I57VMa7PY1I7FLivBOm+zPB4LTtZwUIZ4YofET0j8GoIKL6iJ/YrEd0lE0iAjVg+5OLvHt33kXVzeC3uQPY4cVtzsq1372+a+cLrd9jiSbQ/hx3/7jd30uhTqZ0C0LVqXkVPAXfTMdsd2u3VaGEIbEhMRNA5kl4spxw93Obh/maOjlJN5wmIZssw8sqLVMzeC2oC2otVjQ5945rRyCGybRt0SBxWT8ZIro4x0lBGlGXGyalOpO5nIqR7bJahJRhlJsiaKS5pGUZSRS0DjAh4vlNq3WZ3qdZPqRTnKl/mVanEyrVe3x1W2GjXZKjWXZfiuqhgEPFan3th2W8Cj3wYzJw3jSc10UnPj4pLxtGA0c+y7Y7ELCEuMV6FlRSNrGuEANjSolsHuZSIALcDuGOzABoTaJ9YxqQlITcjYOBY7NSFh+72vPXyrEG/Dxg2fJVUD81xyP5MscslJJljkknkuB++Cz70e8soDDwT81Z8d83//nSdv2xa7eKVt22zzwx7GOA0B+qaM9Emyk67fDR3MzjsH563bBoyNhWUBiwpOSpgXgkWpmJeCv/c1j0++7iGFewT8sV/jnkPn+dYPy7ZB+ZO221bnJ+1/+FtrXQDoe97zHo6Pj/sYs26mO45jLl++zGq14ubNm6zX6/44RqMRnuf1QZxKKeI4PjMb73keSZL0gZMdUE6ShLquWS6XBEHQA/GOJU+SpAfJHSPfOap0QBro69jNyneGE57nMRqNSJKkj0vrvk+SpA847bYPw5AkSc7gri6xUjd70P1HV78OQ3THGMUxsyuXufDN38JXozE/Ny+4rw0HxnJoDMfWMMewFK0ZgTSUnmH9PoP/UcPfCEx/K/xp8q3X7fFrDSsNJ21uj5MGDkq4m8H9DB7mcJDDLx/B7TWIBv7WHTdZ+De/Ass/BcFA2GGMZV0LTgrcq4SDDB6UgvslPKzgUQ1HDZxomGt4bQU8C//PNXDbHcAfeMPygzuPyyb/eZYn3Wd+VS4u5wVxfqP7+vM/HfATX/X5099b8p3v6W5UknkBx5kD10dry7yQfWr1k1xwtOZMOvWTXPBoLchK5/iwl7qU6tPYMk7AU5pVpmiCABEb0qAhljVGZtRCUElJhUdmPTIRkcmEI7mLER6BLohMRtQy1mPVkPqGqV8zC2tmvu3B98TTLimNb9mJBIE6O/W37abcWPiLx8/yepbyw5M3+MvHN4iF5rdPDvhr2Qd5z0c/wPNXP8cvv3bI3TJmHe5TBHuUakqtUjQRxvhY42G7fN5DZ5FGwBwnFRmAbJRFKINUDb4siIKGcaCZejm7KuOCl3FBnLBnj9gRGSMqpK76ANzuOBoryGXIl+t38+nF+9iZHTG98AarUchi9BxvjD9ElYTo1EeHHiZUWF9iPMmnlcBI1QY9tiDb+lghUaZBmTaVem5gLhBzgZyDnAvkAtTcEqwF4ybCbzxMKdC1oKgUWSVZlR7rysOThnHYkPgltjwilAVJBKNYkcQVYejhR+4B3RgojCBvPLJGMs8s69on0z5HdUydS4QA79jgP5AIH2gDHmvRJqAJnSfxaGQZ71l2PRh7LugxlgJlLLa1WqwrKAt7KhVZC04ewlEmuFtLRtFpqvTTTI+ubc9mdsBsn2Z5LFcP+NKvfBrbZH166mGW2W56NY7jni36yNU99vf3N/rsk92V3smU9bDcFff5B95Pc91e4bfY73Z2iUBNfZpCnVMQvam7zsWpRGRtCk7qAhWWCCMps5jAhFyOUppl5429w8GjKxwfp8znEYtVyDL3yAvZ67EdyAZrxSnIFqKVidhOLoInLUI2zNKMCzsF6Thz/tijU1eRMMrwA6fJDsOcKM6dTKR1FfE8TVGEFFVEVQdUOqDWAWUTtVKREfPyMkUxoVhOqe+0WuwspioCysKjKCRFKSicD3yDFh4ar8XDIQKLpEZSomzmJXV+/XrBzm7GZFowma1Jxuvesk+FJdZ3ILsH2C2LLYVG2aFMxIJVyBZge3T66oBIB0RlQtx47IgRYxM6FnsgE/G0REn1WJs6Xw4pWeSW4wGofgxkZ6Jfdt/LdubJXeM4cMm1JrFt30078+Nmfy5NNKFvef3RCCEsH7xWnwu6h5KL7jXsC+eRTdu23SYHGe5/cyapu992TDsMveot8xIeVYq6hEUlWVaSeelep8su3mleCualZFEKTloiQUmYhta9Ivc+CS0HuUDtVUgtUOLxTN7bjqOr82Z5J4z9pmxmeF7ebkAvhNOhv/DCC85UoLUc7hIPdSxymqZ9/JkQos8W3hFeSZIQRRF1XfeDqiiKSJKkl7d2cpdOAtOx28PSbdeBczi9h3YDgq504HqYhVQI5+AWhmG/LyHEmUFHGIZ9MOnwnq6tRe/s8Gj/KvP9G7w13ufOuz9CFkX87M2b2NmYo4/9G9hxAqMQUp/6D/5e3ooFInTkkakElHNULfG0JNCSWCtSIxlbyVU8ZkKyJyUXpeKv/IrkrSPJzVTwge+a85mq5tuyEX+mBdiPcjhqAfOihFUFWQlFDVU7Gyc1CO1sba0GISHw3MtXULcg3Pq4UHgPGh8++JOwspABBZZSCvBBBSB8MB4YTyCsJZQQRZBGMBIwVfAuBcdvCOYruCItS2nJlOGKJ3j9kfvPZy5uj206b+ZoW9veVp4kGX9HAH3bn227qT4JtG9W7pWHgv/yH4Y0RvC7/9uY6zPLSS6YFy4b207iQPYssczi0/Tq08jy3kvdevfaSeDX/3TsMkAKSFLLQSN4pXLswEj6hOGEyIZ9kGMqKhJKIpOhigWyXOCVS2QxRxZzzOoIv1njK9fBoyji0qVLTCYT0iQ9E+wxZAqUUggrsI1PY4cp5yGrFYuyTTnfvj6/mvD3Ty5jGsEfbz7kXEU0/GxzARp4sUmh+T6nx1bWAe6qi+AyDmTLBk+sUCLHl0tCcUxsH5E0DxmVD5g1BwT5EX69ds4C7c0gjmMuXLjAxas3MOmUTIWsgoB16LEOFUdJwBvxJYpIUUSKKlQ0gaRpAbZWzr7PSUUUhoqX7bO8YN8D4JLQGI2oDWJtT3Mcnli8pbPt89eCaC2RucQWFl22oLWOKRqPVR1SGY9Q1SR+ReJVJH5JqCoC1TAdByRxgwwaRGqxOH1ubhR5o1jXinXlGPGTMqLwrqONQpYapS2ysAhPYD3nklJLJ8kJA00Ua/ykIJYlO6wJRU1oG0RVQ2O4duk6voqpKktZCpedMrOcHDuJy93MtT/HSrfuIql1ziItyN65YJm2doTjSJ8B3NPYEvjiXBCzeSOo65pXXnmFN15+mfXiAKUUaZpS13V/U++mWLvssAcHByRJwosvvoi1lt3d3XNvGG8X1PV25e95P8F9+ZA37W2+ytfRaJdCXWgH4oic7zIRfpOgswk6G1HnF6nWTm6RryOKIuRnvhTwpddDtFan0pAOXPfpaERv3acATwjioGI8WnFhL2M8cs4iozYRjWOyM1SQ4YUZIjh1E0nSNXFS9Cx2VYWUTQewA0qdtK4iY47yq5SrKdXhmCofUeUxVe602GWp3AChFJStI80Z2z57moE0DiBJDJNZzWRaM95fM52tGc0cs/6yf/czcZp7YVgGntckSulECpNITNy+xgonFZGDQEfR+mF7Qx22CUj0hMQEjLVzFJmaiJGNXDZHGxBaH2G3a52ttb1/dBzHfXspG1gVHo9yycnaMdou66rqAfey6IC1ZN4C7JPcuQoJAZMBwO5eDmhbLk0Nz19uBsD7FIhPYkNwzhOukJLfy4SXCPiv1QOu/6G/TqMqrt/6HfzhT93kW/crfuTDOVKeTTs+TAqzqUc/75k4PE/DeKehtKVoYFFKTgrBojoF044BbMmownJSnILso1JwXAqyQjp/+BBmnuV6rJmGpgfZOzHMIsNTUwe8Z9EAiEeWWWhJfFxuByyv55bf9k9SjjF8x/e/xc2R8+H+rvk+uPRb3xB7P/xuCOj/3BcS/vKXE37H0wV/6TtWZ2bszpMTPEkW17HoHfhVSmEufjN/v/x+nhafZz/+pT4JYQd8y7Ls13UDqSAIelDcsdXdumGCo6FkJQzDfpZk6LrWAfduXcegG2P6uhpj8H3/zGy48jzkZIx5+l0cvvuDzC9fpZ7OsLt7vP57rzEXcHzpIoxjykBiYx+bKPQP/hZEBCJ0APe1UkIhEKXFrmvsusJqSaIt6nhOfH/FuNE8PRlzdOt17DLjynhCsVjwrve+n2vv/xYelZJHueCgcPa38xJulcIB7ApnPTyIKXntAF77b3cwjeDHhMXzLMIKogCSyM0eSx9CzxIkYJWgcUpRKnn6wgMROABeeWA9AIsyBs82oAMqLdhX8FTiiLD9QHApEFyO4HIIM2mYKkuewR/9uxJl4e/8XrgyPh1Uamt5aOBPPZPzam35ES/ir6wlv5LB7Zct7/3zCiHgf/2jht/6we0qkCeVbaB+WH7VAH24021TTZsVOy/r3LaK7aYQeiC15V0XNf/976/YbS3R0gCGHtBvVzeAZ3YNr60kgbT8tU8UXIotY9VwdOcWn/+Vz7Eu1yjPJ/C9M4kDiqLgMDvk+PiYoihYr9d9GtyqTQUshGA0GhEnI0yww4Ipj/KEIo9d1svKY1W3Kecrj1Xt9ennl+3nVeVhgZHfMApa//CgQXhga4HwLDtxzYnysJ5gLy45kj7Sh//0wqu8P1kxERWhKVnNT3jrrbe4ffs2h/cPWa3XrBqo/IQqTNGzKXo2cq+LNznc+QDNJMaOY2wSYGIfHahTkC0dk90wsO/DR1iDbxs80+DVDd5K4z0yeAuNOjEEJw1ybrAnDa8ev4diHUMluFjfQWtFbUIyk1LZBIMgIMcXGaHImM0ko8jgK0PgW5Q0EDQ0SmMSgUZSGJ/SBuTaI28C8sbnoBlRlVOslXjKoCpnf2hbf+ymlb4ozxAFhnisib2GXVlyhQadLdHrOYGQRCrEVxG2EVgboI3v2Pe1dMG9uWKupyihSX03MIhUSaRyIpmT7FTcuBy1Nn2WndQBiWFGyHEEUm4P3NrsH5uyke7hf17m3GFpmoaXXnqpZ492dna2Pky6h0Wapn2fVUqxXq/5xV/8Ra5du8ZHPvIRpPR4J5Nk50vT6H3Nj3PBohDcqj7MK8UhRRbxruybaPIxq8xjmSnmeTdDBscZ5LUgCdzgexrBJITYh9gzJFFBGqy5evGAJFmzM8sQfkaSZDx1KSOMM1SY4QUZfpQRJU5SEicZnt9QlxG6TLB1gmgSlElQOsHoGabZo66fYZ1P+Lsvj3hrGVNXAaN4QV362CIkLkcUhXCuIi3IZiPg0fedVCSOLOOJk4rs76yZ7maMZxnpOHNa7LhAhgV4JY0q0aKicQnSsZzVYrtAxxZg4/OcqN5lkEtgaRF3LOIYeCSwDwX2nkLf9Wju/8jhv/nLkQmICFGclV9sY3vOzoxIVqUL/p23s5qLTDDPlcvgmjnAPc8Exys3o7kqFfN2fVFLksC0A07XP2aJYdKSL9PYcn3PMo1rZim9D/0obJglltjXPXAbMovDYlvmbG7h2FoeCc3LsmYuaxayJBMVmajIRUlWa7LKsq4scQUfqQT/S91gbr2Xooj5y5+/ySoP+eTtkF9/U/JtV87OFm9qzrtX1ViOS8lJTg+ujwu3fJIzANu0IBuOcwd25oWg1II0dramaWqJEwgi8EOLCoCRhR2ofEupLLmER6J2JFDjg3X1uuw3/NTNI9YS5sJy38B9Yzk0cIDldSwnWJbCsJbOyrVQhtozaN9gfY2dQvR9kkul4vWkAekmXQ+8hue011+LYTuqNJy0zPy8Esyr7l2wqIQbZHTL7TZfPPAAwf/ySsR/9s0rLiXbSYjuvA91+l1pjHAzBO3A5aU7KS80H2BlPTIT8MXnfyMmD/k87+M/8L+O7z/sQXfnPT6UlXRgu2PduzbXAehu2w4/dIC+A/Nd8h8hBAQ+xc4F1u/+IOtnP4DdvcjXP/gBHj37HFkY8OiD38rcahjHLCcxL41CiCU2BH7oNyKUu38+KAWUoEqLX0E9TTEnK2RhGa1XyJM5E625FEU8fPUWIyERVcXrr93iqQ98M3J6jQcF3D0pWRNTTi+R+SOWxT5aRmgCPnvoYfW3g28Q6/bcf1XCC8a1w8TixRY/0YjYInYsNrJt+7T4nqU0FptJmhdjIlsQmYJVNaIufWgs1Sdg4YFvILIQW4gxTL2K1CuI/ApPVQipsVLTCENtDZmBuRYYK/GVpkFS1DH35tfAwGFl+L9ds9wp4H4J91fwSuMkLAcFHJdwJ7PkYw0W3vdToIygyKAuQOcSCqBIoYA/UAhs0d7TlcUYxwF9+uvwPe9/3FL07fL2nCfrfiezz/9CEhVt6uy6fQ1vrDuJ5Rf/1Iov3vb47vdpRuHpdKe17/y/u//6h9+d8Y/ve3z8guZK4v53sVjy0te/xmq14ovL5/kbd38zO/6S/+i5n0Iqyar2OcmnPJzv8nBecZILMh1SqYQ6SdHeBONP0WpCdZBQPAzxhGbk14wCZ9E38uv21TCJNDcmBalXMYkMs9gyDhpSr2YSahJfIzfwzMJ4/Pvzb+LABvy+Sy/w2vtu0ZQh3qPn+MXxiN2q5JV1zhcCydqPyf2E0tul+sSzvXWfli2TjaIRqgfYGg9pNZ5pENoickOa1QQPDf7CohYGb24QcwMLjVlJbG6oS01VavI6IKsTFjqi1CEC01v3hTLHF2ukLVBCcI2vkYspsVxye/o8hTdiN7/NtL5LTURFRGVjahtTMuMo9xA5KM+gWk27lW4aqlESqyRe4B4OjVZEsuJaMicUNRGaSegRSR+08zPXtdPVlpWkqCSrlWJZeCwKj4MmJPI0o7AmkhJqx1aPUsvOqGY6tqTBkt2RYBJpQpmzPHyTUVgziTS+bDBGn7HdrOuaj3zkI3zTN30T4/G4b+PbOuawKW8D6Ocx099I/7t9+zYvv/wyk8mkD+7p+pwQok+G0U2TK6VomqYH7ePxuK/L3/mi5I/8ryk3Zpa/8x9UgOAkt5zkDoCctMGrx73MzIGM43b9SeYAiBAwCmCZAwbe/8wH0PFbpIFhHimi6IjJNGN2IeOmnyP9DOFl4DvALYIML1zjh61UJHWyEWsldRlT5QmmSsirhDvLFF2lVMsdXrhzgSujKe+7OCKbjzguQuZZyEnmc5JJVoWzkMxKKEqXeMoFPNrTgEdlkb7FBg1RqmEaEE+WXLm54MbsgNHUAewwzvHjEuEXGOUcRTQudbphqMXWSGGwtg10bEF2QEAoQmIbkjAiJWKMS5s+I2HHJoyIiYmIbYjPYLpcyCvvpG3cyu6xyCW3c8myUL1M5GRNOxCVA6DdMtgtIF+0LPa0DRDu5VSD92u7hv0pzOKKq5OSC1Ovl2LNUkEUyK3M53C5toIFgoWQHNmGV63hyJbMRcGSkrUoyShZNw1ZpSkqS1FZmjZhlq4kpvRoSo+yjCiKmCJPKMsdqiJ0No+ljxSWOGqIA806MFjPcjMpaZpDfL9i7AlywGj44n2Plw5PGeuOyXYAvGW2W7C9rgWhb5mMYJxa4sQSReBHAj+0iAjsGBrPUHqWQro4emHBN5JGSzJAK0uuNIFfo/wa6VWgNFY2WGmQ0uBJkNKwJ13AvVRgEeAbqkDzHYHBaoEuFbaSyFqhtCRoJKGWJEaSGo+LVjJFsItgB8GokSSNJCskf/oLKata8H3vXvHqpGCSe/ztVcRfa8H3onTg+6QSLEpJrgWJZ5kGhmlgmQSWWWiYBJZp4Na/a6IZB4aJb5gFlv/qiymffRDw3LhhmVvuzyXzQrJo5TiLwr0v25kFJ8uRLplZuy6rJZFnmYSGaWQJ8agqj9CuCUWORGOxSCyhJ3sGuwPjHTAfMvF9IKfvsw4iHu1e5t7sMg+uXOf+9fdy78Of4OjyZb52/SprBWWgqHxB9X/+Y9hY8dlE8bkIRGCxjcAUYHOLzTX3K5hrSVBqzGGNvPsIMV8w0ZobozEXooSpF/DFX/wMd954k2T3Mpef/ybsaJ/Kn1L7Y+7PQ1ZNwiqaoWVIbTy0bSWu0iDaeBJ7U/CllUXVGhU0MK2wQU05VqhEIPwGP87xoxJChbYWqwWytoR1QVSV+GWFnzfItUVmFnsoMWuJzjyqPKBsQoyvEJFgvNOQXlwT7ub4Y42IDSaUVATgCawUSE9RWUVtPKra57AIubuMqMspGIsSLii8vz00YLULqscIpyRoHKlJbaCCuhT8pl8WLcgGina5cgCb2CJigQgthBabWpKR4VJime5Y9hKLTSw/F9d4ieX3JR7/Tujz543kxtryj/4/Dib/8K9t0PrxJJTbEmYOscCQ8DhP3nZeEU/a4OjoyA4rs6kD21apJ30e7mMIHs7b13mjjM3fbAtC0Vrzwgsv8Mu//MvUdc1fuvv7eat0nqWeaBipnEgUBHaNqE6w5RE6P0KUxyi9JLAZOwlc3Yt56tKYaxdSLs0CdsYRURSeSRHc6bK7Ka6uTud5xw+Xf6bc479YP0+DQmD45g9/hueefhmtFT9f/lpWjFG2IbAa32pCo4mNJtWGsTZMtWan1ph7DxAPD5lVhmllSZvG2SkCf/VLv4UvPnoaACU046Ag8Utiz8lGPFEhqRBCoyQI0QY9WtBW0lhFqRWldu42hXYON7UN8agIyAhaL3bhwf3xs+C5iEYlKnxZ4oucwK6JZM71vZALqWbilYxVzUjWbsRtCxIKElERmgLdNPyhf/gH+nMVexWjsGbWTmWPI80k1kwizSh0NpWTSDOOmv5zGlSkfo2wztd+Pp/zpS99iclkwoULF5jNZr2esAveef311/sgnqEPfnftOp3gpUuXuHz5Mh/84AfP6AmH7fdJZQjOz+uwXdvZJjcbtv8u211d1/3vuodPXdc0TUMYhr3vbnfMXVBTx1CFYcgf//TH+af3nMNB6FkupDg5TkIrN6OVl9FLzGbdd4P1oxB+9G8r/uufkIDA82r++J/+czzz7teoy4CmTDF1gqgTpE7wbUJAQiwSUpkw8ROmfsJumDALEiKREJLgEfYqFmMMT/9vijt5e14KC19xMhevZbGjENJQMIos08jV9UIC+yPYT+Hq2GVz3U8tu+1xBB78LvGPWLVPu9DmPCtex7mreMgWYDuI7djpmJARThYyIWFKzA4pU2ISIucoIvy+7k/S1g6LMS6IzzHUcLS0nGTwb/15+cOcn6Ro+B53LPYssadykdb2sgsM7mwvu/cu5mGSSKTcHqxXNfDffFLyZ/5egAjhr/+RI24+7XNAybEtHYstSjLhQPba1KxLQ1ZpyhKqihZkS0wl0aVHVQYURdIC7MhZNZYBZRlgjSAKnUvWODRMI8NOBBcCGPuSkacIhULhWC9rQWuoG0teC9aV078+WMDn3lAY7cJzTAOEMJpBEDugvTO2hBH4kUUGAhtYGs9QeVAqyAXkVpAZQWYkpZF4whB5Gl81+EGN9GtQNQiNVRolLJ40KAmyBdhIi1UWPIv1jWMxlcVU7pxQKWQt8RtF2EgiI4m15LV7iiITfPx6wzf5ARMjSY0kbiSeERgjyBs4LhyYnregel5J5mXLeLfrl7XEl07uMgkskxZoz0Lbv4993YLtbjvDxLd4uCRd6wrmhWyDTF0smWO2Za95P/3eLS8riSct06iT5BgmoWUWGSaRZRqevk+jUyA+6SQ8kSHyTvvTG2+8wac+9alewvfprz/EfPO/xyi8iwrXPKgN02efZakUD6qCMgqI9vcwSUDlQ+VZRKIgwgE6AbYUrUwERK7Ry5ygssxUgJeVBFkOxyc8fOklmqM577v5NM9dvkK1XHMwz3nx7gmHlaKJdhhffoZwdoXC+iwKS649rAgxwncajsYitG2lru4+JgKNFzZ4kUbFoL0C4zf4Yx+RSExgMYEAK9ENBLomqkvCqiQoKrxcO4lpBmQSkymazKPMAoomovEVRIJgtyK9tCbcKVDjBhlbCAG/dT2TLlassR6N9alq3w1889Cx5FqghEZYjdAWXTuA7AmB1rK1anY5DBzAxr3KIbjmFGB3vhgRiMQiIosfu1ecGMaJIIos10aGS2l7T48tlxK4msC12MldRoASgq8+kkgB7794+qzt7mdZafnOP+fzym3Jf/fDFb/zW8yZ52s3k92VJxkgbN4nhXDZ2zvsMNwnQBzH5z4A3jZR0SY4eBJAH5Zt4GQIyt+pbvW8fW2rwxDkLJdLXnvttR6cfHPyed4qfwupLPiPn/4fCPUxq9WKhw8f9p6jb775JtZakiTh8uXLXJ9c58qFK+zvhcymCeNRQBSFZ2QDw/OzOWW8yZZuW/8hNSdFM0fyR69/npfFI7LbE6b2eVa7Iy6XFX/la68Td2mwpTwDqIyFdSl5+Q3NV15es9ARR3JEoyYUOiTTEY2nwLdIYbk6OaHSkmUT8qCaoEuPSLpENpEqSbySpFtWBbHKCcjwzBLPrAhYE4qMSBb4dkWxnlNV1em1DUb82LUfZRFc5MMHP8mvuf93eiBqjOFDH/oQT11/qgem3VRhp1/tzk3VVAjgo5fe5AsPbvC+Cw/4C//ml9jZmZ0Bw0/SQp4mmIKmEX2ATefrul6vKYqC8XjcJ2TIsqxnWLqMsV3ptIhd5yzLkuPjY5bLZZ+F7bx2+04Sdm2Oujfft8lfiqLg6OiIg4ODM8Gg3TbdtHB3TMPp2W4at0uu0aWf/p6n3uJL955jN7F84T81XJlypi5n67x5UxL9O8D3fYvl//VTAJb/+c/9DHf3XyOxU36v/2eI/PTc/Z53fraVH3rW8F9+VfJ0AhcfwGex/Nsfhf/xh4bn6/x9uHWb1w6+i6v8Xd7kKWLexQPuW/h2+yF+j/3uxwZg5xENXSlrOFpZTtYu6+k8c4mZ5rngeM3p+vb9ZPA+z1z9T+0uBbMRAL+F08iOO8BXB5/793v/zdHDwDsdtJ29T0nWtWPPDzLJnUzwQia4fwgPcs1RbjjJGha5C2LOMkG+VlS5R5V7mFohPY0X11z/+Ov8hZMTms94FGVMUSSURUJZ7DqAXbiYgcBvSMKGUWjcTGNo2Q8Fs1AwUoo48PB8gRhbBAJtoNZQNpBXlkWpOMoUB0vBwV14NYN1DrZpUyYIEMZpcI0GU7dBZ00LxNvkRo6+BjMGYuBDsHoeImVpvIal16D8CuE1IGusNChp8IQD2EpZYmGJlWVPWaxnsZ4Bz4FsAFMqbKUQtcRrJEGjiHRA3AhGbbDdFMkugokRxLVAZYpmLckKwUkmOVzDUXYaIHtYCh4tBctMQAI/dQn+QRtuMQ3tBojulg07oeHpySnL3THdgTQI6+wz520m4JPCydLmpXQmDMeCVwrPAe12fQe0gR4sT8IOTLvlSWiYRZqb07rXvE9Cw8hvmMXud0lw2j+7PtQBmcZa7tdwT8N9I3jTWI6s4BDLCbCoBKvGkilLrgzlzStU//7vwUbWBTv6LiDflpcRhYXcEDQClTfo+QoxXzE7WjM9XCPnc259+QWeubhPjKScL7l65SZ3lw2HlQfjixwWgsPMUMU75PGMQjs2uDEKM/nNkFjuVJafft0xv1gLFxtEUCH9mjKBcCwhseA1pKmFqKSWORiFNRA1BVFdEJQVflnj5QaRGVgL7Fyi1x515lFmIWUTUvs+MpbEexnJ/opwp8KbNIjUwC5Yr4sVU208eQuwG5+m8CmLkKoIoJEc0yCtRiwM4kS0OUokVjsrZVOL07wlGicD6XKkGNB4LirTtrOR9wWVtYjYIiOLii1xbIliwzi1THYsu7HlYmK4GFsuRIb9yHIlsVxPBJdDmEiB5HzHpM3EmNbCqoD5seD1dpZ3mSsWuSM2/rc1/WzhSSY4WVveOpC89lBgEfznP+bz278pe6KE+0lS7633/7IkyzImk0lPBGqt++RV55UnMugPHz60m4D8PID+dmz68PfbGPknMejbgMu2/WmtWa1W3L17l5deeolbt271F6+ua2qZkgQgbcNiseDw8JCyLDHGcPfuXQ4PD4miiNlsxv7+PlevXj2TMbTL3jUMGuq8Sjvgs5kQ4p1cyMZZLvDzr17gv/rUcxjg3/3wbcaTmjKXrEuPVeWzLJ2WfVV5feKjdaUQQBrUBHZNIDJiVZL6FSPfMeSxKvClZhpUpF5GJHM8syTxKkJZIOypM0tfp9amqjt3ZVn27GwYhqzX6973dQhifd+5sOQE+NWyvy7WuhTCH/nIR3qA2A00uvTKmwMddy4lXnqJpy6lhGHwxPO42UaGvsJdPVerFW+99RZ5nveDgslk0nvBGmNYrVasViuyLOs7ZQfOO123UorJZEIURXz0ox9lb2/vMbnKeRltz6v7eW1lCIC75bquefToUZ8Zt2manjHv3AuGfau7lr1mtqp6Fr2z3hJCkOc5QRCwLBWTxOO5Z25w8eLFftu3O/eby1JKVrmr9ygWFHaFR4gacAObx7o5a/Ckc9ed36MSxp7Fk5LjXLCXwhB0bx9cvP3+F1Sk+FgDD/KCah054Nyy2SdrMQDcravU8L0F2kUtSMPWiacD2qmLxemChofrN99H0eODDNnS2uLfIwRm+HaXPXuNiGvS19eCoHrKE83lb9k3v3ORwyqHLJPkmaLMHMiuC3cdvLBBBQ0q0EjPID2DFS7zrUW6B7tRaOuhfBdAnSQN08QQepY7jwShhd/4XMN+GuIhneu4FRhraYygagRlTe8ecrQWHC5hvnJWpLoW+K0tpbAWjHAguwXYugXZtvNep53C7szifcAXEDi2TUYWmRrUqEGOGsSkwR/XBDuacFahQyerC32DFk7S5IXmlMX2LKYWmEohehbbyUSiVioytpKJlewgmFknEVGZglxR55JFJjnKBIdrOM4k8xyWhWBdCPLWIrVunGUqWpwGNXsMPOUtSQijyIHgncSSV4J//DUfAfzhX1fwQx8vqTUsy1M99knhNMwnxem6RR98eiohaYw4w0hP+yBS0zPa3XIPxIN2cBVZRv8/0v482Jbtvu/DPmv13Hs+453vffe+CcR7AB4IkgBIMKJBiZLoKsl2FJVUsiKnIjsKS5aiRLEj26pylKTicomy4shSJJG2pGLMiKIgEUUmBClSJATABEC8eX7vzvfMwx577rXyR/fq02ffc+57lLpq15569+5h9Vrf9f19f9+fe/peM23UgKWZlmwpwU4p2NOCQw2HCMYCpmhmFiykJrE0qa3JHEXpaLSrwK37z0wiM4mVC9xc4pWCsH70taCvYVBq1HTC7gd3sdIcHac8vLvFhae+j7FymWqf49TC7m8QKYdZCpl2EHZIoe2K5S1A1FI3A7CFo7D8Ascvwc4p7Qy7K3D7DsrTlK5CSUFZamylCHXNXmc5TppjxZWuWy8EKrIoFzZF7JBELnERkFsOoiMI1uZ0Nud4wwyrXzHY2gXtVJLPUliUomKwc22TFy5p4pIlPlnqonOJ1DXAVhpyjVYCCoHOJaoEigpsN25vpahlffUl1FQdjdbVZ2aSASA1UlbFAh1H43uaTt0eVjslF0O41Fdc62keTSV/71WPF4eaf/wTcwLryWRuu+8tS11VxU6MS5NoXJpOXptcmEqKN1l6raGq2RGcGC40Mr2OeV9Vqu77irLU/Lmf9Ykzwf/uD6f8Z380fWw/zxtDzsKkxvpSa9087+7ukmUZGxsbTKfTprrsCy+8cO7JeSJA39nZ0eeB6I/LhC9/f5bl1FkHfRY4f9K2p9Mp/+yf/TPef/99Xn755aZqVhiGrK6uYlkWTz/9NCsrK6Rp2oBz3/e5du0aP/IjP0KWZezt7fHw4cOmUtdoNKLX6zXld5sEkPpY8jxvqootA/Tl5SytUpsN/6+//jS/9HZl6n9tEPPc+pyum1cJpV4l3Rj4qnpdSzi6bk7Hre6s7e3tpgKYSYIxWeTLPvaz2azR3hmG1axnog5m34qiaEC0CfO012trqW3bPgXuoQKT3W6Xz3/+86ecb7TWDfg319jsk+M4TZU2Ax7PmtidtyzrvMwxxHHMwcEBDx8+ZD6fN5n2g8Ggmd1GUUSapiwWi8Zqy7DN7Ws8GAzo9Xp87nOfYzAYNBOCJ4HBJwH05WX5eM0xjcdjtra2mEwmp/7Ptu0mobQN6s01afv3tv1wlVKkaUoYhk3l3E6n07Tr1dVVrl+/zmAwaBjY5X0871g+Drtw3kT/9wLQ28t5blJpXoHl47l+jKU+Ya8F4+jk/XhRJa9OIpCi5chTV0cdhm3QXXX6RjYy6kA/VKx0BcOOwLUfPx+GxZ7G1T7tLjQP5rC10GwtSnajgoNFyThSTCJYRLBYSJLIIl5YqkxsqQoL6ZQNwLacCmRrCVpIlJCVCl5XIBtL44clgV/QCwuGoWLka1Yc6DuSUEo8YWELidD1/VMK0gLSQrCok/6OYziaV5GAaVTlgFha46CxNGgl0KoC1eWTWGwDsA3IdisXCuFrrFAhOiWymyN7JXa/wOsXuCsF7qDAdjVCnshEDIutbQVuCYJGJiIyq2KxSwu/lARK0lMWfQQDLRlqQZhLrFgiFpI8sohry8bDBRzVEQ8DsKMmSbiujFqKpvKUtMGxwXUrT/mOpyuAHVQyo5VQs9rRbHQVq6Gi5yk6jkbIqjiQ0bkfx0YuIk4x15NEcBhJ5pkgV4KOq1rOLAZIPy4RGXiKQQA9t6zAt1vSdTW2dfq+a5MdidJsF5KtUrNXCva14JAqRDMRMJOwsDSR1CS2Jrc1haNQrgavmujoXCBSicxFNckpBEEh6ShBV0FfC4ZaMBSSUEtCJAESUVbnej+VHMSSwwSOs8rpZpZW2v84q4rC5TWrK4rHAbYdVPcGTobTkRAolFOgXIXVqTTSZalxy5igzHCLDDctsJMKYLMAbeQhC5s8ckkirwLY0kX2NOH6lHBzgTfMsQ3A9qgdwixKWTPYunpkhUOW1gA7cVG5hajthYVSiFKjCl1py+vicrqUaDOZK8RpgG1AdtM5cjLpE1QPqZBSY9kKSYprZwRWxsgvEPNt+nrK1QH4i11evLrCH/jcJ9nwHaylSK4ZTx3HOeXDDmDbLvPU5LDI+p6xmCbVhLUNpKexaID4tAW+oQLX/bDt4FQX/FsG3fV6g0A3ieY9X5xpcnAWdjDtfRprDmeSG+vqY/3mdB9+QoQppdje3iZJEsIwZH19nSiKGI/HFEXB5uYm9+/fZ21tjfl8zmc/+9l/PYnLR0lLPg5wPutzw47CyWB61kD7cQZ3c3K/9a1v8Zu/+ZvcvXuX/f39ptGMx2N2d3exbZvZbMazzz7byBk6nQ5aa7785S/z4osvNmClKIrG0WU2mzWuLm3gaS6G2c+zogvLk4tlhhg4VYDpT376Ie/sdfGdkv/L73+bUVA81lkuW/KcnFPY2Njg+PiYra0tAObzOcfHx/R6vWafTSlhw7QaZtg42+R53uillFIkSdIA8bMqw5rjbHvztgG7SU789Kc/3VQrM0AyjuMG4BtQ7vs+3W6Xbrd7ZmGP9jF/HJB+InM5qdgmpSRJEqIoaiZck8mkqUDXljDled5mLJttmomLmaicF/5qt4/zwPt5y3Kb0lqztbXFw4cPybKsmRCZCZYJmy1X3D2ruInWJ+4wpsNtu8XEcUwYhkRRxNHREdPplJs3b3LhwoVzfZ3PuwbnHdtHrb/czpSirtLaBtaiYbENYz2OxGm5SFTptw2LPeoIBqZWwil/ebg4OkmANJaYlUtP9dv2CNjev6yogPxRXYn1UST4zq5iJyrYjxTHkWISaeZxBbCjSJIubLLYoqhlAobFlo5C2iXC0mgp0UJS1kW4CmWhhcDtl7BSJK6XRJZbTqSlx2i9r0t5VBbWNE39eZE5KRr9Fz8b/eelUhRlRlqIqohWJpmkNsdzh50dwfsLQZxKhNK4NYtdJaIIVK0nVcUJyD6bxdaUDpSOBE8jPI0MFDJUyG6B1StxegVOv8AblnijHDtUjRZb1w9lALajEHXCo0olIreQuYVbg+xA2XQKq5GJjKgAt5NImFuoyCKLBPPE4nCuOaonX5NYsJ9AlIjKIq4u+qWaYwJpcZLD4GqChi3UXB+UDC8q1jua1U7JWliB345bVQjOlWSWV0C6sg6uALdJcDxIBB9OTUKkIMoFga0bRrqSgRi9dcVg3xiW9P1aG+4b6ciJrMQS50+KSwV7uWa7FGyX8D6SfaU5Bo4LwawUle9zDbBTW1O4unJ1cTXCUxXYq1lsOxe4hcArKga7qwQXlWBYCvq5oKMEgZb4dcJ1kVkcJRaHqeAosxhnJ04vW5kgySHNRWPVJ5RG1G1MK4HlKZxA44YKy1fYgUZ0c8SaBk/jOWCjochxyhgvj3HyDDfNsdISGWlEBGohUYcWeeSSL1zi+ARgWz1Nb2NCZyPCHaXIXonsK1gXVd0OS6KETSEsCl1NdlUpEQmQCuLYZ5H3EEWBpUvEWCGOQBUVE61LqwbYomaxqQE2p2QiWts08WjNaXZbgJAaIRVS5Nh2iecUuFZM10tY8wus+SNW5YKBmrD99nf5wrNXeGp1QBItePnllxmPx3zuc59jfXWd1157jV6vRxiGXOxf5M3tN7Ftm0veFQ7nOeGsz6NHkvfzClwfzzWzxGISCY7mDuNFQJS7TGPJPLOZRoJZajFPK61336+sTnu+qnNfVAOuh6Hi6koNumuL1H79uu8rOp5GiNM1d5alm+eNG9VYIdD68THmvLFKCEHH1XRWKza9/fmTQH07Ih1FUSMZNZ72i8WCe/fuNay6lJLj42Ns2+bg4IB+v//YvrSX3zNAb4Ps85YngZBloPVR2tzl/1sGLEIIdnZ2eP/99xFCNGVvDehon8R79+6xvb3NlStXeO655xgMBly+fJmnn34aOJkk2LZNp9MhDEPW1tYaYFcURWW/mGUkSdLoipIkIUmSxnu1zVS2930ZoLWBpxCCy/2En/n3Xmmt/+TzbLbRnijcvHmT+XzObDZjPp83TKnZNyNVybKsmQGbimKGOQYattnIU9oJEu3Z8rLtlQGD5tlxHD7zmc9w4cKF5mYzAN227WZyEAQBnU6nYcvbloPL0hE47S/8UeemDdTb+2rAtcnaT5IE27bxfb85X23nluViJeb/syx74n6Y5axoktmP846lHYW4f/8+Dx8+bM6Nmdy1gXX7WpnP2hKk9jkxrLqZMBvZi/FJN+x7lmVIKXn33XdJkoSrV682ZbLP67jOW5KsloZElSzkuNZYVyBbV4mQDQA3cpET3bYUp+Ufg46oddkVc31lVfPCtRO5iEmMHNaJrrZ1OrJyMpmEaVKB+50IthaC353Bzm7JXlRrsWPNLIL5AqLIIoksssgiT2xOsdhOiXRKpKXRlqikIriVy5K20Uhsr8AflKyGBf1OwiiE1QBWXUHPtgikg4uNRICuQFZeVDrsKINJavFP37S/lU8Yolij4BkUL1EgKEmrakNoSvi7j/xaInIaZD/OYmu0DakjSD3A08hQI8ISGZZY3RK/n+P0FM4wx+8XOMOycmGyNFqq0yy2U0kUTMKjcRSx8wpgV44iNt1C0s8lQyHplgIvthELizKy0LFFFME4tproxiQWTBPYTgVJSsViN8y8ADTCArthsCH0NB0f+oHmcl+xchFWO5rNHqx3K7DbcSq5TqaoqgAnLeeW2q1lHFeg++GhzXSrAuHzrEqwHNQA+ySRsWazfc2lnuITa3nNZp+A8WrdynL4LOKjesAk12yXkl1VgezXoWKxM8Ekh6mg1mFrEluR27X0wlUIt3bJykQjE3HymsUuBZ1SMNJwoxT0CkmooKMlvq4SI/Oiss48yhwOEpjkFpNUMs8FB7kkrgF2XlQJgKLUiLKeyGmBdBRuqHFChV0DbNlV4BfgKRwHLBS9MsUrYpw8wc1SrLTATkpErGEhUGOjv3bJIpck8omLkER6uIOC/uacYD3GHWVYvRI5VGgPlGPVALtyOTNyLV0KZKIQqSZJPBbZZURRIFWJHCvEYQ2utahrFcj6WdSe30YmQuUwUmq0ruyKqw6m9V1bJmJVVbilyLFliisSfLmgZy+4FJRc7pbkux8gxo/o5lN65YJLFzYJw5DhcMj29jbb29u4rsva2hpJktDtdhn5I25v3cayXaQ7wvUGxPEq7x+vM4kEd/SAY0sz2b4FBwO2Jp9GzXtkdChud5lE/y6ZDkjv+EhKOu/mrHxbVsDZL+n5itBJq9duxkaQ4smYqxd6yHKGbyWs9i3WBw6OyPB9ryH7jE2l1rqRCRuTDTOenia5qsfyGNkG2GeRoKZfPy/a3sYx7W2cNY49Cesuf1YUBcfHx6RpymQyodfrsbpaFf9L05Tt7W1s2yaOYzY3NxmNRozH40bue97ykUmiy+Hi5YNffn/ewf1eWM8nvW8v5v/fffdd3nzzTb75zW82Yf/2MUBVIKAsS9I05fbt2xRFwU/91E/x4z/+4/i+3+zzcoMwz0Z3bBqZeeR5ThRFRFHEdDo9xbabbRp2un0uz2I028fcPm/LIZQnVd9zXZdnn32W9957r5GkmIlFm+02nxmwfXh42ADRs5blCUdb6tJmzw37DNDpdPiBH/gBnnnmmeZ3y6x4ezLTZnzbDjhn7dPHTbo0YLTtxmI6CXNNTLsw2zVJk+3zbvbb7FdjxdWakC1X+jO/PW85r0rnskSlLEvu3r3LgwcPTrUNMxFqt8c4jonjmMFggG3bTWhtbW2tKcxxcHDQ6Mq11hwcHDTlri3LaiZvZnKXJEmzL7fv3GX3KGXlwk3iwmcc6Zq5Fg2oPp5XzPYkagHtqALeJyx2DbIbOYiuWO0OXF6pQLXxkx92YKUrGXWh451OKGsvBvzvLzQPF3B/Do92SrYXBQdRydGiZBprZpFgsaiSHdPYJo8diqQqeGR7RQ20S6SlwAItBUq4KCSFslFUYNxfKeheLumFOcNOyUoAKy4MHIvQsiqlvbBBV04sZSnIlCDONLPUYpxYjBM4OIT378L3osrRxBUaR4Okws4GVJf1Q+U1wC70lzFNyKKtxfZxhI+noat1GihEqBDdAq9bsdhev8AdFrjDHCeoJCIss9hO9RCuqo4hrZIdrbySinhFJRPpKIteYdEpNL1C01U+TC2IbcpIkEWSeVxpsSuALZkncJAI4qzWYeeV20qleQVhaSy7YrA9FwK3koj0AljrKJ5Z16x1NGtdzXpHMQo0PV/TcQWKCmA3RX1iTl7XgPvtQ8HkUS0XSSv50sivNavG7cc8B5qLfc1zGxXY7rklPaeg61TVl3tuSeAItFanJvLtZVEo9nPJjhLsasH7WnCg4TiDSS6YSs1CahZW5XOe2YrC0ShXoV2F9HRVWbFmsZ1cVFr4hsWGDQ39XNJJBaES+FpgFYIyl8SpxVEqOUol46x6zDPBcS6ra1BUOQKqOANg2wq3UwFrx1fYAYhugQhAe5Ue2dYlbhnjlhFunmJnGXZaIuMSGdf666PKPSSPXLKFRxQHxHlIYnn4w4T+RoS3EeGMcuxeiVgBPBvlCJQla3BdAeBSW1Bo7KzATSRZ7LOTXUUUJVIVyGOFONDoUoOo5SFK1pM4eRpgF9QWfhXALrVFKWppyBk6bCEU0iqxrRJbZnhWhs2MjjWnr8bcGAieXfPwo32OPnydIJ3g2idY6aWXXkIIwauvvtr0/cZ15sUXX6Q/WOHtgxl3U0WUjbifb3JwdAk5H2HNVtk+eJatgwWZ7mAdrbLIHHJCUhUwSywy7WOJEldEdKcFPb/EFTFSXkV2phRasOIliOAhPa9k2IFL6yEP77xBP9DcurZKPDvkxo3rvPTSSziOQ1mWZFlGHMdN3t7x8TGu6zIYDBrFQT4pyJy1qkJ7VhGYAL1ej7IsGQ6HCCEaJzETYTfjy1kY8qzlrMlse1kG6GdhwvZvzwPoy4TqWa+hwgOe5/Ho0SOklOzu7gI0Ee1ut8vdu3dZX19vauv0+30ODg7OPUb4GIWKlkH6k0IEHxXOXp75nLecNSt60sXq9/s8fPiwYWGNp/NkMmFvb480TYnjuDnpaZpy//59vvKVr/Dcc89x48aNhhFcPmYDvNrSjfb+mSIGg8GAixcvNqDJhDyOjo6Yz+eNhMI0RvM7403dLnxggOWyZObjLv1+n+eee4733nuPyWTCbDYjjuPm2NrHaUAznLDfbfC8bAtkAKFZzzDM7Qad5zkrKyt88Ytf5MqVKw0Ibh/HWUx0e/+eBMCXb87lpc2at8Fs+2FZVlUSuSXzMJ2k0dcZ8G3AePuam8+llKd09e3tmX39ODKPZYa+zX5/+OGH3L17t7lWURQ17WexWHB8fMz6+jqWZTWTRFMJL4oiZrNZI+vK85zjaUxuKUrpkiif+9sK7QzIdIdUB+wcJpSyT6J8otxlElvEhcsid1mkNkJUjMpKD1Z6sgHTg1oqcnlF88mr+rR9Xy0TGYaVNtecx9PnoJKwHM4VWwvNw4Xkwxls7RTsxSWHtRZ7GsF8IYhqLXYW2xSJ9TiLbdf6V1EV4lK41eCrLYStcTsFwVrJSpDQ75SVLtiDgQ0dS+IJGwcbC1BaUqrKVjDJSxYZjBOb48TheA5bu4K3F5CmAltrXCq8LLVGlQJVVjrssgbbDcg+g8XOHMhMsmMFspG1FtvuljjdAm9QUnSLqTXKS6tXltLRCEsLIbUUtrKFrS1pK1s4yrZcJUzCI8ZRJLdwClFpsUtJp2ZP+0rgpwJrZqMjC7WwKGJJHEsmsan4WclkJmmt/zUA20hftEDIqoCY61QstpGI9PxKT3pjpFnpaNY6ivWOYiVU9DxN6GikECxywTSzTmQiaaX/NkD79kTy8i4N6FaaBlgbt5CBX1eb9qvH9WGtU/VrK9DWuh0XbPvsin5CCHIFB6niUap5lJe8U2r2C8G+Mh7dmrkQzKUksTWZrclbyY7CUwgLSEV1/jNR+5ILwkLQUbCpBP0COpkkUDYhAquQqFyQpLJyr0klx7UP+SyX7GeSqJGICMq6YJao5TpaCYSlcLsaJ1A4gcLyQYYFYrUG2LbGVymuinBVjJ1n2ElWyUMShYhq/fWRRb5wyCOXdOFXALsISURAuDrH28iwNzLsUYHV14hVifbqytOtitOFtlFaYBUFbpaiE0hjn+3sGqIoscoSeayQh9WkBCNZKAWqkNXEoRAnEhEzsSs1WktK7S4BbKpnAYgqZ8GyNFJk2DKvLDtDRUfMkdNtLnoxQzUmSI/5gacv4sYHTA538X2f/f19Ar+qwO37fgNYZ7NZk+/lWh3SQ595amPJS8z8W6QqICk84sLjlW93iAuPefpJ4sIjKlzi+rvkgU9autiirGqOWFXNkU5ZEDo5Xa8gtD2urmdY5QHDziGuiFgfOqz1bebjh9hqTjewOTjY58qVK6ytrTGdTnn33XfZ3t7mmQvPcPPmTd544w2klAwGA9ZGa0QPdgjsAN8ekta2qu28KpOrlGVZI/81MlWlFPv7+wyHQyaTScMWG9nqwcEBQRAQx3GT/2aMIYzU+Cwy2OCA5boebeXBedhoGT+dhSlPR1HPj9KfRxRCFd0+OjoiyzKCIGgmIGmaNrlsKysrPPvss2xtbZFlWROZ/jdi0M3SPkkfZ1k+Gcsn/+PMjs5i5tvft5cXXniBF198Ec/z2NjYaJhA4+hyeHjI/v5+Y6tnirR85zvf4a//9b/On/7Tf5rPf/7zpxLg2se8vO9tALi8fwbMua5Lt9tlc3OzAYVGGjOdTtnd3eXw8LBxE+l0OqyurhKG4Sld+nJI56zlLB1/r9fjxRdf5OHDh9y9e7eZmBiGuA022w20/bzM4hoAagC50S23tWGO4/DJT36SF154gW63+9h+tfe5zUg/ib0/a3nSBK/9ffvYzDEYy0HDFBtwbHRjRVEQx3GznnkIIZqSzwYsu67baOvPCtUty2ray3L7NhMHs5RlyaNHj7h7927DYluWxcHhEZnyscN1dg49DqbrBPFF4tLn/vaQSSTojK6QqoDDacnxAoQ7Ii79im1RNq7M6NZJxqKY0HFSVvs2g1AROBEdZ49RV9DzC8rkkAsrHhtDF5UestL3sG2LjY0Nnn322cYmSoiKHT6aa/YX8GABdxcVi70XlxzUWuxpVLPYc0kSV44ieWxTpBWLbXm6lonkSFuBBCUkWtQyEWWBFDh+gb9eMupk9EPFqKNYC2DoCnqWJBA2rrCRutKvVzIRQVoo5mklExnHFsex4PgY7keCRSyQSuOJSoctNWgFqqhBtkl4zFvJjrWsomKxNYVdFXfEFVXCY6ARvkJ0FHanZrH7JX6/xBsWeKMC29cgDIOtKC2NshTKKWsWu0p41FmlxbZyiygXpS5ErAo516WYqtQa61IclTNrnO26WbHvlMWRw5++kv+FLK5Y7HGdhDVLJMcNwG4nOlJNGGTFYNt25SRiZCI9X9MLNJeHipWuZr0DGz3FaqhxdEzolAy6HebZada67SwyiQUPF5K3DkVTdTMrReVrXic69g3Abj1fXFct8H3imz3wNV1PN0XgzorwagTTXLObS7YKzZ0S9pTmMNEcxzBGM5WKuahlIpaqWWyFchR4ChlW/tQysyqZSNGSiSjBoBRcLiFMBWEs8ZWNU1YuMEUqmGcWx6nFcSaZZJVOfZwJtorK4jCrrwEljwFsJ1S4YcVeW77CCkpYLcADxyrp6ApcO0WCnafYaYGVlojkRH9dHtgUC6ey54s8oigkKjpklk+4WiI3Ne6aRqxqRNdC9yXKA+VIlHVSbbqksgO0ixwvixGJIkl95kkfCl0B7MMSWep68kmdMHwCsJUB2O17SGm0khRaVmEy3QLYRpkgqCUi4DgKz9WEgabvlawEip6eIg7vsiam9MoJk7tv88Mv3ESnMxaLOTs7O3zhC19obJg7nQ7r/XXCMGSxWLDwF5TaYpoKFpnD669t1WD6SlX/o7xFnLvE73okZUCmfVIVkOqQVFWvc+1hixxPxngywa+fzXuHMZ6I6VsxGzLGcSM8P8YTMQ4LPBljkT9m7lANEiDyapyRtsQqavvcsWQyrUgbadukacFwOCTLsoalvXTpEqPRqDEDGAwGpGl6StZpxpizct1c12V1dbXJS0uShPF4XE1KXJejoyMGgwHT6ZROp8Pu7i4XL15kOp1y4cIFJpMJSilWVlYAGI/HBEFAURR0Op1ThaPMeN0mTs3Sjq63P1vGjW0MsCxP/SiSz4zH50ldln9vcIPWmvl8jhCC1157jfF4zHPPPdeQZnfu3KHf77OxsdEQgU9aPlLi0t6JZaB+ngzjvG2dF0Y4D/ifB+SXP7Msiy996UtNGMWwuEJUmuyrV6+yt7fH22+/zXg8PlXM5f79+7z++uu88MILjT/2WR18ezZlQOp5Mp7l96bROY7TAPGnnnqqYaDn8znj8ZjJZMJ8Pufw8JDpdNqERnq9Ht1ulyAImmTA5XOxnKRobqibN28yGAx4++23Gx2USTBs/67N9LYZ9DZwbjPo5pyYJEWA5557jk996lOsrKycktM81slwwhi3Wfuzru1ye2gf93KS7vINeZY+3nQ+JnIRRVEDtttOPGY7RVE0EiUT6jPabSFEA+qX2/JyMnFbCgSQ5NTWULKV7V5bSdXa7N1jxYOdPtP480xjSVSzLHHhIoSm42Q4eo6t56wPHTpuTjwtsdWMK/6cgT9lYT1iph7wqeevsjFyIT/mlW//Fk/fus6tW7dwHIdXX32Vsiz5xCc+QRB2+ODeHruTgnDwNAt3xJ20xxvJkHg/ZJJdJt33SHKXOHbI/ilkSUEeW+SJhS5lxWIbHbatKumElGhhU9bhaS2qSrFuv6SzWdLtxAw7irUQVnzBwJKVTER42NoCLWoGW5PmEGWaSWJxnFhMEpejA3j3nuDbEVBWttZ27SaCpmKxCyhyXWmxW64ip1lsjbIhdkRj2YevEX2FCBR2WOD2FF63xO1XTLY7rD6TtkbLSoutLE1pK5RdgWwchXRVxfzlFYvtFFbli60kobLoaYuBsOgkEnvhIBdWldgWWbWjyInLzCSC9x+JKZoQxUUUN1DYKKRJKKvKl1L849suvlfZ9XW9ygXhqbWClQ6shCUb3YrJ7rmKrqvxHUFc1n7XianqaNjrqgDNXip4f2I1nyeFIHT8KnkxEA2IPkl6VDw1OgHb7YIzfa9iz+3WMPD4ACqISjjKBFslbBeaNxQc5HCcV848UzRzqRsttpGJlLVdn+irKtEuE1iZbGQ6finoKElXC24oQQ+bsJB0Cgt3LinzqpBSlAiOE8FRIjhOq7L1s1Swl1fSrSyvtPCVVR8NwJZ2LQ8JFY6vsYIKYIuVAuWCZ2f09AJPxdh5ipNnWFlesdeJhkhUkYx9iyJySSOPZOETRR3mRYfS9umuF3Q3E6w1gR5Jyp6N7ttVBVOndhGpGWylJUJrHJURpho7KUgTn3nSgwLsskQcllj7uro3hEBhAHar0Ew70bFxExGU2JQ4SwC7nsSKKtHRtsB2NJ6jCT1F19esBIrNsORap+R6p+RyUHLR12z6iq6rEJxP4Jh+9/79+3x3+gbYA47nmjK4wv3ZZQ5nJbNYspAur3+rIioW2Q8R5S7xWy5J6ZMqn0T5FNrDFhmeiPFkjG+l+FZCYKcEdkroZQztFN+e03ErK+OOm9N1CwI7pecphE4buefy2GbGpTaZo5RDZWF0ftJgO7q9fI8YssysY9jp2Wx2Kp8LYLFYsL293UTwkyRhb2+Pq1evopRqJL/GWtcQegZ4lmXZSFtu377N5cuXm4iz0V9vbW2dwkhCCLa2trh06dKpXLc4jhkOh1iW1agcer0eSqnG8rcdxW+7kZ23LANpA/jb12AZpJvPlslXY8+8rLBYJp0HgwHD4ZCyLJnNZjx48IBLly6xvr7Oe++9h+u6/NiP/RhBEPD22283uYtbW1t8//d//7nH8q/l4nKedrb9uycBrWXdz0exoe3fnbXEcYwQgjAMmc1mp7y5u90uQohGj2tCD4vFgsViwaNHj3j99df5o3/0jzbhiWUZxln7e9YMrH3xzzre9mLAm9Eura2tNZ8XRUEURRwcHLC1tcX29jYPHjygKAq63S6DwYAwDAmCAN/3m20uy28MYFxdXeWll17C8zwePHjQgMX2fhrg2d6WuRnbGvHlBm5ZFteuXeOFF15gc3PzVKfUnkicNWmB0xr2szqz5fP2cZjzs/a3PXu2LKuRpZioQhiGjXuP0Yi1kzFNp2COKy9KprGEqMsrdyu99TQRtf+1bIrQNK4itW/2pP7caLEHoapdQ4x1XyUN6XoFq9ZDvPARwzVFkRxyaS3AUjPuf/Aqn3nhGXzf43vf+x5Ao2t899130dJj88rzzAh5FLgkw6d5zVsnyUKmxS0mn/gcv60D0g890sQlTf4AeeJQ/LZDmVoVe+qUWActmYg0yY4Vg6a0hXBKXC/HH5WMwoJ+N2alo1gLBANL0JGikoloB6gG9kJBXijiXFd+2InDJHEYTwX3t+H1CMpM4EmNV5PSqEr+0paJlLl4AosNsalY4wK1ZZ8YVI4idrcg6Ci8fokzKPD7FYttuZUWW9U67NKqAbat0G6JqBMeDYst86rwjFdaBMqioy36SHqFhT21EfM62XFhUcaSNJbM6snXNIZZDMcptR925SRSFjSTBWlzqhpq5YUtGASaa2vw5j5fR7CHYA/YRXAMzBFEVHOUITD8S79v9jPTupqjcRMZJ5L7u4JJ4jBJl0qm124lw8DY81WODFf6ik9uFAy8skl2HAS6Yb3TeN4MsMt9otaarIRxDju5ZLvU3FWwV2oOF1Uew0RoZuIEYKe2JrcVhXsiE5GOhkIgMoGdV1rsSqYj6JaCDV3pscPCxs8EXinQmU2ZCbKsSm4cJ4JxJqsiN5ngKBOtRMeqLYlSVyXFGwZbV49AYQcKK6jak3YVoRUz0gucMsEpUqwsw0oLZKLQia6rN0qKPYds4ZJGPskiYBGFLIoupRPQ39R0NgrC1QKx4qB6EjHUCE80AFuJSoetkEhd4pUxIi3JEp84CZmmQ8gFdlkgDhSy1JWfvBAoUXnK6zrRURVLALvlJFLoKpEhNwB7ya5P1EWabEfj2grfVfQ6imGg2AhKLocFN7uKax3NRU+x5hb0fYXnnF19cXkx5MU0lkzHgq3YYRzRkBfTWFaWfcYPu+WRPYk2ycrP48oc304rkP0wWgLaMX1nwsVBFUEM7JyOm9Pzyxpw57jWCYA2Y1qbhTXLieOXRAgP8Ooxc3DKTGE5It5EdvSJnLUdvV8mndrbaP92GfSfRYS1QWfbMtkYXUwmE3Z3d5uxc3d3l9Fo1DiNXLp0qRnLj46OcF2XxWJBEASEYchkMiGOYw4PD5uI8ng8bvTmk8mE/f39xhwiSZJmP33fb6Qeu7u7BEFQ1VKpybF2npVlWU0kedmkYJlcNOfCYJp2JU8zwTir/bVzzIBG8WCUEFrrRrpi8rYMVjDW1EVR8NRTT5HnOYvFgo2NDX77t3+br33ta3z/938/L7/8cqPw+CjVwBMBumEGz5qxtRtGW+JwFoA6C9i2we8yiDoPhC0Df3Oyvve97/Htb3+b7e1t7ty50xQc6na7XL16lc3NTfr9PhcvXmR3d7fxdy7LkqOjI4IgaID88g2xvI8fBRjbLOp5vzkvZGL+37ZthsMhg8GAW7duNRcxTVPm8zlHR0fs7OywtbXFYrFAa91YE/Z6vcbfun08YRjy2c9+lps3b3L79m3u3r3LdDpt5EBtqceyC01bo24aeb/f5+rVqzz99NMMh8NzXVXOaoDtTmk5jHTeeT3r3C13estLO1xmNNumo6lCgSEL1WeROljWGrocUIge2h5QyB5J6ZOogEwHJGVAqgLi0icufZLCRQpNzy9Z7cvK/7pl2zcINZeGik9eqVxGKoupsrKe8ku6foljnZ6wmHZWlprvvfEh34luc7AZcFwG3M363JaXOC4kRxdf4DfmA+Ijl6j/h8gzl7/3SlUVrkjtisXeqhMdHYWwFRzWyY4YT2wLYWlsr6hKPLsxgTem5+V0ZIRXJnSFwEMyP5xCCb3ekDQridMcLxyxyCWRcknLgCLqsX9s8eGiSvqz0fiAg66qOtYgu6glImUuKGsddlPWWtOw2KkNqSk84+nKtq/WYjuhwu+WeN0Cd1BWAHtQ4nRLhA3aUihZAezSqiQKylHglE3CY1U+3cLKJW4p8UurZlFtuqXEnVpYMwc9tyCyKGOLLK59sWsWe57AYUKd6Eijw0ZVAFvUEhEDsAOvKjjUD2BzCJ+4DOtdWO/BZg9WQuh7EDqaQlO7hpw8jmNaTiKAzYvAiAqI96kEA8dUFUTH5vWHhxZ9T7PRKXhmRdUsNw2LPfSh51WJju2+aDmSWCqYZHCYC7YVvK8FeyUc1vaWh8pnLiEuFJGlSW1FZusm2VGEFcgGkFntiV0XnQmKKrGxrwQXFYRFVbLeLQSkkjKT5JlkYUB2VnmCzzPZMNjGC1sbgF17YQurYrDdjmGwNXZQVlERr6An56zqCEfF2HmOlWXItEQkCpGAiiTlwqLYccgWHknkk0QBi6jDPO+i3ZD+pqS3qQnWFPZQYvdLxAjwQLu1TZ+UtcOHwNY5QRlhZ3kNsAOmyQCdW1hlgTyoJCKi1CcSL01Lhy1O7pulgjMVwK4lIpoTiUitwxa1TMS2NK5bAeywqxh4JWs1wL4RFtzoKDbdko06GuI7+rGx8HF2WJMWJ37WkyPJw8RmGktmSQWsZ61oYQOwG8AtSQtB6KrGls9Y7/WDykmkHyiurZS1hV9Jz6/61fnRQ+7ffp3x/j10mZ6qXdLpdJoItImUtsFyNVY4CBGcIq5MBLU6thMyqV1vwpBqZjtSVrU1lskzc46MDvs8s4flMfgsxYDZB7Nd85mRm5j1DChuk28mR86Mw6YWiVKqAaTT6ZT9/X0ODw/5/Oc/34zxplif7/vMZjPW19e5ffs2k8mElZWVBkf1ej0ODw8bIuzChQsIUSkbJpMJ/X6f4+PjBoMEQcBisUAI0TjQmch1EATkeY7ruo2L3srKymMToPbDHG+SJA3IN8/t679MCrZxrZEH2bbNeDxuiDujw5/NZgwGg+bcGyxmJh5Gs+/7Pi+88AJvvvkm//gf/2Nc122srw0xe97ykQDdHPyTkP6TEknbN/N5QOo8ecPyLHH5syRJ+Kf/9J/y1a9+FYAoitje3mY2mzW/uXfvHrdu3eL69esN6N3d3W3kIysrK/zYj/1YMyNbDn2cNTlZ3sf281mJfsts8vK5PIv1Put8mw5nY2OD559/vvltHMdMJhOOjo7Y3d1ld3e38YE3M1LP8wiCgH6/z0svvcTzzz/faPP39vaa89kuCW/2y+jpV1ZWmsdgMGg6guXzs9xeluUf7e9/L5EUc04bRkBDlNoscodZUldYTWxmqcUssVlkDtPEYpbI5vt56lRerZlDoWxsEhw9w42qCqtViLKg6xV0vIw164jATgjtjEGoGHUFHTcjtDP6ocWzzz7Dc88999ikLEo1k1iyEwu2IsEHsWRrV1fJjpHiOIJpBLOFJI6qZMc8sckTq2KxxWew3BdrT+yauZ2KuuiMrFwMLIHlFrjdDG81Z+Au8KwFPTemK0tCVSDSDBXn+HaIqBMd51GK5YZkeKQEzDJJlHhEusd+6ZJlLqIscXWJrRSiVGgtapmJoMxrR4RcnHj7nmKxKx323KYC2A6VTMTXyIHGDkucjqLbLXF7Cn9Q4g0quYvtK7TUpwB2aVcP7ZQIr6ykJLlotNhVdUerchTRFp3CIYxtxMyCuY1e2KhF5SaSxpJ5XShjFsM0hTgVpwC2VpVu1jiJOG2A7UEvgNU+PHNRs9qtQPZmD1ZD6PuV44gUtW1jDJMaXI/jSoNtPnv/EL79sH5fEUNV8mL9GNXPlZMIrHcFz66ffP9bH/JTnAbjsf5pHruJHjw4/l9U95ZgnsO0tNgpBVta8KoSHCaCo8RiIqjdRAQLi9pNRJO7VbIjrkJ6CuFoyAWyZrHdQuAXAi/XdErNJWyCUhCmFm5UuY7o1CLPJVEqmdVOIhWDLWsGu0pybLywlwC21QLYtl/Z9Ym1AsePGMoIT0fYRYpdVPZ8Mish0RALVCwp5jbFlkMWVQA7XoTMoy7zoosIQoabYzrrFsFqgrNiVz7YK4Av6kqOoi40IxFoHJ3RKRVOlpInHknNYOusBtiHJTLXyLKy2kRUAo1KItJKdGxb9dWJjiaRueoUOVMmIi2NbWtcR+O5mk7txb4WllwKFNc7OTe8nAu+YtXN6XslHbfylm4vy+Ob1tVkZ5ZaFWCeSl7fs5jFVV86TSTzxKrAdw2oZwZcJxazWJKVgq6najB9AqqNL3Y/UFxfyenVr03Bmb5fAe6OV5EXy33/Mjtq9ts8vnH7bWYHt7lx7eqpOhoGMBm5hOmrjZWsGZvbYNcUd2uPW4bsOQvYGUcss13DqhpA2MZIlmUxGAxO4ZqzlAhnRZsNk6z1iUFDe13glK7ZSDMNY6y1bmxzzSTDAGBzbEYjnud5gw1msxmz2aypk7K+vn6K7IrjmP39fa5fv05RFGxtbeG6LoeHh1y/fr353ozfg8GA0WjEpUuXGttFgO3t7UaKOpvNGsOPJEmYz+cNlkmSBN/3SdO0YdwXi0VDTk6n0wbPGIxkWRaHh4dcvny5OQfG/OGsiLtt26yurrK/v4/ruo2zm5nIJEmC1pqHDx9y5coVOp1OM1FLkoSjoyM8z2ukO5cuXUIIwcHBAW+88QY/9EM/1ORvnbd8JEA/i1U+i1U/L1y1vJy13kcBs+V1hahKkn/lK1/hK1/5StPAjEuFmSlB1bjv3r3Lw4cPeemll5qMYXMhP/e5z/GjP/qjZ85UP2ofz2LLl8F5e71lDVl72+1GcpbGHE7rmtsTAcOeX7x4kU9+8pNNA5lOp4zHYw4ODtjd3WV7exspZbP+5uZm01iBJvnD/I+5/rZt43neqf9s67ra+7oMzJePY7n9ZIVkkbvMU5tpYjFPbWbpCZiep3b9mVN9ltjMaiAeZRZSarpuSc+vNIA9r6BTV1nteQWbvYSnRhmhk9F1czwrxpcxIp8wO37A3dsfsLOz0xzzsDPkwoULdHsDtN1jrgIOlc9U9Diwe0SyQ0qHReoQLxyynQ7prwniSJLFFnlsnbDYLR22sDVICy1slLAotUQLqwLYQYm/UTAIUrphSceOsIspThrRkRYkGUVS4lkBcZqR5QrH6zY64TwPSYuQxdTjuOhxT3lQglOW2GWBNNdEC5SSlC0Wzjw3AKBmsbUlSW1ZsdjuiRZb+goZKKwgxwsL3G5eu4oUdFbA64FwaoAtq0THogbYyikrkFcnPGJ8sQsLt5QEpUWoLDqlxI9trLmNtbBhYVMuJEUNsONIMo0qADyPK4lIUgPsIj8B2LQAtudUALvja7p+5ThzbV2z2ql8sDf7sNqBvi/ouuBYMDNguq6aOY5brHYiuDsWvLJzwnAXStTOISfgengKZGuuj2BgPvdhFIpm3e4TLCSrewiSAsYp7KQK/8vxSAblJ4WjLwpHbQhHrw1e0SvCVkNh675wdFfYOvQd61TRGeEVCA2i8cQWdWVHQVjCihJcKQReLvETC5EJdCYpUos0kyzSKslxkknmeQW0dw2D3cgnaqu+sroOjQa7tuqzfIVcKQn9GWtygatjrCLFLnKsrEAkJSSgY1CxTbmwyMaVe0gS+USLDrOoS1SuIzsZg80J3fU53mqCM8qxegqxCsIHbIGyKwa7FBZSazydIMoSP4vJUpcoDpmkQ1RuYxUF8kAhC1WVTBeVvEuLClRXrjy1F/Yyi600papkYE3p9LMAtgTb0jhuBbDDWo+/Giouhoqnuoqnu5prHc2qVzIKoOdrLHl6TDB96YlMAhaprvJYarvT9w68UyXSDbCeJ1WxpJlhsxPJLLHIS0HXK5siM32/bAB2L6je31htsdc1yO7VEcKup7Ct88fM5bHvrLG3LPWp8aSNQ84i/7TWPPvss4xGo1O5P3DCKpsaG+Z8mfcGrJpx15g8tMfC9v+0AXMb2LVlFW3Pb/Mbs44hv5afl/d5efxcniwYsN6WX5qJwXlSU/NZ2/jAfG9y03zfb3TXN27caGqkmPwsw6yvrq7S7/fxfb+p1m7qpwwGAyaTCZ1Oh4ODA7Isa6q7X7lyhTAMm4iEYfg9z0MpxWQyYTQaAVW0QUrJ0dERQDPJOj4+RmvN/v5+48Y2m82ac2DsHcuyZH19nSRJGgZ+bW2Nsiwbrb055+acKqUaG+Zut0tRFOzv7zOZTBgMBiwWCx4+fMjh4SFa60aZMRwOuXTpEpPJhMVigW3bzX+vrKwwHo+5cOEC7777Lm+99RZlWfLBBx883tG3lo/UoC+HOtvftTuHs35nXrd/2559flxZg1nM+nme85u/+Zt87WtfI8syut1usy+e5zXZyAZITqdTlFK89dZbzYypKApu3rzJn//zf75x4TgrXNL+77OiAO2b5qylzYaft83lc3vWeVnunNohsGVQbDqG1dVVVlZWuH79+ikXmfl8fiox1Xikh2F4Kpt6+VotJ4sKUQkSo8ypmJXMYZHVbHZqM00tZjXobkB2alUAvAbieWkROEXDWhtnEQO0+37JRi9tAHf1fUHXq3SDgaOwrMfdd7SGrJTMc5eDzGE7tXmQ+exlAZPMYpwJ5vIlZpcs4lWXPHMpMo8ycyn3bMqHtaOIa0B2xQxrUbHYJRYaC2GXeEHF/vY3C7pByrBXshIqRo6gJyWBlDjKhtJCayiUIslKkqJgngrGqWSSeIwjwd4RjOMBKr+MozJspZBFCU3nC6q0UKVshbrbLHbdECzILZvcsZvy6Xga0dE1wK7KX9udCmS73Rynl2F3Ciy/Tna0NNoB5Qq0U5WtxtNIt5XwmAlEKrBygVtadIVLqCRebONOLJhZiIWDmkvKSJInFkksWcSCWSyYJ4LjGmBnGScl02s9uawBtlsD7NCHnl9JiJ5dgZUOrHU1F/qwGmp6nmYQCGwBSVE5hBxHNMy1Ya/HiWB7IXjnwADuygu661WSj1HQBtK6AdoX+wZ068eY7p5XAagnLYWqAPZhqtnKFG+Ump2ZYn+sONSKI62YopgJxUIo4tpNJLdbbiKeQnQ0gz8g/rnOZK4LkelCRLoQEaWYqkJMdG7tFLm1KBfW4t/Wxf9KNTIRi0UqmWdWVS49E0SF5DAXTbGZshRQtAC21NjhSZKjHSgsT+OvRQT+lE0iHJ1U7HWeI7ISmSp0DDoRVXn0yCE78kgXHnEUEkUh46jLjrqI3Unpb07pbszwRgnOSonslYj1GmDXPtilrORZFopAR9gqo5PNyRKXOOkwTUaUuY2sAbZdKESh0FKAbCU6ltUkQrelIc0ktQWwjUykDbBbOmzbqjzAXUcRuIpeT7PiKy4EJVd7BbdCzc2OZj2o7Ef7vsZbGnGX+3atIcrqCE9qMZkL3t23mlLoJ0nlp18bqcgkEhRKPMZYV0BbNSB6s583Ujvzedct6AcVwG634zYgNfu5jAWWx63laozmeTlavDzWLo99ZzmlLY/DBuSZCHHbBMKMz8ug2rCa7W0sY5nlcXVZK97GPwbkV8d+GiyeNX6b3y3LVZb/rw3U28/L434baENVVbzX652yCjbrGQzU/r+yLBupixn3zTkycpgsy9jb2wNOtNkG7O/s7HDhwgWOjo7QWhOGYTPRuXfvHv1+n/F4zPPPP9/U6lhfX+fw8JALFy4wHo+ZzWaNDNVIWra2trhx4wbj8bgqxjQaobXmd37nd7h//z7j8fhUuzSTKnPujEOe4zjYts2tW7cagw6gqfVRliX9fr/R6DuO0+AlKSUbGxu899573Lt3r9GxCyHY29tjOBw2uvThcMju7i6PHj3i2rVrvP7661y/fp1+v984uQyHQ/I85+DggIsXL/Kk5SNtFpcbTZshXgatZzXEj7PtJ63bvrlNY33jjTf46le/yoMHD5oTqZRqgLjruk2i6NraGmEYcnBwwGKxYDgcMhqNuHLlCn/1r/5VXnjhhVMs+PINtby/Z90sZ002zNJ2EjnP3ue8Tmp5ktNeb7m65XnrtTsdE+oaDAan1pvOc3aPU7YPE+4fJhzP4WiuGC8EqfKJSp84r3ywo9xlnjkNwI4yGyl1A557Xlk9+xXQ7roFG92UW2uL6js3J3DSGoRXSTmOffaERCOJCodJ5rJXuOzlLh8WHY4iyWRSAYx5YrNIbJLYIU3sSiaSWpSJjVYSaZdI9wRgGz2n0WJjaWw3xw4z7H5CYE8IvYS+lxKKHK9IccsSR1uIwqpC1EgsJ6DExeuuklsdJonFJBWM9wUP7lfe0C4aX2gcNJYCTaXjLYqqII3RYau6jDpLWuzcdsirfK3GUUT4ChFoLL/ECXKcsMDt5Li9Aq+X4/RK7LCsWGwLlA3a1pSORrlUyXauQnolwgKdibr4ScWkOpnGTcFZgBxrymOBngrKqSCdalTqkMaSsvBIS4+4sMlLl7y0KEurYuebSYJA2icFZ4xVX7cG2Nc3Ky/s9S5s9DWb3Qp89zyNKyEvRUuHbYrM6Ka640Es+PD4RJMd5YLA0Y9JQyov7Kog0q1VzTAQDIMq+XEUnjDZfU9hySezfkrDLIPjVLObaW4Xir1IsztTHJaKI62ZUIHsuVTEUtVabEXplGhXI70S2akkHDKXOI0OXlY6eGVxUUkC5RAoCzuRkFmoTJIlgkVa2SR+7YH4NooBii6KEMUGJVdR2BXoRFMi/qFQ2GEFrE2io+UpwrUJHXfGUC6wVYpTpIi8RGYlIlVQ66+LuLLnyw6MPCRgEXXZjXpkjHB7Md2NGd21Bd5qhDMqkV2F3NDgSbQr0JaorTIlNjkhc5wyIc+mZKlHHIdM0hFlZiOKEvuoROYKmVfC6apYFJVJTUlj11cldHLCZiuNUlUSc9EG2Kq+v1o6bMs2OmxdAexAMfQVG2HJ1U7JU6HiKb9k1S9Y7Qj6viJwqt+2+9XT44VgkRnph8U8lrx/ZDFNLMYLGp31CZMtmERtLXbl6d739SkN9iBQ9MM6KTcouTjUp0qpGxa7HyhCt2wA9jIAXR7b2rpqM2aWheR4OiUMQ4QQDStqkn8XiwX9fh+tdUOGLS/njXPt83Ue8fekbZn3Zr/jOG4cMQyANmy52bY5dgOC0zTlwYMHDYhedvxadtxqn7v29pbHV/P7TqfTYJFl4Hze75bbUvt/2vvRPpfLZGEbmN68eZN+v3+qurlhzdvFBNv1Pcy5TNOUIAhOWSobPfjKykoDno0D3WKx4NatW9y/f79JRH3qqaca60VD/pVlyeHhIQDD4bDRd+/s7FCWJTs7O01EPwgCxuMxWZbx/vvvk6Yp165doyxLFosFe3t7xHHc6PwN2318fMzx8TErKyvEcUySJIRhSL/f5/DwkM3NTQ4ODlBKNfKSg4MD+v3+KQ25UqqpY2McZ6SUHB8fs7W11UQN8jxnf3+flZUVXn31VRaLBb7vM5lMePjwYVMo8Nq1a0CV3Lq6usr6+jp3795lNps91t7byxMB+jLD255FLjPiy8u/DlA322s/m6Vto/QzP/MzfOc732kKtJhZzv379xutkUlkaGuJiqLg1q1b/I2/8Td46qmnWFtbOzNCsOwBunwDPqmDWd5nc4M+iXE4q+Nsf3eWXV8bqGtdMSjjyDiHyLpyY8XETCLBNLFO2fo1z1EVmu76J1rAQVgVE+l6BR0noy8iLD3DKndxmBNYKcNQszawWekKQg+kPLFdNMeslCYtJdPM5rDw2c9d7mQOB7nPJLaZFrJi0xOHOLZJE5usBtlFaqEyG6TCMjrs2hNbS1mx2NoCWVV/dLwCbyVn4EV0g5yel9Fzc7pC4asSqwZDZV4B5CxXJDnMc8E40kxmkkXpENs9ptY6D/ERRVmx2GWJpXU9MEtKJSoWu5DoupDIaUaunp1bkNmichNpWfZJX2P1q8Q1L6ws+7xeidcvEEGE8FOwC5TU4Ai0K8AVKE+gXV0XsCmRJuExlTWLDVaisFKFM9OIfQ1TCz2RqJmAxKaIbMrMJU9tktwmK13S0iEvLYrCoigrAFTdCKKSiRi9q13iWgWeleNaCX13xoaMWfFLVsOSzb7guRvrXF7rE7iSooRpohnHtXNILRGZJLLxyH64Y8B3Vf3RtfQJe+3r5rkC3IorA/jkhRpwBzXArgF536sYyuWE5dODmCYpYZzBQaLZKhSvJYq9hWZPlRwpzbgB2JpIKhKpyJySwq5Lp3sK2a8KzzRuIkWVaBoqSUcJhkpyUVkEhYOdW4iZpMwsyqyy61ukVbn6aVpZ9R1muooiFJVUx3hh6wIQhsHWTcl0N4hZe2ryVM+dW6FcWF6Z2G6ZSzvPlZXqUqRaEgvKWFLGDnkNsONFQByFzBddHkZDSrmB14/obswJ1xb4KzHOsMDqadisXERwqJIcK30Frs4ReoqnIvLMJUtd4iRkkowoMwdRKOyjArmnEFmdDC4r2ZFCoho3EU4z2AVVsRklybXkzERHE12RIGXVLh1H47slnUAx8BVrfsHlQPFUJ+eGX3DBq9jjQajoemDJx6OgJ/1tVYSp7Qxyb2LzeiyYndJe189RBaqnLT02QM8/Acz9UJ9iswdBwaWhbt4bcH0iK6kkMMtjjSGplsej5ddVO388kn3WOGOW2WzG9vY2SZI0LmJwwqy2+3bLshpL4hs3bjT1Ls5bzsMA7f1rSzTOkrO0AbOpXzGdTpvicu0x0Th9mH03rKjRbGdZxng85t69e8CJtKG9LOfUtfd5OXnTsMhm3SAIGtbZ7Hu7T2pfy/azAcLmP9ogvP1/y8UD2/tq8vDeeuutU2C8XZ/FEHXtatjtc2hsFQ3ZWZYlnU6HZ555hgsXLnB8fEye56ytrTEcDhv5SafTIcsynn32WS5fvszv/u7vorXm0aNHfPjhh00xvTRN2dnZIQgCHj16xMWLF1lZWUGpqjqp8QiP47i5zmtraxweHtLpdLhw4UJTnNK2bfb393Ech6effprd3V2Ojo4aX/vpdNoU9DMyGuPiYtjz7e1tBoNBM3lYXV0liqJm+6bwIsBoNGrAt8nvm06nPHjwAMuy2N/f5xOf+AS+73N4eIgQgosXLzKZTEiShDt37hCGId1ulzfeeOPfTIN+lp54ucM4azlvFrw843vS0m70Zl9msxk/93M/xyuvvILWlZxlMBgwm80a60Qj0oeqsRrvztXV1cYWaGVlhc3NzVP7ddbS7jyWl4+yx2mv0/79WcmTQlQlryeR5Giua+Asq1B8DbLH5lHb9hlwPY4Es0RgiTqMWoPrQahqoK3r6o4ln7yiq2qOdXVHUwGy5yukOG13aJ5LBbMkZDfq8DC6yKO55uE853dnaVU+/QimsUUUO6SpQ5445KlNkdqUqXXCYtduIqIpnV5Z9mkhkU6B4xd4nYJwNSP053S9nL6f0bVKumg8LZC5gMJCKUGpJHkhiEuLRWEzLxzmhc1iFrJ9aPNBboMCnxJXldhaAVUIuVRVxb0il7Vd34lUpAECGrSQZJZP5lDdKa6oZCK+QnQ1tl/ihSVuV+H1FEGv0mJ7PY0VlmCf6LDzWotdGKmCW1ZMqq2hTni0cwlxgYgK5EzjzCX2voOc2+iZpJgL8rkgjwR5apGkNmnhkBU2WWlTKpuytCoNNlS14i2qJDtZ4lgllsxxRIInUlwx5UJYEsiIgBlducBjwTCUdD0bKS2E0yMqHTLZIRcdYu2xKBxmmcUk7XCU9rg/c4jHLvFDF/n2ifxjUFvxneiwNZtdzbPrqpGGDALNKDjRbldOIucP6AC5EoyzCmTv5YIPU8VerNlXikOlOEYzQdcykfKUTKR0aoDtK0S/YrGtmsX26qqaoZJ0SslQWfiFg5NbiMhC55IilaSpZJEIZgnM0mpiMc1gN4fU6OBrJxEz2bFq/bXb0bhBSacT0R3NCO05Q2LsMsEqMkRWINLKnk/FEpXIir1euKR7FXsdRR0Wiw7TaHPzwL5MMKzAdbgWaW8Ua3tUajEoS3EBjY+2HeEaDbYEPJFi6YKwnJHnbiURSQMm8ZAicyuAfVwg9zQyL0GLWiYCZSMTOQHYokl4pALYpSCvrTUbgG1kIkX9maxAqGWrCmA7itCrCs6s+gWXO4rrYcFNX3E5qIDrb489/t/bXf6X12b88cuLM5nVstTMkhpM18mL96eSSWxXyY110rj5btZ2EEkqgC0Fj7HTfb8FtMOSKyNFzyubJMi2HrtbExZnjR/L0eZldrl6XVfObK3XJmme9Nuz3i+PPe2x3OzP22+/zeuvv34KjLfJOCFEA4YNI2x8r7/4xS+eOrbz7tuz9ucsiehZRFV7nd3dXd59990zi9sZuYt5GJBnJAhtJ49ut8vly5cbLXZ7eRIB2T4n7c/MfpskUMNcG0yyHG1Y1j4v/5+53sv/dRYeMZ/N5/NTibBG2to+L1mWNZXNTdTkrPaUpilhGJLneeMOd/36da5evcr29jYPHz7EsixWVla4c+dOY2UYBEFj1GFkK3Ecs1gs+N3f/V1GoxFBELC1tYVlWTx8+JBPfepTCFEl8z569AiovMUPDg7Y3Nzkxo0bHBwcNNVaNzc3ee+995BS8gM/8AONUcazzz7L7u4uvV6P+XzO2tpa42RjEoU9zyNNU/b39wnDkCzLmgqfnU6H8XjMYrFoJEhHR0eMRqOG7Ta/SZKEzc1N7ty50xhzXLhwgXfeeYfFYsHdu3dZWVnh2rVrdLtdOp0Ok8kEx3G4evUq9+/f580333zsWraX31OhorOWszqHjwLhT2Kfz3qvdaV/+o3f+A2+973v4fv+KZ1Zt9vF932Ojo4aBsAwze1O5fLlywBMJpMnHtey9mt5ne0x/Jn/rpr5/A9/LuXCsPJ2nqeV/7Vhr8cLzdFM82BvjpJ95qnNcSQ4nmt2jzN2DpOqBHDpkxaiKoNdA+dB62HeXx7VCTnhie1UxXorQrcKvT4eTqyKuhxHkq3EYi+zeScRbO8pDmLFYaw5msN0IVgsHOLIIksqR5EisVC51bDYonYTQQYVi41EiUqH7XgF0lW4vZzVYEHHy+jYMZ6e46mcDpJAukjtQmmjtSRXgrQG2NXDYb5wOR53eFDYqFLi6RwXhaM1UqiKTFNQ5pKiqAF2Vnv75ibMbVhsQWJZJLZVtfSWZZ/0FdZQYfsFtl9g+RmWnzDadCntKcqOsYKa+XYEeALtWSifGqhXWmDp1ln0ucTKLWQhEYXEji3cyMaeWFgzD+Y2amFRRlWyYxZLklgQJZIoFUQppClkeVVUB6gBdu1iYSksWeDIHIsEV6R4csJGWBDICCs/oicjOlbMSsfCtzVpkpGWklT7lHaP0u4yz6uJTFS6xMrjWPfZLi5W77WHRhDEKYFMcXWER4KnF1USo7uoWHNP8XRPsX7ZZhQKXBWRzfdwVcSLz9/kmWeebli1nQj+zG+4HAB/6ccj/mZ3wicLh/80HjDPJAcpvFcqdqeKvbHmoNQcacVEVCz2QmoiS5FaFcAuWjps2TeFZyp9tV1InFQQLSwu2IKrjs2VwsYpLOTcQqUWKhOkddGZRSIqgJ0JFhkcZBXAzhvAqRtNvwgUItR4oaYbZPR7M/qjKV07YkXHOCpB1PprUg2JRsWikodEDunMI92pEhwXiy7Hix734isITxMMI4KViM56VGmwRwXWUMOlmsE2EhEpsIQmJMbVKV01JctdstglTkMmyUgUqSPINdZRqUWulMiUtgVoCUhxArDrojMULYlICRQarQS5Ogdgt+36ZCUTcWyN65T4Tkk/KFgLYTMouRYWPBUUXPYVKybZ0Nf4p3CKICoF00IyLarqpruRZC+x2U8sfv3YY7wrOc4l3x1XWbT/yXddvrHqE6cnwNrIRuapxBI0yYztRMcKSFcM9tVRbXvasvEz+uzOUrLuMnhaZncfHwNBqY9nOnDeGLMseTgvCnvW/5/3f22JRPtzKSVXr15tQM6jR4945plnuHPnDltbWzzzzDMMh0NefvllOp0OTz31FDs7O6yvr3Pp0qXHxsplMu68/Vtmoc3zWaC4vY0LFy7w9ttv8+qrrzbHZbZnAKnWurFYNCYHvV4P3/cbTbLjOKfcVJb3s329l9UEy5EJKWWj2fY8jyiKWF9fZzgcEkUR0+n0sWNpb6cNxKMoavLilln05XPS/k6pqp7H6upqw3wbz/HlSIQhPw2xCTRa7CzL8H2fxWKB53lNvtqrr77K6upqA3QPDg74vu/7PlzXZT6f84M/+IN89atfJYoiHMdha2uL69evNxKRxWLRtBezDysrK9y+fbuxWbx8+XJjF21+l6YpBwcHTY2bvb095vM5cGJ3afCe7/uNc4ph64Emr05rzd7eHovF4lT+YhzHTbKn4ziNcYQB1Cbx9u7du41U+nvf+x7D4ZAwDAnDkDRN2dra4qmnnuLChQuNQcmDBw/40pe+RL/fZ21trWkXn/3sZxuDjvOWjw3Qzwr5PGnmvpyNfFZndRZIP6sRlmXJN77xDX7pl36J2WzWhC4McDeel1rrJjNXa90UJTLb9TyPxWJBmqbnJnYuh6PO2qe//Ws2/+rdypf2pf80QIiq8IglK53rqMVQOyxwdM7GaM6NywNeCBUiP+Y3fvUr3JD7SD2l1yn403/ij/CDP/DZhrkwHY/WlW55vICthWYrEjyKBK/MFXv7ugLYkWSyEMwjSRxZpHEFsovass+w2MJRSEuDYbGpWGxha2yvAtn+oGR4IaPXiRiGirVOyWYAF3zBmitYdSzWXIuRD31H0bEKfBtu/K0NkkSQJD5OojjIKj9bVyh8FI6u4/VaVV7YpaAsJGVhoQqrsuzLT3SlqEo7mgqb1KYG2PXDq2Qiolfpan0/xQ1L3E6B16202HanxAoU1Frs0tGUNnVlwarwiQHY0q1KeJMK7NLCjhT5YYaYgDjUMLFgbiEjiY5tyliicxdVeBQipChd4kzUTiJ1ue66XDqWqCzRmoIzmo6n6fqafheubJSshJqVsEQm++STLTqugmzBfD5nNk85mKZksgPukFyGTFKLXITkskNkrXBQXiDieZLSpywt/CIlkBmBTAlEQiATQpXR0VVy7ao/x9Mxbg28+57CLufIfIpOp3iuw+rqKvP5nOl0ynw+Z9AZ4Pt+FVaeVszKprNJz+4xGAywB13i2GI6rUJ53W4XKSV/5y2Lr29XxS3/p38SEn4+4Y2NjF/w95HDyrJPFKJisYvKEzustdgjLXgKyUhIVoXNhiW4YAkuOhYblqRfSHoInv8dl61EILRGx8Ddyp/7tRbAtnyF09E4HU2vG7PSm9Bfn3FRRtwgwilzRJ5DVqISTR4L4rmkjG3y2CGbuSQ7AdEiZLHosrPo82GygQwKgkGEvxrRWYvxRgn2sMQaKeTlutBMDbC1lNhC02GBpxP66pg8d8hSlyQJmMZD8tSpqjkel8g9hVUohK4dRIRASyh1JacqC2lYaV07iQgjAymVLUBYaE0OLQa7fg1NwRnL1nhulejY7ShWgpINv+RKmHPdy7nil6zVLPHAPyEDqv5SEyvJpBBsHUcc55LEXmEvtjhIJN8YhxznkkkumRcWcyWItSTRgkwI8robFiWQU9lnFhpbgac1vlCEQtOxFG5U+cP7gSKoIxL/7menrPV0zVwXDEJN6AKcLgRznoxwGQxW34v68Xj/fx74XF6nzXKfFW09i6E9a4xcHjvPkoGctR9njbnngU0TrTaa3Xv37vHCCy80FSdv3LiBEFUlyNFoxNNPP82DBw9YXV1tWOGz7InPSnA86xycdW6Xt9U+n7Zt86UvfYmiKJhMJs05Nsy+KRCzXMzGfGfYbYMtzH8ZsNfeb8O2L18rqDT7Rh6itW68w69cucJ8Pm9y4cqybHLgjF2fSVxt693Nf5okSiFEw26XZdk4z7UjBO3r3d6WwT07OztEUdSAaqNFH41G9Pt9Op0Og8HglAueKSpkvNy73S5RFDWFgkw1UCPVWF9f51vf+hbb29t0u13eeecdXnrpJfI85+joiDzPSZKkmaiY3AbHcdjd3SXPc954441Gj24snYUQjf96p9PBsiwePHjA1atXG6cU3/e5c+cOjuM0MhKTjxAEAUEQNFr1+XzOO++8g2VZXLlyhRdffJHxeMzR0VFTtNLzPD788EMODg44ODgAaPzdjYbedV2uXLlClmXcu3eP8XjMaDSiLEveeOMNfN/ny1/+Mmtra03i7K/8yq8013uxWPClL32JL33pS7z11luP3Rft5WMniS6D9bM6hPOW5ZnoWR3j8v+Z10II7t+/zz/5J/+k8eQ0jc2cOGP8bzRm5uKPRiMODw9PzVLaon+zLB/beR2m0pAAn7mucJ2KWPor/47mJz9bsNGHjqvRWrGzs8Mbb7zRHGNRlBzPEjznMtszm2+984D42lNc/r5/h+Nc8O5BzJ/5VoT4dkRRBhXAjitP7CKtWWyhT5IdpW4lO1pggeWVuG6B65eEg4KNTko/KBl1CjY7inVPs+EINn2HnmNzwYOVQDD0ITD10FvnPy9hklgcRTZbE4tHY8HDqeA7M8HOXHAwl4wXkkUsSRJBFmvIgQIOC9mw2BmSzBJUtZ3dxrJPeBoZaCxf4foFtp8jvQwnTHGDDKdb4PbACisGWzugHEHhakoXlKPBU1helQQqLCpHkUwiUhCxQEwlciaxJxbMKpmIWlgUkaSIaxY7lSSZTVZYFLVMZBlgIzVSllhWJRNxrZzAKQjsgtXujGsXuqx3NOtdxWpH0/MUHQcsIVBaEuWmbLpgnEgmiai12ILDheTDQ8E4thnHV8jUdVwyXCICmeKLBFvM6doFIx8CkdARU0LrkK5TEFgpropwdYQvElwdI8XjE2fTli1Z6xdrHa6tbIjrini2oMBtst9NDodJLur3+1iWxd7eHlmWMZ/P2d/fRynFyspKEy5eLBasra1hWRaf3RDYdsXC/vCPjbm7MUU5Fv8961zPHDaFxEegbf1Yb9Swh1ozAyZUCZgzFryl5hxmM44XMS8NUp5yUuykxMpL9FWNSAUis8gimzRySKc+yXZAtOgwX3T5MFohTq9ghxnhKKazEtNZjwlHKc4wx17XLC5A6YK2BdqvALYUJT0xI9ARhbIagJ0mAdNkVAHsXDQSEatQCDRai8pJRILSVSKtseoTxqbP+GKXUCj7ROljGGyT6Hhi16eRuqSqf5Ni6RipJ0h9hC12KLlNLm7/vS/t/q0Nv2QU6MYGD20S0ASxajHYmcXOQrIX2xymFt+dBfz6kcWkkMxKi6gURFqSIMmfCLB1DbA1oVB0Lc3ALrnmlIxcxapXsuaXrPslG2HJMDxxHfFs1UhE0kIzyyTTVPJgbvHdI5/FTPKPXuuDAG+g+GsvHD421hiAbQDXsgPH8vpPYrvhfDeRs5Ynsb9tRxFzXy7nPLWXNjhsA9dlBvcsQPtR67X3zTDKy/pk85nZltEsm3XaHtZtNnj5P84698ufn2WpWBRFQ8a1wb5SqjGAaBN0hpiL4xigAdBtK8LZbHaKiFu+HgbwGxDfTiJtO3gYq2Gj2TYuHkbO8eqrryKE4NKlS1iW1SSzZllGGIan2oI5BlOYx5yPt5Jn+dXjH+Kznbf48dXXqj67TthUSjXyDXO9liegvV6vaUfL7dLIW8xxGmBrqoOaCVC/328iEVtbWxwdHRFFEZPJhFdeeaUBtcfHx02C7CuvvEKe540fepIkrK+vs7q62viHG/Z5e3u7YbWff/55PM/ju9/9Lpubm3Q6HdbX1zk4OGB9fb2R3Jj6LW2/es/zmE6njTzFgHIzjuV5ju/7fPazn+XSpUvcu3eP6XTaXA+TfHpwcMBoNGpq5UwmE8bjcXMejIRmNps18qGiKLhx4wb7+/u89tpr3Lp1i1u3bhHHMZcuXWJlZaUhjCeTCV/72td48cUXm0jJecvHZtDNBV/+/DyD/Xans8wSPImdbi9lWTKfz/nFX/xFjo+PG0P6wWCA53lNozLhGNd1G0mLyTw3PqTms4ODg1MFdp60tBv7YQFf2BU8jAX//uiQH/0/fI94EfKL9z7FX/tFCw/NugPRQjCbrZKnX6ZIHYrUoswsUBJhl0hHI6zPVcmOW5VMRAuJdIvKE9vLCUeK4ZWcXqdk1NFsdhUXO4JNF1YdyaotWXGhbyv6rqLrVIVRzPUQQlAqzTwVHMcuv5Wm/O10ij7y8N8a8c7YoZNp1jPFIpbEsSRLBXkqUJlYYrEFiBo8GRa7Lp8uPY2ui8+43RzVKfC6JZc6Gq9f4nYUVliibE1pafJappC3tNjCqyteegoUtTezREYCfayZHPrMD0KssWI0OUZHNiJxsXMXlTnkuU2WO2SFRVZY5IWkLEQNiKpoAdI4NihcR+HZJZ5dEDo5Az8htBI8PUU7Pl8/fBpbaj599A/pFjtIHIQVIrwheAOUOySXHTIRECufWdbh9W2bWW4xSZZKppty6T51xcaqJPq1oWLo1+XUfbDVnO27b7P14ZuQTZgcV9ntFy9eJEmSph2bwaFwKlYmX+RNe1/kOfM6omTuUcO2mN+1vXOVqnyATScPVSEsy7KaLHTP85pB2oRwHcdhZWWlue9s2+b+/fvNoNnpdFBKcfnyZRzH4ebNkvBCFUL9S/6vkVv/kERd4af/f3+Ju0cxz12J+MxTKbNFxiIqiCJNstCkkSRZWKSRTRp5tf91wGLRYbboMVtcIs193G6CP0wIVhYEq1WCoxgWeJuableDKyqQLQWWFAQix9YTunpGruwGYM+SgMN0SJ44VTXH/QKRKygUEo3SlRe2EJpSyxowi1prTp3kKJqiM48BbM2JRKSlw7asE5ebwNP0Qs1KCBsduNqDp3twrQuXO7DWEYzCSrfvVMmwUvysEEBIVU10SMYaKZcouYjFJi6f+x+3+0wLybyUzJUkVpKEFoMtKlDdBtiO0pULERXA7liKoVVw3S4ZOooVr2DNL1lzU0S0S9fNiCc7jFYFz966hhAVE7i2vklUOsxyi2kq60flwDRNJe/MXb59WH+e1d81ryVxIfHrfq7vVY+kMJaJMElP6jGcNRa1GfSz1mkD9DaIPQvMLm+3vc1lVrq9/eX/M89tYLi87llSj2VpxfJ+n7Vvy8D4LEKtDXyHw2GTEHf16tUm0c381jCrRsJmAGN7W+2qlnmeMx6PG734WXlty/ts1jP+00ZX3T4Gc11Nwt+yjnw0Gp1yNTG/a1cA1Vo3XujL5zlJklN2wwZMt1lrKSVpmjaAvyiKhvk1iaoAV65caeQWRVFguz54K5TugESGzDKLhbCJCoe0DEiU30g+f3v3OgrJo/RL/P5rB4zcauJhCJS2tMPgmva5jKKokf20Qby5HlEUnTo+rSuJjknCNAmYxsfbSDnKsmzkL0VRNGOH0XPv7u7ymc98hjiOef/99+n3+2xubrK5uUkQBOzt7XH37l3iOOby5cvYtk2WZRweHvL93//9DV67ceMGlmXR6/V4+PAht2/fRmvN4eEh165dw3GcJlnTtu1G4ry/v8/6+jqPHj1iY2OD9fV1vv3tbxPHMffu3WscZzY2NpoJVa/XayYZUkp6vV6TCHv79m2m02lTDHJlZYUXXniBzc1Nvv71rzc2n6aC6s///M/zh/7QH+JTn/pU036Vqgocvf7669y8eZOvf/3rvPPOOzxp+VgA/UkdTrtz/KjO5zzG/Dz5zGKx4Bd+4Rd45ZVXGnsbgGeffRbHcTg+Pub27dvNfxvrm+l02mRuNzewbYPdQbpDJvIq3/xAMo4ExwvNOLKqhMza3WQcURV6iETz+SwRlU1fAP8o2KTf/QIr3WPscI9y7zrHuc/MrdxE7E5Kfz0l9OfIcky52EbMtwmSCUwOeWZ1jecvbnDv7VcZ79zl3/vJH+fzP/g5jo6O+Nmf/Vn+2B/7Y/zIj3yJXPscx1T68Ylg61jw8FjzvQnsTzVHc8F4UflJR4kgSit7v5OS4/VDC7AuVnICT6GkxcyFuVeVvXZWFH5HsdIt6fUUK/2SjWHJpa7iUrfkkgsXhGbdgoHUdEQ1Ififf3OFbx56lGgINNf++H0QGpVXftdqbuPMbeyFxFvYdGMHO5HIxELGAp1IVCpJYkkUW1WFx9pqLMkFoauJlKhAtgULZx3XVjhWjk2CrSNstaAv53SCiKGbs9nXXB4KrgwEq4Gi5yhCq8S25Kk2alwKzHOSJPz8/v+sSn4rNa92/yR+cdgw06HI6KicblkwsGcMgxkdO2e1a/HS9z3FpdWQYaDpexrfPmnLyxn/Z7Xx1157jUdvfJsoihiNRo2HbBzHTWlj41hgEp7M75dZLcPMmAHL8zyKomjAeJsNaj8bNmWxWDCbzZoO3LDnRttnwoamOIRlWU1VuaIouHPnDtPplPX1dT7xiU/wFa3I6kP/D9Mv8rc6/4hPOP+K//jfeoef+JNf50M/5rc7Ed1OQreT0Q0Lep2SYUdztQOrFy3WOzZrocNKN2AU+ow6Pn9tW/I/TiGTHpaj+SPf/wt0vRnH8Yiff/M/wJY5vszwVYEbKfwCPAVeKejW5eNfvmOTHQQgKvcXW1bpBpUdpkWqalDgaYSr0basnHgWwJZoJTpqXBd8t7KQHAaajS5c7cJTver5cgcudOrCRJ0THbbWEBXUSa+C/Ri2ZrA9h90FvD6F39oTHGcwzWBWwqKEWMFckWLhIBHkaAogR1CgKMgoSSmJP9h26EpVM9gZQ8cw2AWbQclakDNsfLM1nnMyiStKxSITzQR0+zjh3s6EzuAy08zirYnN7niVcQzH8QUWpUf+Rkik3DrPwcWRmr5bNgC7V4PtgVfScxWbnYJnRurUOn2vJh/cAt8+nSxXKs1Pvz5iK3H4z148OgVCl4HeWeONGa/MvXjWb86K5C5vf3mby+s/acxrb/M8WcpZwH15H560L8vrnQXWzWd5njfVIgFee+014jjGtm3u3bvHo0ePGA6HTRjf+GC3QZ+Z9LeXPM957733mv6kfSzGGq/9m6IomM1mp7TTbfC5DLaBBqC1z6fWJ/JXQ0wYdr0N5k3dlLNsDo3rSXub8/kC4XZIlEcmQo5VydS2yPAZh4I35g6ZCJk6gvmKQ1w6/Nq7HaLCZpbbxIVDcuRgC0VoZ3TsnI5dENo5oZ3Tcwo6TsHAz7lkL3hzknCceniyZHPk49tuA8rbcti2s0u7HZh+2owJphiQSaBtV0AtiqLZpqkWats2s9ms6f+NxeLGxkZTxdMU2zFJokmScPnyZVZWVnjrrbdI05TNzU2UUuzt7dHpdOh0Oo39tSlgZKwbb9++3TDLv/Vbv9WYe0wmE+7du9dUF71582bjGmNsLd99911u3rxJWZbcvXuX69evI4RgfX2dK1euNBU/Z7MZR0dHvPXWW418px0xNraORpbjOA53795tJEp7e3uNbMfzvMYF6datW9y4cYMPPviAb33rW6ysrOA4DrPZjL29vYb8+hf/4l8wHA75xCc+wZOWj5S4mOWsWfhZYRNzgMuz/fbvoOoIt8eS1x9Z/OizBaF3uqNM05Rf/MVf5Nd+7ddYLBaMx2PyUlOIDuNyjdQK4OIQiwkZIaXVJ/VXmAer5KKH9EZod0hp9cEdop0Bx9LF0Qv+8i97rHRF43AyCCsHlFGoeWqtbH1efdb3KznG79t1eXQM/9fve8Q7e/8KgJvHf4j/chLw9FDzW388Y/f++/zyL/8yfuGz/cE+r7xzh1gHzIuQmRpida7w1uIir9wbEeU/SaQ9fuOXO9i/PkJYT5HmP8A/+O8t1M9ap1hsIU50zIFXsW3dQNMLFLdWFKOOYq2nWO8pNvuKS0PFhX7JsAvv9eb85d0p3bDkM8lN/v5Rn896Gf/88jGOOLm+7fOfFTCJKt/eybFgHEvuJ7IB0JNY4hwp2NG4Cka9god/9SmshQWRRZJLAufEamzgazq1r+8gUPR9zWBVMwiKxmZsWLODwxAGYVXN8X/zmyG/8IHPH7yW8g++PEWIdlEJQZ57TKcpW1uLpsLZdH/K3p05B7UbgXH7MZEXk6cghGiYBaUUf/xTGb+xo7Gl5o+p/yfR1rebiV8YhnTdLoEbEAYhw+GwsfJ8tjfgwopbDwQn98F5g7r5LkkSvv71r/PBBx+glGo6HiFEo6dre7Ma9spk1Huedyoka9wXTNjPVGETovKhdRynAflm8mC04u0B0VSEk1ISRVEzKTB6PcOemI620+k0CVmDwYAsy/jmN7/J3t4ef/ALn+fv1m3sl/zf5nt3NumNVvk7/6+fAeCPfM7j5/7350e0kqIuMpTWz5ngzgQupGDtaKwCPr865Vu/+iVIeuyNL0ECtrJJc5u5EvRdje1Dx6s81gceDDzNg1SyBfzIFc2f+Yxi4MGw/m4UCP62ivlb75V8+jpI2eF/KjR/NoOv/E2LcQy/+FOKn/yUIC5bADuCh1PN9lywt4D3F/A7h1UxpkkOswIiVQHsFChkpS1vM9iy1LgaPAGhgI4NPRtWHXja06yFsB7A/+3/K/4SKVss2KbkCBgDE/0LNBo+8ZcRP/+H31HjGOaFzTSVzDK7LlJk8XpkMcucmrW2mGcWi9KpqvXmFou8GiI6NYAIiHFUwHCrJJAx5eIAXyR42YT1+IhPjQI8HeHKiM0Vj5VQYukMz3Ob5DxzDxggUEQFIhYNY1kAR8BhHSE1bObly5fZvHCBN92Af/BFlxLB56eKP1xPKNtjVdvhw2hYzf1ixp/z2Of2s1n3vJylJ7HsH7V8HMa9/dlHsfpmHXNunzTJgIqFTdOU6XTK+++/3/hSb2xs4Pv+qUqbxnDB6ImllIzHY9577z1c120qSsJpGY/rumxubjYSjuUlCILGyi6KosZtw+iUTXKeAWzt82PIlWX23Oyv6SPb1y7LFbFyWOQO89wmSqrXi8Ku3hcOi6J6H5du9TqvTQzq9UotCWtgbe6LrlPWzzmhXXA9LPDljK6d03FKQjsjkBk9t6TjFLhSofUJw3/exOzfunif147WeHYwJnAqSVo7YmOu8/LkxBCfZmwz59hEPIw/uhCiKfYzHA6b5FIz7mRZ1uQmtElPrTUrKytYlsXu7i5BEPDgwQN6vR69Xo/hcNjgtkuXLjVs+6VLlyiKgg8//LBxidnd3aXT6TRM/GuvvcZwOMT3fTY3N3Ech8PDQw4ODprJpBnPDw8Pmc/nZFnWyJf39/fJ87yZkMCJMcjGxgbj8bjJoQjDkA8//LBpL8aPvdvtsrOzg9a6kViZxFRzLEmSMBwO2dzc5NGjR7z66qvNZOLatWtkWcav//qv89JLL5GmKe+88w5ra2u88MILCCH45je/ydtvv/3E+1k86YZ/+PChPut70xjOsydqs4bndT7jBXzqvxyQFYLrqyX/wQ+nNYMNh3PNo705797ZI1E+uehSyB7a7kKZYpVTLDXDVnPy+S46GyPzMbaa4xIRjR9BdowsZ/gypowPWekIrmz2+bHf96P8hb/wF06Z9T9JZgPVDXP3WPD5v9VDabjqK+7cq8Lcz10ucWxYxIJppJhHijQTKGVRVYoxiWoCRIkQBegEVIQoF0g1w9ILVrqKrpPQ9xJEssPnPnmJP/FHvsSlNZdhSOM13h5gzI3S9jA1+2uOS2vN/+Nf+PxXv9pBKfgPvxQx7JQVW93Yi7W8fZPqOc4lvlMB6b7fqkrn10Uzap/fjqeqoi+BouMrVvzK2rHnK9yWJ/WTBp726/a65jkrwW3JNNsd01kTR8MIG3mIqZg6mUw4ODhovjNlfF3X5YUXXuD555+n1IKyKPhH//B/4K233kIphed5hGHYgOMwDOn1evT7faSUPP/881y6dOmxNnXWcbe///DDD/nGN75BFEVNJ2iSX7SuqsEaeyaTWGSswgzjYDoQMxBJKRuXIyll4/tqBjo4yeCfzWYN6DfJTMY3eG1trekMzX1uOu/ZbEYcx6Rp2gySvV6VLGomD1JW5bW/8IUv8PSzz2LbNv/rXxb8/NtVOfL/4oc1QleAddIAcHECxOvntBR0nUoKNPCqokJDr/ZIrwH1yK+sQle8Kqei48Kar6tEZpemPPpZ0oWsrPIqjW3jJBOM8+r5//zPBXeOBMKBzz6nWSiYF4KohFg/DrB1DhiADXhAKKFjQd/WDFwYebDqw0YAm13NxU5VnXS1A8NONTF17dPShLSgZqjhONZVrYME/tTPW/9bjLTl5DE64zPH03EFpEWCXczouSV9V+GqBcl0l65T0HdLXBURT7a5vjlgGICnIx7cfptO7VTQ7/e5f/8+vu9z+fJlDg4OGgbKdV2GwyHj8ZggCJo2ZEB2r9fDD0MKP2Bu2cykZC4kY6XJgxAxGJL6HpkXEDkukW0zl5KxgsRxiRyHxPPR5p4Sgk89usef++6/atpk206ufQ8Oh0MuXrzIrVu3mgqL7T5ymV0+D9y27+Gz+p7le7y9/fZy1m+XCaxlSYj53MgQlrfX3s4yQDfvTeEUY5ZgwJsh1tr7bYCukXaYe8dMbkx0rz0RMgl+6+vrjS/2gwcPHquUafbJEA9nRevNezP50hqSQjLPLea5zXGkWOQOsXJbwLoNqG0WhcuiOAHeSWlji7IB1qcYbCunY2eEdkHHMe8NAC8IrRxfJvhWjiUel/ya824mnu12Yo6hfW3Oivq02+HyBNL81mzrLLlQG7ibiYkB5p7nsba2hpSycTExMmLzm9lsxmg0arTXYRjyzjvvsL6+ztHRURONuHjxYkPk3Llzh8Viwfvvv88nPvEJHj58yNraWiPDCYKAixcvNraM4/GYq1evMhwOSZKE27dv8+DBA1zXpdfrsbW1xWc+8xmCIGjGRAOKzTk1xX6iKCKKouY87e7usrm5yc7OTrMPUI11xh3GFLc0E05zzIvFgiRJTuVVGOcY05aBJs9gPp8ThiGXLl1CKdW4ujz33HOsra1x7969xhXHtm2+8IUvYNs2N27c4MKFC9i2zc/93M/x0z/90+cC0Ccy6EbzvdzI2o3iPIBk3rdnh+1OZH9ukeRVWeJ7hxZvPLLoeQUqPYLDO+QfvMzm8SMW44eo+Ih4skURHWCRNVof27bZ3t4+mUnbNp1+H1V7cJZlyYUrV0jKhK7XpchTXnrppccKE3wcFuTOUeVEkSnBg5lEKSCB/X3Jzc0cVx8iZh9iHdwmPr6DUx5gpdssDm5DfszVi3163Up7dXx8jJSS9fX1piLbU/2nSKOUB+8+oCgKttNbXPlT38fGYKMlFzp97s05NgDK3GTtRQjB649ssgKkgG986PDpK5XP7+VBySc2sxPw7VVODf3ajqxdlvpJE7Wzluq7Jw9kH7UYFmC5PHa7PZ01MTHA1GjpNjY2Tg0uhl02A5NJgBFCIFSJFFWpYpPcYkAGVCFDUyij3QkuJxyZ/TxrwmHY8/fee4/pdMpkMmF3d7dhENbX1+n1eg2zbUKzBowbtsowXKbqW1EU9Pv9ZvCOooidnR1GoxEbGxsNe7m1tdUk7ZjwcVmWjMfjpiM0bKPphA8ODhgOh428xTDyJpFoPp/z8OFDpJSsrKywsbGB67q88cYbXLhwgdFoxMu7kqwUSKH5l/fh+dUKdN8cGuBdVfccNkBc03erSMp57Udragb7xBv9MIMPj6r3k7z+Lq1BeC6Z1Gz3JINxXvmq9x3N0IWBC0MXho7mQSkoHQGFZn4Amx143tOsdjXrIWyGcLELF3ua1S5N5Md1Tu9jXlYFiY4jzTipgPYkriYkH87gd/dNtVSaKqnt10kh6LgnxZmqXAYAvkjFmh8D79avzXvzevwfLX7qaDSoNKBaV4VDLm1cagqCGBZ0ZWWlYkZ7OcNh1ZaU8vHEraZtBv0BG50uE6UZX75C8tTTTJUmcT2yIED3+swQzISk7HaZS4vYdogcl9hxSGwHqTVhkRMUOUGeEWQZYZETlgW9siRME1aiBWGe4SYxcjHHz1LcOMZNIpz+gP/2i19m5rj8wdvv4DhO40hhAJIJ6xuZlud53L59mw8++IBLly5x69YtRqMRruueCZ5N39P+zLS/dh/bHvPM+NYG5st9gVmWQZ3R/5rflGXJvXv3Gvs3cy8aUGGIgrPuieX+MY5jHj582PRZnuc1CXBwOhK+DJTb+wcnziXAqcm9+W2apty+fZsPP/wQz/MYjUbN9osSZpkkKk+A9DyziUqHeWYTK4e5YbMzm6hmtR9jr60cv06g963arap++GKGL1M2ZVqBaS/B9xMCK8MTSZV0z4nv9zIOkEjKtETm8tRkJNaaWVE056GNhdrSwWUZocEhy5+3t2MmKO1JZXs90/cbFtxclzaeajvQtCcDhgU348SjR48avbeRLxpyxoBdo4s2MhbjtGLbNoeHhzx69IhHjx41E9/j42NmsxmdTqcpYnTnzh16vR5JkjT9iiHHbNtuLLHNOTbjGdBM9kejEdvb280xPfPMMwA8evSINE3RupI/jcfjU5aK5phNVODhw4dMp9Om9s3+/j6WZTWVZE0BJaiUGyZCZCLSSZKwu7vbXG9TuMkw49PplCRJ6Pf7zXnr9XrcuHGDb33rW4RhyN27d7l58yYbGxvcv3+f7e1tNjc3/80kLsuhF3OizE18XuhveWbYbkzm8dSq4j/+csSvvunxn/9kxJefT/nVX/1V/v7P/f1mRucUBY4pPhTNcKRkbW0D27ZZW1trktbMTdSuGiaEoN/vNx7ptm1z4cIFnn/++ccA3ccBn1+8WvInPp3yvS2Lv/IjMX/nqz6g+et/ap/f+e2v8gu/8AvIxYJu3fDjNOb4+BiiGUEQ0AnX2draanR3cVx9P51OWV1d5YMPPmA+nzdZx9vb23z3u9/lJ37iJ07d/G1A3nYnMJOO9gBizv9f+cMR2xPJWkfxN//4hNA9Xdb4LGaxPgtPah7nTmyWB6InWW0+aZvLg+F5/7U8uCwfS7sdGmbXdd1T217+bRAEjeaunWBpBnygqUxmZshtLWV7QF9m19I05ZVXXuHdd99tqt/2er1T/rVCiCYst7Ky0ujjwjBkOp0ym824desWjuOQJAkvv/wyx8fHfPGLX2yqsm1tbfHOO+9w/fp1NjY2mgINxhrK931GoxGe57Gzs8PLL7/MhQsXuHXrVuPYcufOHVzXZX19vZGxCFEVoDg8PGwScFZXV8myrCm1bKr8Xr58mZ2dHXq9Hv/dT8Bf/HWL77+g+W9/v8aSNStWnjDY4wz2M3j/GCaZZJzpBnxXgLr1un5k5wBs83rgap4b6PpzVX3uwdAx34tTLLtZfmUI/8X/x+InP6v4i/+2YpzAJBUNg30cw505vHJwUiX1uP6uAuAV0J5nAt/WDGvXpEGtUx8F5jVcH2o+fbEu8lQnFfc9xSiAYSBOTVJMG/U879//yBsK+G/+m2cqb2gqF6rhc88zCUK2bYfI6zD57A+SByGp67GwbTI/IHE9Itsmdlwi2yFxXSLHJbcdnLIgyHPCPMPPMvw8pVMU+FlKVylWVM6lNCU43sdNE9w4xs9SmE0ZAL5W0CJ8DDul1EmSMdCUEo+iqOpDa4cI4pj/06/98wr0FQXT+v40yYIGhLaTpLvdLrZtM5lMmE6nfOMb3yAMQ65evcrm5mZlFbqU7HhW37PMcC5fk+XlrHWXPzfHYfpiE+Hb29trJuSLxaLZPwPmFotFY4FnktRMxExr3bhSGFBnzo85t+1+uU1gAKfGnOozwSLTJMqvAHVLDjLPJIvSJS5cFmUFvKPSJSpsYuWT1PkIqXKwRUlgpQQyJ7RqO9gGZM8JrJyBTLggMwI/xQuSqiYDMaGV4ZKiVUWw7O7unhmlaFsjKqVQQCwls3oisZxwugyszXJWhdH20l63HW1oX9fl89oeRwwpZJjZs8a+9kTJXMf29s1/t1+bcUhr3bjZKKXY3t5upC0mytlm2000wwDrhw8fAjRSy+l0ShRFLBYLJpMJ3W6XLMuavKUwDJuotRCC6XTKD/7gD2JZFvfv32c4HOJ5Htvb2wghODqq8ke63W6T92SKTZr8gbYP+Ww24/r16xwdHTGZTIjjuMmdKurJU6/X4969exwcHDQk2+rqKpubm9y9e5fPfOYzlGXJhx9+CFSe+oapv3//PoPBoLIVHlS2wiYno9vtcnR01Eh+bt68SZqmTVT9nXfe4amnnmJra4tf+ZVfYWVlhV6vx/7+fuMy88u//MsopRqHl29+85sURcGf/bN/9tw29kSAvsyALzfG5eWjOq7l7/+Pf2DOf/ITi7oBab7zne/w+uuvs7KywsrKSqNtMyEGpRSrq6uUZcn6+vqpcI+xsDGzTdM4jQZ5sVhw9epV+v3+Y5GA84791DEL+K/+wLT5/If+oyNef/11vvLz3+Y3f/M3SdO00f8mScJ8Pm+SKDY3NxtJRRiGHB4eNrq6CxcuMB6PGw9Oz/PodrsEQcDXvva1pjLWnTt3EEI0fqAXL17kxo0bDIfDM6VG5hi01txYLfmlnxqfAuPL4bHzoiFPWj4q8rDMQp31/Vn/86SJ0/J650VCzgoZLjPu5x1HG6CbWXmbQWo/tK7stZbZ8/MG5rIsefnll/mX//JfNkknN27coNPpNPq51dXVpqiGKX5w8+ZNer0eQogmseXmzZundOVm++3O2wD79v6ZxC7z2pwj27abYzEevh9++CFCCD796U83/sEHBweNTMAPAqJcsbsoiOngX7uFUjZ7s4w3k5LyoMdXvpXTuz1DdEasXIJXMsGL/1ycAti9GmAPXRg4J8B66AqGLjw70AwdGHknwNs8+k6V4Hlee1FKM0t1BaJTwTgWPBzDGzG15aXguAbUk1gyTjTHcZ0gHsL//VXJf/2aZOib6qcViz2sAfYogIt9zfMb1IC6rp7aMN7gO7/39nzyuUIpUFoz05qJhnGp+Py4+BInMpbBOa+Haz/5x4gdl8T1KC0LN0sJsgwvS/HSBC+J6SpFkGd4acIwTQjyHD9NcNMEL0lwk5ggz/CzlF4dXTHtzmjHjUSq3f4Nq6rreyhxXdK6vRmJlimUYkrMm37D6JJNu26zW4b9M6SMYXHbCX9wwvKafJG2pjlNU958803efPNN1tbWuHjxIpcvX25s7j5OH2jAXpscWZ6QL3/WHj/N6/F4zLvvvttsy8gklFK8+uqrDcsYBEFjbXrlypUmiU9r3cgH2lE1E+1LsoL+2lWi0uHR/oJE+yTaI1HVc1x6pHgkOiBRXv2dR6J9chESlS4KiUeCL5OajU7rR4wnUjZHFhteSpbtETgZkikyn+E7CZ6T4BFjkZ+K9golsIRFnubN+VjGDAWQKcXcvK8ZX+MkZWQL7XPaPtftpQ2a2/im/X/tvnKZWFwGzcvXdBkvtScLwGPXfbnqp7mO5rvzxpHl42tPCNoTkJ2dHVzXZWdnh8FgQBiGjTsX0LDy7foxQKM539vbazTa/X6fW7dusbu7i5SS7e1ter1eMwm+cOECt2/fbqK5q6urDIdD9vf3iaKIixcvNuDX3IcHBwfM5/NmAn5wcEC/3yfLMvb29oiiqAG7QgheeeWVRn1w/fp1tre3mwmC4zgNITsajXj22WeRUnLjxg2g0qGb+8r0MWY/jAGC8YRfX1/n8PCwsXUsy5LNzU2m02lj693uvwxLbuQ4x8fHvPfeew1ZYI4pz3O++93vNjj3o8aEJwL0dojnrIbYZtLPajBmG2dtdxnEQ6Xt6ff7zYU1M6c2sFxmEoAmA9doHo0WUSnVMNKm/O5yB3De0gY57cEmz3MePnzId7/7Xe7cucN7773Hw4cPcRynGVDMzM51XS5evNhogk1iAdAk2ZkQ1qVLlxqmvygKHj16RJIk/N2/+3e5c+cO4/GYoih48cUXsW2b/f19XNflpZde4od/+Id55plnmoHF7H+7c1kGqcvn/qxrtPz+SefsrOvZ/r92R7g8Efg4DPtHMVTt7z6KdT9v39vbMrZNQBMiBJqBwfjVGgCS53kjOTHbOWtysr29zTe+8Q3u37/fyG+m0yllWdLtdhkOh02mvPF1PTg44Nq1a01nbkL2beBtmBGzj+3zYe5VwzKYa9EAHGEzFSHz7hWS3hpv29eRxSozZfHWjQ6x9HkvuEqpeyxyl91eQuF2KQZdYuFRYOH1U7wyJiTHVzHBaoZfJpSzA7KdLbxswue+72k+8fQFRr5k8P9n7c+DLMvy+z7sc+7+9vdyq1wqa+2q7p7unulZegAMAA6GhAgINGyYVBCQIJmUbTnCckiWaIXDoZDl0BKyI2xLpu2wZImKMEOiJRuWwgyAA1IApzHEDGbr6WV6q+7qWrKycl/e/u5+j/+4+Tt183VW9Qzlk/Ei33rfffeee873fH/f3/fnanq+Ogewn35uy2TRvpF+wP5A8aGA6ggGofqUPGR4ptsexmUqSDcob53aGZAOzpKSA1ioaW4sVBnswlhj9uqKugtKfTZou+jlsu9CWhSMNAwKzUDD8AxsDzUMtGZYVB/DSMv7YKRheLa9DtApv+dvcpYYyhNJyxB4XH38F37vP/tWLU2o5xlqPCGLyzFIgLW0ar2Iqn80SpWe58AIOD27Lqo+yvOkR5VN1FobxrbaZ6VfSoi/Op/MAw4BZcLuSd7FdDo9F46WyVS2LYtsmR+azaYhSuS6kQqae3t7JpHrxo0bxk1JXJKenOPz2uJn+ZhfdDzmE1oFjIln9fxYenrapz+JOY58iqzBo2FOmHvcVatE2gevS6T9st/PdMlWa59Z7hMVZcXgRHs4x6X22leRKWRWuofE1GsZHSumZk9p2AN8KyIgouFmXF3t0A40o+MdU2NBzks1yrmysgKUEgJ5z2CQ0O8Pz97rUXr0nm/V4zmPNarjrjRZrCVJguu6Jh9Ixr35qOVF7Vlk1GeRRk+bB+ejK3K/CrSrkfCnfbcAx6fN4bIf1eupuuiovi5Ek7jlSD6T7IvMA51Oxzwn1oxFURgpVVV6KSXtl5eXSZKEdrvN7u4ulmUZMnIymXDp0iVj3LC0tGS2MRqNDFZSqqycenh4iOd5PPfcc7zxxhsAhq2+du2aWcQuLi5yenrK48ePjbQlTVMuXbpk5DeCjY6Pj1lbW+P9999HKcXJyQlRFHFyckKn06HZbOL7PoPBgMlkQqvVYm9vj8lkci5/K45jc18pxdWrV0nTlO9973tmXJOIdLvd5sUXX2R/f58oirhy5Qrr6+v0ej1msxn9fp+trS329vaMOcSz2mcC9PkOLx2o2snmGfWndeKLOqNsfzwe88EHH9But1lfXydJEhMKkcpgkggksoJarWZK6ApwWV1dBTDeq4Bh0pvN5qdAonz//P7O72uWZRwfH/PWW28xGo346KOPeOeddzg8PGQ4HJqQjOxHvV46fayvr6P1E792wOilJJlJVpSiOT48PDRRg9dff92wVLZts7u7aywmh8Mh3/72t/nbf/tv81u/9Vv8i//iv2hK11bPw0UM+dPaRSD6Wef0Wdv5aV+b1z1e9N6n/YZnRXJ+1ia/Xc7hcDjkzp07jEYj1tbWgCcJIlevXjWhc/FRlUTO+f2u/q579+7x6NEjGo0GCwsLBhT1ej0TErt37x7Ly8uG6RBZSZV9EWuzJEnILZfI6xJ2ezz2L7MbL5B6DbbbS+zceomt7gpvqzWi1Ceyahy99OcJlc83gzYRAVnq4HcS7C9MqBURHRuaSUbTSon8GV46ZaUYsKQm1K2EB7tv0fXg1voSa906Dz/8CWlcamWXlpbMALq8vIzjOzw6eMRkN6Fp/zxXL/8lsqTN8UjxSfRp7fUwkoTI81KRJFe0/IosJNBnTDb0aqW//Hr7CbPdDUpNuLDYLa+0RLyoT3123zljlQrNEHUGos/A8xnYHnEGrAtdgvDqe7RmWMCEEp50VQmw22f/u0rRpry/pOCmxdlj9eR9aDoKmpV9bDabX/pp+vVffftN+llGHwwBUK1QKABZKWXyjqp9Ta4Jea7qBV3VfFcjM/JZpZRh1Kqt+j4J82v9pBK0gBoZ+0S7enh4aEC9WMbJtuW6rZIwMqbKHCO+xo1GA8dxaDQaJlI7m83MvLO7u4tt28bRpOo5XZW5jcdj2u02rusath+e5G9prcm1YjDTZZKisNaFT1j4TFKbUWSRqAba/eqZfMR9kuSYOkwyh+LMOaTpZNSchIaX0kwL6nZCS5d2lFe9iJdXchpuWCY/uhnNs4THmpXQrJVjhkR7q+SInIfzQK+G1hrHURSFprW+ahZD8n7pBzL3aa1ZX18350MKwlwUia+SN/JYtlt9T/W/ACaJpkiffZbUtvrbqq1KVlQXANWoSLUvyftkLr6InZftPm0enf9988/NzxvV/1XMNf/b5D2yQK1GSC2rdDGSfZP3yetZlrG3t2dsNEViMplMSNOUtbU1RqORKbYj52xtbY2f/OQn2LbNaDQyNr+9Xo/BYMCjR4/o9XrGWlD2fTqdcudOmTuysrJCo9EwSaQiv9ze3ja5EpIjMxgMcF2X5eVlJpMJx8fHFEVhkj6LojCVSzudDlpr3n33XTPejUYjI7URck2UDoeHh+e8+h3HMS41juOY4kfi1e55HuPx2FSMlWiX1prXX38dgJdeeslYMfZ6PZN82+v1eOWVV3jzzTc/08XlmQD9aVKQakeUzlLtyBcx6hd9vnrx3bt3j+3tbbPC2t7eJgxDGo0Gw+HwXPUuMcSXBAbRokuIQk6ADOL9ft9c0POTy7zDxXyLoojj42Pu3r3Lu+++y3BYsgGPHz82CXmrq6tGewXl4NzpdEwEQNh0ScKQ79Ra0+/3jWuHrOiSJDEg7OTkxIQ2syzj4OCAZrPJysqKyaTe39/nD//wD3nhhRf4C3/hL5iFwPxC5Gdtz2Ks5x/PsxbP0p7PD8DSz+bDjfJc9bfMA/Kfhtm46HufxpIoVepB3377bV5//XUTFhPQILrOR48ecf/+fb70pS/x0ksvmQXX/D7Pb1+smtrtNqPRiCtXrtCfhDw4DVlpb3JQq3G40uEd/wVSt8FhJ+Hgua9yt7FOppvM6h4nX/hNYqfO3w3aJYOtHJyvRnjZlHecnIZKaaqMPDglrR/RtGBVjfCzkI4H+4d3SAeH3N68xErDo+1p4tmU999/n2azye3bt9Fa0263+eHeDymKglu9F9FRm1Q1OIhSEt1he7rCdt5lq2gxLiyc0TLZtMXxJCO2ayT9GpPUJXZqpC0X935Cb1+x2LTPGOwnILtbg1tL1aJOnFW+LO+3/bIY7bPO6bNe01ozRTEsYFDkDAyTPc9cP7k/mnscAQ1KwFyCbEX3DEB3LUXXUlyxS7DdVdBVFm3rCePdUWX5+ov6xkXtfFRIg4af/Sp+oksVQCYAROQiUOqTq1EY+Zwcx6qutcoCyq0KXKoSAWlV54mq1MqynjgTKaWMp7UBt2fA37Is410s16J8n2xb9l9ugCFyhCnf3t7m4OCAbrdLs9k0v1sKqLRaLVMEbzweMxqNSfE5mGbkdov+rOB0WoDXIVF1Hh+OqXXXyO0Gp1PNJLFIVJ1YB0zPdNjiHFKzE5puRsvLKzZ9CeghLS9huT2i6WUGWNedlJaXE1gxDa/AsT5dUEnaRZG7+bG/Oo5VIx/Vz16kqxa3D5mbfppW1dPPg9kqXpgHwNU2v/+yPdk3WVhK/oGwv41Gw/RTmZPn2WdZDFYXWpKbZFmW0VlXnbmq+ylSW6UUOzs75hxUfztgtMyWZdHv9zk4ODCES/VachyHK1eumO2/++67PP/880ZG8eGHH5pcJ/mc/BciEGA0GvHxxx/zxS9+EeBczqBc7+KBLjhD8JRox7vdriE0u90uSZLw4YcfGgDcbDaN44njOMxmM+r1ugG7nueRJAmDwcBIzJQqNelHR0eMx2Nz3Pf3983vlxwRwFg2bm5uMhgMiOP4HHEmdUJkQSKObKKwkMqgUBK0YRgym81MAUBxJwvD0BDBWZaZKJb4rgs7Dk/ccMIwNGYJUsBJriWpZCrf3+12efTokTkuQoxIH36WBBh+BoBeHRCqA+xngZ75FXJ1VSgtSRL+/t//++akHRwcGFmBhDaFYVZKmQupylinaUqapty7d88A5G63y/LyspGHTCaTcyBKLmDJWhavXCizeXd2dvjhD3/Iw4cPOTk5Kb3Yz3xgRfgvk5dIEmSSkM4q2coCyOWkCOMux8eyLDOYZFlmwkpFUZjf2u/3jTH+0tKSyT5eXV0lDEO+9a1vsb29za/8yq9w48aNT4VeqxPqRSvy+XN5EaNxETMwP8BWByoZgOcn9GqTc3ERG/40hvyiSaq6Hz9tiPOi3/Znf/ZnfPOb3zxXAU/On4TvpJjGyckJH91/SOR1+Jy/QqhsRqnFKLcYJoph+uQ2SGCbv8jWF3+e1K0TWzViu0a25ODmEQ0rw3NmWMGQx45FgxRlD7B1yFIxoJv38YuI3a0PqOmIl29s0lApNZXwzps/JssyPv/5z9NqtfAcj63BFg8fPmRlZYWbvVsktk+UBYxmISdJncP4Eif2ImEYMIwUO7VXyFSLb+0vM819kr0aA+8vERGQPqrhUPr52u6Ymo7ojKAdZuSFhaPGLDhTFpsR/cl9Vtoua4t1+nsPcIspvg6ZTQb8C7/zL3Dr1q2nAtSnPV9ozaA4L/8YohjowgBow2brJ9KQgX7CaBdUWWtonwHsTuX525YyYFpAd0cp2mcMtvsZ491Fv8X0U33xb/xpwHpVp/qzNpm8BZzJuCrXZDWkLqBHxiQZs4TZliaMtnxWAPo8UyqvPwvYSXRRyspblnUuiVFAogAwsTmtOhBV2XORCgKEccosc9Feh1GsOJ0W7Ifw0VgRDjxSq4Hyu+S7LTKrTu60SKgR6aBkvAufAovAiqnbCXU7pWYnNOyEll/gt7vYVkrXGbDWiahZEb6K6dUVTTcnUBGeCqm5T1ja6nGbTqeEYWjkNOVJBhLQsSabwqgoGD5lrKuOY1VgWJWFVsdlGWcvktvM657luM4vBKquJPK56kKtun9i6CC/TSIisi0B/09jzbXW54wghP0XUBeGIcfHx4xGIyaTCW+//bZJTIzjmLfeeutc36iSRhsbGywuLlIUBR9//DFBELC5uQlgijNVE+Or+yO5CuPxmE8++QSllElMrkY7hUzTunRPkuTFqlRYfrs4npycnHD//n2SJOHq1asMBgMePnyI1tosIKrnRBIZG40Gd+/eZWdnh06nw+LiItPplFqthmVZTCYTptOpSRKV7xRySeSaJycnAAZ3iHxof3/fKBbknIokRLTlklMn17Ecd9G+7+7uGuCfZRknJyekaWoi/51Oh8FgYFQPYtsq/VmMP2zbZjabMRwOOTo6QmttqsRfu3aNpaUlEzWTRf50OsV1XaOy2N/fB+Dll1/G930++ugjlpeXieP4nCRIjuEHH3zA2tqaMXWQfnz9+nVj5SwL2d3dXU5OTiiKgn6/b1yw5HqpXjfPaj8VQL9IZiAdtppkJq0KDC9i1Off//jxYz788EPyPGc4HBonFtFtiyXQfDXEi0CkrIpk9bS6ukqz2TSdcJ6J9TyPzc1NA8JkIpGCNlI2VnRT4/H4XMUs2WZVplH9jdVBU9r8fkuYtjrgSigvyzIGg4ExxB8MBozHY1PhSkB7v99nZ2eHN998k+985zv85b/8l/nVX/3VC2UX1QiIsFTzDMdFrXruL2Kyq8dgHiTPs+nzjM/84H5RSLK6DxcB9Iv2Xbb1rJVqXMAotRjnNsfTlP/oOw/Y3/w6+kYDVe+A12KycpnMb6HqPWbaIfdbDLwmD/w2b7oBvz8Maf9JwUrLo+crOmdJj+Iucr1R0OlpnF7Bj3a+hzOZEBQR650aYf+AUf+EjY0NwjBkd3eX69ev47qukYt8rvUSltVikjoMZxMyp8lptsq+qjNKLB51rxPqgLuTVfK4xTRzy2I0K24JOO76KDR1O8F3Q7zujHaa04mh6WYoNaRhTWnZIzabIxYaFh2/4JP334B4wBeev0ozcPB9j3fffRfP9dhc28S2y0IV0+mUK90rZcGKwTbdRpdercd+vZT+7B1PaF+/wXBhkR8lWQVon2ephxV99hOpSCkhceAJY63UeaBtlYD6qmG2nwBtw3RbCgWf6jdP6+/VyXP+LT8NOJ8HGk/r009r/zhg/KImlp0yblUX0FVXj3lAJxruas6PAKPqmCJstoBOeb4KCIX5q47TVRmNjFOy3VK+osmtgEwHjFH0/YKjxjWsoIfl9MhpkaQ1YmrEeUAU+0TUiEelfCQsPBLtYZNRt8UtJMGrzbDdCU4+gbhPNtlHx3eoq5imk9KtW6wt1uh0oG4l2MWMWvCEfBHJjeSCyBgqv6UoCnzPN1Uo0zQjjp/Ig6r9oigK4yVfHSPnQfJn9YX5z8nxrR5zy7LOMdDz/XJ+zL6I1a62KkCfdzyRebrdbjMej03yuUSTq/kw4rAj/ab6/QIYqwBSFmyyD0mS4Ps+W1tbnJycsL29zebmJru7u0wmE5PrVQVGlmVxdHREo9Egz3NOT0+xLIuVlRVjAyjnunps5FgJaJNCbvV63VxL1XFDFpqSzCvXUxUnSEvT1CT3St8QGa+8Jt9RxWSyWC5tUQsDjOW6F0Zbjr0cgyRJDKveaDTo9Xqcnp5y6dIlU4F6PB6zv79vZMb1ep2lpSVjyyjnIQgCVlZWODo6Oidp63a7pjjQdDrl9PSU559/3lzvkoQJ8Nxzz5lcEFFQbG1t0Ww2ef75500ETSJfvV6PhYUFI38uioLNzU02Nzc5Pj42/UKcWSShfGdnx4DvbrdLq9UyEZT19XXa7TYHBwfGSng2mzEej43v/xe+8AWzSPjoo4+YzWasr6+zvLxMp9NBKUWv1+Phw4fGOafRaJgIYZIkHB8fG4/1Z7WfqpLop5igued/lot6ftWY5zlvvvmmcTYRY3rp2NK5ZeUoJ1Mu8Crolu3JwHR4eGjCmeKbWZ2UABNi3d3dNZ1YLpKTkxOzmhQdUrV0sGggZSKSDlAN584fJ7l4LmI3qgO0RAUkjHt6ekqn0yHLMqNXFgZXNFTT6RSlysqXf+/v/T0ePnzIb/zGb7C5uWkuTPmu6mAjx636+tP6gHxmfhVf7SPyf34h9Sy9YXXAmdekz4Od+f6WaMUotRimilFWAu1RZpW31GKcnz2XWozktdRilJXvjwuLul3QdgrqJBxf/e+hlwdkoyPy8SkqHGD3t7BnAxoqpdi+h5fNWGl6OMmEdHSMpUtnoV/7tV/jd3/3d41HsfQxOEt0nPnY1zd5/+4Og6RNf9Smn6wzsODN0RLT1KHvgz1cJNJBaWe24JEc1NFYJcDozKhZCXdHmqabUVMR7eUuKyqm5U1oe8MSkKioLLZhJ6VFmUpxXcfsUxUQ5XlOdEU0nSWDUBQFtXWH8SQgaAZknQ4Tz+fg8lXSWo2jjcvEQY2TzZsMiwJ7YZHI8xm+/BppvU4S1JjYNpHrkzgOXpHzHyub7jQvWWn1RI/dUbCqFM87ig6atrLMa10FHUtRr/Tdp/XTz2oXMY8X9bNnRZY+a/vz/38WUC7tWYDsZ92ejHky7olWWqKAAjxlDJNQt5AdMh4CxjFEjpuAMGFBq7ZiWC4q6GLXFoipMU0dDocJqVUnt1skqoYVLJCoGpnVKKs3FgGZVSe1GqSqjlYKjxhfh7h6iu1OaFgZjSKjXpSAe9Hp0/K1seMLVETLL2g4KZ4O8R1tGHpxmpGJcTwec3R0dN66TSlGww7RmS2hJNpVi4HJ4qU6hgoQmo80VMd9CclXgZKAJYkMS5PvuShB9aLr4CJJYfU1eXwRGK9+Z3V+flZ/n59HLhqbBZRL4qFYalYBnFhcVheG1Qj3aDQy4Eis/GSbVY/4ra0tI5VYWFgwnt/zFVHlvziNVPMnFhYWzGKzypzPk5W+73P58mWiKDKRc7lmq77wSilWV1dxXdeQkNWIbPV8SLVckYa4rsvm5iZRFJnfICYT1WMsuRSXL1/mnXfeAUo2XQgfAbBSv0LwVBWHiNFGrVYjTVPjbe44Duvr66yvr/PDH/6Qk5MT1tbWWF5eNot68QFfWFgwLi4y3mitjax3d3fX6LA9zzNVqJVSfPTRR+dkKM1mkyiKGAwGNBoNxuMxN27cMBGXx48fs7KyQpIkdDodRqORWQRJRVy5HmSRobWm1Wrx8ccfc+3aNVP7YW1tjU6nQxRFvPjii0bB8NFHH52zWlxcXOS5554zOFWKPYnjjERV0jTF932TUCvqiCpGlIJeQiQ/rT0ToFcniYuAVLUTzneaiwaJ6megHAwODg74gz/4A6ODkpCHXASyQpbPy2qvGharToKirZLQShiGJvQShiF5nn+qANPi4iLNZpOdnR3u3btnigmJG4ssEiSpQTqdSFKqv3P+t1YvQrnQq6y6TJDVMtRQhsbEzSNNU0ajkckUvnLlipHAHB8fM51OzUpea83W1hZFUXD//n3efPNNfuM3foNf+ZVfMUkPchylzUuQ5sF7VXMq75sHENVw9Hw/qIKiajhUWnLGYA9S2wBtuY1SxfAMUA9TxfgMeJfPlQA7KixqVgmw205B263ct3NaTsGVWka7VdBx9LnXO66m5RS4Z4fj/v37/Nv/6b9tnHPkHDeuXiVOC9zFy+QnU9LGEgNvFbt+C93toGo9JrVF/v13PX7vP5nRXFpgFFulm8jZLc4VTU/TdH8OOx3hFjNqcYRHiGNNWLI1DX1CTx3z3OISNRVBPKCmYjaWG9TtjHD2pAKcACuRGJhQv7ZxcEpJRQZpnHNq2US+T2LZRH5A6HlErs/Utpk6LpHnE3s+ke+XFRvd0gd79uovMnM9csuilmc0sgz39ufx44g2mjYaazLBGQ5YnI5pjAakx0csug4trXFmU1brNYIk4de+8SssLyx/JpM8/9rTQMJnkQFP2+7TPjM/Zl3Eej/tOy/a54sA/9M+91l5O9X3/6wA/Vvf+pYBHrPZjMePH5v+EwQBvu+zuLhoAPbh4SEa8Bo9lN+jtXQZ5feICTgcWoS5hxX0UEEPvA72bIHMbpGqGqHyyYMmhddG23VUEePGU5xihpWOyLITnGyMR4idT/A5oeGkBMWU/HQHhgfUnYRAxXjM6NZtaoF/rr7F888/T8Nv4LkloI3jmGz8hBgJgoB0mHFaFEbukiSJAQciSRS5zMbGBpcuXWI4HDIYDIxc4uTkhCRJaDQaxlZUrrlqtcGLCAWJrlbnmypbVmV0xSjg5s2bnwL9833nafPw0wC1vD4fRX1af/oscH5R/3sakVKVUImWWkq6CwEnzLNEjGVel9wAqU4rFnVijylJjNX6EXfv3jV5ZxsbG9y/f98UUxM2virPtayyqJowm2mamm3J++V7q9e9WB2vrKywv79/bv9lAQsYH26xTa7VakZmO5lMzH7Ztm2SONvttsk3kwWNHCMBeKJ5hieRfvkNjUbD6KNrtRphGJqFtsh3xZlFXFSEaJTk26OjIx49emQkZ4uLiywtLfG1r32Nf/AP/oGJhjQaDW7evMmPf/xjk7QZBAH9ft8A5OFwaKS69XqdtbU18/vlHEqS58OHD4218OHhIUopFhYWePDgAXt7exwdHZkF8Gw2Mz7pYRjSbDY5Pj7m9PSUer1ubIvFOU/kMkEQmOem0ylXrlwx25RkWVkIycIgSRKTe3h4eMjR0RH9fp/T01NarRZpmtJutzk6OjLHQKonC447PDw0uHR9fd1EQyRX4GntM11c5AK8aKKprnifdkE/a7t5nvPBBx/w8OFDVldX2d3dNcyEZVmGJa4mDUHpeCEHXTqnAF9hs5V6UrhIwj2SPFKdeEWyIh3+ypUrpnLi8fEx+/v75mIqisI4yUgYpzrIzk/IMgg/bUKtMpjVamEyAYxGI4qiYGlpyZRVX1xcRGttJlnbtul2u+zs7JgJSsrau65rogB3797lt3/7t7l27dpTQ6bz568KUqqfqbL9WmviXDPMSha7rN6oDIM9zm0mucM4tw2zLSB7lFkMU4uoUNTsKng++28/AdtXahnttqbtFLScnI6r6Tiapp3RsDJcdTEQqk5QhVaM49KObxzZ7CYOd2KLYWwxii1GicVHD9d5c/VfZbxoU7ht8Lvgd7lbW0A7dVQWouMBKh7QT0fY6QifiJabUdMJ9WSf03fu8Mu/9nV++Wuv0AnEeUTT9jWurTg8POQP/uAPTEKLhG3FZzX2YhaDcnGQWznKdRnEOQe+z6TWIO4tEbkeeaNZAmrHZea4hK53disfz87+h06pN6xnaXlLU1PJUe538oxaPKM2GVBLEmpZijub4oYhLV3gJTGNs6x6wEx2vV6P4XDI4eGhKai0u7trIgiWZdG0lYn6zAOPZz2unsN5Zm7+/Z8FgD/rO57Wftr3XvQ7nrVvz9qP+UXu/Pf8NPuk/mVczvJTb6krnEYLOI0lZq7DYE2R203s2iJ4bXKnhQp6FE6LaDkge6VB4bRAWVj5BCeb4BHiFlNS/xgdDXD1DE/PsGdbBOFHtLycZHLE6GgbOx9TsxKcYopDxsLCgrFi6/f7JsoHJVC6fPkytm1z/+g+zpkeXdjLLK1RX+iZ0PTS0hLtdpuFhQXj8jAej3n8+LFhErvdrplLZH44OTkxkjGJQgLGGz3Pc9rttkneHgwGnJ6e0u/3TTEWYXkFqIvOVuYcuV+NTlU1/cKyVucfWVSLm8z8XFftP/MstYA7ec9F8/A8gz7f5tn0i+7LYwHP86y5bL/6PiGBlE/ZTG8AAQAASURBVCoL0cgcJtsQ2zqJEMRxzD/8h//QMIpf/vKXuXnzprEAFA2x9B3ZvkQ3JDFQGHQhvQSUSvEnWSTV63WzXdn/eXcWmZOHw6FhduV9VX2+3Jeou3zH6uqq0WpXlQAipZXkRyngI8Rk1dZT9kUSOiWXTo790tKS+f2yEJIKsRKtEY9wOS47Ozumr0gRsI2NDVZXV9G6VB5UEzmPj4/RWhvHlSiK2N3d5fbt22ahsru7SxiGvPjii3z/+983MrA0TYnjmOFwyOrqqil0tLq6yunpKXt7e6YPVS2q2+02YRhy69YtnnvuOd566y2THPrhhx+ilGJzs5RYXrt2zVSzFrlvFEUG/4lDTBRFHB0dGbJC9O6WZRmpjySxJknC0tISm5ubRnq9ubnJwsKCKTjZarUYjUbU63UTSageM5EeSX8XWczBwQFHR0e0221jb/m09lMBdGlVwDb/nqdNtJ81EX344YcmfGNCo5XXqxf7vF5HEjFlxS2sRlUbListWa3Py07SNOXOnTumHOx0OuXu3bsm81Y6lIQghYGWUKkMuE8DvPODKjxxVZCLSrTmMvjLb6iyBouLi6yurhqf7L29PeCJL7fW2gzu4kc7GAxMVUfRO/3u7/6uAelyjDNK0FwF1kNhshMMg12+psx9YbGrDHbLKWg7OS07p1UB2JeDlLYTl8Vn3LIIjQDyjgfeM5KZn5yzsirj6AxQ748tRnHAMFaMohJoD+Py9fEZ4B5E5XvHSXlzrbJCY8cvaPkFba8wj9t+QTbYItv6Nmq4jx31IR7gFlNWewHx8ACb0kmn6lgBEHsel2/exPM8wjDkk29+j7/2C/8O1zauEevSnPqhVoxyuJvDj9Y2mTkuWb3OWFmErkfiB0S+T+wHhJ5PdAa4Y9fFyfMzQF2C51qSUEsT6mmCnybUphN6SUxwVjrdj6OysEyaUE9T/CxF8WTAr4aypR/K75HwZJ7nWEF5TWX+k9C79Evf903WvSRbwRNHEPGlns1mJmmneu3Nt6c9f9HC8WdpP81Y9Fnb/qxtzLPs8v+isVLuy7W7u7tr6iCUr8E0OSuiFJb2k6NIMYzK5+Tx3/wO/xFPihLN/68DMTDYev7fw9VTml4BUZ9suI9KRljxCU74CCsbsdLxudT12X3wIcPDh1j5BI8YdJmk/vzzz6NU6S5RLcoGZbTv1VdfZT/Z5874Tvk7bZtcKYqzyWlzc5PT01MAY+Mm4KxerxtJgoyXAkhc12Vtbc1EUiUxPooi83nHcVhcXDQ6XNGOaq2NPEeKj0gV3tlsxv7+viE82u22ISFkOzI2SxETcQqRx/MgSprUxKg6zIiWWWSaEpWUSbzqKCPSjmpSWTVaK/sp+/pE656eyzOoyguF9AIM0JV+KHOHzJnV/ABZiCiljExlPgJaXSgAhjyTMUZqmsg+ylgj11PVpWc0GhlybX58kv4m823Vo77KusvCqQpcpd8NBgNDhInstRpdkAWWuPkIESfR1Hq9bvZZ/svvkvcKJpnfpkTD5dwIiIzj2FwHwoRLLRittZFiQIl5pP8DxtpQzp/IWOBJ2XoBynIdyE2ev3TpEtvb2+zs7KCUYmtri9PTU9MvLcvi4cOHpoLmj3/8Y55//nkTnRCHJIksyPEUlr5Wq/Hw4UNarZYpeAeY3ye/pXrNu65rohO1Wo0vfOELvPHGGyiljGT5jTfeMAvglZUVswiTvpbnuamgrXWZN9DpdLh27Rqz2YxOp2P6gOAvAftSGbVad+fhw4fkeU69XufGjRvGRjLLMlOTQYD7fJSkmlQqDLv4tj+rfabE5aJw10UT1E/DYM1vY2trix/96Ec0m02zKpQOLx1YVtTSAWUwEh2QTHSAORHVhBWl1DkQX73Iq6GobrfL9vY2d+/e5f79++bgSuhKQkBwXucuA+Vn/XZ4AsyrWsZq0pAMzuKIMBwOuX79Oi+++KJZFUuGt7jJjEYjUq2g1sHqLGA1Fpgsr5O5TSIrgHoX1eixv7jG/6u5wO9/r8ni4y6R5ZeykUwR5haBVco9SomIpmPA9hOA3XHL19rOGdt9JidpWBme9WTglGP76XOvSAqbceowTmwOx4p7icsosRknFSY7thhGJdAun1MMIotxUvafElDryv+c9tnjhVrO9W5mXm95eQm+vbLwTOBonobvkiTh3/i7/ynuB/+AuMJSWIuLeEsv4Vx7BWdhkcF4jG62yVstimYL2m3iVpt3ewtY3R6q3eHDVovXm0skU4hRxp6vg8ZxG6TXb+OnCY0spZYmXMpS/OmYti5oFrkB1u5sij2dopL43IAuLIwMvNIni6IgsSyiomBQYceqTJdcI3KtCciR60OYJ4lQyYAp4X15XdjHNE1JkqRk/8/spGSikX2SiUIGz+r18VmA+1mL/+p7Pgt8/yzM+T/ONsyiPy/93IfhE1A9PKtSOhTf95lm5zjkk0fHHI0U3UsemVU/6/dlYaW2fxZ5kZuvzeNOoAE+4Xyhour/of4/EwG8+OJv6KWlJb7+9a/z6NEj3n34rgnby8R0s/cyL195mR/un2J7U7IsJ8sgy8uxTYDxw4cPz0VOBQitr6+T5/k5aaBo3Wu1GsvLyywtLZnQL2CIlEajwcrKCsvLyybxTNySLMsyVnUSSta6dFoSR4pvfOMbxm9Z9MDCwssiSeoVSIG4MAyN/vnu3btmf0UGKPIKAXgybkt0U4CIXDvz5JVcW1UrwOpcUdWyyyR+dHRkAJxsS4CosKxVxl5YO3Gx8H2fXq93roiZ1qXn9cnJiSnq1G636fV6ZgEt2l8BElVHmX6/z8nJidm+JBBWCZ48zzk4ODDREXEQqY4v0qrRhSp5JbICwQHicX6RU5zc5g0qquOVfJeMWdXrVs6D2CzK+ZZrwrZt01fl+AkREQQB6+vrBjC3Wi02NjYoioLhcMjp6ak5/tUE6+pCQPqMyKZarRZra2t0u90SzDseb6h/naPdF7EfvMcNb+dcESupoi7HuqoikP4mv1sWeUmSGN/uVqvF8vIyu7u7KFVGdPf29oyNM2B+38bGhkmAvHPnDt1u91xkSvzRJRFXzCva7TatVovBYMDbb7/NxsYGS0tL7OzsGBlzo9FgaWnJeKhLP1tcXDRys7fffpsgCFhbW+MrX/kKH3zwAZubm6ytrRkL7h/84Ad88MEHpj/t7++bYmTiBiM4rtlsmmjIvXv3TMGzmzdvGrB+9epVHj9+bKIaElEQP/Vbt24RhqGJCFiWxaNHj8y4Nh6PjXOgLNRlrJV+KraUz5Izws/IoEsHmX98URj2ae+XVhQFr7/+Ovv7++c6FWAYBgHIAmRFXC+MuABmGbDEPL6a0FH1BJcBVl4bDoe88cYb/OEf/iEffvihSXCqhqKk0wPmcRWY/zSTfpUx9zzPsDphGJaTFBZubw2aC0ROnfrSOod2jeRGg8erVxisXMZuLpAHbaIbPrPXPBK7TuY3odYFvwHxjCIcwGxAkUwIdEwtHGBFY5j1mT2+g5fNCJyCr37jF/knv/4LLDddOo6m5Rb4c/2kGmmo/s6keMJgn0wtHsQOo9gzeutR8gRkjyrSkZLFtkkLRdMrKqC5oO1LcZny/kY7o3MGvktpSAm0215B0ysB9jwbe5H2HSArCiYoRigeWDbjs/tjZTFSijGK4dlzJzrlh3/9f8rkf/QvodttaHdQ7Q6RZfHJZIwajbCnY/RwiDOb4kynqMkYNR6hd7axPvoAPRzizqao8Yife+EF/tf/yv+cz13ewKFkcE5PT/nRj3/E1taW6Z8yYYnllSxMj2cz89y8TV2VVZIJRhilqt1dNb+gek7lJsC+muA8v/CUbQroEV2k5EEIeOn1embSlN8h7JAALwkHVs/TPNP8rHZRVG/+tZ+VYT+//ZK9LosmKUamOqlmGCpGUVlUaVSpVlq+r6xmOopglioCR9MOysqkJaDG9GOSEf39B9z78McM9h/iZ2O+vvrz/LXf/i2W2g7tQNPyQFWqNl7E0P/v/0rwf/hpftN0OjU1FhqNBs1m0zDYUuwDMOHxdrtt+qCEfquyQ5EkRFHE4uIi8GSMk0WbZVlmkhIpnviMB0HAbDbj5OTkXD+t1WoGIIVhaNhCGbOr9omj0YjDw0PDllYBuVSYNFEg60lNCZFZCesmYEzC1UVRGGZMZFzCylXBuSR2Vll8AWZxHJvaFoAZ94uiMFZ38puFzRWGVRY9jUbDTP4i3ZHvrgJ0icIA5txWKxoLAyuLAKWU0TlLlNb3fcO6CnsN5cJK+gqUBJgc42p/FBlKlS0UwDiPD6pjT3VecRyHr3/96+a719bWzr23ytrLwmZealMdowTAy3lTSrG2tsbq6ipKKeNGIsd+aWnJMKHSn6Str6+bviJRGSEKpchVVcpTNVxw3IAwrzGeNYiDF4kKRUod/6aPrRokRZ1xEfBGWufNnR5/eLrE/vBX2E8XAcXfefPz/Fu/9EPDZDebTS5dunQu0iTko5wLwVOzs/ljb2+PnZ0dIyXrdruMRiPT75MkYW1tjdPTU2q1Gr1ej+3tbaPLPzw8xHEck5Qp50CKJorzzdbWFpubm+Y6bTabpg+sra2RZRnvvvuuOTavvfaa+V1JknD9+nW2trZM9VG5Xl544QWuX7/O0tISCwsLvPXWW8busNFo8KUvfYk/+ZM/MQDfsizz2u7uLp7nEUWR0eTfunXLLCijKGI8HtPpdEiShN3dXYMVBZNOp1ODNSUZdW1tjZOTE5O7InPklStXTC5CHMfnqrvLeRFNu+u6Rmr3tPaZDPpFCP+zQtVPa9WJ+M6dO/z+7/8+gCk0JOxBVe4hF5cwhkVRmuUHQUAYhp+alKVDyMAcx7FJIlhaWjLbi6KIt956i7/39/4eb775JuPx2JQMTtOU09NTM4BUQ0TzEhmlFDguqt6FWheruYCudaBe3rcaPVRjAavZIwu6pLU2I79N4bfM+/Ab5MkMZgNUOGQSjSgmJzAbMAxHzA6PqB0+xo7HqHCIOz4mO97DGh1TTE8hHKKy5MnxtW3qCwtsbm6yuLjIxx9/THp4SKPZxKk1eev0Di+0XJxXvsp26jBK3DMwrRhGZ/+rbHZFOhLnFnX3iTTE3LyiZLK9ghvdlLZXgpESWOcGaDfdAsd+MtDKTQbT6oALkOizSowotnDOwDSMzkD1UMMQxdiCERZDOAPh5eOxUji6TGZsoWmf3a8+7qDZpGB2csCP/x//CTzawp5OYTSkGPQJspSlhQXD7kynUyzXxT5jlXu9npFPjUYj8rwsWJB7DvrkGOfyBqenp/zkJz9hb2/PrORF2yqh3eFwaEL3AnhqtRrdbvecz36VGaoC8Gq4+CKQKhNbNfehytTJNS+TpzAGAsbEaqvf75/TkkqBMAEIUk9AtiVARIp0VCNZnzVeSLvo91THkyp7PYrPg2sBz8JeV8H0YA5cD6OSve4ElAA7qAJt6NRKsH1zSdMOCvN656zoUqdWMt2+c37RmGUZjx494vXXX+eP/uiPePjwIZPxGO9skTb44Ig17xe51Ll0buHy/w/Wv6qPFhAuUUEB0QJU5HxCOZZKBFP6nEyoYsMmFrCybQnTy1heXURK8pa4eMjr0qclRC4TXHWxKfpZwEz+o9HILEyrYHwewFbBm+/7ZpKVMHSv1zO5PsK8NRoNtre3mUwmhl2s1+usrq4a/+vt7e1z18Dq6iqO43B0dGQcRARci/3ayckJ/X7fMPSSzHf16lWef/75sg+nKUEQnKt8bdu2ScSTxxsbG0beIMdSEtmkuIokwVZzsESakGWZqR4ppcmvXbtmFgeSpCgLD1lo3b9/n3v37gGwsrJiai8IeSYA9Z133uH09JRf//VfP+dOU72mqxKQzc3NT0XIquOU9MPq8yKBkf9VQC4LRlngdbvdT42dMn5Iv5TngiAgLyBXDRyvRlHUiHWNKPM51jV2jxf5o+g2h+Ov4pzuk+mAOA9IioC4CIjy8nGqfez9nLqXYuspngoJ7AjbKchsl0X7gIYa41mn9JoDbl4ZM+gf85+/+yukOuCLG/vnmHDZPxmjheyrRgksq/Q8FwJQLBCzLGN9fd1ca1prY7gh7O7t27f5wQ9+YMCwWAxev36dRqNhCiUeHBywsbHBwsKCkYPcunWLDz/8kDiO2dzcpNlssrW1xcsvv8zCwoJx54vj2EhxxK0ljmNGoxHr6+vs7OyYa96yLFZXVw1Df/nyZd566y3TJwUgyzgnEd5Go8Hzzz/Pxx9/zIMHD1hcXGRzc5Pt7W2Oj48/5RIoEmqp4q61NgSByMuqsj5xZRH3J1E/SE6jkFYibZM+KKoHsRb9rHnwMxn06kq1epOL47NY9osYL6UUH374IYeHh2ZQrurJq+GrKhMiAwpgPMqriRXwxOJILrKiKMwkI5/9W1sN/uOtNX7rUpt/+p8OODg+5d7uMc7KZY7xmBQOerWDrnWwm4tkbp3ErpdgWkD1GSDX9S4ETXQSQjigCIcwG2JFQ1Q8LkF1NEKfblOM3qaYnlJMTmE2gGkfFQ6w4jEuhcl0BoyvJ0AKZGfHRKPQXovCbaP9LnRfgktdtN8Bv4cOumR+j8Ogy3Gth9e6hLq5QGY3OHZaFE6Dx3nMu+9OWN0OWGw6dALRYGvaXs5GK+fFpcpzwmCfAXH3goqOVVbUPAeEWhvG+oFyGKsSWI+UxVhZTKyz/8pmUliMlGU8scfADEUNTYsn5c5bZwC7ozUtpVlDc1trOqqgpQvaWtPIMzpnIDzQGipg7mkM6w8ffsJ/+Pf/gOAs1FkUBa5lsbS8zJUrV8zFJVIrCX9K4lm322VxcZHJZGJChx9//DGrq6u8+eabJjlHMvHFC1W212q1WFlZMUUd5nXuVTZJWlVnWg2ZV/M15Dqqnh/ZrrxW1aTLBDh/fVcXrFJ5bjqdMp1O2d3dNeE8KUQh1RqFiXn33Xe5f/8+zz33nPF5r07GWsMkpqK11k9kIhVAPQxhGFJKn8KL2etOBUx3ahUAHWiWmnBjsTgHursBRjrS9OAzIo+f6vcCgp8sdDDH6uDggG9/+9v88R//MQ8ePPhUVeM0Tdna2uKTTz5haWnpU310fqHys0YIWq0WrVbL7Fu73Tb7IAvLWq1mwKYAwzzPDWCVPiKyJQENaZqa8Luw5FCOx8vLyybqKcBQvqfRaLC+vm6AczVsnl1ehP/jX2f1dMylf//3DZgSBjdNU1566SVeeOEFI7eSPltd8APngGEVyAmIl3MmDJsk4sukLQXJhPmCskjL6empKRonunI5thLCr2q+hYCoJvjJ90qEuNlsmmMqoEv0vY7jcHBwwDe/+U1jD/g7v/M7Rmpm27YBFnt7e/yjf/SPgLLcuNi/yXskQi0gYXd3l29+85skScLXvvY1XnvtNRqNhllkSR8Rw4XhcMjbb7+NUoqXX36ZV1999Rw5IIuvk5MTtra2zlkKzo8r8ljIh/mxrdrv568zOcdVckJuWa7JrRa6doVYNYnygMJqkegaSVQjLgJSXeOdN9cJM4+Dk9eIMp//+h90CVOPSfyXSQofixzPjfC8GNdPcf0U28vIa3UOvCXoajYszavJ2wR2TOAk+FaIp0J8OyJwYm4/d43CrfHeJ4+ZZg4zq8n/2/tnyLGY6Sb/k9l/gNaabrfLc9eeY8/fI5j8u6j6Vb7+pRVOTp6QV/NRA1mUynGsRpTkHIpDjFxrh4eHJi9AokFZltHpdJhOp3z5y18mjmMeP35MmqY8evSIjY0NI3F64YUX+N73vsdoNOL69evcu3fPzDedTofj42NjQSkRmiiK+OSTT0xO3erqKgsLCyZvoNPpmKjG5cuX+da3vkWe5+fGpWp9nK2tLbrdLg8ePGA8HpsFnkQONjY2GA6H9Ho9ExFbXV1le3ub09NTvvjFL7Kzs8PKyoo5Rvthgz/y/zWU5fLPdP4zGuO7xvLS932+9KUvcXh4yOrq6jmCSqLJ4/GYw8ND2u32OSm2XBcC4IWkqvbhp7WfqlBRdTKfB+jzF9B8m39eLsxer0e32zVWP8PhEKWUmUzEdF9uopGTleTS0pLJDBfrIpn0pEkoVbRRcRyT5QX/zsc9NIr/aHeFv239U4T/w9/GKVLG6QQVDimmfVQ4xAqHpJNTksEharQNu++hwmH5WjSC2QA97UM4xMoTM/jLMZMM8tlsRnLGWGlAOw0Iuqigh+q8gNNaofA6aK9NWFtA1RbBaqCdpnER0UGPwu+C34Yih3hwdutDdHZf/k/34PRDdDwkTUcstxye21zCLaY8/OhtrKIEV7/9u7/LP//b/7xhveZbAUz0GVutFEdYjC2nBNyUEhF5bXz2X14TIJ4qReMMNJeMdUFLa9qc/deaK+R08pwl22XRLSs3dhSG6Xb1pwfqM8yN1mUZdBMylT9bkZ/pZ3X5wacuKKVVWTZhBEUDKuBTwI1kZAvrLdZNwiZfv36dLMv43ve+Z1iD8XjMwcFBaWOny8S0K1eu0G63zSRbXejIACz2VxLOhDLUtr+/b7LNJaTnOA77+/scHByYQcFxHG7evEkQBBwcHDAYDMyEVq/XuXLliolgSWl0GXg6nY4BIdVQvoCuKts+mkYcj1IOhzEPdjThdowK6njN+plLSJNZ7pN9WEd7OZnlM07ss2RIjPa6E8hN0649YbEFcD+3VMpGqs93a8q87j9lVHva5F9tT47/Z2+jKhuqblPrUkO7s7PDD37wA/7kT/6ETz75xGhaqwSEfG48HvPmm2/y2muvnWMH57/zZwXnAFevXjXabXiSq2NZFlevXgXK8VjkECLv833fTKIyQcpnhV2FclKuguNarUa73WZ5eRmtNYvLy6SBi15bwFppYDsWtZqLX/OgVWPv0hJ7zRojfYPMdxjeXiVr1yC7hPdbv8Ts3UM++eQTVlZWTPGV9fX1c5IPAScy6QnwkGOdpqlJTpM5QuSL8nvl2IqDVrXwSvX1xcVFjo+PzXhSzXFaW1szFnFVAC6J/sfHx+a75Xysr68buYX8FrkP5+Vs0qp9ZB6oCkiGJ3aC1f4jjJ5EuWRBABjCQM5zdQEt+yP7UiXwqvszv4gUEAnnozlVoFl9XGiYJQ7T2GeaOExjh1nqcjrOCVOPWeoSZT6DiS4Bt64zS136468xiW3+67eaxG/4WBS49gzfT/D8BMfPcLwcy82wvALlahrdOngW+WaGZWu8do5lJeiswFEpkeUT0SDLXbw8IcsTHJ1TKA8U6EIRNWscxhtEKiDGJyIgwifCJyYg3vUBTeDG+E6ER0qKB0oxomOIiqpmv+bmLLQGwPK56HL1HFaPm5xnuYYlQiTnGJ4kZ0tfle10u13a7Tb37t2j2+2yvr7OYDBgb2+Px48fc3R0BJSSlOXlZa5du8Z3vvMdptMpP/jBD5jNZiZHTharEk2u1WrU63VOTk6o1+t8/vOfx/d9VlZWTL6EEBmSwL24uEitVjMLWgH4wpT7vs+HH37IrVu32NvbY3l5Gcdx+KVf+iVOTk74R3/6HbYPx3SWNplmbdKln6PY/DJ3qTO99gJ5+xJ/PF4g6zWwnQWKoEVuNTiuLzIrmqA1f7R7g69Ef2YY8q985SumQqro8KvgW86NzPWj0cicnzzPjVmJ67q0Wi0z335W+0wGXb5ADv5FQF06TfV/tSPNPy+hFd/3OT09Ndmvi4uLBoCLQF8AugykMvgMBgPjKyk+5lmWEYah0Q6KrkgS1La2tvjqV7/K5xoxH408loOc3/vSYzpOzuBon7/1t/4Wb731FrPZzCTskOcEZ+CkXJnnaMsnd0t7Mt28gbW8gD4D0WnQRXtd8LvMvDa510F7HfA7BmyjrDNwPUTHA9JkhI5O0VEfOx1hTQ5R8QAmR0+AeDTASob06opu0+Px9vY5lnReD69UKXUp2h2OdQ83e4Hlm8/h3vwNHg2H0OrwX11/no8ThddplppsngDuEYoJZ0mZ6DNQfQawkfvl/7UzAG5kI2fsdhtNQxeoSuLofKsOML4GX1mfAihaPymKNC+5mtczVnMDZJKU41NlOC9qCwsLXLp0yYT1pLpcGIY8fvwYpUq7wHa7zd7envGdlYQrGWSGwyHtdpulpSVTOGF7e5uDgwOCIGBjY4Pl5WXjbyv7LZVzqxXG9vf32dnZMYktm5ubdLtd3nrrLfb29giCgJdeeon9gwPaq7e4ue6zs7PD/v6+GYQlFNhqtdje3jYAvZpvsbC4yDvv3+N0mpNZTXKnSa27xqXNVQq3zShSJKoOXpvC64DXIbXqhEVAmHtl1cbCwVVZ6WPdSPCZYecTrOEYdzCj7Q9Zarustyc0nF02lhq8+rnrrPaCEnDXOMdeP+tcVc//2b2ngtenSUbmoxHzgPui9rSkHllMTSYT7t69y3e+8x1+/OMfs7OzY6R51cWXgBwZ49I05e2332Y8HhvJSBUQ/bdpAv4EZAo4E5/mauROpBfChhdoaNRIFpvMLE2+6JI7iqLuo1p1irrHabPGsNNkZmtCG/LARTcCsppLHrjkNQ+VFzhRij1LsMIYN85xohQ3yshy8JKc+skEO0xwtc0niy9RJBYLx6GRdiwvLxMEgZngqnIWOU7VKJIcNwEsQtiIjKbRaBi5h0y0khMi+uvFxUWOjo7M9ywuLnLlyhVTvluiaFCy55cvX2Z3dxfARLIsq/TbXl9f5+DgwCzm5XPNZvNcEqC0+TlWFgBKKeMyMz/HSp6AWOCJrLO6bVnMyPY3NjZ47bXXGI/HfOELXzjnwCKyTgF/UC4ovvGNbwCc09Aqpc7AtUuYeXSv/DJq6Zf58OQG6UmdcWQxiWxmqUeUeUxim0nsGNA9OwPjYeqglKbuZdRqKX6Q4Qc5tpfi+Brb11hNTdZLUb6NHShcu8CPE7Isx6pD3SuYFTYxTeKswC8i/DzFKVKcIsPKcyytsXwfNCSFT5EpGNmk2iZPbApto7FBga0LXHJ8MgIiGvaEDJ8wtbmkjulZE3yOqZFwVs8WN59Rt1NuX7lEw9FsbT00RM8P9ed5m1f4Zf1dMwdIH7QqeEOaLETlPMyPU/KZWq1m8hk6nY4Zk8QC0LIslpeXSZKEZrNpCtKFYWiSPcWWOcsyNjY2DHi/dOmSyTUQrf1oNGI2m50DnUVRcO/ePVZXV02i6Gg04nOf+5y5vo6OjpjNZqbKpri9TGYxkzxg4dprfPxwn2Mr5YPwFpbuEWqX00lG/9rXGF2Ch7lHttHAay4xadrcy5dIugHpb9bYKTKsbIxdTFGrI95TMSQDaAyJshm12YyGOyKIH7Dc9Wl6OcdZhz9TfwUUtN2CN6x/Gf3g/8LCwikvv/wy9+/fZzqdGowpC/AkSdjf3+fk5ITZbHZOqirKELl+JQG7urh6VvtMBn1+1TY/aVzEqs/fv2gnJJFEmD/LskxJ22azaQCSOALIYCYaIcmaFg2fAI21tTVGo5Gphgclgx0EgSkZvHSQ8/62orOQs3vN5vWtY/7uP/g+Hz1Ywl79Haa5T/FcC+11UUEXzkC39jqllMT2IRmh4iFEfYpkAPEQN59ShCcUs1MYPkKHpU2fivtYyYgiPEUlQ1wd4XtPrICMPywla63l+LkutDvQ6cJyG3txAzavkKyu4Rea1Laxews4C4vEvg+tdvneVhs6HVSzhY5j8tGQndGQwyjEC0Oyg33UeMSkKIh2H/NKs85yENDWxRN9tta0dE4DTHl0ac8C2vPnutCaQj9J9ql2zOqCDzATa9Ua62nAvMrIzA9cVSBU1aLK5572W7TWLCws8PjxYyNTqW5H7OAODw/NBTgej1GqtGdyXZelpSWjAVSq9Gp97733iKKIGzdusL6+/inWQLxbpTTwZDKh0+nw4osvkue5YbiXl5fp9XqcnJwYOzKRiN2/8b/hePHXeNvps97718GJSIMeudMCv8u9pefYbayz/fw/SVgE5E6TwmmR200e1Xpl1cZXwcrGWOkYlY5wiimh5+Izoz/cIp9tYWUTanbMxlKTlY6Ljod4esbaYp21hTqXlrrnwtWSDC2et5PdCd1ZlytXrrDmr3HNc7iyeGXO3eXp/exZ7Vnvv0iv/o/zHRcB+CRJODg44N133+W73/0u7777Lv1+/5nblmiEsK9FUfDgwQO2trbo9XpPzSP4WdoX+b4FtG4vtchbdQ432kwvN5hZa2Q1F5oBqlUn9R0O2g3yusdMvUTiO2SBS1H3yGse5AVenOFEGWoWYc0S3CjFiTLcKMVPC5qDiNpgTNofYc8SgkzjxBn2LKFjuTRdn+lZcRLRaAIox8dqLJCqOgfDiBCfRx/e5L77AqD58X2fr7ghOzs7HB0d8Rf/4l9kbW0NeEIYCRtdtQmcBzvCGlalI1V5STXaJOfUcRwztwijLREGSZQWnb6M4QLyJXo6mUwMaSCaaJnLpKjNfA0N4ML5tNPp8Ju/+ZvAeVeQaiuKgo2NDTY2Nsxz8+OhFK+pyiG+9KUvkeUFGQH9sEk08TkZZQynoJ0OUeYxChVh5jMOX2aWeoSZR3joEr4nANsjTB1QmrqbEngJfpBRj8GrFSV77RXYbbA8i8LRFJbGcwuUlWAVMbZW1Gyf2PKYFi4T7VLTKb7O8QoXVxc4usDSBTq1UcrCSh2KWBHNIImAoU+OQ5paMqGS1j1a9pRAl9x2oCMClbBeq1G3MsbpY9x8xrWVFTzL4ujgkCRNQDnkWISZQ6QDYhUQa5+hvcIk8dBYHKhL/LOt3zfXsbDUlmXhWA41K0OpJ1Ed27b5av4TvhD/qMQyZ0xsNapWjUjNz5dJkhhwW42eVFuj0aDdbnN4eGgKHkrRHMdxOD09Naz3ZDIxZMFPfvITk2grBM/NmzdZWFgwY9ONGzdYWFjgJz/5idGQr1y6RIZL6i6iei+wHTfoLP4ip9mIR6c2N174Iu+4HTJdZxxaTFyLf7jbInrsk6oasQ4Ic58MF3uWEngx/gsxyfiIPzkJ6dYUTb+AYki3mdF2+zy6+0PqdsIVr8vj++/jFDPsbML49DHdpkeeZZycnJh8DkmC9fw6q1du06xt0Fq4TFTkHJ0kTMIxl5P/hMRa4O3Fv1rWgXj+Jdbu/yW2tra4f/8+S0tL7O3tMZvNDNGVpqnxTtdam2RziehJ0SM5V7KIkfvPaj8VQL+INZ9nn+bBVrVTSau+f2FhwYB06UBVBlwcBOSHiP+kZP6LN7kI+SXUXxQF3W6XpaUlMxhKtarZbEaa5Xx7uw4o7pwG/C/+4SLxMGFcfJF08QiVjvDUlGK6jdV/lyLqU8xKdpuoj5WO0PEQ60wzLsBP9F5xHKNFt19voHq9EjC3O6j2i9jdBYp2m7jXw+4tkAQ1imYT1e6iz4C1bnfK9wcBejpBjYYwHqPHIybTCWkco4cD3KMjgv4JrdMjTu7fJz48xE9i4qND9KCPHg3hDABorcltm9zzUGlKrVbjyu3b3P6FX+Dzk1/mtddeMyu7+Qt+XnYh5/IiNnK+jwhAnh9A5gedqmVW9f3z/UxaFSg9S6NbZSeq7Hp1sSDP/+Ef/iH7+/vmoqn6Gfu+b3ToYRiaZCOttSlcJUUh4jhmY2PDsJO9Xs8w38Jo7Ozs8PDhQw4PD6nVaqytrZkqjnKhS+guCAIzaNq2Tb/fN9abUk74tP7nQVkc5wuEt/7t0uM6n+IWE3wV49YCFoKU0N4lnh1iZ2NcPcPJJ3zxpRvc2Fjgj//w/8tkPDIL4lqtxq/+6q+SJAn/zf3/htGofM3zPK51v8y6v86f/fjP6Pf73K/X6fV63Lp1y4QoFxcXz0nWNjY2ODw85OHDh7zzzjscHR1RFIWJNsy3nwY8X9Q3ntUfLmrVvvQ00HPRc6PRiAcPHvCjH/2I73//+zx48MA4alS3Mx/dkn2UCViuk9FoxNtvv80XvvAFCjQRBROVM1UFE3ImqmCqciaUz/3fee9/y1kxosqtW7nfAvS9f/e3saOUINNYsxhrEmFHKV6c46cF3iCkOUip5RAdnVKMZ6hxSC0HO0zwtUXnTNol/dtxHJTtEOOjvTZ+Z4X+zCFOlwi1R6xqRNojseroWpdcN5j6dunvT0Bi1UhUjUy5ODolIMJrznCLkJnVotAWFgXHoeKgf0QQBDx8+JAf/vCH/NIv/RKrq6vmOM9f4wKUq7lLAryrIFzC6MKgS/KqgJhCw0RdxvYe4FiZAejwRDoifUPGMK21SVaUc14taiO5U1prms2m0afP99P5qE/1u+f7VzmGacLUIdE1JnGXWeIyE2Y6dZlENmHmMY0dprFdsty5X74vcZmmDmHiohTU3JS6n+EHZ+x1rcANCpRb4NSBRQ2uwrIhcDSunRKonIZKiZRNqB2m2mOqfbyifM3XBa7OcYocB42lFRQanWnSUBMlBWlqk2mPJHdIcwtdKGxdoLUi0xZF4ZDkGrRCF5BnUBQWWW6T5jYasFVM4OY0awmNPOU4bYNS+NOEW8Eb9Gc5udsiq/UY2F0Gp11mhcs4vkFIQH7fwVPZmUAloqZi3GJGPjvFSsd0goTFZs6Cd8DH6RWUzlhWx2ahd//+fe7cuUOv1+MLX/iCWQTNy3iGwyF37twhjmOuXbtm+nN1XJD5sXpL05S7d++yv79PEAS88MILNBoNs11Z+DuOQxiGprK69FXBK8KclwxvQm7VOQp90tZtMr9LlHv0pwWpV+fOtEG2/E/wIDnhwXCZ5tYGg+Vf5/TnclJVo7CbfOC2KZSDfSXEWh9DMuRNW5EvDNBRn5l/CV9F1O2YBS8ht05R6YjlrodTTPFVRGAlTE936bZrJvrz3e9+l3feeYfbt2+zubmJ6wXUOis82hsxSR8yGiv2lWZUXCKzGljtHmHdJ1+5QlQETBOH3G6gvC653US7bQq7wZ0ixWWGP41w9QylRrj1kHorQUUjoACtsYvSL13rsqjlo0ePUEqZaq/iWFMUT2y4JU9HzoNIjWRxJXgjCIJPubPNt2cC9PkBowqW5sHY/CRX7YwXsaXCkEtGr2gcZfVRlW/IBCaey9JRq/KG09NTxuMxgAFESilWVlZKYIHiJ1du8L6y+GuvDPnP3+vw9Ut7LP7gX+E73/kO9TPtbRAEDIdDcq1JPJ8sCFCdLupyD7t3Gd1qo5stskaTQpjqdodJq41utaDdhTOLPiwLPR7DaACjUWnJNxljTcZY4zH5oE+x/QiG5esMB6jxGGsyohj0y8/MhWqLoqC5sGAkPFAWJGifySKWl5fpD/ucnJ6Ux7uidRKdvjA5sqh4//33eeWVV4ym9CJAfZE84KL3VPvNfLi22m+q7Nb8IFTd1meBtIsWD/L4ogWADFTVvmNZFnt7e7z++usMh0MzocukLQPf6enpOa9wCVcJGxgEAfv7++VC8CzBqtls8tprr+H7PkdHR6bgw2AwMLrk1dVV1tbWjCxFkunES3YeDNy4cYOrV6+a/fvkk0+4fvz/4cHC7/BS6xF/bvwfcDjaN1KZTqfD5658jtWVVd58/Ca78a5JjGl2m2y081LL3W6h0E9kL2cVIKMoYnl52TD/ruuysLBgrMlkkJFkv+3tbR48eGAswV544QXW1tZoNptsbm7S6/XY2tri4cOHxj3p53/+502FwJ+lVSMjF7V5cF4dl6TNS6rm+1a1z4ht2dtvv833v/99PvjgA+OEcm4bdR9adWjVoO5jtWrQrEMzQDdrqEaA16phn+mwrVYDq9Pg7yz1+L32R8yURgF1LJrapqltGmf3G7r8T8kPPuTM95yKB3rlNt385/5m0ev16HQ6ZgGBUhR2jdSuM8td1NIGodvmaOwy0z6R9kntOqHySOwaFF1CxyPseESUTGJq1VA6w9cR9TzBc0NcO8TJxrj5DHTMg+s/R1qv8ZX+T7h99D5WOoawT81KKcI+vo64vFbatMV5mWcxSiz+tP7fJRqfMj31+f2l/5qVG/d4pfUv89FHH7Jjf5HZ9T/Plxpv8vXNfWMtKCSMLCJFey5sljiBVa13BTwLUy6R2QKb//L+X+aj6Bbu5UNePfynsVViwL0wz5IcLpp8rTVOLcD6xhfx7+2wWBQMOt/goH6VzWLrXAn7JyXg6yS5x/EkIFMNhlOYJg5R7hNlPqNQEWU+YeaVYDopQfc0dpglpT47TF20hrqXUvMy6rUUP8jxggInyLG9AruhoavRDihHU3MzXLugYaW0lU2kHCLtMClcjnQdW+cEOifQ2RlzneNqjaV1mdhTQBEp0kyRphZJZp/dHPLCRuUFSkOORaIVWWFB4aALhS4UWabIcoskt9DawrFTPDulbsd4zgTPyan5FrYNlg2FLtCWosAiQxHnkOGS4BIWmqhw0dpFWwWWV+DmCb34hCQPWHMec9DPCAf7eMU9NpaaXF5ucmtthYaTcfz4E+xswudvX8W1NY8ePTLz5t7eHnfv3jWkzCuvvIK3cos/+/kvMFMOv/idN1Cj0hnuzTff5PHjx7iuy6VLl7h58+a5cUfGHpFEWJbFwcGBISjm59D5Fscxp6enJElCnucMxxOs2iJD3SOOltjOc7atRfqDDbxsmf70Bgk1xrEingZEhUc6qjNNXcLcJdI+ceEBCo+IwIpxk5Ai7GPnEzwiGm6GU0xo2RPG23d4pXebeiPlcHCXB3fexiPkG7/4ZQZHj5iOSy/4w8NDfv3Xf503fvIGrVaLP/fKn8N2XPaOJuB1GGMzxaE/1ATtDbanmlnmktk/z3TgEOY+yUHAuPbrzL7s8Mhtk80aZFEda5TjFFOsq2OsYsLR7ISgleDoKU4xwR7skRy/T92KSY+28K0YkiE1O8HVU+xiQjjp02o2WVhYOLsGa/SXfpVB66t8Qb/Lm2/+FyTLv4J6+HvsFodGzTGbzajX6+dqOTiOYx5LLllRlLbfkkheTaCW8UMk2s9qn+11NtfmQ8Wy4qs+L6BH3iMTXBUwid2U7Ky4QeR5fs42CjCSFtd1jZn8/EQuoF1unufx3HPPGRuw8b/57/GDb/wqP3Qc/rlfmfLfee4eP/roI4a//luo3/5r5M0Wk0aDvNFCt1olm52mqPEIazxGT0YUw+EZcB6ihwPUoI/eeoAej2A4LIH22fvt6Rg9HlOcsTPiEy0hUZlIqgsfCXMJaxp7HuEZMwsY2c7h4aFhh4qiMCwllNp98SaWcNrh4aE5/kop42kqIdof//jHfP3rX+fWrVufOtcXnf9nvT4Pfqrnp/qeeYagOiDJvlbf/7QB62mMvzyu9sXqd88vOO/du0e/3zdJXOKdKp+vFu+oVusTZ4TT01OWl5cNIy8LyHv37hmwe+fOHQ4ODtjc3GR9fZ1PPvnEgAOxKVxaWmJ5eRngXLVCiWZVryVJnk7TlCt7/yl/sfNdXr32Kvfu1YydnAwWspoXoCL62KpvsIS/W60WCwsLrK6u4vu+YUxXV1eN5lQqNor+XuvSO3Z1dZWdnR36/b7R67muy/b2Nt1ul2vXrtHpdHjuuedot9u8//77vP7661y6dMnYzP0s7Wng+rPaU/sTmlBpJjpjRMpY5wzzmMPZmPuHe3y084jHwxOmQUHxG7fhr34B1arhNGtQvSlgEsI4xJ6EMAnR4xA9nsF4BuMQvXdK0R+TnAxgEmLNYpr1Nv/G3/jX+NKtF6lhobg4QhClmn/z/3r7b/Jp1nyDOTb90cbfwG4sgN8maj1hr7WyyyhLNqWpcxp5huWO8fIZKhnBbEg+61MPNOutVVAZcR6T6pxYZ4yTiNR28BcuUVu6xFR5hLbPlDaRvcIsaJMFZbRyu7HGL+x/i0lmszdUbE9dRrMV8Hos2TfJrCZh5jFKA8aJQ5gFRPoG2aXfAMvikBtsrf0NjvIHbK3/jyGxeJBcZyn8X7Kx0jEJpOVC3KJQAf2wxjCsk+oa077H0TBl99Cis3SF/hRSXS+TEBOPMD0DvplHmHokhUuJQhWpvUzmr1Nj95y7WK1Wo9Aap94jbywyvnSL9+o3+OCv/CbT1iLJrYBUzRhyk3sxfLSzg50cETk+aatO2mqQ6To/HtTRA0X9o4yGlxH4pTTEqxV4gcbyMpwGWL4CFxxH03BSfCujpSxiyybGJsRhql1OihonukagMwJyvCpzXRSoQpdLu8giyxVRokhSizR3zthrGwqNfQbEU22RFxYUNhQKnSvy/Am4LrSFY+UEbkbgpnScEN8tcKwc2ypwXAvLVhRATgnYM2ySwiIqbMLCJsxdMlwsnWNT4DuawNU0vYy6k9J0Mjwd0rBTGk5G3UogHtL2NUstl5abs7/1EY/u3+G5mze5efMmjx8/Zm9vD8cv84MejB6QFeWc28t6bDqb3GwpItfnsFkwVm3utJcIHY/tK20m2MR+jeMbCYNfUOS1Bolf44fdRcbtBaZuAErxo+e+yO03y8reYmYhaoD58UbmJc/zSHJFnHn49ip72Tr96SV2jtY5GiyxG62j+l3+9KMV+jPNyTgj1h6zzGN0DRJqZHaT71o1rFFObZrS9HLsfAIM6YWKnuUQODG+iljIB9SdY7o1Rc1JSafHNL2cwIqp2Qkqm3F0eIBt25ycnPDOO+8QRRHdbpfbt2+zuLjMrGXxve0PqI019d4G7e4GoTdi6rT44eAlUu9rOJsLPPYnTJcc/qvoGoPrBbgdvv2gSaJ9FBpfRXiqZK49Qppphp1PyMJTWt6Qppux5JT7NYv22N56j8/d2mBwuIWO+2yuL3F4cGBcZz744AMjCxQcOBgMyrytPCdoNCi0QgcdUr9L5K7DpRc5ImDW2KC5sMHMWeF+/bcgVzworvHLrX+evQf/tzI6fuaFLnOwRLLiODZyPSk+JBInkbYAxlBC5ish/JRSppjb09rPDNCrneyixxeB9GrmtgAjYTbq9bpZTUgRH1mRVDVXtl0a7CdJYizCqgy9sCVQgpnj42Pa7Taz2Yx2u83xxiaF42Kh+SAtOL37McXeDr3ZlPjwEGsywZqMyU9PmB3s48cRejYjOyvgYpIQK/elye+sroayM2ZG7PPEt1fsE6tZ/HLMqqFZKajhuq5ZLYt+Uu4fHByY4yOeotUKk0opUzDDaOIch4WFBZO5PRwOjfZVAHp1pf80RvNpmt6L2ny+gjyu9o/q42o4sPq5+e992nfPS2QukslUj30Yhrz++uvs7e1RFIUJR8l+iSWV67ocHBycc1MRvbxYeVZLKvf7fU5PT9nZ2eEXfuEXTL+WcyA6v9XVVRPVWFpa4sqVK0ZjeHh4aKq/SXGX559/3mjdTk9PzyWcyQBSTQKTfbRt2+hwZSUvYf7q8RVrPHH30VrT7/e5evWqSQ6yrNL2bm1tjc3NTbPorNfrxmZKjuXKygrvvvsub731Fqurq7z88svcvHmTjY0NY7/4k5/8hJs3bxoWs9ofpD0LhCulSnBNYSQgT6QhpSxkQnZOJiKvTcmZiJyEHA3UtUUt19hhSjYYEx6dEh31SYdj9GiGNYvxkhw/ybFmCYxnRCcD4uMB08MTwuM+nF2/4olr+qOyyKw6udskc5uEhUfhLmPVF5jmC/yHr9tc2QmYpDbDuLSTHCZndpNxWWk0yhWU7PmATzPo8v8Qi08WJ2/8tY5dZ6m7SJwnFDZkCmZ5ztRxyNoN6strFI0OUwJOrQ4ztUns1sj8BjpXfBAleLMYZxZjTROYxRRJijXTuFMb73GNLFJkqU0cK9LMKfGtoyFXHKhl/kPrX8JWOdpK0c2iFOAom3uZS45NjoOyc2w/RtkziEeo/BStFrHyhAmXyBwLNU3QysXSBX8c/g/Qwyb5vRZRERhpR64tam5C3UlpeCl1N4FshJUN6emCPDzh8mqLtjWhvhaQuzCzIbQ9QmUxxeEo7TDM21gq4aH631Lg8zYu38wdis9ZFKlNkdgUkUUe2uiZjdousI5KrXWRKHRdl6YAviZpejTI8KwMz00o3Am540FQJ/fqZF6TU+2TF3VcnVLTOT45ji5wi1LTobMCMo3CQuOQZTZJVrLXceaQZha6AKfI0UVOqjUFNqqwS8Y7h6IowXWa2+SFhaUyHBXhMKUTaFp1C9/V2FaBZWmwQKsS06ca4kyTaJsUBxePSHuk2iFRYNsFnpWjSVHFDN9KWO74dLyCQMUEKqbp5rS8gpqK8XSIryLS8RF/+sffZDIunTp+/ud/nitrV87lY8zLx4bjMZlfw+4sMnEDdpdX2cJldPkaHy+ss+etcrSekgY1Qsdn/HWLxK+T+LXyFjTIHBe3yPFvhvhpRNdWNPIUgiFuNCNII2rxELd/TJAlOOGEjVYLZ+0q/8Vzf54sUazdOeCj2VWGsWaw9k8RLirwurxZe4k7J2tY0x5h7jEINWHuERc+oe2Rv+qi8hCPkMY4pxnndKcWrp6RpSd0HOjYORv1MQvpPsFZouPDR++STI7YWGpwaaHGUrfJ9evXWF5e5u7duzx8+JDbl29z9epV4jhmMBgyCgtmqUvuNDkd52UyLsuMYotJYjOJLFKrwXTmMLMcBs8rYu2jnTZvqDbJaYACvNshd2YjukrhERKu3aaIThmkPu2aZrk2xatv8/Djd/ji6i3u77zFCzfWWGq6eCqk5hQMBn0jEYnjGOJyjo2cssqwr3xcu4z8R3bE3vBtiBwubVzh/k6D907rTPQyKuoyShXZ83+BrLtGrH0Kt01uN4kKr8yxcpr03RbabUGRlomj+RQrHaPjIZN8wqiIsVKAckGaR0Om0ylZlhlVhRCqSikTNdZam0qukiwKT/BMvV43nxOsIJInifJdVB+g2n5qics8izkPzp4GoC7S1mldmsALeyfhAChDBMLYVascSljS8zxTnbAK0OUgVYFlnuc8fvyYZrPJ6v/pf4f3r/6veK3d5C9+79u89eM3GA6HfPzxx8abMo5jwvEY/wwEywpJtids5HwTIC3sprD9wrpmWcb+/r5hzKvg/CKQLmWnq7pFpZSJHgirWbXGmk6nxgbIcRwjb5DtymJhcXHRuOVYlsXOzg62bbO7u3vOqkn2rWpbWbWRexpjXb1fZcbnQ3xVAFzdTlXTOS+jumhB8KzFwU8jk9Fa86Mf/YjXX3/drIalHLDneXS7XePpure3Z8plV4+NAGABY9JE3yoTy8LCAvfu3TNRkna7jeu6psCJ6FKrjg7Hx8fGJ317e5vDw0NeffXVc8m084U3ZD+q575q4TX/3nm3AOkrVRcY+T4pky7XmlSenE6nHB4eYlllYYkrV65weHiI7/um/oAsJPb39zk8POTll19meXmZbrfL/sEBozQms/Mnumt1prsmZ2oVZ6D7PJieVB7PyMkVpQQEm4a2afJEHtLSNg1slguX6wQ0zuQiDW1RLxROmDA7OGH7o0946803ee+993j8+LEp2mJZFq7n0exdord6Fbu5xMP9Pv2Zxm5s4LVfxbu1Ajdc8tRilNokqgZBG4IeKuhQeC0KtwlFip2cJeTGQ1Q8ws4m2OmEB/sjbt1OubyoqPtge+B4gKMolCZVBZmCv/Eo+Ld4wp7L/8vAy+Y5TePuq/8zvGlIEOe4UYod5ahZQhbG6FkGmSLaboD2iCOLNHPIMgdd2IAGZZMrh9hSpMoDCjRnMjFloy2HQrloLBxSbCvCUiF+cUwj2aURONQd4CwRPgojoiglyxXK8vGCDjk+ce4R5z5xEYB2UZnCykaQ7+GrKbEV4sen1A7+LjpYYaM5oB6EtC61aawsk3ouke2UUg1cIlxi7ZAWDse5TZb1yPMlHmY2RXqbN2OborDIt+0y4dApsPwc28ux3ALbzVFWSq4U2nJRVoLrpLi+Re5mZEFRyi2UR24FpH6D3C5t7QrbRhUFwWhAmjewAIuCabJCUZTaaR255NqlGAXkeOX93MLROY7WFFqRFCXTrLOCInfK/S1sstwm1zaWKgicDN/NaNgTbGJcO8V1FZZbnicsheO6KMclKRSJtki0Q6wdwtwmLDxSXYNCE+mIRhMct6BupzTslLqT0bATrHRMPjslcGOaTk7dTnCLGTUrYbVbo9sqi+YMBgNOTk6MV/7169eNK5vx7bZtwgLGWEwsn9NghZMXv8QYizSo8/3bL/L28iWyWoOp5TKzXabKZmq7hI7HzHIJbReFpp5nNIoUZ2GCHg3ouTYrgUeR9HGmJ7SGx3jxjOjoAC+JYZbg5BaLrSWuXLpKlnt88uiIWe7SXrpMVPgc9COmqcNxscgkWWaWucTaJ7MaZFad4mOb4DspDTvmzeIVPrQTnGJGemkBNxrgFFNqasa6d8DlpRkNN2N6uot9prfe3/qIux/8GPKEjY0NXnrpJZbP6m30+33u3bvHwsIC16/f4LA/5V54TFT49IuC3O4w9XrcjbvsJ5dxJ4t8/8EK+nGbw8ErDOIC9XGP9G69zC3IXEAR2Al1OyGwY8Nie6pcIHl2iJ8esOinTMM9OsNtotEBDTfj+eurtIKCF25eJopCfu/3fo9XX32VMAy5c+8O/X6f3tISV69eZb22zsyZMeh/l0vphMP4DivtNVTQYJL3uHs6Y5atkGVNUquO8jvY9UUyq0HebnCsUgq7ySx3ia0aaaNG/udr3CnSUqt+PULHQ3TUxyOk8PqodIwqYmrJEdZ0zKy/x+euLLMYOGzfe4/9Rx8Rjw5xVIp3Ji+DJ+Yh7XYbAL/2d8iWf46Fwz/gNDo1gFsi6XEcm0Jcgu0E38nrIgOt5sPI/CqKBrF3fZYyQNpPDdCrj3+aDVc15/PALIoi4w9dXZFIpcJqVr4AdPmc4zjMZrNzwHE+eUZYQ9ERTyYTku/+Kdd2tvm1v/7XKYqCS5cu8eDBAyOHEestWSBI6XPZ94sSveRYyMpITpRUiJPfIwBeTuZFen5p4nohAEhKPM8vPER2IXp9cbsRX/l5hl+yurvdrqmUJwVmHMcx1fUkuamqz35aklv1OVmYVH/PRZrwecD9NFu7i/pPtR9Vt1s9F/PHc37xMN8mkwl/5+/8HfI8Z3l52fjvC0C/deuWubD29vYMqJV9l2OTZZmxbZM8CqkwduXKFdbX101USFyLhBWXxZIwrQKOq8VWJHwnC0EB2pIpXtXwV1fqIocR4C0LCNlGFdzPf15ekwWF7F/1mgyCgEazyTiNSTt1hgs1/MUujcZrNMIxWa/NR7evMLzy50BnzK6s87DTYpBFvNNp4vbaxH/1Blng8l9a96hri6a2SvCMTfNMb93ApoXNYuFylfL1ZuU9LeXQ1DZ1LKwLZCHSJ+IMBjEMQs3pNGfndMrWwSn3Hh9zb+eYnZMpo1gRW18mX/o6er1F7rYovDbaLxkZjcLTEXY6QTf76GhITSXUVEwnAD06oqOHdJmwv7dNnEzA1ihXUe+0cBp1tO9T1NrktSZ5u0Xq1cnrbbLGBp+0emw3fKa5QzpSNHJNM9M0MggyjZvauKmGLX6TCE2IIsEhISCjRk4NjY9GodDgqEwFRLYmUQVKlVKevNDkWGjloi0XiwJbJ3hWhGvNcLMpZDMCK6fTamLpgjzLyTNNFGfEcUGBh+U2sNwWkQBs3cLGwylslJqiiwIryHEpAUGdPnFxiqoV2E2b9vol8ppXSjUsj2GUMi0UOHUKuwZWD80lEu2gM5s0cyhii7uxXTLXJzZqIOC6wPJyHC/Hdgpsp8BxCly7wFExys1RviLXOcpxiXNFbteYWS6ZUyMvbJwix84z7CyDNCKPIoqswNJQZC5ZapPkDonlkFseheuR40DqoIoCbIV2LPSpTbzdwY1CFDYT3SHPLXLtUOCiyLGJ8KyEmhPRbtoEboHt5FiWxrJAWYq0yEkLTVpARslcJ9olxiPWPjM8tKXA0gQUKDIcFVOzEmpWTMPJ6fiahYZFzUoIzl7zdUg6OWb7k/cgnZKc1Xe4vXmbZrNpcpyEkBiNRkT1Mkk+1RA6HpEXMKsvcr/VIas3GWMx6GhGVy0m2ERegNXpEbs+E8thqpwSbNsuubKo5Sn1IqWWJoSd51DTIc5sSq3R4rLr0ssirmQjGkWGHc5QcYGTKXQMs0EIhQ9um6jw2D4c8/hwTKO7Slpf5HTaYRh2iHWNRPvlQoSSCXaZUZ+mdIZQdzPycAWPkLWoTsPN6DhjFpgQWDGD8Ra7Dz6EdIhPzJ/7hS9w4/Iym5fXOT095ejoCMsq7XjfuP8GjuMwHA958fLnuNS4znL3OcLcY4uccVrnOOuylQXstK/RXbnKe9S5e7IM4y56u7S0HUUWyUGN+H0PrRWeikt9ODPS4Ig0P8HJJ6zmdRY09LwJK92UDf2YfvSQ25cv8dzVZawzM4A8HlLkJZ7xfZ/ZbMZsNjNmHIPB4Jx1YHa6jZ1BkbXYPnCw6wt8MplROC0Ga7/F6/02Tn2R0Y2/xCx36Qc9HncuAR3iesDkV2zueR3yr/q8pVO8MEQlY4qij12URETLL1iwbBbsnMA+oqYes5g9Znq4i5WOGZ/skM6OWWq5jAdHLC8tUa/X6ff77O7uGnZbvNIXFhZ4+PAhV5eWeHXhNQBOk22WaimH49SAaCFQBURHUVSqFrK3ccL3ynn9bM4QpYcw4a1WC9/3TaE+iXwLLhAZjMy7EjkVjCSEq0TTP0t98DMBdHmuysw9jVW/iDmVdv/+ff7sz/7snCSl6hc5z8LLQRQ9+3wCqUg5qoBa7vd6PeM+cXBwwHvvvcfP/dzP8e1vf5vj4+NzFUxle3KrlkSWVmWY4YldmpwQkeiIG01VmyQrsHnA+CzwKxdSdZ+EORWpkLFpnDv+IkOo1+vUajVarZbxEU6ShK2tLUajEfV6nd3d3XMOB4A5hrJgqkpQqmx39TxfxIpf9NsuSuqrgu+qzOEiNv2idtFxvegYy35alsWHH35IURRcvXrVlEWWZM12u20WYHfv3jW+561W65zHsVhUSfniw8NDk7ktiaW1Wo04jllYWKDX65m8BLno5beKPEWKAwHnFpsC3GXRUC1EIqvy6vPV9114PC2L3LWJWwH55hL5Sp1keYHDFy4zXlmgqPuM9AL5rb/C9voljpcXiD2LyFH8uFUjtF8gC1y0bWGFCfuZppZpiuEi0XGfWq6Y1Go4gwBn75D2oz5dZ0b8k/dZrbdpWy4bnQV+7Re/zkvXb2JzfnFVPWd5QSn5SFQp+4gVj1OLYfxE+jE8e75638hEYohzRWDn1FSCk4UUsyH5bADhGOIIlcywkhFBOkYVZUluL7BpdFu4NY8wS2gsLBDZPgNVMK456OUOqrvEtLbMyG8Quy+i621yvwlxCoMx9iRCDydE4wg7znESDZFGjUGnFnlio3MPVJ3c8tGBRmsgh9gB7ShCGyxboZRGY0HIK4AL+GeahxDFBJsJmn0KphTMOtMffCNwHVqNRqkh1oo0hTjRRCkUKsDyOqQ6IMx8UgI85eFoGyu30HaE7RcEdoSjpignJPNmTJpjcs+httijtrhAiEViOWdrBY9MO+R4hLhMMoc8Wz6ThVgUsU0R2hSxDQcFtp9jeXnJWjsZkKCKFE0ChFjWBHQGFGidozybWq9NbgUU9RaJHZDjYhcpji7wCoWrwS7AURaqUCRxUTqBYJU2fJZLatnkhYu2bdAFlp2TeTap7aPcAO200C5o1wJPlbeSkIQESApUkmMlKU4W4+QFWaHKytMNjVc/JYj3CRottO0SFxYpLulZkm1mN9DaQxGR+BrXyfDtjJqV0jjTXrtFiFtMUfGQ8ckOXjFjueWxudJioWGz1PawVTlXRFHE8fExh4eHQBm1W15YNqH4PM+J05RQOYyVzcAOOF7fZKgVoe2h2l0eXrlGVmuS1RrMztjqqTDYlsPUdskcDyuNCdKYps7Pbhm1LMGLQ6zpmOnOI/LBKV975SV6rgdxTj7LsDMLldhkIUwTi6jwGYSadz96xDSxSVWd08UFBkGPMHfLhMbMIS4cHJVTd1LqToqvIhpORivQNNycNM+xOWXR7XN1EUJnn2mxWwJbPWN49AhXh1hFiOe6XL58mZdffhnbtvnggw9IkoRXnn8Fy7K4f/8Bo1lKlPu02jkrr7xEqurEOuCTtMnW4TLudInTccbppCAufGaZy1HrdyicFkmzzp+ELfQnNrUHKXU3xdUz/DPGOlUnTKwHJMd9Fpsj1hciLnWPuLbeIQtPOd69x6VewAs318ijAVsPH6B16STyztY7fPzxxziOw8997WtstDa4uXmTlZUV7hZ3+fjoLleaNr16j/7UZ2/icDptME0dYu0zyRwmsc0sdwkLj2liM/M9Yj8gzF2SVo1kpYa2fKw8wlcRTjHD1TPcYoq1NmFyuoM9HrPcCdAH92ikGb/4/MsMDrew0jE/eeNPWWw6kAx55XO3GQ6HdLtd7ty5Y1jlzc1N/MxnxV+h0ygXg3pNc2f4CVuPt7DDEEcpPKtH68zOUQrkievV5uYmg8HAPC/Sk0ePHpHnOZcvXyZJEhqNBlEUGTZcnJqqWENUGjLfSGEh0bVL3omoKuRzggOqrlHCpFed8UQVUlUYVL/zovZTFSqSHa7+l/vPYjgvAlRFUfDee+8Zza4csEajQafTMVZe86xtlVWfb1UZSnXfj4+Pjae6gBUpP/zgwQMGg4FhuEU6Imy86IjlJFSr0wl7KIykrLRESy+2e7ZtGzujtKJl/1nafMl22Q6USaMXbU9Wbo7jGKcAuV+v15lOp+dC93KsZrOZYU0Asxi6KIIwn3wp50asxp6mX6++Ng/Aqv3tot81Dy7nQf5FspunrVDlN21tbRmpj+M4xmVFft8nn3xCo9FgMpnQ7XbRWtPr9UyxkSqbXa/XDVO+vb1t+kS/3zfg+tKlSzSbTQPGRYsmTc5btViIgPMqEy7gWnsOiVeC6+n1ZR6tNzjqLXN8tcboC8vkgUe82CG8vIZq1uj/uSVGxWsUdZ9hs8ZWI0A7Fm6ao/+JVYrxjCRK2bY9mtohyDT2DDqZov24z+WJDeMZs8MTbl7a4PjhNktBg+HOPgd7+7z44ousra0ZS7zLly/z4osv8sGHH/Jo75TupqaxuI6+36S4+hz3pzlT93k4eJ7/6rH9BFwn54H2KFZMUoVrado+dH1N29e0PU3Hh7av6fiajl+w3oSmq/F9je0qsAsm4YT7jx9x5/4nPDg4ZpgrUrdG1mihl1rktRZ5be2M1W5R1Fpo20eNZ9jjGfYsRk0j9DgiyC3sEKxUYU0zsj2LJPcotEeee2h8sBxQBWCDrqOVj9YNtM7JgNyywXLQtgeuC3aGymeodIpdjGmpiBtXVml4Cs/WWCDOX+S5Is3g9fect9HUgAalorsLLAM9oE+pQ7eLyMYqEjw3xbZi3HoB3YLMV/TjEF3zaS6toL2AsFCkOGR4ZIVDXjSJii6Pc4c8tSniOXBNgT3OsZJSEmI5WckA2zmQYFkJvpfheKCdnCIo0CjSwiLWNplfaq9zy0MXYOcFKilIpyFFHKPTBFVoHLvUWxeWj3Z88Gpkuk6hPHTqgNZYTk7hKWLHJXZctKPOwLU6uwEepTY+BZVqVFLgZDlWEuPkOcQpbl6QT2JcLIo4JRqHKG1jaxeFDwSEiUemysnVziLsbEzdzmi6OcRDJvU6kNB4/BZOOmLTbdGtKeLJIen4iGi4TzY9ISDm+eeusbCwwKvPv2r82eG8V34YhvT7ff7h+99iajms//zXiFsdDtpddupNQttl1LLoF5qTlZT+bU0S1NGNFnm9SezVCG2vlIhYzpk0JCVIY3hugD2b4kRTammEFzTpKE0nmVAPU9w0x0o02TQhHadsffKIu+8/wHaaLKzdoHfpGpnVICZgljocJS0mSY/B7DIxAR88aJFpG9/OqNspdTej6ZbJn4GV0PDyUouuZzh56R7yufo+z19fpR3o8jNOip1PsUnNGDgej3EcxxT2evjwIe8cvcMrK59n/fIN7hXH3B8OiIuAcd7hSD9HrGvEBBS0eX+wwj96d5Uw8zgefYUwdyn+UZswc5kkDgU2norxrdLJpGYnBFaEFY9Z9F16GXT9mGZ+TGAnkA55/+53afo545PHfPmV53jh5gbP3byOOIVJpdatrS2+8+A7hGHI1Rde4EvPfYlr69dYWlpmNEtJThOsYIlHkzqjqMHDtM4scxnlikfLr3Lixii/w+v1q9jRAuqTDtHHPpP0n2DmuRSfOAQPyuMdWDEeMR4hNTvBJ6Rmp9StkJY+Ji9OKeI+djrmUq+Gjgfc++BNpoM9FjpNXnzxRTzPo9frkWUZ9+7d48OHH7Kzs8NzX/kKO4c7LC8v81LgMlmY8PDhQ3rqmNFBWQskDDf53Oc+x+npqXGSE+IwiiLG4zG+7xsHlE6nUxYtmkzIsozxeMylS5c4OjpiYWGB0WhkEi/39/eN6mA8HpOmKWEYGsmz5GjBk0i0SKSreX3i5y7YoapYqBJxIh0W50HBdjJ3S1RdAL0AcGHt59UF/61cXC5iK+fZ7XkNNXzaE73awjDkgw8+wPd9s9qo1Wr0ej3jRnJ0dMRkMjFAV8Iw8/sm3z3PqMvrVQ9cAfanp6dsb2+binLSMabTKc1m05RVlwMtQEq+p+qmUT3g4mlZPVntdtsw3AL25yUaP0t7FvNeBa3S+UUHLxXDHMeh3++zv79PGIbnim7IBCB+rHIeqz6u8wC4uliSFaXcqsU/5n/DszTk1f4l25wH7QJcn8bEXwT8L9oHWbB99NFHDAYD42IiWnBZdT948MAsVITBbrVa1Go1oFwZi4eyZVm02+3SwrDZNG4mcizF1klrDb6LVe8wXWqS+g7Jl24yWlkkW1lgogr2Ok32bn2ltOprBozysojM/3O5R+JZJJ5NYVtYcYoez3g/t3ikHNS0STaYkB26MA5xRiFLB1N6pzlHDx5x/yfvkw3GdJ2Az125zmZvibof8Ed/9EccHR3Rbrd55ZVXWFtbM+cwTOs83OuzfmOTo4nD8EDzKF/n8bjNJ1mPUzfndDHn3XQVfdCmz9c5fel3Kbw22aBBeMlFX1L4xNRUTHFlQNcHz51RuC3aU5tuTXG9WwLthgeWW2A5gAOFBbkNoVYMcxjmimGmGOWKYa54kFuMcsxzgxmoETSSDHuaUQxysmEXPXm5dGeKNQw1OrUgdSm0T1H4aAK08tClaBiNR66hwAdVFvYonDNwfVbERNsWloqw8hBHRXjFAFcfYqUhs/EpqshpNRqMxxOiMEFZLhoX221guU2wGqUG02qhrTY6nzIKQ47dHN126dah1YB6kGPVUqy6JnULWvXiSHm6rpwixiJCqRlQ04Wqaa0CnauWztS1PHuefmpzmtjk0Rm4ntoQaiw/w0py7OMC5wxgW1aGZWegE4p0hmNDs1FDlTJlI7MYa1VKc9wmubZRucbOFSrXqDwnHE3ReYGlG5B7JAWkOOSWQ2o5ZJYFyoNCoawcXEVac8FxKewG2gE8qyzF7qkzcK3OMhUL8iRHJSmBzvAKjVNonCzHzgqsqMAqdxaVKYpUEUcFee4RJTax9smxcUkJVIyvQwKVYGcTaipCR0NaXk42PeFk9z5NJ6PhZiw1HTaWmoyOt5me7BBPT9FnUSqpEHwyOmH0eFRKK888qG90b9AKWpzqMf2gYDfLyXprTFodHly9wc7iMtsLVzmKU3bjKZEbYLW70OoQuj4jrciCBtFv/g207fDdNKZRZDSLlHqRUc9TamlMLYux8iFL4RB/NqQxPKbj1fG1i5VaFBGkM00aW8SFxzBSPNztM8scEuqMrRXuNpZKRjh3CTMXDdSclJqVUnMSivAaYfc2dj7Fs+ss2B4LfkLdmRq22lcxP3njTwmH+/z1f+a/z0onQBepKQAl44qYQ4RhyO9/+PuMpiMybXPF/SWW/S5xEXAQBszSFsNogUlsE+vS33oUW2SqTpT7TFOHSfJ1xlqRvFMje9vGt1OcomStXULQQ5x8jKOn1Ei53CxY7Q5ouBlZ94TAirm80qThZoxPtrn7wZsUeWpyb4QgU77iq1/4qsEVQjiGUUQj/hjLWsBvLfA4bNFQL3B8colZ7nEwWGf7YEhMjRG/zODL/yyZVee7fpfvjbvoO21muUuuLXwroTHNqO+XOQBFeEoR9rHyCWkxoa4muNEBi4OH3Gwu8fnnrlK3E3wVk01PWF9u4trKSIOn0ylHR0eG0RWsMp6NOTopq3qur6/TrXU5DU+5fWUB/1Y5/ktl7b29vXMEkhR/FFwkuXxaa2q1mmHNxdxDKcXy8rKZYyW6OxwOP1VYTIhCMWx4/PixIbzyPCcIAq5du8Z7771n1AliUhAEATs7OwDGkrKKFwTnVV3aBDeJb7wAZ1kwi+S4Xq/TarXO6c/lJt9TLdJXdXUR/OJ5nsGDn0XYfiaDLiDns15/Fuissu87Ozt8/PHHptSygJzBYFDuUKWqlgDjqq48iqJzMpGnNbE4EpcK8fzu9/vkec6Xv/xlfvSjH9HtdplOp7iua6q+ycGXC7DKWss+SkeXKlWij68m2E0mE2OD948Lyufb/HaEVRXgKB7dsl/iqDEYDDg4ODBSILEHzPPc6K0nk8mFLHa1WpksAqqAXF6rAnR5vqrTroLtaphn/nfN97uL5CnVx3KByXtl4VDtlxctZuSCA85VmpXzOplMTLa5eJlK6AvXIfY88oaHbtRIA4d0qUfjc7fIfuEarfznqV9aZENn0Kzx/eefo6h7ZIHLB42A1HconF9DRQlHWVnq3J5dxs906QwyjanlOfY4pDFOqOcjnIfbJKdDvvb5L6ImEfHpkOnBMcks5N133+WrX/0qzz//PEdHR8a7tygKeovLXP7SL+B3Vji4r5hsdYhYYdZeIZpd5z1niTxqsfvcy0yu2sy0x/dVl+y4SaR9Qu2R4eB4MY29FJ8I253Qm1goPaSVaxRDgvyUy6rgUr1PxIBP9j9mYanLyuoqw3BCrDWR7WG1etw/7NPZuELi1pj0At5q2YyKEnQPYwiniiDTNHNo5Jog1fgpuKnGThVWoiliyCJIIkUUQhhDFJfSDZWCxmKq9ZlVYQOol9IEZZ0lN5ZgECvDKmaodFbalKWH2PkMK48gS6DIcSybvADH8mh1FvD8FlnhEqYW08gi0QEpbTI62LqcRK1CYRUZTT8jUClTTvFaM7xOQO7b2M0JbitEBRO045Ussu2jLR+Nx5GKOEhT8lRRjGzy2KYIaxSRDZbG8vO/YnlFodyisFydKafIlKNTbBJsBljE2iVxRnc/F3gOficgbSty5ZH6AbFTI7UCChVAnuBoIM1KL7xMkaeKOILc8bD9JhkOibLILYfcdskdG2wLRY7lnRVCc2y0G5QJrc4ieBYnc+BaJSVzTZRgJxNqloWTlWDbCkuAnc5iJoMRRZyjcwvfbZGlFspukTsNcitAFTlWGtKsKZpuRsPJqZ/prgOV0DwDlXU7pdFIidIDGs06xychLywNSSfH1APPTKxJkhjHrKme4hYu+8N9Hhw+oNVq4QUBrfY6XuMa8WKDae82Q60YFYqJstm7eYvayhojrZg5HlPLIXJ90qDOdzoLpLUGmeP9/1j70yDJsvy6E/vde9/qe6wZuVZm7V29VFcD1QvWJkA0wB0cEhwumrEhORyJy4gckRwzUhrOcGhj0shkEkVSGspIyPhB5JBGIwUQINkYAGyiAXQD3Y3e0FWdlVVZuUdk7L69/d179eH5ffkiKqsBafTMPDzcw8P9+fPr7557/ud/DqoqkekSkcyRixkTTzAwNUGSMX14j/LoEJml+DVcGK0zqQX1Ow/wjE8kB9ja5+r1DzBYv0y+AtJp7XNQDcj1GovyEpkOKG2IRDe2hF4D9GKvIl41C/YDTSAy+uaQXnlKz6vxdconbrzM5iigt3ps7GkEttXc/vqv/zpfvP1FpJS8MniF3/Hc72jniKzQzDLIdIg1ivGl13hrepGvHTVhSMtCkemAtPZJSo9FqVYAW7GIfg95EKLx+cVHFf2DupWzxF652veCvl/R93K2oor1wZJQ5u3jApE2rLtXY03FT/3UT3Hv3j36/X5LlLkQth/9+H/IxYsXmznFQq49kiojrUMeZmN+9eGAwkYQjlkUAuuPkNGEcLjN528+S6oD0trjNPmd5CakUjH2owLPZCiTENiMu8c+k0zQ82oopkznu4TimIlO2LIpXpWg8iWb1udHP/FJhqElUiWePIu5vvKVr/Ct2996Mqd5qwyIBG7wEq9ujp/grPX1MwwwPLGqdiyvlE3a9cnJSet4d3Jy0jrOvfDCCxRFwenpaYuR1tbWuHbtGtPptP0/l2rqrIK76btA29fl/Ntd1cNJPl0/oZMJO2wVx3H7fsIwpN/v881vfpOvfOUrJEnSGAusjDdcJdv1dblGZWe0cT6x2V3c8+d5znK5bCvbLo+jKApmsxlFUbRkZ1d62pW6OOznMFXXirGbFeIWTO41/mfZLL4faHqa3vi83OG8BMHt2M/93M/x4MEDtNZsbGwwm83a/zk4OGB/f/9MA5uTZgghmEwmbTT6fD7/jvvtSiknJydt46YbLJ/61Keoqoovf/nL7d+zLGvBvGvu7No6uS50t9JzUg4HwrtSFzfo3H58J8b4/5et2wPQrQK4xY3TQrsV3tHREY8fP34qI21ME1fv2OKudOW83Z37TM/LTNz77K5S3eO6X4jz2vXzQPv9NPnnj123CaPbmNrdN3fcuycq9/8lllzBzJSUz1+k/OizWF8QeyCGfdS4T3hxG4Y9ilGPnTjAmwxQ4wFy1IdhDxH69LICu0ix8xQ9W6JnCQelpjqdI5YZvQry+/sc33vE5a8/Ii41dp7QN5J6tuTk/iN0UfLMs8+itW5OYEVBvmrkNOMxs9NT8sEGx9GYg0VJbkOSvX0q2SPRI3K2CUZbPL7xg7yVbSDemlAQUcY9yo/GaG+A8fvIpCZYpEgW2CtTZLnA1wl7xwXx6QE9+Yi+V9Ev5pzuP0ComqgXIAKJCn3SuqTyInrbOyysRx6GzHtrVGt99uMhpX+RMniO34yHlH4PjMC7/kOERUmvskhdofIasajxTiXLkyX5QR9T+4TBAGsFedGAbF0CNZQKEg8KTyClQIhGnaAtVAZKDViIfOgHllhp+n6CX5wSpIfoIkGYGmEakG6MABTaeiAjrAgxooeWA4waouUO2pYoPcfqBcosoTpF2RR/VNMbGWTPEKzPkL0cISRYyzCMMCqkMrKRXBBgTISt+5j6EtOOFZ8pFLkAGWhkUCN1jSw0oqoQokRQgsgRzBkMYi5d3sbzJKxs7gprSC3MteAg91Jj6GGpMWSrS4GhxFBjsFRY347Rsk9qAowKqJWHER4IBUIjvAobSwovwHrBGVkIgaDym8pFC64rgywNXpEgipKeVAQWRFmjpzmmKPGtIpkleCKgHw+pa4+8aJjrQoRgJV5dE6uKUWiJRY6qG1ATy4LZ/n2q29+iTk6Q1ZyXru8QmIx8ts+FScg4gnTelMw/85nPtLHlQNus1Z00i1pwWwb83W9+hspXfCi6y2s7X0GvGhvr3oCFlW0zYxFE5H7EQihyL0D3htRh3Pg4lzl+nuLnKTKZYxczquND8l7IsK7on54SLXOG84zkZE5ymnLj0rNEqk8yqygrn3luWZZeY7G5dplDOyA3G9TyRYw3xKoIdM7+YomsF6DmCLsgtBm9oOIkFaxvVGwOGmDqpAx9v2J62Dhw9FTF5qTP1atXWsDkWD2nj83znH9/59+zyBYYFNYfMZTXKOuQWT4grRvwvywU81yQm5B7yTUe7vxutBrwVrXFv/7CVvu4yngEspGDmOCTDDHcvxPQ8xtpS9+r6HkF23FC7FUos2QQaEaRJVYlg6AmUiW+Olspdudxt++OhBmNRkjlkZuAZaVIqzUOSklehcwyeLf/o+xuL7HBGIIxJTGVjCEYc/f+ZcoHPdLaI629JhhstZjxbcpy+8VmPKoSkRwxDm1zjOVdPnExZuBrIlFw++ZXmR8+YK0vSU4f47C17/t84rlPsLOzgzGGw8NDfu3Rr7Vzs7W2ZWhHoxGbYbIyaeDMfOlY34sXL54xjHDW0uvr623/3tMIr24fnatcuHh6JzdxKey3b99uXcyc1BjgxRdfbMPmwjDk9ddf5969exwfH7cY4uDgoGW+uwmbDx8+bJ193H65ni1jzJmcll6vx+bmJgcHBy1Yn81mGNNYcruGXIfb3FhwGMaZPLiqdZcpdyGD7hi5MeTsHp1BhjPecGoJt6Bzr+Vwh1NndMdmt9fQfee6/WCOHP3tkra/pcTlaZIEB4a6kg1XrnD/9zTN+s2bN/l3/+7ftavYg4MDrLWtA4lbiTghvntDYRi2DXVuACyXy6fq0buvb60946eepmkbAuNM+Pf29lotdl3XJEnS2kGdd8fofrGiKGqbT92Ac6C9++X6/9fWZYq7C59uqqVrXnRlKbd4cKu3bqOgs5V0X3SlVCvzOL+d15t39+lp93U/++5Ygfdq0LvHq/uY99tKa8ikJZWQypLMg1RaMgWZglRZEmFYUDf2fMI8uays+kphCazAL2qWrw1QV/8gwXSJmM5boJ08PoJ30kYqcjpHLHNkVhIUNUGpGXsRJ/sH7ZfSJbRevnyZw4cPUUrxgY9/nJN37lKdZsxfjpkWllkhqGWP2ruA9p+DwYSvZUMIx5iLQ2w4hmiMDUbYcAThGIRAFHMoZoh8xu1qgaqX6OQEr15yxVtS7d8ivX2EzedImzOc9MnKlMrW+L2IYDIhFT4mHqLjEd54gyQasIgG2MEFbH+EGEywvTE27ENawHSBmCewyCApIK2QjzWyAp0bqH0kMdgIbXwQPlY1bhJIhcanlAqtWDU2NgDbCIU2OxwLD4WmF8AwFgz7lotrlp5v6QeWUAqUsDTR3hatoaqgrCEvBUkOi8xwOrecZooj6yONj9Q9lFlDGQ9pE2SYofoVXl8gQo0NLcQNq2t9sF6NlSlWBBjrY62H0evYagtTPkdZKLJ0pbnWIOcama/kIKpGFTVSVQhR4ymNp1J8z6ACgfUrSlVReDULVaMu9JCDdSovwvhDMD6yTJEV2LIBwBgQ2scmgiDxMcKnaFQdlFZQCtBCgEdEQIWHxaOHTw8PiY/ER6yuqeyzVKVdAWyDLDVBlaOqGllVBAbCQqASjSlKqAxSg6ksuhIoGVPWPoUNyEzjWKJsQWA1PakZBBU92XhaV8sjvHrJMDCoasnlrQGTSBDLgnJxiM2nhDZD6AQh4eqVq+0k9/DhQ4RsrEW/+eY3OXjzTXSeo4I+/uT7iS++yH5xmbR3FTncYdYPMP6EO4vnqBY98sonrzyqyqMqFXWp0IXA5gK0aGa70MKa4I0fvMHci+npCj9L6JkKlSb0koQ4LRGlwWaa6f4pyWmKrH1s5RF4Y4L+ZuP0Y6PGRaXeobDPcC8cc1v2MSg8m+HbFMo5llMezj36gUaYGZ5IicUJJn2MXOxz0fRZnjyimB9gyymxqkhP96jLtHUEc81vjk189tln+bHXfozNzc2m0msE07Tx2T4yPqf6Aoemz/35hG/d2yI3TRJpIwVRpJVPqpsGwVP9eyjjGCMjpC34+W9oBoGm51f0/boBrX7V+HX7FZveY+bzr+GblOf6m3z/Kx9h0hfEqiQgoy4bU4Of//mf56VnXuL1118/4w7mNq01SZajZZ/MBBR2zGEpScuAZa1ISsWiaq5T3ex7Wnsktb+69sh0QG4CJKZh+2XZXoeyRK5/FxcmSePBLgsikRMypefdZ3N080l1QVV4NsdNaVprFuPFGTDXBVxRcQoNJuaVbU21ttXgjGvbZ4itNE25c+dOO49+4AMfeI9E2M2RzqihWzl2AN0FL3YXLV3ibHd396kVZ2ttS4LO53NmsxlVVTGfz9sePEfOdZsmnbtYnucURcGtW7e4desWw+GQfr9PWZaMx+MmdV1r5vM53/rWt9oeLFeN39/fBxrAfvXqVbIsa19vY2OjdUdxMmMHvIEWvzh5ycbGRqs1dwmdQog2HTgIAo6Ojlpi170f993pksZOGx6GIYvFog2BPO9S5hh1h2W6GTPOocUZQrjMGuBML6MQ4ozhiJMY/XaUIN8RoLumyKeBdHd/F4R1Y7fPgzRjDJ///OfZ399vrXKWyyXAmRKFE/x3n9eBX2MM169fb2PX3WA6z6S6A+MAer/fbxtJ9vb2ePPNN4HGSP7555+nKAoePXrUvm53he6AunPWcMJ/F8xy/oPvssXnt/9v9eddsOsGm7PJcyc8d6zd7fl83kbSdp1d3LF1YNtJd1xzY5IkrVbq/OLqt9OMef49npepSCmprCFVdhUWY86ExyypWVjdhsgk0pAK24BqaUilJRWGUoJvoW9kc7GS3srLum8br+uB8diyYXvfYOWH7fyuoxrefesWf+Wv/BVufv3rZ1ilM+9DKIjGiGiCjNdQgw1Ubx0RT5hOdppO93DU6KxVDxFNmPbWKWUPEwz4qWgN+1EfUSW8aTJMeoLNpthiBvkUsimeTtHTh1C8gc2myDpBKk1lSvBA+BJ/MMDEQ+RoDR0NsP0xZnO9+X14kYPtSyz5OKY3gt4IZEA6W8JsAfOMIsnxCo1e5Ji0xiYGM/cxtdc4hxCBiLEqBikb6wt8sP2m4dGuqGoh0FKhlQ9B0ABcnSN0ii1miHKGqpb4ombc8wikpN+L6UUxZuX9nBcaVExWQG1DShNRJn0ePwYQjHsw7gsmPcugZ4iHBn+gIaypPUMhmwVWZjRJUZLmFUILBlZh8TDGx+gIW/XR5WWqlRWfyVeLUzTSNq4b0tSIskLICkGJIEMwR9oSXzYWKlmegbH0N0YEkw2qICaXYeNvTogoU1Rt8bTB50kYW1LW4HkY6ZFLSx0pTN+DIED7QXOMpQbPYEch2guxgQBfrHTXgioQJAZYSXaUBl/DwEBgDIepfICmoKQgpaKmXv1szE8qFBVBP7n/MYIRpYypCLEWpC0IbAOqR6FhFFoCk2LzJsnP2fDFqmRz4BHLksBmLI4estaTYGqqqmI4HLKxsYFSqrE/S3apRc2kP8EYw8b4EtafMDUxt5drzKNLLEyPhR+T08fOJ+Q6JCt9lpmlKD2KWmJf8rE3FFQKtOBLHk3o0RUNXo1QJTIskSIDEzDswdoga0KAZE0oNQEGpWuoKjwtKDP4yt41FkcDtv/ZESIQnOoBmVknNy7y3BKKgpDGJ5piCtWcgBzfJITCMhAzInuMqhNENed0/y6n+3f5yMvP8Py1LdLZPlXRAJu7d+9ycHDA937v97IWrzGv5s15Op1zZ7cxKvCyC8Q2pKoElewzFwPM9gfQakjpDZn7I7QcYL0hBBMIxrwTTPjlr+ygVcNyF7pxOOn7FYHI8G1G7JUMSs26lfT9xv1k0lsSDQuGoWYYGgKR8+u//D8xPbyHb1L6sceP//iPt97QraQPWju6N7I3GMRfB+lxdT2iH5Qsq4jHSY+kGjHLLYWNeXdji4f5Jr/4pUmzQFi5hjhQnemAwgZIdAusI9lYQEZOqiQLItncPxFFk4wZ5AQ2I7DZ6j0JQqlbcP3b2c7Pac1c+8Rxo9vg1yWTzjf1CSFam9yn4SRH2rnbbr4+Pz92wXUXwJ8H6+f/3n0v7nndvnSryU7i4fr6HKMMnLFdnkwmxHHcVp5Go1FbHfc8jyRJ+NrXvoaUkvX1dR4+fNgScLu7u62CwDH6xpg2Qb3X61FVFVtbW8xmM95+++02NNFlZbhgycePHzMajdpwR4AkSSiKos3OcZhvOBy2JBnQqgoc4esstLsEpcN0QLvgOM+swxOpNNDq6J3OvHtcrH3STBpFUauzB9pKgpMYdRde3dyUp23fEaB3myufBtTPrwTd5kBgdyU5nU65efNmC8SdeL47eN+PeXYezAD7+/vcuHGD7e1tjo+PKYrizJfj/H64g7xYLLh69Sr37t3jtdde46233uLZZ59lfX2dn/zJn2xtEbvhROe/KM53M0kSTk9P28d0SxjvB85/u9v5L6zTPXf1+G6F6LTxzjnGrczc/3dlH+54dPVX7joIAmaz2RmP9u7+GGOosWTSkmA7ATFPrheybn93f2/CZppwmUQYCmHxrej4XK88rjthMYPaY0t79CpBrAU9A7Fm9bugbySBONuE3K0uPLlfkNSSRSmZl4KTWnG/UsxyuPt4yk//3H1urv8E+jN/smGqozFEk4axjsbYcALhAOoCW8ww+ZS6mCOKObKcY0yGKOf41ZIg3cNkUwZeTegZjk8PKHXB1qUd5nmCGk5Yv3Kd49IwR2DGA+zlCaJ/CT1ax4QDGIyhP0H3Ruisgukc5iksU+qkxCYFJteIEuzCYk49RB2AiKmCEVQSRAhKgARsCFYg6GOx1FJgEVghwQvQgQ+mhCqFcgnVHL8+xBcl1fIUW+dYXeEJSVVprBWAh1AR0uujCUENEOEaVo1AbWJNjjEzrEhJ0pQiylCRQG5YCMGGFi1ABDVhEGJ1iW80niwJtaKuBVUl2S8leysrPr2rMNnKuz00qGAVJKNqQDTVBUoEKYIKaUuUrbC2WaBbYTHDALsdo8M+NhpSewNqNYCyQFYCUVpkbUCD0BJs0DRyKoUJfWrpUwcheAFWquasqSyoCoYe2vPQvqQIRAdki6ajsgRKC4WGokaUGSKfQZ5jsxxV1/jaQKUbiUhRYSvbJD8SMhxt0RtvMy9lkyBqIYwgDoHZU1NEu79PgdnHdv/uz631BJtDH52cYKoU2TknXL58GY/mfFKWJeTNpF0bRW4iimrMgR0wZ8i0fpU8GZKLAZmJKI97lIcRpQ6oqoCq8jCVQh8rbCHhDdEA6whEaJGBQfk1yq/xvYqAitCr8OOEtSCjyuacJA+hKiiSBSbPkbUl9PpE8dYK2PkQTpDxBsYfcpKPORAhkroFdD3VgLwmkMXQ92rKxQGbp7/OZTLU6ZL1gc+4B89cnOCbdPXYiiRZthP+cXJMkiRt89skmLARbnCSnJCXOZU2lPkJ0zLBqBFH+hJHeosEn0wFHG4ULAaKX/degGxEgkcpYvJhRP6hkFr0uev1EaZC6AWymiPrOaKaQzVH6gVKLxHlDK88hOkMn4RRZPn+qx/hIy89Qz+oGQQaW2ct8eLIGdef5KoUbj4UKiS3IYtC8vKrr5OZ72u04drn5x4PKB5HZCYg1w3TnunG2STVPpn+/WTPBZTWxzOa+M2Snlrp1VWFb5IGYK9H9FTFIJg1OnL5hOHue/Xq8SWBNDS1tadjim5PkZtf3dzXDXZ7GonoAJYDqt2KrjNS6M6X3ed3wNB9T9y1UoqNjY329VyjpPt7lyScTCb0+/1W+nB0dHSmh8v9j+d5XLhw4T0LAAcad3d3WS6X7wH3xhhGoxEXLlx46gLAWtvqva217VhwINdJiJ1bipNgJEnS3g7DkJ2dHaqqYjwe8/LLLzObzVowaozh6tWraK1buYrTnTtnFqUU29vbbSiie/9HR0f0+32m0ynr6+vs7Oy0VaM8z1lfX8f3fbIs491330Vr3X5meZ63smTHbLvPxzm0uJwb996diYd7jPu7ww9lWRLHcYupiqI4o4d3rmtuDDnJTJec1lrT7/cBWjx5nsR1BPLTFAvd7beUuHR9HN9vOw9mu0DJAb5bt25xeHjYsrVu9RFFEXVdn7E07H4hu3pid9J0loxuBeeY+O5+dwexex+LxYLRaMRrr73GD/3QD7GxscE//If/kMPDw/YD7bLO3eeDZgW1XC5JkuQ98hcn0XkamwxnGy3fb5HzNFmQ+2Ddatxpo9zxcselywi4QdJWFpRCjfuoUQ9/1G+11HLcx5sM4cpFxPWr/NLHrvD1/i7pKsXxiTxEk6/AtUtp7CY19oxoGepN6zWAe8Vuu8f2VrdD5Hvea5claN5bzbKoV9HmMM1htxAsKsm8bED3ohKd35vURgfG3d8Bhn7jCjIKG3u+oVdz/9Yj7hyk1GUG88eQrxjtcg62bMCXL8CTEPegP4L+BIZr2P4Yu7lGPVrHDnaoBhPoj9HxiFlviJUhLBKYLXlc1FQnc0gKMtNY0pl5hllY2PPBxljZAxuAiEB5DbMqFNg+woYgxjhrcCNkY9/nB+CD0DnCZAhZEVTH2HwJ1RKpczwMVZEjkPh+iLWSsgKLjyFE+IPGNUQNsd42yCvUeoESGdKfYMIZwaDE6xvCEEQkML5CRQEiCLFeI71AaaxdYE2GMR626mHKIUWhyDLFNFOw5zTXjRWf52uCEKQsUUrTjyWDvk/oQ+jLJgVRSaSQ1BZSDAttOU5rpgXkNgTjIwqQpUXUDbAWtYc1ESgJno/1fKwfNrpqz8P6XvM3ZRCqwg4ExovB7zUuIV2v6zPg2iAqjchzbJ6jyrJpaCxKKEtUrZuPrbbYUlOXjTQEGaFl3EhZoglGxE3jaFlg8xyK00Z3TY6nl8hqiclO0elxU4kwCTcurvPX/vKf4+VnGsu5vm+RojnJDwaDj77PKRnxE4TAmCEbhQl4aLa4X26RhUP0cI1lFVKJHlr0MWmPqm4sInXtY0qFLeVKFmIhEhCaJ5+frPFURRCU+KoiEDk9McezJT410tQoowkAqQXUHkWuyHVAbkPyIqQsosaNww7RePiiYUk9nVKlIbaYQjFFlrNWJiLyA7zkCI4fEtiMy9tDlF4y6QkoZtg6a3MeXnjhBbY2Gjljmqb85m/+Jnfu3EHpxuYSz+MAwZ7ss799HYIxxhvSX7vEPBuS2wgvXqeOBqTSoxI9ajkAMaI67TfsrxdR+zHi2Rp1fcncr4mXJdIuUCzwVYqNDlmLMnaiGaE8oKoOMfkJSiec7N9hdngfnZ8QB4J8Ze/qmtrcOd0xcG0VVAT4G1cYRh8itT6Leo2s8JnlUJiwsTusA5KVtnpZeSSVapsak8qjsh6+aHoAmqbRJ82j8crWcBxZLgYZfW9BrEqUXjKKYBILYlnS92tCz54p3WutOT09bYFTXdf0er332O82c6PHfJ6Spg2I7oJbd9vzPNbX19vb7tq9ZjeorTtnOszw8OHD9vVcxdgRj5cuXWqzLroXa22bjOzwT7enQUrJeDxuQdpsNjvzGo5VdQ51vV6vBX/OPc2x2I6Ac8DRvf754+DkTUqptpHZsbFu7jyPG9xCA2gB9/b2drv4cAqDuq6Zz+ecnJywtrZ2hl12mOj27dttVeHatWu88MILfPazn6WqKvr9fpsj4gC4U0U4Xfvm5mYrWZnNZvR6vfY4Hx0dMR6POTk54a233mr321UwiqJoJTNlWbbHAZ5YTTttviMgu0DcLeaCIGil0g4TunHiXlNK2WbuuByb80S1az51Y8+NWzfuzocVOXK1LMszLL+TGX+n7bfdJPo0v8YuyHraAHE7bIzhzp07jMdj6rpmNpu1Xaybm5ttQ6Yrw3SZaTdI3HMaY9jf32dtbY3JZMKFCxfaAft+mvRuctbrr7/O+vo629vbLJdL3n777da03n1w3ffimlWB1gHGrVSd7KZbCTh/bNz2fiulpwH67mONEKh+iB0PsOMBatxHjvqE4wFqMkSOm6ZGNR6sLk8AuBoPmr8Pepiywqz01Xq2xKyu9WyJzmsCA5dKxStVjwEegzatUdIzDdCOhDoDqM83k7r3UhtW4NoyLxV3S8F85Ws9y3kSIlMK5oVkXtIGz7hQmdIIep5hHFqGvmkugWEUGAZ+c73d0zw/rhgFhmFg6YcgfYvwJMITVEqyMIKFkSysx8JIvnrrXe6F98g/ug39F6A/hhXIpjduLNymc5gvVwx20eivsxpyA0uLOfUxpvFDFjJGyCbUwUrZsNcEYIZU0mDNAIslUx4oD4MHPQ9MidA50qTIaomoDlFlhqozPFETeYqqrKjKGqV8LB7W+tTWpwhuYEWMtBlWDUjFGtjNJqFNzJGDDDko6IUVRAYZS2wg8XzVuJYoD6v8Zl8wGDPHag9TK0wxwBRjTKZIcwUWpDVIXSNEjczrRh4iK6QoETZDUiJsCabEmIoo8MltTmJLxCSG3gA5WIdoQuH1KERMZWo8LfGtRnoKIQKyGkzdkPqVFRS2wccVgkqIpnEu0I3WQ1msF6KD6Iks5AxzLRppSGmQlcarNKoq8KoKr6rR04RqmTSWi2UNlYbaQi0wtaTSCrwesreODUbUMkZahc0LdFk0TXlkqDqhWh4SUaDTY0w2xUuPsdlp0yNgEq5uj/jkR19mve9x69Yb3Lx5s50klVKsr1wXYDUZpRVGDSFc40BYPvfNI7612OEo8zjOBCcJzDL4/G1+A82Amj41MRURFT4lHiDwAAtfWv9biND5lFcoVSLJCLyaQFV44gRbpoTK4tkKU5YMwohQ+phCUuaS0kQUxBR1QF6F5CYkMSMMstX3hrIkXsWIe7JEyWLlBJITDhvZSCjyRlpB47zR9+pGm20b4uHw8JB/9K/+UQt6HPgSQqB6PeqiQCwW+P0BtXiG/tZV9k401r+EUUOsGSHyCd+6vUl0eIHeeAethiwv/glOhqaxDfRHaDVsJCOmbllqpZeERYEvlpTJEWNrGYQZY1ng2YRRYNkYevS8kjo9pkqPiESBIGcw6rdA8uDgoCVxiJpGxqY/SPNoekxiGhZdxBPqSZ9SxKRq0IQW+SOsP6RWA6w/hGCM9oYYf4gIxtSqDypE1CnvHiYMZ4ZJT6yCe4oOwE7ZlCVxWBKGObG3AuGyAdoDX6PEWSDxfnOVQKDLlQywtJQLQUlTojlf+XYgypk4uByO95v/FosFSZKckQk48OSa/Mbj8RkZqwNFjuTrWgG7edTN51tbWy1QzLLsTCJyFEXvkRg4AB7HMTs7O1hrz+Sa9Pv9FuQ78mxtba2Vl3aj3tfW1tq+NWh0z12A7PCNcz/pNjB28YEQom2CPj09baW1xjSJ6Odj47v4xSVmOtvnOI5b8FzXddvkubW1xc7OTtMHsvrfwWDQLmKcCiJJEt544w1ef/11Lly4wDvvvEMURWxtbbVJ5nEct32FbrHijDRcL59LNC/LsgWpOzs7TKfTVg3Q6/VaL3S38BNCtG5L7vNzx7xrZ+jAfXcsOG9yt8hxi8gkSdrP1C383NhyUhngzGfuwHuXAIZGN99NjIcnixx3HnMuL11Zzfttvy0f9O/EoJ9ffXYZ39ls1q6m7t69y2w2I8/zFuS6so47MbsD6AZfl7l2q1jHHkNTWnKD25VEuq4pXSB9eHjY7tObb77J0dERu7u7lGXJD//wD5PnOV/+8pfPxN67VXKe520zw3lt19NY83aTEjXsNaDaAejJ4AyYlueAdQuqJ8PmvkEPW9VPwPS0AdnG3Z43QLt+fIKZJ5h5Qn26oJ4uzoBwk51dqXV1a4PBgI/+zt/J949e4fXtCWn9JJ3xQSmY5zBbsdWzQqxY6icAu5vkOC8FSdWEq4xDyyigCZCJYBw018PAMIksz4wto7BmFFomoSAIAWUx0lAYzVxbZjXM6sa1YmYkcy2Ya49HWrAwgrmWzE3DnOeZJKwM/crS04a4grAGrwKvtNRpzdtvGNLZdTBOe90DGYMKVs2NEkyPRoM9AmEBAatgGaIAIqDOoEqw5RxbTVF1gm9LRrGiyhOy5ZJ+r89ymaBr8MM+Fy9dxxBSGr+JRDcRhV6nsjuNM55M8CYJQT8jGJZEYQlBjQolXuxhlGBR91DWx6IR+FhTYmuNriSmDDDFNnnmoTMFtUBWutFd1zXKNTXKCilrfFEReCVKapStUMrgjxTWGvJKU0hF7YeUXo/K61PKiEr0kfUqNdHWeNYijEYYi5VgpSDFpxYShMBKD3yP2vc6uuuS0heUvg9+yKxNaFyB60J0GhoNsqrwsgKbpYi8QJZFw1xXNUqbJi3SekgrkfhYEaJFSCkiCtGjJETj0bT01HjUhFVFNm0cQkTZSAt8naLqJTY/ZXn0CJufMgota7HkytaA3Qfvcu/uXbTWvPrqq1y8eJHDw0MODg545pln8DyPw+mc06XheKEx/hr+hQ8wXbvOL86vkExjFtGnEJ9Yo7Q9ShMi1IA9ehgdYLW/0lzLVhZCBH/7W4bwXUkYQxRbehHEoYUdaiS7KDKgwJJjqKmpKTBkeCTEl4s3fsLUA/IypLARJRG1XCe1Gs+mhCsGvyZvQKqosVFBGBcM/JowatjSnmrSGwehZuAb6vQYz2b04ujM+cQFcDkg0K1KKqUYjcacLitmmeBU99hNApalYlFIpukVZtcadt14Q/DHWL9x4EiD8SrZc8TSH7HEIutFc9ELRL1AVHN8s4Ryjnj8iPjwDn2/ZmcScLFacO+db2KyE2S9wJSn6GJBtOotcmxg1/GiPxqxtbVFUZZcvPY8G/GzlCLmMItIwxucyB4lMfhjKMYUNuTUs+TDkMKGVLJHLfuUWUxBiFn38EyGZ1O8tQTKGSY7bfa3nCGqBWRH+PUdZNUsHGS9gHyKb1NkvcQWs0a3Hce8+uqrfOITnzgjYXlaBdkYgzUWSeMIZLXgiXHw2e18lbcLfrt4oPs63fnPMYsOuHSdLs7Pk6PRqNU5d62WHfPsxozbj/Pz7fkFwnlizNkPf/7zn+f09BRjDC+88AKvvvoq8HTW3u1fGIYcHh7ypS99CWjA7qc//ekz8e3u/WZZxs/+7M+2DOna2hp//I//8Ra4OVzkwOrt27dbwPbqq6+24LxLEHYXT+55XMiiEILNzU2uXLnyFHnnk94Ba23LfJ+cnHB6enqm1yyOY7a3t1uG2bH6DjgPh0NeeeUVjDG8+eabBEHAzs4O//Jf/kuiKGqrEk7Ksba2xmw24+joqM2TGY/HbY+KSzV3CySA7e3ttgfQSYiVUi3ucpJjN44cOHae7EDLkmut2wWFw4PutZw+3n3X3WfXJYRddo6TxAyHw9bURMom48TZcbvXdQx+V0nRlTy7c6JbdDgi2TV/f6ftOwL07qr3/MDvbu9Xknn06BFf+MIX2Nvb46233uL09JSjo6N2lWKMaa18nPl8t+zQFdd39VOumcFZBLmB6fRQRVG0k4K1ti3neJ7Ht771LT75yU9y7do1giDgE5/4BFtbW3zuc5/jpZdeoqoq3nrrLYRSFIHE9mPs5QnhoEdvJQnx1oaIYa+RiYw6DPYKVEsHuId9bF23oLoFy7Pu7YTi7Qct+HbA26ys++rpAvLv/CGeOUn6MYSjlZ76AvRfgo3xezTWpvP7vLfOzw43+exvbJB+yUcIGAWNLGQcNNKQJrHxSXrjjYlhEgrGUc04hL5vEb5FeAIjm8xxFyLTgGrJVDdA+24tmddPwmZmuWBWg9EwrGGwiosPK0tQgaosqgRWvtdVAUXW2PLlhSAvBboGNFRSNNZ8SjFtJMoYQFtBWXsYPgz9EuoEqgSpl6jyAFPMoEywZYqnBCs/IsqiBjys8LGEWBmDNwBv3EzK/jMgfTRTRDQjHeSojQRf9ZAjjx4RRhpU6KHXa8I4RlqBpzV+lROWFWWuqEuPuogp00EDrk8lKqhRQYkf1kSRIfArqrqmNBFWWCKZshbOCX2odYbxKqJeQHRtSCkl07ImFyGFiihFj0yMwGh8XeLpGqVrhKfQ1oIUTRiQVVRCUsWSWgiMp5oYdGURnkF4NdYXVF5MtQLV1qcB13rFXK/s+FRl8PICmyWookSWJdV8DmWBKCtMXiBqjWcECoWyAUJFGBVTqx616qO9AVr1EDWoukLVJb41RNREFE1jmSiIyIlEE4IUiab5MSJvGs5Ek6TnqScWZFmV8aU3vtSyYwDCi9BygBZDpIgp11/A27rBabTO3Sogf7aPeWmEVSN+LZgg1AA9iOB6yE0dYEoFoeiAa8Es0Ah/5fIiC6QoULLAVktkeYwn9vGFRugKoSuoDTqvMKXEJiE26RMNt1kf3CBZesyPBfuVIPQs1Fznie58RiPkzc/dN/+w/6WfsMWMWDXMbzZ9TLk8IpkfMlo1eXZdtNz5JI5jrl+/zmg4alm4NM149+4+dzKw3pj1nRsIJpQ2JtMBx4ua/ZOctPYJhxcgGJNUHrkJyXXYBOKYlYOCLBpva69pCPRswuL4IaW/3TgWJfehnCGqGbJeEJBhilNMfsJaXzI/2SPqRGo7QwE3kWutsUKQ+z670YTx9jVCpch6a8yrTeTkY4hoQrZiq2045iScYLwBWg1aNtsGI7Q3AOHhzVICMnyTEoiMwGQEZARZRpAt8c0BI71ki3wVjZ426Y0iR5SNrlzYpkx+dHTU2ga7OdGBEQeWHBtrjEGtJCPWWqpVw9mLL77I2tpa27R23r7NMXeu8c8xyed7ttxrdSvXDnh0PZsbACq4k24z8RaMVDO39vv9dpE2n8/bhrr3sPEdnNB9PSEEX/ziFzk6OuLSpUt84AMfaF1LnoY9zktSzs+Fjo13oHg+n/PgwQMALl++/J7qb/c5u8fFgd/lcsloNDpzbLpg2llW5nnOzs5OC7y6FXE3Tl1gXdf4QUqJNpZFETCOSuDJwuNOuM4vXPwwry/utVaCUso2POlpCyVHTjpZrrMaDMOwBb5hGDIej9nZ2eGtt97i+PiYPM9bb/iTk5P2d2ffHEURDx48aP/uXt8Beqcy6OI219sHTXCQY4+Pjo6YTCYtibpcLtnc3Gxtrp3My0ml3ELRgd1uMrtzenHsttObOwDuWHcH5LuyMSezdq/lHFaAM/p0d1yjKGrd8bpEravGdIG5lPI9jbnd/sxuvs7Ttu8I0M8P4O7W3eHubbdjeZ63vpfz+Zx+v0+appycnJzpfE2ShI2NjTOlGydZieP4jMDfDRBoykJra2uEYcjp6SnyT/8uen/uxyl/5le597/6P1KF3gpA91pQ7a+N8LfW+dKH1rmzlZFf3KT83m0KXzL/saurErnHi4MINVqB63OSEJZZY703nVNPl1TvPFwB6ifsdQuwpwtkWbeDzq3g33eTXgOkwzHE6xA/C+MRxJMVuF4B7/b3MYSTJ/8TTcALoFg80VUXs/f+Pn8Eh2+293smZ3N7zJ//z/8sH//eT6ClYmEE0xWI7gLt41VqYwu8E8FsLkhqgV9bhrWlX1uiumGvg26wTA51LshzKPImVCYvoWgcJ8AKCt+ilWApV82ONLivXvleCyBe+V4PItjpWwaBpu9bej6E0pIs5mRpClZirUCbRrLwzrsPOJ4WWDVEBBPwJxjvCsaWqMEpMpzjhUt6Y5BhA6wqKmTgYaXECIURPpYAi2zYaz3D1CG6iDBpn2WmoJYIUZHmFhkaRDlFZDPKZcXFbcUgopEWBCUy0JR+3oS4IJiaHnfFJaRfsyanFPgUIuKEEGskvikRddPUKGVEKkJSJTAB1IgmREao5vdIYqRs3GC8Jgodvwmeqf0QfFj64r3gulqB60rjVTl+XTeXosara2RdI6uaAEnsBQgtsDVUWlEaRWZ8Uu2TaI/c9BE2widrXEEqja/BN42TiqqXyHqJLOd4OsHmU0x6QrU8RJRzApsxjmBjMmpLtF1WTq2WUhqfWvSpRZ+SPksx4FRdImNIZgdk9MirHoWJKHVIWQdkrwi09rErCz2MPAOuCS0EIDyNZI4xKcJkWJthzCmKA3oeDEKfYRyRLZYoFEVimB3lKD2CYoT1Rxg1oLBjauEh6gTKEFH5YFKsTZF1vgpHWuKXM6rkCFkvUHXCOIb/zY/+Kb7v4x9hfaAYxwJfGuI4vvBbnb8BfvzHf+Yf1HWNVor5suJkqVmWikrcYKq3OSovkusAr7+JUQNOE0NhYygmhI+28E43KWwDsJtkSYGoF3hFwuSxYBw7i72Uw93bzI8aYD2KLR999UOsjw1DLyeZ7vLVX/scJ4/fReiEl196iT/wB/4AnudxkA/5t1/LqH7zH+Dd+UY7ASvPp5YxIl6H/ia1N4SNKxQbl+FiSOo3jLqIJpiVFMSsWHYbDBv23R+CEM04q5crpnq+Yq3nDUNdzmD+AJ9b+CZpHlc14+/qzphx2AT6xmHQzkmOVevqX5VSFELyhYt/jEz1+IH9f8ZAN3a8xjcY1bCvzjfaVRpcjkZXGwtPQKNjKH3fb8v0Wil+eqPPDc/wh2yTIzKfz58qPb18+XLr5by7u4vv+2fyO1wacnd+L8uSjY2NRjNt4NHhkpNlxa+UP8ab5UcQGP6z8d9lLE4Iw7A9Lqenp1R1zVvR65jlJf7I+iED/6yVrsMObgHibjtc0GUhz4PcLmvuQND5ar8Dle5vDgyHYcja2tp7jk9364J/V50/PDxkc3PzzGPccXLyiJ2dnVaHvbGxcUa+0F04uMVuFyRWVvC3vvhD3M42efnyAb//1W+zlD6JDPjHG69RC8UvTV7kL3ztV9ox4MBeV3Hg3k/3GHXtIZ3+3Y3BjY2NdsxcvnyZIAjY3d0FngTxOMc63/d5/Pgxt27daqpKqyZQt2hwfuWukdO54nW1+04q41I/XXLn6ekpQMtud0OAkiRpj3U3wdM9rls5cP0Jbly4v7uFrRtHLsnegWzXJ+BCK91i132eXQmiA9Wur8KBfieJSdO0/WzdezrfvOqkM0+Tjne37wjQu96l7sC7A3V+ULvH1HVNlmXs7u7y+PFj0jRthfplWbK2ttaWBdzg393dPeN/2dXuuMF1viHT8zw2NzfbL1z9V/8owvcI/xef4cU/8SNgbQdcN4DZzBNEUjD1Lb3ZHL+oGeQ147Rgcjpn/507fPvLX2O+u095PEOsmOsuo+8aeN7DDEiFjMdor78C2NcQVyeo0SblxhUqGWPrFQAP3usaQjSBoA9VvgLS0wZAn/89O4Xp3bOgu1iAqJpPM/AgHjS6aqetHkxgy2mtz+muBxOq3phHMuBv5iXjryt6GuKqAddeBbICcpfa2FyKFbjOSqhKQDeV+cQTFAqkFE0plQZcV7phsX3VAOlBaBn1LJfWDIPA0veh7zc9fBILRmAN1BrKCooKskKwzAXzDGYzwd6eIK8Fg4lhtK4ZjDXxyKDXDHodhF9jlcBIQaVL9JWYgR401oHGx+gaUy0whY/O1qmyLSotKdIaVZcIlSNlhkwzpEmQZoGsDpHVnPx0j3p+gJkfYGcHmOk+cb1chYVoTl76XSQ/8bfRKgKzBXKHssw48C1DT9MTFbGoCZUmGAYowCI4TtepKw+kZV9tgRII3yA8kJ5B+x54TZBMHjSJjS24Lh24bmz/oroi0JrQ6CapNKnxjcY3FqVNG81utUUbrwHX2luB6wZkF4TEssLzSjxZ0VOSnhLE0tJXJX2R049q+qoilgU9VTPwGjlEYFPq5THUGVEUnmGn3Pni9PSUb/zmW+wdpySFR2FjTLyBuvQCtr+NCTc4DCbsygEVPUobU+uQWgfUdYCuPEwl3yMLIWqOl/RrvJVjiK9KApkR2RPi8pRovgfpEV5xjM8RXnqInJ2gkiUyXXDr4/+euhxhrUIzAcZInaB0gnDNizalVgkJOel0j61xQFQvuBbDlc0h60OPga8pFgd88yu/zMN33yRLlyyXy1aL2S1vCyHOOEgIISAMmb3zLDs//CGSSnLnUDBNBZ/+P/GjwPg7XCbAOBj83zBqQC37sOGdkYVIvSSSOQE5oWzOPSLbJ145iKyniu/7+IdZHyiUXvJrv/xzvPvWN6ir5tz46U9/mu/5ru9pHRw++4XPIvf3m16j6z/BvzK/hyAp+dOb/xPTQnDsXSW9+BKlt8aXg0sc3/04OQPemF5ugpG+/4+hPnYH4w2wwYjKH4K11NWMspxj8ymiXjBbAezApohihljcRxYzbHaCKhrGvZG7pPT9Gs/mWKPPNF+6Y+7mlK4lXrM48JC+z/DCy9Sp4d3HSy7eeImLl56jlBGZDShV3NhXej0KEZERsOtfYzd8Bgt8VdZ85uh/PCPzOTg4aF2zXO9VV6PaBZtSKUqlEDvbeBvriPGIejSkHg4o/9Dv4+R7P84j4MaX3uCl1ZhysoNaW3LtYf0BJ3ZClUUkleLAW2eZSyoVYcSARW6pVZ9s1VSqZUxa+2S1R7WMKO4EFMZHYghlSWk8NB6SmiN1na3NNQ5kn1I3x2Svd4GH6hpftq9ha0FyeJ+/cPErZ+Z293sX6EIDCg8PD3nxxRdbANNl+d3/dc8hv1V1H2idS9z87bb3A/fdz+F8sEz3ObuLjBdf+SB5OGYZrLP01/isfZZZFZEwYCFCFkqRBj5pX5FtS+rQx4QKE6lG4vc8UMDNfBs7kPR1Sd+UK48Ai8BibdMAOZvNGAwGLVN+fp/d5oCsY48dWXp4eMh8Pmc0GrWNq04Dfnp6Sr/fb7/TTjvvQPlkMqEoCjY2Nrh37x7L5bINKdra2sL3fY6Pj1uQevHixVZPvr+/z3g8Jo7jNrel6xhTlmULaJ1kxVV8HMA9L+fq5rw4XbmTGrnz6XkNeFfK4vbTnQPa8+7q+HXHnws0co2o7vzhmHe3+Or2Urjnd+DeLX7Py7Wetv22ADo8adQ8r03rvhmtNY8fP+YLX/gC+/v7bQrU4eEhDx8+bN+k6+Z1X7jzuvHuCtAdhO5rOhag22waf/6bqA+/jpg/oPjDf4PHb99pyzruILoGhfHzzzP44AcbBsTzsEKga8H9N26TLnxKnsVeaphr7Q8hHGHDMXU0oXLhMdHkCbCOxthw1EgFOsDZFjPKYo6nqub+k13E8S1sNn0CuvMpmAwUDZMZ9c4C6OFaY8F3YQKDnUayMhh3HjNpvK+zCk7nsGL5WeaQVk1zY2FgLuAk6Givo0Z7LSOHiqmkYuELUrVKbVxJglv2WkDsrdjr0LKzZhn4DcCOA4gVKGERFqwFowVVDVUtyEpIC8Eig1kqmE0Fj3MBUQOuh5MmqdHva7zQIHwLCoxoevcqLahqQVEKqkJSFhKZSYJckWeKZBlipxIVavwwwg9rwqAmCmqioEKUM6qHd5oJvJxjlkd46QnVyR7VyQkUGm/+bTbHAZhGj3dyckKv12t9bl1zB8AsSSm8kNyPqeN1eOYZ8rXL6I1LLPrrmI1r0FeN7jo/QVAgN/vkgz5LP8T4EtuCa6AEWTUyIUoLNYxNRmwq/LQmNBpfa5TWeLoJkhG1BQ21FlTWIzP+6hKQmoDcRgSippYlVhpiaVGyJpRNw1hPlfRkReyXDcBe3e6trNB6sqTvmzYZT2vdsn3ue1Vp1QS26JCk8FlUAfumx9yMOS4UiXmFwsZkJiKrfPLKpygVZamockldiIa53jAQ0gQIRQIRaKRfo7wKbwWufZnTk0tCcUBMSiwSemZJ3y4Y2YShLYh1SVBk+HXZALuiplw0DM18PidJkraByVmPnS+7u/caH36exfZniOySH03+G3b3TzHTt9B11ZaNd3Z22ue9trHBaDni+vXrfOITn2AwEBRFyq1bb/ObX/0mD/Zm5N5l8kEIoz5mtMMsXm8dRIwaoFXDAFfEaLWSWHgj/ndvj/jf/lWPQWgZx5ZRDMDf4KycZQY8Bm5277t263/9+VFsEdUcqRMW8+mZ0upzzz3HYDBACcXh9JCDw0NKG1CHF7D955iKMY/zCbMi4O0LNzgZKbQ3RqsB/7y3zr94s09lFNpI9Gv/Fc3KfHWZQ2lC/v7p7wVjsTcECAm2RtiSb5+Crwy2ljTtiHDp5n/D9OAe1fKInlchTUbZKZM7XWhRFFy6fPkMwFosFszn83ZuUmGIrn1qFaD9AUVvA3+4QR0M8IcbEE3QQZ9K9bDRGNVfQwdDTDiEaISNxuxFY2q/j/EihC6IbElEQWByYgr8OiWkIDQ5gclYqx6zG15FYRjIE9KNCVUYUsUhJ1XFu8OA+Y1L5MH3IkYj6MXU4yEMB9jhEDsYYEfNbQY9hO9hk5w6KSGtsKmGzGDjCfZuTHXT55989aP08tcoTMCyFBQmpBYRAkOsKiLVBPJEqm6kN6IkUBpPWB6fPEDrY4S0CE9x9do1rJSUxoIfYqRPYRVJJckJmFYBmehjhOKf8weJsoqeKIlFSSRKlFpSicZvHwuGs6nibusCFAeUHHBySZBd44cuLuhKO7oYoft9Pu/Q4SQozvXtPKMPTRVySsxu1WcqhuyFN/j2lRscbgture/whegZMs+n8AJK36fyFfVEokMBr4imElxYZG7wKoNXamJtiHRNVBWs6YSLxYLlozvI+QGX+/ChS2tc6Rv+2be+iy8/uspnnnuHP/naV9t9+v3TN/nl6DKvLh6g4pByfZ3Dw8MzeTHnwXn39nA4bDXZTorSbWZ08mInw3AKhtlsxmQyIUkS3n777VaOdeXKFb70pS+dYZWn0ynD4bD1VL937x4HBwfvYfuNMfT7fXzfb2XKGxsbrK+vc/PmzTbfpt/vt/0gDrg7xtuNHee24t6DA+5uQdJ1yXH/4/oZsyw746DjgLurhDmc6Qji7vmy61rUxQVOltY1EOmOdffcZVkyGAwIguCMlefTtu8I0Lsylq4Wrvsl6JaYlFJcuHCBq1evcvv27Tad8s6dO20DiJsY3fN09T3dksR5XVX3ywmNvU7X49T8oxvw3EdhtkFQXif44McaMBoMG/AcrWHiCUlvjTeHW7wz2qKUfWqvT616WOkjnl1CPm0AdDGHfIrIZ4hyAfkpdv6wDZihnLcgW3hgpUHGETYeYp0t32CCGK1j1repowH2Ay+2958B4SqCeQLTReMcskghbU7G5KYBbEceHIQdgB2umht9kCtrPtOnQTnGHbRGNqP8xjlEFyuLjAVUc8geNB7Y5ZJQaV794Cu88PzzSATGgF4B7KKCrBQkuWCeCmYLwdGh4J6whEPNaL0mHhqigcaLDDKgafZc6b8rLahrKCtJkUNZSOpcofKmmXGaBZzuRqgjQxBrwsgQx4ZeZBj0LJPIMootkwGsRbAewZqvmaicZyYxF/qWcVQxCAx5tuRrX/vamXKttZaf/umf5nOf+1xzMlkm6GhAvLVDHl3E/sRPQxxSn/4yU3uPKhpi+xNMb8iyP4JeH9HrQRxBFGBDr7Hkq1fgugIKjckriryELG90OzPReGufLBEnd9kcjRmFEbLWCG2xFVgrqUVAJQKa6T4kJ6YkZEFIiSWkkdqEq79KkePZjJgmGS8gJ17prgObtb+H5ARyNWkZgbDN/rgThiFoAlp005R3bGLuruQhKeuktkdOn2z1mLIOKGufuvKpKw9dylYWIkKLjQXEAhEYZKDx/BovqAgDzSAyDKKKbS9h4JUEes7xg2+TPn4XsdwjrjI2lGJcQzQtEZ2FtdNoOj/dblmwW75156SqqjhZ2YYtl8sWTJ9n7Jxt6Xl5nnu+S9/+Lynv/T/YGVl+/pmfJL2ywWD8ZUb3/j7lOG4uwwvIrfXGQzpcIxhs8Wh4kV++NWRRSNLKp7AR9oaPuJog6jmUM6ReYE3SOO/opHERqU4I8nsovaROj6nTY6hmeCZhY+jx3/6Nv8IPffoH20XiYDD43vPnbPFX8TCsYbkCXARePNn4FEfRFibcolJjajVqGGrVw8qId1UIeBgkrAt4kRXAtmDhzYfQxsACxBaoEaaiLgx9NAOV45OQTB9QL3Yhe4z2emQv/UeMgpz/9NK/5/Te1/nqr3+ObLZPsmys1v7sn/2z9Pt9fmXvBj9/Zwfv2/8XePxF1GKBrWvieIJVI8pQYQgIRlvIjYtYQgwhJ5uX0MEA1V+jUj1qv4/2B+h4jA4GZOEYGw3Bi6BMEcUckc9R1ZIinyPLJbKY49UpXrXEHt2CxVHTkOlrgkgQDBSFKFGRYuPaJa5/5MPkvseD2ZRUSfRkjNq5RN4fUURbFIHHKNyD2Oem/jBvF6+gcoPILTqpKWclNmsSTm3hYXIfm4eYeYgtY2xqscsmA4H5ErlcQLFA1Mv2IkWNjD3Mx18lDNY4uvgcUli+p7xNcviYg9MZMugRDNaIJxdIbcheNkYLCWpEySp9U5RwcQ2TTVHVksCkpCrkXrqNMhWfjO8xzQdsiIpnw9t4fcPR0V32fu3LUMz4yMvP8YGXXz6TQnl4fMLP8LvYr/s8Kx7wp7bvncERbs5297mty2o6fXHXTtjJW59mhdx9ntLAsQk5tgP28ZnpAbc+dplHLxRUvTF7Wxf518EGuec1IDtQaH8FsoMnIFsVBlUY7DCFJMXLc4aFZT2ZMxIVo/U5I7tk7b5lXaRUe+8wEEs82YC0K1eukCRJI43xILc5NTWJTfjC3S8wnU750Ic+xI3gBsNoyF/8+K+gtUGIJjHZbTvVKf9BcgTAnXOYyL13p3t2mwPDDqw6g4719XXefffdtqIwm80Iw5A4jrl3716rxz48PGzB9Wg0otfrtY4vt2/fbs+nDnivra2xv7/P5cuXsda25K7neezu7nLhwoU27bPrwuLGjO/79Pv9Vu48nU5bTbd7PveeXeOoA7xdG+5uk28cx+053p37R6NRq1d3lUsH2t24c//fZdsdsD5fdRsOhxRFwWKxIIqiticSnpibAO14dnPMbDZr57XvtP2WTaJuR91E6XbWvYmu9MRay2Aw4LXXXuPk5ISbN2+yv7/P0dFRC5jO2yFJKdnZ2Wm/dAcHB+9Z9Qgh2oHTldt0V8g8+8MgfZjcoPzBv4UuTpvSZ3LSlEXzE8TsLlIn9HzDxz/8AqMQIt+QlykPHu9yUlQsrcfeIsP0hojBBDFZa+LP44sweLkB2Oes+WxpYTbHzJMGaC8LSErIKkxuyQ4BHUAdACGIeOV7HYNSq2CZoHENYQDCNAEsUmGF11jjebbxvV7pVE1+gKdTpE6xxRyhC/JkCdqs9MwKhNcAeRWB7K2aGicQjpCjG6hNDxEsUP4SGRbsbvVYDpbNgkNYtBBovWKvK0lVCsp8xXxmCpkp8iwgO4hQpytwHT8B1/3YMo4so8gwHsA4MOTHu6jslNG4YETOSDR+u1cujLh8cZvt7e0zg7y7NVq9ptH0KNccF5AGgm8Yy8Fpwdu7h5xWmsPqMpkKmov0yVTIyU98guQnFDoaNFWKMqdKZpBrUL0m6Kf4QfLFMWQJJEs4XsCDu7A4heUJcnlMXy+R832C9IigmqHzE04P7qPr6uwK/CP/CfpH/wdQIbbawlscECgFWUJgm6S/kJzQNo1mvkna5sZw9TdlC+xTVuFCCCqjKG1MZXtNSp/tcSIHpHKbXI3I6FOIPoVtZCGViah0iNY+uvLRlVqBa4OIwDrddUTDXPs1SlV4K+Y6EAsGIiW0S2JSQr0gqqf0zZy4SgiLHL9I8dMUb9kwHa4RaX19neeff56+6HP/zn0ePXrU6FRPTlhbge1er8eQ4RNHgs6i3113wXm3S15rzWKxYLlccnp62rLkjnE9X+lz/+tK1471OA/U4yjEK+7w6Pgi6TPrIDyWw0+gXlpDVDNMdkxpEmwxRWf7eOk9Xti4xLMbjzndv8Px3ruUy0PyxT4mn+IpePz4MUKI1l2idXmQErwetT+hii5AtE7VfxkdbGDCLZb9Hf7SLz3P4FsrKUIJR0tmWHzAb5oiGnUYlmZlvPr95OJfpuneWP3B1qtgnAJlFgTiGM8sCe0Cke2jFw/xiz3ID1jrCT713a9wYRIRypyvfflXuHf3DlmWcXp6yg/8wA/wIz/yIyil2N/f5xe+/Qvs7e01jWhZxn/6kbe5+OzLLGvFIuiRbL9CfuE1kkqQ9df5rPwUVdljOfFQzyQcb/0ZKv8vof0+NhyxjEaNw1KZQDbDlAvqOsWmp+jklLROEfkMNd3HqxP8ckkQ1HiBJalmRIMAEUKlDHXoYQd9xGSEv76OHU3QgyF2uAWjEXbQww5iVD9ChB62qCnSmiLT2NRABlmh2NMBOpWUUeOcZPcjuAteofGLCuYJxdExZjZDJgsub41YH3j45JwcPuDx44dYW7fneeMpbDyg8kNEf4iJB9CbQH+E3Ryio0vYMG4SqoIAPK8hZaRdfayNZ705NLyTXCDOK5YLC6eG8RBGpSFZBGRLDyR8bPM2v+Pqu1gRMC/gG/fu8Og4pZZjTLDOffthjoshwlq+fmJ5tJggfE34PXsc/z5JVXyMwlqWX3xAEH2M3eQKpeqR2YDMBDyyfe5xFetLDmxJT9zCEZBdIO2q6d0odQekaiSHYsS9HI5tzImJqNUGMwKmUlGEPTLlUwYhpe81THYoMcETkC0KiywMXmmwYYZIU1SWoSrDdrJkaArGomBCzhopGzJlUyaMvZzlYkoQBqRVwud/7XNYXxONQ77n059ExmCvL8meOQQg/uYlet+8zJvpI0Svh7Wy1b87722X1O3IAmczmGXZmebC89NeV1HgJMHD4ZAPfehDbfAPnGV4u3LkbsMjwNHREcvlspWiPH78mEuXLrVe6I8fP26rpDs7O1y4cIHBYECSJK0TyvXr15nP59y7d48sy9o04a4DnsNl7px69+5d+v0+i8WircAmSdJabEopWzcXhzH39vaI47h9f0VRtHIX12zrWHM3F1RVdcYvvhtO5BYNbiHoJELdKo7b566Cw903Go3axxZF0S58nIRluVxSFEVr+9j1YncLIvc6rlHX3fd+23cE6E63A5wB6u5FuuC8W1JxAvt3332X+/fvn0l4chZF3SYOt5LyPI/h2gbV9/9h5u++yca9r7NcLinLkvF4DJydmC2wrC1V0Gf59v+T+qP/Mez9Cux+HrP9QbzwLoz8JgK9fwU7WKMejJmOt/nFyQ4m8KjxUXmJmqf4Wdlo1k/mkFRQGHRm4FA2ANux17KJRkeFjTRFCLA9GvC9CbZasdeqWTTEjr1OGveQcg7VQ2SV4tmMYD1EDkL0nV1M9AxCa7xiH4SPtj7ICCP7GDXEeAMYjFAb2xAbVJQgggzllwSybDyipWhkK0Jh8bHWx5oAqwNM5WNKH5Mp6lRh801EPUFWBSfaUqeKXmzoR5ZJZBhFlnHfshbDRgSbsWEztFyIDFtRxSiyDEODJ88yI+c3ay2zNOPzv3GfA78glU3S3n3ZJ/dCvuJNWB4oVC4xvYi5kSsbRcHcrC5akFlBKCwjYRgIw1hZYlMRacvpwRKZzgmqnFgXDKqcsM6Jdcne2zf5jV/6d6SHe9jllICVbaeM4TM/B+OX8b74J5GPPnsmUfWM/EEIdBxTGYP2PMrVeI7CABGFLBaL9mTq3/sscrxHvbFJ8Hf/OB8dHPB7f+/vbU4cWpJVDXtdmpisDkjNiAUDDumT2B6pjcl11CQ51gGlDqgrb8VcK6yRCK/xWSakYa8jgVwlbXpe1SQ1qoJIFYzEKbHI6ImUnkjo2yV9PSeuM6IyJ6xywjLHqwpkLeApE+rTPtMzjFjkY8OGoTg8POT+/futbnE+n7O/v8+9e/fwfZ9nnnmGnZ2dluF4WonWxTS78447sbkuf1eudu4S75eD0K36dZt3uoy5A+lnzjHBGrOdH8Y7+hLj6S+yGH8f2w//Hjsn/4zxeNx6Ez+eHTHNYOuZV7iyfZnDNGNf7JBdfIG57pHZAdobkduIyoYgI6yMmsX3yrLcNQk3wNo+Yawtzf0YsqXFzwWhD5FvoeAQmAOnwBGwh+UhhgfA8epvs+u/9j03P/LyM6xPeuw/fszbb7995j1fvny5LXE7X2oLlAQIf5OC5zhS66RmzDsXfozHG5uI9D4LdZHPX3qe+8WQQobMIjj6HX+EQkRov48JBvwd5RNVjSREbi8ooyNEPkcvj5FVQq4FI5ky0kvSR19DnzwCVZHpJVpURJMYrQx22MP0+4iNDRivofsD7GiEHG9jh33EsIcdRJheQK4kpDU21aSZxWZgC4nJJDZX2CIgK0NsBuyucg6WKWKZIRaHiGSJmE2R2QKqBT4ZppgRKIO1FUEUIKQkrwqy2qDDHtde+TDDy8+Re32O4pr8gsBcu4ztDbgf9bgXRBjlYT3ZAHNpGmLGrPonCt3IESsgLRFJDlmFONXYomwaccwSjL+aZxz5sqqYes08dCL6GPMCJngW6wuWQrJXWORY07uQIAPNu9Emd9U6wrMQg/2+D2OFxAqBtQJjoV8v0VryuBpiS4GuQ3728Hfg/eMaL6zxBi/C8w8gDBiXgkkkiCkYiCWxfcx9cRlrasbihG8UW5zS48RETG3IQkYsREgifZILijIIKTxF8af/Q0yosKHkK8GqYb0AWRhUqQkrjV/VBEVOv6zYqhPWlGFdVYzJWBc56yJh28sYeAnGK9CqQvUEp/aY4IIHgUFEFhUJtF8jAoPx6+bi1Vhfc7L6PfMbV4Lv+ZNXEZVCVApTH1HnoAbg1RVeVYH/5BzpME23yXdzc5O3336b27dvtz7jOzs7fPrTn2Y+n7OxsdEA0sWIz958jo9ePuC5rRmVEVzoZ60ziZSS5557jsuXLzMajdrvcFfe4cBq99y8WCzIsqxl03d2dvA8j+VySRAELJdLrl69CjShXoPBgPF4zKc+9akzEiH3XBsbGzz77LM8evSofbwQgvX1de7du9fI5VZqizzPW3LVNTo7YtXZXDurx+eee47ZbIYQguFwyHK5bJ1lHIjvNti64+vOY4607dq7dkF2WZZnXAKdRKibt+MAvJsvnINMt/m024TqHu882V2DrGP9XXXAAX53XNzC7X+WzWJ3R7oTqNux9ytTHR0dcfPmzdZY3onm3YAKgoDBYMByuUQpRZqmzc54Hkd/7L8m/fSfACUpvvQz9E1JaiTzaIgYTrC9EToeYeIBJh6CkMh8BcyyezDaonzxb2L3oEqWiKM3YCaxlVqd3CJM7ypaPimTG6FA9LAixtgB1myDkCsP56ABvTqDqklqpNiDcgrFCaI4YhJVVMkJy8t/FazfsNgP/2kDzmW8Yq9HDXvtT8C/Bt4QIwtqkeA9r+h/dAbmCuWij049pLkOWiOsh9Y+svah8rGlh0kVOlXYVFEVEdIvUV4BIgO7ROgl1HOoZivJzgl2eYxdHmJmB5jTfVgcNM4F1RxMhVCKP/Pn/zx/7c/8tTOD320VLvBHtmD5DSOYlTDPaO9z17Nzt+dGkNs1/PVt4nFJbEpi3Vz3bM2G9IhNyQVTckkWjAMYSsNIWsYKxqq5PRCafN74rFZVRRzHnJ6eMpvN+OpXv8rx8TFra2vtytQYw8HBAbu/9Evkd+5gnf3mapUv6gT7rxulgApDdKcR6XzDFkBRC6y3hg7WUeE6ieijBmvE40vIQlHSQwQTyosvYrKrcFtQfO//yNdy+OqtkLpQWCMaS8qwaWq0EdhIIkKDFxj8oCbwa0K/IvJKxl5KT50yUBl9mTOUKSNSBqYgqgt6dUlcl0R1ieSs40ErFVuBvfPSMTwQvgcMVpezjgNuHLzf9737fNY2Hf93797l1q1bTKdTjo+PGQ6HrYXq1tYWk8mE4XB4pgG8+3zu5OomFGfTlef5mYmmu3XPUefZcHfi7jIYQggsoFVMJXqIcNVHovpoNaBSffau/i/RwSa8AH62i5QZhy/+Ofb5iytwrRpgfaWRg9y2gl87eTq4FrVuFu6mRJgMpadNo2Y1RVXH+OURQXVAqE+Q5SmxV+LZFKWXbI4CepHHxsYGf/Q/+KO8+uqrLJdLnn/++ed5yib+DhIYsmoWHZ68Rv3ch9iPRjyMFpwMF9hw2GitgwHTyQXM6nYhQirVQ/t9QCDLhLu2xq9qZGU4HW9CDWKUY8uIg9SyfrLLx3feIalOeOvwDZb5DC0qcgo+9UM/SHzxEgvhsZfnHOQFVRxTRRswvMatrXXqUFEHEhN+EFsbSA02M9hcUBgfk0n0QmILH5sLSCrsIoe9HDKNKSeI+THR6W8i5nPs/BRPJ5hiTkhOev2DLD/9H4OpCf/xf4maPkZ6HsXlT1G/8uMEi7dR2SPEYIP65RepL38UUo09TjB+iA3CZh7wVOPjL+wKXAswCkrD21mNV1pUabBVgU5y7HGNZ2xT9aQGo6gKS2UVxMEqU8FAUEBkYeBDJOF+gJ16yNDgxaDWLSqsEX4FSiKlRfUyhMqaJni7AtaVQtcKUyms1wRu1aWH1grpaWxQ43k11gfPs4ReTSBKTH5KvTxC5CcEVcJHX3iGQo8Y2Ip+lfBv7n6YUOe8evU2tz50gaQKSb+yT76pWG5NmPX6vBvFlL5HHTSfpQ0FBB63qlf43xevoEqDXxlkZZG1ZaILkJbAFHxgOWVdlExszppK2QgWbEYLRsOck+QA41XUccXoap9aVUzTE/rrMcav8XoS69cYT7cgO/UqEmURtURUHrJS2EJgiZC1j6wUlAqv8lCZj1r4UDYA3NcBolIsjhJCETOMxgienP+MMew+uMfHtw557f4/BOBz63+B+/JiCz7dOcadp5ZJzs/Pvod3648xmP998mWCXLvM7dlVHu0veTu7xCKX/LOvvcyyDPg3bz2DbCzg+cuf/AqfvLLbnhtds+F5svS8O063OdIx957n0e/3W8edNE3p9/tMJpM2XGpzc5PnnnuON954g6IoWtbYseMOgDpLa6AlWjY2NkjTlMVi0TZ7DgaDFmw7WYo7d5dl2Ya0eZ7HwcEBxhguXbrU6uWFEC3T3fY9rVxTnCuNY8rPa8i7+MXNAe7zcdVTJ1Vx78X9n1vkuD5GB/AdoO+qSNxCxN12r9ud36y1bQKpy4lwLoXfafuOAN29ge5g6Ha5nmfXjGn8Xd9++23SNGVjY6N1aZlOpy3L5VY1cRy3ZQv3JoreWlPCs7ZhvO9+ky1qTh7cwcuX9GyJlyeobGXzFSqSxaI9QHn0Eg8/+C9ABCgjiB/8OsXyAKoZOjtG2ZTqh/5FE6uOYePxP8arDvFtysZAkS/2eXzvJlVyiC2n2HxKlc/QnTJRtyljPB7z3d/93Xz5nW/Bhf+q0XwLCdk+VFOEXjkK1DOknuOLFGUWlMkBkQ/9H/sUk7/+1yEIWH7dI/3imPIgwksfEy3fwqsW6OURIjlCJqf46Sl+PqVOj5H1giJbtKvLLMs4Ojp6L+vpBytZzuqyM4bnP9JKdexgjB5M+Lcf/W7e2RuxsKoB4i4MyAhyK4iEZSQtI2UZKRhJw0jBWFrGyjL2LFdWoLp93Op6KAyLvYf8xq9/8UwXvTGNN+vOzg4AV8ZXGPtNtcQa2zCK9RNLo4PTU3Z3d1kul+24nM1mbU+CiyLu9/sY0yTY/uqv/ioPHjxAWw/CDaw/pgomHS/z5mL721g5QAYTUANEMMF6q3AUb4T1xxgZQp2g61mjJ67mmHpOrRdYbwr5CRRHqGgN85KBvkKmC15J7rMz9hjJjJHIGNiCuC6J64Koai6+PetSIIRotOOVQNRnHVCeuq1kw+d1be9xHOqcuJ6mgTvf73G+ktAFwt2qWV3X3Lp1izfffBOgzRoIw5DBYHCmeuZe100g7oTsXC3cucJV37r7K6XECq/xSPcGEIxW9noDTDCkViN0sEal1tBqTKWGaDnAegOMitEiwooQK/xVJ7RGGNOCamtlw1CWFqpGGlLbTWSVIc2SkAJRzVD1lNBMic0pKt+HdA+bnzBcVZ48m1KlJywXM5bLZft+3PF1kh1n0+d5HlGvz8al64x2rpET8niWMYvGnPgDdnsTTt6ZcFHHnKQ+//zf8E94unvLEIvFMkOzvP2Rv8YjJDavyWyG7huoJbbwEIVPlfWweBh8jPEwVq2OhcDYAalbaDhg6lvEZoAc1XgfKtn73QE/7b3SHK/sezAZ2BVb/QuFh80FMrPIvIYkIThdEuuUvlxycUPTUwWeXXK4dxvLgiCukUFNeGUD/8IFFtmIR19PefQLe5jhOmK4hYknmN4YM7oBYw8ubVKpi+BJROghQ4nwIVUW4RtilSGkQf4f/ibC1wjf0g+av8neywj/eWTQLJpF8Jjjn9zGvkMjdauXCOuB9bEqhDBovF4jA2EJoUHEAhNLzFBir3iIXg/Rg3rlKGREUyWxWkAtIbOQGUjByyxRlRFWM/yyxN+pUTs1ShiE6EhCrELXEm08qsSnLAOKIibLY6wVeEqjkSAs/ijjhd/8K3xky+cHPvkJZgw4Nn1ObI/D2iMVIxIbMTMeB3rMMlqnHsZYP+YuEVo1FrVIAdcsXIX75jL+iUFmBWath1gu8KqSyemSrVAyIGEkF3jlYyZxyuNH3+ID3/UKYmuTUxmwJ/t8W2zgBSVXtvYoeoI1ecpr4tsMyRoG29NgBEntkVUe5aJGZxZRKTK/whYCvayxc0lgR/S9AZQKpT08HUApEaWHqCTCylZqN5/PuXz9OooK36RQzPB0im9OW79632R4JsXXCcXisGmSD0zz+KokrTyS0udaHXO6N+Ff1b+Pk3qNr95/gd/IX+IoeYVib0xh+5Q2Rr8RU2lF9YsBxjTBbWz+IGKz4u3U8m9/VWIxCKkwVlL5EgKBtRK9+ti/urfJd1241zYmPnr0iOl0ShAErfyjq3RwygaHsfI8b4ODjDFsbm6yvr7OYrFge3u71VU/ePCAXq9HnudMp1M2Nja4efMm3/3d391KN8qybIN2HN7yfb/FcUdHR4xGI9599922z9DpzV1DqgPrTr++u7uLMYYHDx7geR7b29tordsKqVscOItR97xOJtjVjbt5ajgctjaKLtTIAfPuvHI+Z8fhKDeXufmmC+rdxTWmuvnN4Q8nV3J49rxsslvlda4v32n7LTXo3fKBu+88aHcHB56UpoUQPHr0qF05tNHHnZ3rAo7Wxujv/udQFqjjh8Q/9X9GlwXe+jqXPI/T01Oi4bDdj8IYTlcrkCzLmv1dfIXo5n+N3fpBri/+EX7yJg8ePCBNUyLfZzgcMrr7XzC78p/Qe/hPyW/+E5IkaZj+FbsXLx4xUAoZSyrPJ5W9M/vfLYuMRqPGVD+dIr/yRzDX/hS8+39HHHy2Lcm4/5FSolaNF1ZqqsqQ/sKXWf5UQn1hG/+//+sU5g8j6iWDt/47FCVaetiojx2MMf0xZn2NavgcdfQqcrhGHfVJoj62N2ykPNJvXF26Ti9hBHkKyxkks3PXc0hmiGTKxfSQH+mXjFcAfKwsQ8dgexDKJ43BDpi9n6SlOyaa32F/PuXk5KT9rN3qczgctp+B06Q5QO6OeW09Zgm8+zBj7zDkeKEoTcw8ExxOJywLxSx9BRGu8YWbW+Q6YJYKjmYlxQt/ET4wasrBddJUFt5zmaLNElEeIbI7iOKUUOZ4NA180izoeRVVdoyumlW1G3POkipNU0xVEQQBz9hn2f3QlGzrIlf+X/9X/uDv/hFG6YiqqsiyrGUm3PNkHX15d1NKtR6t3c718+PQbQ7sAe+R6nQX1c5T1j1P9yTkvl/nnZu6+9c9D7i/Hxwc8MYbb+B5HtevX29PVk6DZ63FIshqyUniMy8E00yS6h6ViKm9AXU4Rg/WMeE6xhujvVHjcCJ7q6bGECOapsYGWOvGamgFIq1VTcO01UhbIGyBNBmySvCrGQF79GTOwC+YhBUjP2d6cIejvTv4NkGs0huFnpPEz3L0zH/G+OTzbO7/v9tj6d67K3+6k7W2gtrvM6+HnJYDKq+HDV+mHvSx0QgdDND+ABuNGoeQcIQNh5j2eoj2egijCXSGVxWYdIkqNKIyyBruvxMxeuxh6gCO7XdjiZoELUIMAQYPi8UgsWINa9dSrpIK22iWPYvwaBo9I4voWUxsEZFBhBYVarxAYwOL9CqE1PjU+LYk1gVV3Rzjfn4XEa2jlM+NXznmmd4pViYcz/fI6xLrRRCtI6ItEtkj8SKyMKaQIUU+oCjXSeqQvaOA2nhUKIz6IUQAIrQNWJ5pZGoRUiNeM/ivrwC1b5pG5NAgggNkaBGhQYQGGVhsDaaQ2FxicoHNm8WCzgQ2k+jEw5YSrSW2VphKYLUBKRGrRkFxA8QHJghfNKYzRmLrFbjOV+A6sURlSVTnBHWJr0v8pEYtNdIahG2s8YyRDegyikr7VJVPWYbkRURexGgCdKgxoYZQInyNtAkeS5SdI+s5Ui/BlJSjD1HH64TJPS58uECMRuxOd1gcbVF7MSkRpfUp6fHmzt/jTSv4p4lo9jlvdNli1dguKppjVQNWIoRGeTVhVBDEmmBYkEQ+cqC54O2zOTmm8iISGZCLgEKFVDLgQA7ZlT5aKoyUGCkaZOH9BP/aa07+ogapDWhQxnBUb6ELSygK9OFF/vi9PSgaFlsYiRQN5rhz5w5lUVAWOZdubPP4/i2OH7zNznOXubAWEVHg24zA5kQib0C3TdsAKU+nlFlKYSKqr/dZ6gHH9RZH9SZ7+hL71Q571Ysc1Nsc1+vM9JhF3SOpIioiND7aKuxq4dNcRFMYE805B7eIEs1iXgiDkgYlNH5Y48uMpe2BtFxS9/mA/3UiWULVkDxrA8WF9YhCR3zr6EUuj0/4+f1PUBmfD4ef4/btQ5RSJEnC/fv3WxJjc3PzTDOmmzMdgHYVTaVUK0PxfZ+HDx+ekZ4cHBwwnU5bEtWB2t3dXfb391sNusumAdrbrmn09PS0nden0ynz+RygDXBy8xHQSlbc/zhWWSnF7u4uVdW4ZDlm2513u64zjgx2EhI3P7r5yQHxbmhSGIatDamb37oV2y7QPyOl7shd3JzZldw4oO4cBd1n4CoortnWYV7XpPq0+fv89h0Berd03WXKtNYtzX9em+5KD5ubm9R13TZtuTfa3dzqrftaHD2C/+6PYYRg6nlcvHixjWrd399nb2/vjP1jVyfkbod3/z7lrb9Dvr1NIURbwvE8j+FwSH/6Bb7vRsZb5Vvc74j3wzDk+vXrrU2QY2nH43FbWnHMF8BgMGB7e7tdycn9n8Hu/lSzb+cArPvQnS9pO5Be/UGyCz8IQYj4L/4Bcu8Otj/ioP/7sb0RBBEUKSJZINI5IpkjswUimeMXKUGV4Sdz9MEDmJ0gDnax89MnAHw5bUB43WkL72zdz+/1tb/EH4pfb49HF4Q3Zb73/u8ZDfK5rVlFQl7ByULztXdSPveNhON5TVYH1PQxaoAM15DhGrmOGhZUxpQ2pjAxpYkoTITGa6zBREYgMkLZXA9jS6wKhF0QyVM2eyXXLuYEIuNX/v2/5uRbX8ZLj6jz40bSY5+e3CWEQKz0g8aYxqJPNROOY21Vx2PYWS8uFgviOG5X627867Lk2Z/6h7zzzjtEV6/y8OFD4jhmOp3y4MEDrl+/zmQyIcsy7t69y3Q6bVfn3QVwFEWtRriqKu7fv9822nSlHNCwGVeuXGlPFA8fPjzTLd79Dvu+z6VLlxiNRgjRNGcvl0tu3LjRah4fPnx45juKCtFen1r1KYkZbV1BRmNyG1AQ8+j4ErvXPkw0vsxesEYpYmp61CJsJjvrNz2NFjA1wjTx44IVa82q+mQrpCkQJkOaDJUneHoPT8/x6llzqU7wzQpM13NEvSAUOVSz5j5Ttuz0cDhkMpmwvr7eSmu6Gve9ao8v3fk6leyhgwGlv0HlXcUEA0b5L2DWh+zt/BlMMGxBdu01MpDaH7Tg2vo9qEtEPkfmKSLPoKgQpUHUYCvRaI7LRqqGXDkxiRBrfYxRYA3WSAo7oLB9YAOwTa+Lamw4jyogssgL1QsisojQWgIrRGghtIbAahna1Po2F5JknDx8ZiBKzPKQwCSI/BSllwQioRdo+j1D2AcVSQoTkJsRqd0gMxvkZo2iHJAVPZbZgLJovOdP1SWMaXoh3lnXCH9lLBU0QFlI24DUwiCVQfQ7wDqwiMDghTlBkCKiFdj2wJQCk0nMUjX9rJnAZKv7cg89V1S134DqWqzAr0Ao0TD7gWjSXEOL9RvphzXyCXOdW0hBZRVRnRJVSwJd4tcVnq3xbFNNEdi2mmKMotaKWvuUVXAGXBf0EKFAhAIVWAg0nqoIREogUwKbEIqMviro+ZbZImXv5JikllT+BgszJmGNgg1yRlg7aHIT1JWmioNsWGy9AtmHrCo7r3D7Ls14okn5VaFGhRWDfo43qPCGNf5QY8MCYouNJTZUmECifYX2FFpJzOo8Z1QDrEvPUilICBA1CK14VF/goF7Hqyv8siKoSnrpErVc4i2WhGlKnKREyYJwOWNYJAx0xUuXLuGVOaHN6Xk1v+FfZY7kM+YNdi/GeKLg1Xs3CdMTwlUKcCgKYlkSkvMDWlMQkXkhszsT1upNJuuXuLe/zZd2L3BQXeCw3uZEr7HQIxLTJ9cRpQ3QxmsqCl1w7Txj3XTlQPbKZ7wB1xovNoSyQrEg9gp6XkFPJQz8kr6XEZoZfbkksidEdsH2WsD2eszx0WEbbtMGSmnDzeoDnGQer4hfR5gGqF28drF1T9Ezw4PBR/nV576LDXPMf6T+ByJRoQtNphuYVpYlH/nIR6iqisePH7fg2+GfLmh0ko8oirhw4ULL8pZl2RJEruruGiCdhCXLsjYcKE1T1tfXW0trKWUbXOT6g8qyZLFYtK/tXsMRGYvForUhdP2EdV23KfJBELTNpa6K2q0wOvbcgW4Hio0xZzIkXHOoA8Ge57WA3M17XZLY4Ry3z+613bzQdSd0BJPrBXDuX13tu8OL7ji5ICWnv3cLAhcO1l1QvN/227JZ7GrP3RvsGr/DE4C9XC6RUrK+vt7S/ue9zM93crutCwhdx7KLgj0+Pm5B0NPK3t3f3cGZzWbtwHAJVUVRsL6+3iZ9OW1sXddcuHCB5557rjWjd8lr/X6fLMu4ceMGJycn7QppMBhw48YNNjc3+cY3vvEeaz+3Sut2WncXM8YY7PHjBsxXJeL2N/H/yX+PTBeY+SlekTTX2DODwaVQOYDsOrWLYuX60dUe/xZbF2C7sAA3gBy4nmewzCXT1LJ7mDBNLGnpkZQey8JjWSrS0mORSxa5YJ49uZ5ngkoLeoGhH3waL3idPDrA5CdQz/GqJV6VIme3qdIjPJsQiIzYLBiToUwDwKr0GGua4xuGIVtbW+R5zvd93/dx4cIFZrMZs9mM9fV1RvGIb3/729z/yr+APIeqQnS89t37Pa+xdp9Pl93vjkvX3R5FUXsScTZU0+m0bdYZDAZtiu6lS5dYW1trT0Cnp6ccHBwQhiH9fp/ZbNYmALpQg66Lkbu9vb3NfD5nb2+P+XxOFEXvCVVw4+Py5cskScKdO3eYz+eEUQxBk7KoVR/tDTDegNuHFxlzlYIej7OauQnpnV5BBBMyHZKOFFrGq4ZGx1o3wBpjEHOBmDclWbNK9BSjioQSryyQJkXWM/xqhl+cQnGMLI6R5XFjK7gC1+73RnOdINBn3lO3Kve08VuhsOEIemsweQYx3MAfbuKPNrHhiHk0YhoMeNvrU4iQjJBchJQyohARxSSkfsVH1DmyWDbgusgQRY0oddO4pxVUjSzEEGBEsCKuVyYq1gNjGhmE3WwyEZxzygpcNwCShrmOG9aXEGRokEGJH6yClbwKT+X4NifQc7xqSmwzQpvQkyXXLo7ZHCvevVz9vSjOhAqqyBTeoEiC8XI23FzMhhtpMhjnWTQosmi9yNY5zAKq8nnq2sPoBlhb51V+ZlvppOCJG4xZvQ8ByCYdN/7YEtnTyIEm+mCCVDQMdarQC4lZepilwiQKPZeYPMCUClsJTK2wGqxurD+b/WAlN1q9XmiRr1fYWmC1hFpADmSWoMiJ64ygKppEWjRiRYpYITBCoaVECw8tPGqhVtdNyq6RCr3rkz4YY8IcHWQYP8WoGiMau0jPLPHtElGcIIpTyOfYbI7OS3ITk9sJJr6M7V8CcZFaTEh1zLxcpyZE42GsbKQNZtV0XYsGXDtr1lw0OQmhAVMhrUZ6NapXIXspqqeRPdNcYguRwgYexvfQnsK04Nox1hbrQYml0h5SS2StkZVElSV+VaLyJXFe4Z+kRHlOkKZ4pzN6WUIvXTDIE3pVTlzVFNNT+qHk6OFtnr+2xSgEWS2IREFPVYxCSyRKAvImEMqriERJ36uJowqBoJr2SHXMSb3Ovr7EB6oTHpUX+CX9QQ7fvMBhtclP1hNmdY9Mx+QmojIetW2O3xlpVQuqO+Nx9R2TwqKEQXkGP9SMVEmkEiIvZxSWDIMc38wZhxXjMGc9KliLS2JVMIotypYURd6mWjqttgOvXabTYZ9GiuFRFD1832c2m2GtZXt7m95oncrro4Mhs1LwjA3Z0B4z+4dIjM+slHxZK5aVT3YxoFB9Hpl1ahSH6gLVM9/Hs/VbLWvtAoTu3LnTzlu9Xq8NGKrrml6vRxzHSClJ07R1stJat0GQSZK0JFPXR9zZHjq3ltPT0xb4u8dNJpO2qhpF0RnA3wXKaZq2xI6rMDq222GzKIpaSY1r6nTe5e75XQXWSWy6FYIwDJ8Ecq1Iz16v18pZ3OLE+afDk5RRz/NauaU7lu51HRh3DZ/dirFj9J00xlUoukmprmfKEZ9unnYEm5SyXTQ4R5rvtP2WQUVuc0/kKP6nbW4A5HnegmLnVe62bqn9PGByzQtuJVZVFXt7e23yVbcM4f7n/HN099cNlDiO21XYyckJ165dax0LxuMxw+GQsix55pln2kEzGAyYTCatVv7KlStMJhPeeeedM2WOK1euMB6PWVtbYzqdnpN12PcELZxnPbn1NYK/+rsxl56FX/yn6LLArvRLZvXBW/HERsh9YbrlF7e549amX8nojMb6/S7WH6PDNX769AV+9W/vsCx9Fpl4Aq7DxipxGBk8K/FFRs+v6HkZvaBirS94ZrPHeMcSyJw6O+bqzohBqMkX+/RDzeb6uF2s3Lp1qz2ZuZAXd5yaL1rTsOi+3Hmes1gs2tKd+4IkScJgMGiDENzJabFYcPPmzdYHtZvM2K0GnddVn/+cugC+2zzivsxuTEkpmUwmHB4etgtLay1HR0dcunSJCxcu0Ov1uHbtGru7uy3zcenSpdZqyo2nM40lgJEBJtoi3PkwS3PMYdxDD/tk0QSCEbU3ovbXV8ExI96J1lDROnngU3y3wsqwAdfWIHS9ApFOEiKh8LAoiAwiyCmqmsjWUC1Qy8f4eoGv53j1FFWdoOqGoaaaMwg0H3zhCtvrIYvjh9x+65tEYdCWKN33r7udGftCYFSICYaYeEQRbkI8xgRDiMaYcIAOhq0sxKzYax24yxDj97EqQOkcv84I6hJVl4QGKC22NIiZwhqvYUGNR2U8jFFo42GMxGgJxmBNiLbR+4BrENFKFhI1DHAjxdAIv0D4CVI2ybOCJX61ILCnhHpOaBJCm9JTJYOgohdp4lATD/4/pP1nrGVrnt6H/d6w4s4nVdWte6vqpu7bcQKne6bZjM0ZcjgUSVEegJIoghJIwwZFgzYEmZZt+IO/2AYhELAg2ZBs2oaGtAmSEkklaoYzw0nNiZ2m0811q+pWOmnHFd/gD2u96+yqDkNBC9h1qs45tcMK73r+z//5P49GZCmtzKjJ2Jo562bBtplSNGPKJmdbLmir12mbGGsVziq++KBjhYG/9szO/b7guvssotdkq8wgMofKbX/s+3CwjpDtGWeBMwJvZDd4aGT3sILirUnPMAtWv3TEEHrgRS+lodt3AVh1bRI0LbGviWhRqtNZS2mRewynFxIrFfYtjUX1wFoPEoomymmyvGeWPdI6pDNIa1CuRpkK1RaoZouqV0TlErG9xK5OabNjqk/9azRjTXr538CupMyO2SYLfDrHRSeguwA3L3Q3BOpll9ib0X11/TmRAEknD5Jj1xUso27fRlmDzFxfhPmB0Xda9qy17NZ53T1XIMsxIIyEVuFbh6sNVBW+KBAXW1iv4XKJvDhHXyyJ12tuTzLujFJeyGJy55gqRZ4kxKIh9iXvfesrXcKxNmi7Y54rEkrG2pLJhrQPLktlQx41pEkLXlBPMwo34eH0kPtG8LB9gUfmOk/aE56aEy7tIWs7Y+vGVDaj8QnG70tCenD9XeUg4UsHrqVwRJEjVoaJrsiilpGuGEcV06RmrEsie0l5eRfVPOVwLHnt1jFH85gs0YPmOnT29wOPguRiPp9zdHQ0JIoOLKYV2B6PnJ2d8Y1vfAPjYHb9FtnBC8zcMbXM2LmIwsVsXUQtc0oSdi5i3Uo2O0VBTJPmtHaEXcXEviEXNSk1OTUZNbloSH2JqC/R5SXHtiBxJcfjmH9u/yh37U30uCF58jUeNReDVjp0TwMLHBI9x+MxRVFgrWU2mw0OJwF4n5+fD2z6drsdbBbDPS3P8+GeZq3l8PBwYLxns9mQeDuZTAYcBQz3rJAc/+KLLw6s+cHBAdvtdsArYUg1ANN9Nh8YSK+AA+bz+TPv6XkCCnjGQjdIcAKIj6JocEgJhUc4L4Jj4H7YUCB8syz7DhlzkH8+r3wI+CCA9lA8BNwZGPiAE8I5tz/sGkjj77d9X4D+vD7muw2L7W9BT7vvHvE8lf+8Dja8TmjF1HU9+BiHIYfnQ4med5nYf3/7W2gx7DPuwV7o8ePH1HXN8fExk8kEay1HR0fUdc3FxQWLxYLxeEzbtiwWC+7cucOXv/zlIfYWuhNrMpkMXqD7OqP99xROsuftfEJF6b/6K/CVX77SW1sLMsWKBV5NOyCtZ3jd/z2a4vUM4hkieJvHc2z4efgqYzDb76G57h/NOWL3HpGsuH3yMn/hx/4VfvDjd5hmnknmmKQeJf2gf/7Wt77V2bDtVZ6TyYRr164BcHp6yjvvvMPN0UfJ85xHux27TYVpugGKg4MDPvGJTwztq0ePHrFarVgsFkgpSZKEo6OjAYCH8yFYOgWbvfv37wNd4lgYGImiiDzPuXv3Lvfv3x9sjrTWz0Snh8Up/Gw/6SxckG3bcnBwwGKxYLfbDXZUwdM/SJ6SJOH8/Jw8z4eKuWkasnxMLTJGNz7OLrvD6Og1Lhef4PxkSvH6D/DhzY/xa+rTvD/b8uCH/wJez3DRFKdGOJnhRNprrRVfdhZ95vHKYj9i6YCY6oC1UOAN0tcoXxG3LbNUMnUXXDx9D22WxH6DNksiF1jrDcrumKSOP/5HfpRZDv/g7/0M52dnfOELX+Czn/0sX/ziF/nVr/7qMwXz/jF33hOPD7h+/XVqkfBrd1ds9SsQTbH5GHdtiutDwlwyxsXTTgbSy0Fc7yCCihDNFlFtUHWJqEtkY7q0EavwRuFNhN8lUCR4EV+x1q73cncO6xOsS6nwPSvcg01Nz1z34DoOemWPjA1xbFGxQcgCb9Yot0LbCyJ3TmLXjERJQoG2O5TfEcsKnVj0OMcns043rqZU/oDCHVG7A6p2Ql0fUpVvsDVJZ41pNc52WuTvZK3he4LrAFyVRyg6cJ3YDvzlDpnYzjpOeVB474XHiZ6plsIbiW+l8G0HrL3rGGvrNRhBu+QKXMMVsN4H13RyFSVaIlERaYvCoITtpCz9e/QBXIsrUB1YaysVXkqMijEqoXIeYT3COZQ1KNeibIOyFarZEJuC3O6Imy25rYiamvZySXOxpl1u8S6lkXMuTY5Jj3DpASaa0kRj0COQUxAReNXZGNpeJtJ6+FLH0Jcv/09g5CG33b5MHTp1nR4/bRFx0zktxXQpz1rgtMCr7jHo+SVdoWAc0jpUa1CmRTddJymqa/RFAasl1aPHNI8e0Tx8BGdPcaeP8OdPUbsCdttufwjPJIF5Lpgmnly2LHLJNIVZCpPYMYk9sxQWI8XtZM5Mw0Q4IlnhrKDapFy2M+6bW9jpLR6213nbXOOpvcbp8oSlWbC2Ewqb96x1TEtXsH4nuN4D1nCltaYD1kpatDYkomWitiSiJJU7xnrLjYVinhmOR47EXSKacy6e3OXaQcpiEpGlycDCLhaLoXu7H7Ee7pXb7ZYmbQZN8yiaoIQeGN79kEOEZGs1T5uMC5lQzF7iSTTlPX9M68ZUTcLGRuxcREVKSczOxezmEdsf07QyRfmWzFeMWsNINGSiJvOdXGciLNejkpFcI6oltr1AVEvGynBrOuXaLO2y7PaAnVAJRo/ZmYhHlzVPmprTnSee3OJb9nXuLV9A4PlB9ZDDtKH2nQtKVVUkScKtW7cGbffp6Snee1555RWWy+XAOKdpyng8HsDg0dHRwHav12uuX7/OxcUF169fH/TmwACKg8lEHMfUdT0MQYZjcHp6ynq9Hmadwr03gP2Au/axYXDxe3641Hs/hCQJIYYhy0BihkCi8F7CZ9/HHcFDP8/zZwKMgnwnbPuy1IDJQlHXti3j8ZgsywbcGVxusiwjy7LhOAYMIqUcCpaAI54P2tpn4sMxCkVLkJF+P8I7LMffd9sH2s9LXp7f9un9IEcJ2/OthHDgwwcJbzRooEK7Y1/7/TzzGSqc8B7Dcz8vmwmse3hvwHCyHh0dDTKC8XjMcrlESIk++BRpWjGy60FnXpblMAW9f3JdXl5y48YNHj9+zNPTU7yPvoOh9vEcH8+7fwfQ/f0eMsY+D67NGtGuEHaDNGuEWUL1AaJnNV1zSUxJsX7USUjspp8A+j2OsZRk0ymTWz/CJ6/9GD/0ysv0/exhf7ZtO0T9hhZf2M/Bqz4cv+PjYzabzaAxC6x+GEgJi0mQ54R2UzgHVqsVRVEM1XvQdYcOS9DQBZuicG6GRemrX/0ql5eXJEkyXEzhXAggOiwKo9FouGCbpqE1Bq8yZHpMlV1jmV2nzTMuSbEHI5obCqNGkBxBdsRleohIF/g+qr3Roy6ASibgHF+ypkuQWQnklyTG/hjuJjxtI77ybYESBuIC5SpUUxC7Jdo+QNs1kV2R+DV3bsz5+Os3Of3wHb75lX9BRIFyuw40uh3CXwUivP766/zE53+C+/fv819+7b8ErqRp4br0CGyUk42uIw5OWKdTmlf/GPam5+7NH6GMPsJbL99hmf4kLplgovGz7HU07mz4hODbNFBtaH/spxB11fk5GwFGgdPgY3wT400CRdxJQYjw/gpcez/CuzGOwOzzLLhOOwZS9NrlThJiUHGFihoiXZOogkztmERr5mlJJjakFExi0+lZdQOixUpHSUyrcmpSdm3Mph2zqmeU7ZSqPaaublMUCSsT9+Ba4X1njwkCCjrn8WFxe+7vbu970iP64UyZeETcIjI7sKsich34lh0bjSOAa3yrcEaB6fTWXeNDdNraRuBrOtDpewClESgvOtmt6NlwjxIGTUOkWrQ2aCxSOKR0w2U+SEP2QPUgCRGqk1IoRSsyMB5hHdI5lLNo3xK5lshVJLZi4kriPjQoMRVyV5I5cJsCVTZoL2hMxN2nDU/LCD+6js8OqaNp57Qjj7vuH1HXKbD9/SYGDoAXgSmQd0UKCf2Qq0MnbjhPiGtEV8v1TLXAq+68ErrvjliPMJ0NpmxbZNMi6wpRFojdFnXZAWsuLhHLJeLsnFFZIC9OOcCQNA2ZtUz6lESFYRI7xlHHTufakMsWmhW2uESZLamoyVTL5MAxue6YxI5pAtPEo+McI2Y8aK7zTvkC96obPKxPeNwc86a5xoU7Zu0OKPyEyoxp1xl+k3aSEPribwDXeyD76oYIeCT9IKOyRJEhlyWpqslkRa53jNWGyF7y5IOv84lXDrg200xSyzixZFFLLB1t2wxgZH+gr65riqJgNpvx8Y9//Bmib7OxnOqUo6Ojofu5P2QXcIO1jtorLmsoXEyjcs5jy5N1w1IB2ZxkdEJJ1rmNtYqti9jaiMJHFC5C4MmThjSuyKgYK8thqhjJmsRVHOuG26Jgoi+YJ3CQSXJZc/HgPe5++ytcO5hx48aNwVmkbQ0X25Z1LbB6RiNzGjlio4956MZU8Ud538b87npBuUzYtJpt28lYChtTO00iDePIMNINiS9BLjmSCq/zftnz5LLh5OSEi4sLVqsVp6envPzyywPA3Lf0e/jwId57ZrPZAC73batDcmaQhb7//vuDFjzcrwO4vri4oCgK6rrm2rVrXF5eDvfwzWYzMNQ3btzgvffeG8BwIMBCVyJIZYMWPkiIgQFUh/8bRdHAxofB+7CFz/k8wRp061mWDd2F8HpB0hLwmfeeSW8sErYApAPDHs69ANZDqFDoyAR1R/h8+/sOruYLw+fcH2wNmGOfvN63e/wfZbP4vUTsz8tKAkAO1UW4aMOOen6L45gbN26glOLp06cDgNjtdkOl8rx8Y79ACJGx+8N0+62KfV3V/mBcqF52ux3b7Zbr168Psoh9sLe7/Tf4kvizyNry+fT/Q20nvHOWcbZ4icvsiHtuRm0TrJ/wC29OWZeCapJQ/v4YR/49mWvfg2yaJaI5heLdLvK7WSLtprftW6HcFtcsSWM9sO/hJNgfUg22Q8aYgbn1gKiqzkNdCL5z73/nFoqbEPwSBkcCoDs9PeWtt97i/Px8YKIDo/r83MDztn3h+Ozrpdfr9TNMNjAsKvsn9H7RFZ4j+KyG8ygs8N57mtbxS7/w63z9/pZd/hrN+AhzkKPyA6J4isoWJNPr+OSIrRhBPGeppziZ4+QIr7IeGCiwLWvbgrNXrXt6+z0Zg3dgS5wtOi/1co1oT4ncFm3XVKsHaLPk+kHG9YOcT370JV598Yi3v/nbPLz7LT79sTt89oc/xpvf+iZf+tKXnjkG+222OI75ocMf4gdved6q7tFOH9OolEZO2Zg5tR5hohFGZfh0hrlxm7Pda5zHH+feH/ksjcpo1YhGZd1Qox5hdAZ4YlPyDeNJN5bd629gi5bKjHnzbk6xvsbWN1DHuErjfAeqO621HBjeyiXgUuDoWXCt6MB14iEApqjTWMuoRKgaqUq0KNCsSMQl03jFNN4y1TvmqWOkDVI0xLEgShVGxTQy63TjPqW0CZt2zLaZsWvGlPWc5e4G752ltE2MMQrnNM71INaLZ8FKeL9CXMl+XP93AQjfMdO6012LxCJTh8g6YC2iXsoh+v9rgxxE4FrZs/+if87OrxqvoY7wpQALIDq5RJDS9DNtCIGgY61jVRMpgxIday1xCNmlDnYfS2Jlz1jzHLiWCqsirIyprQdLLwmxKGc61trUaFMS2RV52wFrWa4QZYEsSmTR0C63qMYQIRHRlIYMoyY0+oBGz6nVhEKOWIsDrIi788Wp7vPpDkSTebghYOIRYzr2OglgOhRgHhV5RGRBW9D9Ph4eXScB5xHGdxprY1CtQTYVfrfDb9dwsUVu1ojlEn9+iXvyBH15SbJdwfkT/GoJ2y2mT7uVUqBsxSKXjGPHOLaMteVgpJhnglwZpqlnHDkWuWT+guRoohlpyyS25MrQupgLe8i95hb3zC0+bK7z0FznaXvCE3fEuVywVhM2JqcyGaZOsMR4epmZ6AvA7wWugS7kyCJ8i+i7ZqOkZBS3LEaORdYyT0omusDtHrI9e6/rhqaOUWSYZJBE3Zob1utwrw7rerhXvvnmmyyiDZ9YfHqY5UripJcLXM1YBVASfLfDLI8Vmq0YYfV0kII8tQ2X+WtYM6FZd64zOxdR+Jit0T3IVuxchEORiYaxbBjJlrEyjEaGybQlFy2TyHNNLBlnLX53SXH+IWNtuXNtxkRbponCe/dMJkacjniyrDnfOd66d8rjy4rd5Bq7o5d4l4yaI1bVi5xOP0dtcqr7OcXdiK2J2RkNHrTdMtItByPBSBumieWByih1zGfih9zOVmSiYqRbLh+/x4N3vk7ktvzAG7f59CfeAK403GdnZ8znc7SO+Gf3FpxXMeP3/y7/5EsfEMcxr7/+OlJKHj58ODDF0AHdYIkYRREffPABRVEghOCll17izp07nJ2dIaVkNBoNjiLBKlEIwbVr19hut0DHoAegHIjTYMxxeXnJdrtlPB5z7dq1Z0B30F0HE462bbm8vBzu2VJKHj9+TBRFg2QmyIa11oOlbiBq9xnwgBnyPMcYMySMhwTOIPWp63rAR0EyEki/QPiGub3wGgEsB3ebfTlyIGz3JS/hvQSmPID2LMueKQrC5w5fQxECV52FfbvG/1FBRfsuLd9vC9VCMKMPF/l+m2r/eSaTyfCBgwNG0zQsl8tnmNmwBXuc+Xw+DOMF9jW8x1DdBEub0BILA33hIAbNVPAlhg7oBalEmqacRT+GNxEWze+af5XD3YZElPjxEq0a8qgkUUsm6SOOZhH19ikRBfX2Kf/Vf/EzbJcPEf7qgH03nXxgewOTvA9shRDEeTpcJPshT6EjsK9nCj8Lx2Hf/m5fA//dtv3j4n1nkfTkyZPhGN6/f5+7d++y3W4HDRx8p3/2PgDfL9r2tVz7P3/2JJYYEVP5hFZkNCKjJqX2Sf81prAJpU/ZtAk7k1DZBFuNMac5tY1orMJ5DXcsvNiCNVjv+5gY1aegawqhwRuELbvAmLpA2w2ifYBsl7jyDLt9gi1OcfWyc35pQmHV/V3aDZiiG8gN+7H/LKPplDzPOT09ZTQacSv9BK9cf4UfOlK8cKQ5G5VcHkwoJ9d5sz3hrcRzeueERmU0MmVrNY3MMNGIVuW4ZMJvjRaYx2N2+Z+m/iMxwhsiUyKbAmVaRNNC65FW8bjIePftjLqFqn4J5xXOqcFxo0suBJykcSMufACnM0Cwlf4KXMdX4FoGaYUuULpGqYJYl+R6w/Lp7xK1j5lGKyJ7QSwaImWJlSeKBTpLkaMFanSISGdYOaYVYyo7o3BzivqAqn6Rs13Kw22CaSOM3XvPQRLyLwOuww9kB66F7qwDReqRme1Zaw/adeqg0LoPLHUruyHGXhJCP8TYJSxG3SDkTly9Vq8hvpKEdMBKCkskWnTSEIkW5W2ntxYWKX2X2igEXkosHWtt9gB20FsblWJ80g0ZDnpri7ItUohSedsob0vp7EYJs4xoL73xS9fopSnVZbVJl5Nf/j//nyIniHSClSOMyHF6TCtyrJrQ6jl1PMfpY5y6fWVjKfoBxomDG8BEwBjECMiBDHrnmC7VNuqYaaEdSleIPVA9yEFcXyQMwLolMi2qrtF1RVRVRMsdcVEQb7dEqw3ZZktWrBnXBWldM5WC2Hse9/K4uuwSmSNbsDl/wEgbFrlkloouCTmFSeSYJJ7ZiWD2kmcUWaaxYxx3fuhGznhkbvJudZN71Q0+rK/xqDnm3faEc3vC0h2wc1MqM6JZZ5h1gnsc4/u1ZTg/vxdr7T0Ch8DgqcGXYC76QLklsjmH6jEUH6LaMyJzgbJrlNvh62VH4viCtPd3DsODN2/e5HOf+xx37tzh5s2bw8yNEIK33nqLh5uHpHFKpCKUUJhG4syVO0XQ9Ya/h261MYZ7Dx7ykR/4DEt1hFFjjBxTNgmVSNn5uJOHiISd7gev44RaZpQio5YZVkSkHxom2jLRloxOHpKJiszUzCLDrawgFw1j2ZLLhsjs+Mq/+Oe8943fQZuSw4M5P/mTP8n169dJkoS6btg1AhtNKGzMplG8f3/Fb3zlTS52HqPHTI9Tjl78CKVLKGxMYWO2bcTWaCqriaVFmw3t9hRlNvDokhunFW+8PCNXJTdSw6t5TapOkdKiNBTVlv/6n/43PLm8QI1nyNkxJz/4GW68+gYf+in3zRSH4G15wL87+w2stXz961/nH/6dnxkcTr7yxZi/9Jf+Ep/4xCcGtjskgrZtww+N3ubXf/fX+cVf/MWBcHv77bf50R/90SFXJsw7BcD51ltvURTF0JkORGnQWgfQHeQhk8mEzWbTudn19sDr9Xq49wcnlfPzc4wxxHHMgwcPODo64vbt24OTymQyYbVaAR25dnJyQpZlA3kGHSa5vLxEa83R0RHT6XTQxQshmE6nONd5pe/bLQbQHSQi+zNfQRYcMGPAHoHxDgA9dONDKFDo5Ac5VTAI2TfsCBgy7LugWw/XRlCIBAORgGnyPB9mLwPuCUx/AOf7RKsxZkih/r2w9fcF6KH98TwYe34LMpTgXRn0RM8L/MMOC7Y80+kUrfXAvO+DzX3ZwtHREXAlfQgL0L5H5X4bLfhVBglD0BaFE7Gua1arFffu3ePw8JBbt24NgBfgDx38U/7p2Z/nMH7Cv3nr7zAdRcMkcljM8jwfJpHLcfdZ3WHG26+c8OUvf4iQ6pkC5fn9ta+lD0xGGIwIJ+H+Z/L+2VSr/ecKzH/w+twH/aFFFX73uxULwSXk6OhouDjDhXF5eYkxhjfeeOOZ1NdhaNUJShdR+YTaJ1QuoSahcjGlT2hIqXxC6UfdlL7PqX1K08eetz7G+AiQ3aCXtwjvBqmAp9Ptun7wSNKiaNBUpKplnm7IdYOyS+6/82W25/cQ7RppOwu+cWwZRS2JbLh48j67y4fEyg/HOywC8/mcPM+HaOGyLIfwHGttlyqbzWE0Q4xe7PTVaTfMSDaDZIrN5mynh+zyBa3K2Y4P+J3JIV/K5vz/3IjmXoQ6+in0oiIVjuyJwJgaM27w/Y3TtAKHwpW6Z6wVeN1ph203FOetonFj8JO9gc9+OC+A6+hKEiIiSxQ1qLhF65pIFcRqxyTZ8eqLkuvzkotHb0J9zos3DrlxcsDl5ROenj3B6K5IEtkCq8ZUNmbXxOzsmKKdUtQHpPFPUhUJj+sEYzu9tfe93roSnfvGcu8C+H7gWjDIQoT2yMx0rHUvYSDqgLVQ/uqpenDt2k4W4k0PrF0oPCXOCcw2grW4GmQcmGv61xUI4VDCEEcNOmnpIfMArLvF7EoSEoB1ANlXkhBNLSJqk3WDf+5qmFFbg3YN2pTIdous1iTlkqzc4ndbRFEhdhVuV0HRYJuWunU4NcHrMcTd/Mnu5Kd+Bc8UmCOYg78JIkd4T0xL7BxjT/uF/w2MBYy6h09BZPRDjvQFS98p0B6pqw6fKz8w2AI6aYvpJCHKtKi2QdcFuiqJ6gp1vkFuNkTrdScJOT0jWa+YNDuuJQlus8btdozynPF4zGKx4LM/8kMklGi7Q5sdkS9JqEioiH1JMqmIRiXa7YhcgTLb3uu6RN3e4pqS0/aA++2LfNB8kg+KL/CgOuTCvcC73OBxMWVpZpR+St3mNG2KJent4kO7gu8E1sNX0XXLsAjRgq+RrkL6FbHdkKqCWdpykFVcG7dM9JZUVWi/Q7Rr2t0Z0m2RvnMHeeedd7h3796whgZSZX/oX0oJSmG8x/fkhvceB7T9fSEwePP5nB/5kR8ZNLjhnr1arfjw4SOsHrEUY6yc0sicWnbguRIZtchoZEYpUxqVU5JS65RKpBQkmD+d8WvekPqyH3DsBh5TX5H6ksSXTOwlh64gsQWR2aHbLZHdkfmKw5HmT/z4Fzg+PubrX//6gAmMF4hkzvnOEcljKp+xKgSP3ZS379V87elHKA8/jcgOqMj44rdeIH5wjU2r2dkI7wW5Nox1w0i3lJcP2XlJnBXIds3lw7d4aeF549YNpLJEsQTdjSN4rTjflfyzL/4WaE18eEwpEx7nKfbaFBNlFHQymQZFhCWnxdsl289dR5RrKNcYU/D2kxXXb9VcVysEx0Q4rolikIJ86UtfGlxRAs75zd/8TT7xiU88wyKvVquBbHzrrbcGaUqQlN69e5c7d+6wXC6ZTCaMx+MBZymlWK1Wg1wonEP379/n9u3bw7kQAoZCvP35+TmvvvoqX/va13jvvfcGfXie5wNQDyBys9kMuuzwevP5nPv37z+DVQKrnqbpIHPJ85w7d+4M52Y4P4uiwHs/aMj3WeZ9MBuA9/7/2ZfMBFC/z0zvY5vwPGGOMbzXIFnZv+YCKRlmCYNiIRSudV0PTjH7pGaQ5gTMGoD6vh1j2E/AYFCS9+vg99t+Tw3695K57H8vgN4wxRsqlXAShR0RhPqhYlJKDeb4z7PmoWoJgwkBFId2R9Ad7Wvbw2K3PyyyXC678KG+ugvMe7AlvHfvHuv1mhs3bnB8fNxpl1a/wJ8ffZGDgwNMDTadPaMnCoXD8wN0o9GIz3/+87z77rtsNpvh5HgepAeGWwgx+GqG/bh/QoRiICROhc7B812JfR1ZAJ7hQgwtne9WKHg9gnRGdvQS1eJFHiUf42c/mPNPP6xoBOxazda8ys5+ivpBTiM+Q+1SKhtRW01jNdYrhHdoXB/OAcL7jnF0AusltnOcIJKWWLYkqmUsGzK1JtMVI9U9MlWRiIpYVKSyJhYlqaxJRIX2BTRrTFsNLbHDw0Nu3LhB0zT8/M//PB/+1q9xGEWs1+tu/wFFmmJH3TDfppVw/FGabI4Ij7x7LMeH+GQC6XSIPXe9U4hPphCl0NbQ7LB1CXUDremSJo3sbPiIaGXcDaehMVtNu1b9EF4Hrq1TWDui9oKV9+DHDG3t0MKPPUSdQ4iMHVFakaYerSuUWJPpLaPokkQ+IVdrRnpHqgpGqeDoYMLx8YJ1UfLosuihTkplEozIKG1OaUfs2glFdcJvPcxp3ktp6x+hNRp3T+Gc7Jwr4Luz1t3JdwWuAwsfwHXv1y3jK3BN3MtCZPc7wSQFK/c0172lnhN4ej22lxin8FsBq14CoK6GJn0A14IuVEcadFoTCYOWbWe9hgPRvXbHXPeDjGiMiHrWOkhCJEYmGBf31ngduFbWoqxBO0PkW2JXo01B3m6JTUncNui6RdUNurGIskVbj0BRGShbTeUSChNR2ojGp+xE1oUwqROcHvcpxCUybpEz3+mse2DtMwG5xGcCkYKPBSou/3g3wNiz07rfL9oLtI/R/WEydH7WxnWBR22LbGp0VaDKNfp0iVov0es14nKJPDtHX1zA2RPypkIWO2RVDadC1+JWaN90WmvVUk1/EpHd5E71X3Cs7pHrlknUabFnM8HBSJIedY4hsxRG2lK7iCdvXuNe8xL3zS0emxs8Mdc5bY950hxzYRZs7ISdHVG7ztva9Emnnd6aq2tHXJ0Hz259FDFNF1zlNgj7iNiskXaJak5RzRNU/ZDYr7HlBalqcPUltrpA9rafARSH+1K4H8xmMw4PD3n11VeZpJMucbFsBoeGJIKyrLG9BCR0TfctXfdlfKED3LYtrTG0IkaODvDpDJ/O8ZMj5OgANz5m95k/x3ZxRJ5MuffgnGUj2LmIVo0oSSlfjRF4EleSuPIKWPd/T1xJ2q6Z85TcNCS9o8jy0V22Zw+4MUt58frRMC8WWMm6biiNpHQJWxPRiJzKp12HwV/rhi59iqtn/OZXX+OigIfnh+xMl5fQkBAJQ64aJlGnyU5FxTgy7Kotk8UBc2WJowLrl2SjC155tcBIj0xjSFPWjWPnI7ZWsFvFCPEGZdxJ+Vqd8hjIhSGnZSQtuTCkrmZkLN54jm++RGJKclEiqqdEyzf5+OIhd04WjKVlkSoiU9KWnQTk7/93f5+f+7mfGySV075T+ic/9u8xnU75g8kDLpMTPr77Ntttp50+PDzkD/yBPzD4oQcAHFw+gsrg4uICpRTn5+ccHR0NJgmvv/76oMP+1Kc+xenpKY8ePeLjH/84i8WCDz/8cGBpj4+PBxwUutwBdDfG8osnn2OdHvFTy39OvHnK0dHRoKc+ODhgvV6z2Wx48uTJ4H6mtR6cVUIH/fXXXx/UDuH1guQlSGQDwBZCcHJywnQ6pSxLzs/PGY/HA9gO4YQBxAbsExjswIgHzBYwV8B9QW6yr0AImTRhC8RqsMsM7i6BgAvzj8HpJnQQAvAPnylct4H83de+B0wWCoTwmQJ2y7JsCGPcJ62bphk6HN9r+z0BejjQ+6ByX74ADPKWEMMa2myhagg7bz8w5fDwcGgL7DO84edh53rvyfOcF154gdu3b/Puu+8O7Zd9UL6/7b+v8BqhQgsaJuiKgO12y+XlJR9++CFpmg6svpSSxWKB1po33nhjqJD2h1lDAbFfgb3yyit89KMf5bd/+7e/g7He3/bZ8ueLk3CAQ3H0HVIVFUMyw8dTbNIt3NXsGtHoiIIYqyf4aAzpAS4+7B1fpvhoCtEEVD/ICGBa1q5l4zyPgK88Un1LX/Y3Qo3AoUVDqi2ZNsx1Sa4aRkndgWtdk6mGTDf915ZRZMh1S6rqDmQrM2REhM+4fw6F4xVuWN3PoUFR+CmlP2JjJKsaTjcNtUi4Nz7gd9yER9uCd1+8Rftv/lV28YRG5R3bnc3YpRN2Koa2gqaEuvNFp/VgPBgNVvfgKAYUotFQBUkInSTEOnARuAW4g2eZ631wrX3H8kYGoRt0bEhSy3TsmOQGYc9J1Dm3r8ONQ0FbPsKUp0SyRUmL95ZGdG3khpSGHK8nNC5jU0ds6pSiGXFe36Eq3qDedHpr21sHer+HUv6lwDVXv9sz18MwY9I5W4jIXTGpov9P/UCj65l/jOwYawf4LqDKe4kxCr+WV44kwVJO7cn6hUdKSxS1PbhuUcKiMQjhBmlux1pLbA+qryQhAVxrGjSNTa/Ate0GGTvmuh701rrdkVRr0mqHKAvsZocoanI0Yx2hvERHGSoeYWROS0rlEiobU/uUysas/QmNfwknYlTUdgVUbpFjh78BPgeXyS4gJhO4RHQhOrHAx3SSkMgjNUhdPctYS9FlagVpi3GIpkW0LaKuUVWJGB+8KaU7lco+kNLfM0LdrXx21yEeiYhzYAVsX/3DP2rTNB3kfta0aFdwMNZMY08qa6axZxQ5ZinMUk+uWrIjUPEEocY4NcbJnFZM2bFg6U941N7kQfMCH7RHXNgTnJNczt5g1WhWdk5RT6nKEc0qwxBjver98ntwLfa6Jvvguv++xHUuIXTFVq5KElWTiIJ51pL6JXZzl6O84WimiXv711FskXbDb//6L/Hmt7/9jIvWfns8+CnvrzlR0I1KiXAGz1VK4/59ZGCDjWGxWGCMYbVaPROmUpYlOh1RiYyCmIt8jnntNcQLFuMTbDLtgq7SGaQzfLagzRf4bIbLZl0idBSD3YItELai8Q1CGPz8ED87pLZw6td8UrxHnjaU5w8ZK4MrLnC7CxLRovY64APx5SWtzKnoLD5rkVPYmKdVwv3da+jDH+V0csKX7ZS6ymmqjJKMynVSQ0dn1ZhSkoqyJ1ZqItmipUFIw3wmeeXliPz0MfPpjtI2EGlIExods3OaWiX0+b4URLR3bqFcQ2IrEleT+YYcw3upYqIdOS0zLDdEwTT2UK75td/6b3G7JTdmOdXlU+zmgtvXj/hX/+yfQQgxyBG22y1RFPH2h2/z7fvfHixxQ5DiG07xSt7pjmMdYxA4dRXtDlf2ekGaEc6h1+WGUeooXIzIE+q65tGjR5yfnw+DkU+fPh1yXfbDFve9u588eTI4lERRNDCtx8fHHB4eMplMeOedd7h27RqT6ZT5tZt889ElFxu4/vJHebop2XpNMj9m98onMfGIx/lNvp292jUqo5Q/dfafcXJywt27dwemPuC3kBETQGl4fwCPHj1iMpkMTHooVuBKGRGkIOHnTdPw8OHDZ8waAmZKkoT1ej107oOneJDphPM1ELoBKwXCdx/Uh+s7APCALUIBEYKZAs7YL7gDBgne6kHbvp/hMxqNBry3H4q5///ruh6GTsP6EPZHMLgI7y0Qyb+XOuX3DCp6HmTus7dhc86xWq2GCiGc0PsMbjCHDydfEPE/r00OlUzQEo3HYz71qU+RZRn3798fDnb4/VDNhEIgtEzC8EFgy8NODb8bBizC64TnCsMBwU7oM5/pkjVDImPwSQ+LftAlhQhc7z3Xrl0bTo5nN4GPJ7h0gU9mHcjWI4inkMwhmXbe1skMkx5CcgjxvH/0wFqPOobWGjp6vwPyhfd7iCfqwKbU4BqwBbRbqNbQPIH6EqrzLjjGbpkkjkRU5Lrl2iIh1S2iXRP5AtGuEb7m1ksvcef2neGi3A8mCNX2PiPkPRiRUNqY80bx2Fzn5y9/mGlS8snZB9QioVEZpe+srnYuZkfSyWG66AsaIpyQQ8KfshZhHV45XAviXGGNpDUOl8mOTnV9qEnhe2AtukFPG4NLrhw29sF1kIVo3w+mGWRcgSxRqiTSJd4sSfQSV99HuIfE7gzNJTEFk7FmlMdUtYN0CumcaHyESOak42OcmtG4nG2dU7Qp58WrvP9+SvNWStvoDlz7HsTup94FxPLdarzAWvcfIzDXMrKI2CMDa627hEahfae39qLrcISBRtPvryAJ6YG19wpnNGZ1JQkRPUMblAG+B9daGaK0Rcu2KzQwnUvIXjS2FwLbg+mW6BlZiJMSK2Ksjbpj1rI3zGhQzqBtgzIlqi2Q9VOiYkNU7vDbbnhRlDUUBmEsgg4IOpHg1AirxhiRY/WEVo2o5AirruFUDtKg1Q6VNUTXDOUEznspiMs1LpW4ROITiYsEPhb43hkEDVI3KF13GU7ad97WIVjH0gHstmOvVWMQTYusKtRyh1pfopanyCcPEA/eJX50n2R1gdxtieoa+ptfWM+KoiBPI144HHE8jfkHf/wr/yYwo+PaZ/3j9zkvpks7Pzxrjw5XdnbwrT92glNjKrFg5Y9Y+hM+bG7wsL7G++aAlZmztRNKl3de1lV0BaS/28kX1v59T+v+W+/4j3SSEG+RvkXSIH1FxJLMbVFmiTIXLNKSO0eKeVaTyLpzEAlFvm7B1c8U7MGBK9ykg2VZHdeMx+MhITYwX22bc3529gy5BAw3yHBzDITSvt1ZAABJktBaj0unuHROO0vZTY+pSGn1CDU+xEyOePPwBd4eHdKonDoe0yZjmiin1Rkuivt9UCNowDc43+K97YpUKUArfBRDkkCSQZR03bpiBbslotwgmjWyXCOqLbLc4N1HaE/+INpJru8ectSuOd86Hq9Stm2EkS9h9IRGZJS+A+EVXTpzZTsWW2HIROhaNkSipnKXyMOW0ThBxAKpPZkuyKKKsVphI4XRilpFtDqhkgmX6pBGxgggcTWxrUhchZ9mvJ9AmYFSYC8umeKZS8nMw1haJnjGyjLVMI08b3/td2iK7XAMQhH02c9+dhja01rjdIcjLvwFS/sAFzsm2lNNFH58xGKxGAYqQ1EVNMVBHhQwyf7Q3z7oDBKLfbe452fk9uUZ+3IH7zuLwzzPh+cNRbKUEq9iVl5jkzEXsxEXpWHzwsu4zx/S6IzzfMr7rWDnFSKf8Y55mY2VrA8ExVzRhvPrpkR+boest9xzNaLeoqoNqW84zXJS1zByFaKfgZDrpzx+/JjpdMrZ2Rl3794drIvH4zEvvPDCwAR3QUy7YcjUOcfbb7/Niy+++Ew331rLarUa9NuBGd83tgjHIeCwKIoGDXw4Tvve6GGfh30dWPaAJQObve9rHv69P6xsjGE6nQ7HdX+eL6wtQRGx7/YWXisoE56XUYfORTAjCa8ful9hADasV5vNhvF4/IwSYl/v/r227wvQn9fxhCd9HrAHr+qw2IWdu6/lCcxFHMd8+tOf5v79+4OmKDwPPBvHGpKx3nnnHaSUXFxcDNKYPM8HoB9aSOEDe98FDoSTpSiKq8ny/j2E1wmi/VBpPXnyhPF43IHeouDi4qLzwK4M+fw6pTvg8XZHxDGj4gZPlhU7o4nbIzaNZF0LTo9+mPrP/vXOjzyegR53D5WCB2casN0gI67flz5EEkedS4hQYEswO6i3sDmD5l2oz3twfQnNEtpN72e+6r7WS5Qr+htei5ZuuLiCfn9/MjmOYxbXrg2feZa8yK3bdyispBaH1PImtUx5MD7kiTmicd0AY2UTShNh4zFeTalIKFxM6SNKF1H6COcg8Q2ZaChcRpNpMPDm+YuI1mJtF7jifMeodb7IDKy1t52dm7ESY9X3Bte6H/rTFlQNskbEJUIUCLYIe0merJnka8bxkjxao1lRrD6kbVZIqVHJFJUfdLKXZIGPJgg9Bz2h8RNqN6VurlOUP0BdpBRthLHxlU68ER1IaejY+c33ANcisMx74KYfZhSRRcZt73xi+4LhSm8dni+EyHgjOucQJ3rW+ira3NQaX/TvQfkrW7kArgWDf7FOmo756plrKRxCdAO2XkocQRISPScL6Vjr1qtuXxh6yQ+9BV8HriPXELmayFaMzBLdlERtJweRVQNFjSsanHFUjaVuHa2LsGqEESOs7MB1o0ZYdYJTL4MSCFmgohJ13KDHBjlx/QCjxGcKl0X4ROPiLt6cSHTx7xqkdii96TsfAq8SapsMftne0ttF0umuW4dsDbJsUBclarNFry5Q50+ITz9EPXoH9fgu+vQRqq6Gtexq7XSkyvce1q4bUkw8I22GVMZJ6hnPFFKPkNEE9ASvZrRyxoYjlu6Ic3vMo/Y6j5tjPv67x19c2yk7OxaVS6RxkbReCd8Jqr/z5AuM9b4sBN/rr3vpUH/xCVcifIl0JcrvUHZDZJdE9pycJSmX+PoUX19Cu6byM5roJovNzxKJ4hk9aCBRAnlT1zWbzQb1yiu8+oM/OACacM8QQiB1POzD0M7e716Ge0b4v6HlHmLDv/3tb/P06dNuJrXvqLlkisgWNHo0dNl8OkPkBzBa4EaHmHyGy6a4dIJLRxBnQIvwTQewhe0WJ+kRWiEizSqK8FGE1REg0KYmNhVZe95ZTbYFfrfCbS6xm0ua5Rluc4nZXOKqBiwIK4hVhvARxigsGS6aYvSks+hN5tjkOiJZYJI5PJzjH2tk5Phn/hbSHqJFg9YGGVu0dkSxQkQCoSDWEqkaksiTRw2N1tQ6oZIpWzFGu4bYVPhCMVYOQ9PNSbiKxDWkriaxJUnTkBQl2pQktiL13WdNfU3kGnwPeMbjMX/5T/1lnjy5x1urt7j38B7f+ta3SNOUl156qRt49JJEJwgnyHVOpCNcUw1DhEmSDNrlLMsGPLF/XQnRuZFcFWYd4xlCcZ7v0gbnLyHEEKhYVdWAG8I5FsDh8zkqXipcPsLMT2BywLv6Bo/dNXZO0ZKzbBxVG7Nzim99/IhSRNh4hIlyapXQRiN+xo1oS0WMJa8sOQ150klwqlvXSft9nu2WzG2Fu3yLF8Ujrs9yHr/3JucP3ufW8ZyXDqesn3zIl3/nt3ny5MkwdDkajfjIRz7CD//wD3N6espkMuHW/V/lQSX59Op3WW42zOdzLi4uBlly0ItHUTQQoOv1etBtB/vk8/Nz7t+/P0iN9zHUfiDkPlGb5/kwvzYajTg6OmKz2ZBlGUmSDPaOIWE0HLN9php4hoEPwD90HoIyIlh1BtAcTEgCox20/vtSk5AYO9xie0lNXdcDcx6I3CBdAQZdeyAsA8YMkrCLi4uBEAgkbpgTCDg0ELvfa/s9GfTwhveB+fOykjDkGQJfrLWMx+N+6roeKswsy5jNZjx58oSLiwtOT0+fqUYDsxFeOwwphmolWPXsg/Gg/95ut4PFY3jdpmmQOmJy/BLbWtKqHB9PqfIDZDqnHh9h1AgfTbB6TBPP2KVHXKYH+GgG0YT39Agnc8g1sjaoyiGcQa5ArgTOuo5xXCqsk10AiHeQ9Kz1Zg3NB9BcQn2JaC56N5AlsaiIfIk0G9rdGeX68RXYNluEd0PbZV8DH06ssLg8I6ORitHhDbKD67Qqx4wWXThMNEaqDCEziMfIbI4YHdCkUz7M54h8gYvHvJ3NaFWGAGJXEbuSyFakvmGsFdpahO3IvdZJbBXhfUzdSlqjMEaA8UStwZjO77h2Sccm9uC67eweBnAtdGetpmKLSg066ob00rghTysW05LpaEvEU6R5gvIrZiPHeBTx4OEZD56sMTpHJPO+IJrh9QTLhFZMad0LVPUrFEXGRRXTljGWCKcl6DAo5rthxroHMN+NtRb99/fAtQia614vHlxPhKJztpAw2PB50c2bGdFpry3PgmvXeVz7SkDRMemiH94brAsFIBxaWXTSyWIi1RKJthtkFG4I7XNC4vokxgFcB/ZaqA5g214S0oJvOtZXOdu5jtiW2HfJj4mrmbZrtKnRTYtqGujt95qyRqsELzReRFjRufG05J0rDykFI2qO8FIjdYWOKmReo+Yt4qZFjBwip3MHSSU+jXCJxsUaF0l81KU5Ki1QurxyBlEJziU0ht7mUAyaf2/owE/roXHIjUFXLXFVkVYV+W5LtrkkW5+in94j2zzmJLacZBHtbstyueySXrcbtCuJfcU4suS67SQgsiEVTed7HXvi6wn6pTH0w5ytGLOTx2z9MSuOODfHPGqu8bQ9ZGlmA2tduxRDhGsVtN+NtRZXbPUzmmsRA6L3Mvdd5j0tXZm4Q4gtsMrxziKSAADumElEQVRWv/IZ1Z4Tu3MoHyPqp4j2DGHWKLtF+4I8atGiQfbs0b4LV2CYwlobbioBuFRVxch74rbFa43ZG4Z6PuyuaZphLa+qitu3bz/j1LDPvHXsk6dFsfOdQ0jhY7Y2YtUIVo2gIGFDSqFGFLobeLyceOrbCf7P5B0jLQHfEFozQvnOJ15LvNYQRaAjMC2yLhDVDlWdExXvI1eXiN0SVe/Q9Ra3XSKKDbrZkQvLQZYxTkcoIrxVOB9j5AijJhg1HiQkOxvTcItWfYJG5JiDCf7aBFyLMBuELWl8haTpBwYsSAfC4rXspCFJhM8UPrfoqcEkEa0QCKvQrUJaAcb0CZaGES25tGS+6eQidEA73lVosyPHoJsdkSlxbc1bb73Fer3mtddeG5i9cDzCPXwfyOwPvnUMZDqwlm+88QaTyYTf+I3fADpbwevXr3N4ePhM2qeUcmCxQwc8yEDC+RO0wkEiCzxzbgY5iBBiAIiBuAvDfUIqVrXDoHikFuxe+BTnpeFxPmM19qjJgrvjG/yz3SEbK9k5xdYpdk5REHH+r32B+qcTNukYrEFUa1S15f+bZcwqQUbL2FhS3zCN4EBUXNu8j91ckAtL6iqa9TkT6fhzP/nHmCeSWDKkaoZhzt998LtAR549fvwY6PBV+27LjU9+krHdoKon6Msdhdti285KejqdDvrssiwxxrDZbIYB0psX32K23XJ2eUnbtnz44YdMJpNBTnJwcPCMtCMA1BB6FNLVg6XivmFF0G0HKdF+ts0+UHfODWAduuHT4+NjLi8vyfOc1Wr1jHQmaLzD2rPf9dqX/e6vH0EOE4Y9gWeAfMBS+250QYYSzqvgIhPWsDDwGrBYwGBhLQtrYZixDGtnKFgCyRC6EYFV/16zgfvbvxRAD9v+QMuVlKF74X1gHbw2n/cqD+D66dOnz1gkPrPd/nH8n/772OIx9h/9K7i2RCQzJtfvkE5PSKYnnWOGGrPKPsrp5Atgd6jzf4HXI3w0h15r7fUIZNYBZlN3rLUxWOew3mMGi6ze21rGeNd2er9qC5sVyj4gcRtGquLFkzHj2GKrC24cZoxjy9mj97hxPOJknlAsH7E5f8Cv/PP/nnffeeeZm85+iyTsu2Q8hnTG7i//t9j5bfiZvwjv/SpMDiB7GbI5ZnyA7Yc5yTqtoshnyNEB8uAGSiS0Ou8GGdMZpBPWtmVbbxD1Gl9t0KnEvn4Hmgbx619CVbb/vCm+SfEmw60ThFAI6dC2xllFYxNql16FoeynNPbgWkYeGTmktqjIEKctk6wmSxrGo4rpaMtsXJDIC5YGDtIVLx+WREpTNrA1ugNwNmLXaGqbsG1S1k2ONZq6jXm/mFFfXqepP05rI1yQhHSGyN15I+gCZPbB9Z5K5DvBde+nrF3PXPeSkLj3vlYBBD0LrkVgWF2n08eJK3BtNb6UUAhQrmPFFSD9VYNEOiJpUVFLpDptrZZtJwnp9bieMMjYaazbZ8C1wouI1qtOEmLohivbK3CtvSXyLZFriF1D7ioSuyY2LbppiRqLrA2RFYzHUxCai+WGqgWdTHB6wqaCdSWoXcKlH2FEjNSGKK6JJg1qZJBHBt6wkPsOXGcSn4qOsY4kLlJYLXFa4hVoXe7Z7kmsy7A261h3ywCug70hvmeyG4/YOUTVIosa1lvEaoW6vESePyXeXpDXl8TFKcnuKVmzZuINkzRmnCccTiKOJ3E3nBgZIleSigZttyizJRaWSkjsJMKJnNpmbOMZq5NrrP0Ra7fgqTnmSXPCXbNgZaYUdkTpMlqfYL3G1xLq51jr5wcWn9dcQx8o1LGywldoYYhkSyJrElmSyYJcbsjlBmXOKS/fR5kukOzX5//x63Q687X/u3xPQ907d/6iD93NAIzC2hSGnuilRsZdWevur/cBEAW5DVy1eaWO2Kk5yjx65veDu0LjoJZh4HpGq0c0RyPS1z7Bz8afYeUTNjJhKyJ2xFQiohSaWmvaPoFUKYtUHinBS48THiclLrD0tiV2hsQb/Nljos0D5NkSvz6HzRlxW6DqHXZzCUUHrihWyLpAGoh0iopn6PExRo3ZmZhG5IjsANIFLjqgil/B6gluusDFU870lHsyQvkaLRqUaJHSIITpLT7BK4FV4LTCxREmjrBxCkkErkDWG0S1xhcrZL1FNTtEuUa3O2S5xRcr3G5N6w4Ql99Gbj9knkj+w//j/4FPvvoSv/oLP8s3vv714Zg510WWh2TE/Qjy1gq2bc402mDtFftp3ZX++bXXXnvGwleqmIe8wgkXpG45AIp9HBAA1H7n/NOf/vQgEw3tfqUUX4o+xj/Uf5hX3BP+qvjZQRO/TzyF5/feU1YVm6rl3EYYk1L4iFqlXFpLXaVcmGO+NU6pVYKNR5TE1CrBxCPEesbOa0qvKXqok1WGZFahsm7fj4RBlGvGynPNKWau5oCGXLakoiGhYiQsP/eL/4Av/9ovYbeX2GqL7Fn6f/9v/A1eeOGFrgOv+kFDNK1t+S/f/G8GpjhNUy4uLtBZRm4LpEsx7oqpD4B2Xz57enrKZrPh2rVrHB8f884773D79m0+/vGPD5bHFxcXwz4L/uC73Y6zszPefvttoiji0aOr6zI42L3//vu8/PLLrFYrtNYsFovBwvr09BQpJdttJzW6fv368ByBOQ+PAHCDAuF5X/z9FO84jgepB8BsNhs+e8ij2d9Ct+Nq+bxalwLgDl2TgDUDIN6f2QuYNODQ/ZmT4LQS3nMXkmWH8zhId4PUCTrQvm/usb/W7c8/Btl0WG+DPj10ZQK7//22f+mgon2gvT8YGdqV+5VJmLidz+ecn5/TNA2Hh4fMZjMePnzI2dnZ0L4IN42hkvjs34CkG2ps/p23od1Bs+a83aDtjky2aLcjpuIy+mFoY7ARdvpTYCuE3SHLHWp3hnbvE9k1ou0T3FRDvXnK9uIBpjgnjwzKbml2Z5jiHOW2xMoPIQJVVfHiiy/ymc98hpdffpnbt2/TNA1Pnz4dPD0fpY+4MblBnuRcRDvOi0tOnz4dTqj9AdL9aikwB/UbP4E9eg10Cn/lH3WFRLmCaoWv1th6B8WuG25sfDeMV0pEO6LZ9FZE0tOaLY4W5AZkhpMpiAkITeMFvOXBj4AvdOAa3wVeKItTppOFqApEx1BPsjWTUUEWXSDsIz5yZ8pHXzsmkhZrPJXVlC5i22qKVrMqoHGdW0jdppRtykWT8OjykPrxTZomwtgI6/t4erhiBcN1GaQrw98JTOHeUCb9MCOgLTJyQwiOiHpwLcEr3z+9vwoacnsA0F2x1h1zHXWSkIKBtR6eq3ceUbIrQlTSgesQHiOFHWS5XnSSECM0xkc9uA6aa4VH0TqFsRGVEfi6B9fGX4Fr1xL5hsQ1jF3BSHhGQuCLEr+r0K1HuS74xvROOcZHtD7CiI613jaKoh1x6RegPXFcE41b9Nigjixy5BAjjxwLfNoli9pE4GOF0xKrBU5JnAKhayJVg/Z43Q3uYhSYzp/bmyvJjQ/72PcDto3AF0DdOZuwa1CbAlYb9HaD3FwgNmfE5ZJoe4Y5u4u4fMCMioWomcmacS8HyZUh14ZM1kwiR64taZoRpzPi6QIOxpRiwcq9wsotWNoFKzvnvfaY31oeszRzNnZMYTvWuu0dQfx301qL58H2c+A6SEI6LUwngXA1yhdIu0PZNdpeos05ylyizRmieUrs1vimCyMTZk0eW44Ou8GsAKr2Z2rCAh6GpR5uHrLcLrv15O/y7vdbv8P2/MD+fjcOrpwfnrfUDT+3Hlw8xY4OcPEUny3wk2PM5AQ3OeHRzZ/ApXMEG6LqMS4d45MMH6e4KAGtQFiEcF2xqjrd9QMp+bJzRN4SO0viDalvmfuCm65h5Cum1EypGdGSuoa4LqFY8eHb3+Lxu2/yo5/6GM2uYNMo8sULRJMTvvrtD/jGOw9p5Aijxj3Avo5JFrj5Ans8x0VdIdoNy3twZXcMpUFIixe2n01REGt8EuGTBLIMhEHYHco8RdstY9GS0g6DjbrZkfoa3RaIYk1sKyhWtKszntx9G7u9oF1fYOpyCF8JxyTc/BGSOD/E6wkXH/uPacafQI4Kbn35D/MHf/CH+EOffo2vfOUrPH70iLquB8Y7yD2DbGHQ4vqY/+vX/gqX9ZzPnnyFn7z5jwd203vPw4cPkbIL7PMoCptg1IifbX+aB+4VZG34n2d/k8x3riatddhoRBvlGD1Cjw8RoznrBnw64z+9f8T7T5Y8NZ9nXWuWqaDKUp7IAzySb+gX+Xvq80yk62x3bUppI05v/gBPXjqgUgnaG1qp8VLzD7aGkTDkwjISLbkwTJQn9iVNvCLHkLsVUVsQm4pZDG/Mb5DRMtWQi7Y7v5LO1/vu3bs8fPiQxWLBarXi6OiIl7OXuRHf6IZ7+yLFi6578KXmnKS6BA02y4ZOPcC7mzn/8buf5STe8Nde+TWmuRgkN0FVEOQM4f/sO9mFIidIc0JBFfTUAWd9+OGHPH36lB/4gR8YBjGD1Nc5x9nZGXmes1gs+OY3v8m7777Lyy+/zPZTP82/+Pi/xWz1Hj/w4N8bgOj9+/dp25bDw0Oc64Iiw6BzCHcajUaDk0wY9EzTlO12+4weP6gdwjxHYJr3ddzr9RrnukRN6LoqRVE8Y88Y0r9D1yS8132nln2N/H4gZpCaBPwWcGUgR/dlKoGwCAXRvmNMWIPDOhw6N0EZEo5H0JhPJpOBYd+f0QvFxT4bH7ol+1KX77d9X4C+PxC6r+UKi3xgzy8vL5/R2pyennJxccHR0RH379+nrmtOTk548uQJT58+HdiY/YMZbiDqnb9P++IfBluR/v0/hLj89lAYhBPdGEM8GvHCR/5n3Lv57yNsyfEX/wTrD788pDsNSaKyM/afz+eMx2NGoxH1UT1MKZdlyenqlFW5ojIGs2cJqbXm5Zdf5l//1//1wc4vVKzL5ZKmaQaPUyEE4/F40CV5wKsUmS6IJtew8TEuuYbIruOSY6LJDcrRNdroAN7SnRa9WIOxIMcgFiA0fn8YUAKxBd9gfYvzFcIW+GaHaM5I3BbhzpHiKXF0idYXpNmGyedfQ/wbP4FqDdE//KeYdY1MF5DOcWKE11O8nGIZ0dqc1s2p6xNWZcRpndDahDffjPgnb/Y6C+H2gEo4WcLXPdpwH1yH9y88QnauIJ1Xdw+sQ+R57+4hoCfHe9be00lDBsZa9t7gEmt6aYD0/VDklSTEC5B9pLWKDJE0KNV2LLZsB9YaeocQ1MBWD+Aa3THaTuKdpjWe0gh8JfCtAuOQ1qBcizIVsi3R7ZYUw1w7ciOZRQnaeIrLDTjIRhMQEZtdTdUKWh9RuoTSaEobsxVz0Io4qognjmhi8fkEddwixh6Xg4gdNgGfCHzSs9Za4rTHKYtXjkg3vf1et4taoxBGXbmcGAGtwBnwRnYBPXUHsL0AlAAp8C2o2qEKg666iHNV7mjOnyI2F4j1GWJ9it48QW8eMSsesGjPmfstM93Z8Y0jyySGTNaMYolOxgiRgZ7gogW72TW282O2vMHaH7K0B5y2h7xrFqzNlK0dUwXW2mlcKzv2ffNdgPT+JvY11/2/ZV+g0iBd04VWuQLttmi3JnJLYrdC2yXanqPMKdTn2OoC0W7ArBCusx4MDGLYApO4P4S0zzhBl9y3WFxjNpsN60dg0/bX2v3/D93QfAgd+ZfdnPc0MsXGhzC9hp1ew02uwewEMzrsgPfoEJ/OsPEUH2V4leDDLIxX3TXsfF/kcjUAa7nqgNyeop68S7x7hDpbE1Urot0FYv0UVa5Q9RaKNaJY85lPfJTPfvINpnmKR1C5LkxmZyMqn7EzEVsTUbmEpUt54Oes2phVo3t7vz9GcyvmVzc5DtUVzqVDtR55w1GfNDjh8APAjjuZiK+QtkCYLap+F9FcIqoVslp3DHa96YB1tUGUa1SzI2oLKNe47SWxLRFtRdvPVs1mMz72sY8NLg3WWoraUjSdraaI5qhswa4es23m+PlH2CaCch7j5BgfTRHRrLPblGNs75jj1RjwXbp0NOs8bbzmj/yrf52P3D7iH/7CQ975QPKN91+idh/FVGMan1P7KZUfU7kRtcuobUptY1obd1eFFPy2/RTrLVSkmGTSyYZOBP7FEb+kY5yKQDqkcl2HQglspPh/qv8VViqMiLBCI7wbnHa0kkglaZXBIXB3FU7OEdiuU5jZXi7XOXDhPe/u0m74XymkcHjRUrmYQmaAJHVb/vqT/xuJa5hNRoxGo8FtZD6fc3Jy0hWtu4cD4BFCUDc1qUx5senluJWntpYauOjddrTWXLt2bbBuVlFK4RK+cW/N/SdL4tEBRqZ4NaIh4d78TxJ/7g/gozFSpBiZUicz/tMnL3H3/osULuVROeIXPpjwJ+50tom3bt0aQK2UkpOTk2GWLlj9tW07AOKgOnjy5Am73Y7JZMLx8TGr9RqZjklP7nDvdMXDb51R+imz0Qltehv9qT/KqvZclIbHKkXk8/56H3M3ndAcvQ5Ss5y9RvviD3N0+jUeP35MmqaDXC0wx4HlDe5GAJe9LKZtW+bzeb+EXq1XgT0PqaT7M32j0YiXXnppAKMBTEdRNMwghucJwDasoUFms89K73drAjjft1gMyZ5BHrPvkhew2nK5HNjtgD/30z1DcbZPWATLygCuw+vt69j3Jdr78q08zwcwHxxdwpD7dyhIntvE99PAvPnmm8/88Pk2VNu2vPPOOzx58mTYIbvdjrt37/Luu++yXC759re/zXg8Zj6fc+/evWem8aMo4uTkZAggClWHTY6IaBjFfrDeCRXMvvek1pp4dgtTXhKJZmjLhMouHMzFYsH169eHEyN4om82m+E5P/zwQ0ajEVrrDmDLBCNHXL/9Bn/sT/8FJicfZVWnnO0Ujy8NlztoGFHamMbGGBKM7xMtnew01l5egVPo2DbfORQIXyNcgbQFrt3g2hXaPyTWl0TxJUm8Ih8VCLUiyiXxaIwaTRDpHOIpKjugJQc1wbic1ucYN6Y1OWUZUZYxxnYPR9fCRga9v78Cz4MO5Lvorr+DRfSDtlpE/oq13tNJD3jcd187XOQRPXPtfQeqvRM4J/FGAgKhep/sZyQhHqlcN8yoTOcWIk0HtoVB9Ax8YK3tnhyk9fFgwSe8QzqHsL5nfDt7QN8qsD247iPPZVtCHxjjtht8UWB3FXZXdSy3iojjnNp4UAlephBNUOkCp8edn3WSIVRDclATzwwqtUS5ROQOmxh8BiJT+ERgNLhIDFIQp0XHMA76fLpiw4aQGAZwJBoPle3lLRKsxjuF9aJjhoXsipRIgJJQO1Tt0LUhag2pMUyFZ+xb5PaSpFrzQuZ4fVQx391DPP02h35DXl/gi3Ni30lDlJQ4kVPWcLoy7PwBO3WNnThmyyEbv+DSLDhrD7g0czZ2Qtmz1sZ3dnvf4RDyXYF1/8d3BMh4hHBIDFq0JMqQqIZUVoxUSa5LclUwUjvG0Y5MrtH2irX29YrV8oz79+8PjFFgP5IkeSbsIgwMBYC97/oUtrDOBCJhf4Bpn+RI05TFYsF8Ph/mZ8KwGjC8dngfQU7iAaczyvSApRjz3rJip8bc/dy/+zcRHAEHwCGOBZ4prlPy44jwdHqwYf5TdJS48b3LEX2LqB8OpUW4CmkKZLtD1UtkeY6uzombJXLzlLg8h/UjdLtBNWtOP/u/ZXXrp5i9818w/d3/F250G+sdPp7h0iPaZIFNDnHJAhvPcNGUKD+EaExjI1oru1lp7ToZSwiVUWC1wCqFUI5Yd+mssWygWmJ2p7xwkDKPW2JTMpKGRaqYRp63vvJbfPO3fx1frqBYUZ4/RrUFaaSH+0zQ04abddO2WJER58fEoyOM7MgLp8bdsLIaI6IZRo6wYtQx8NGUKD/uhsnp5i6ciDvZCwURJWaeUB9PiYtzoqdLahmjP/waolr3hIbAo/rOZ44VGagxXnXPr5ND6rZbJ6W0SOkGGZ6T4LXEa9EVI1ogeicnEXtEAsQeHwts3M/9yG6R7gB2t1B7HCjZuRCpTjqoMEjnMC5C+4Yp695Nq0XRBcoJJFJFLA6PaS08OV9Se42PMmoiahEjvCOl6TTwvkE4g6qWjGXbfd/XjJRlEhuc0/xX6sfwCD7p7vI35r+FlJJ79+5xfn7e3d91RDI+xKmc2kUURnG5a5HJFJXOWFWSJ+2L5IlBSkdLTGU1m0ZgSGhI8DrvbWxjGtfdMySWRDQoVxD5moiacezJtCOmC6gq1IiRKJg2j1ntPNZr1uqAu9EPIfD81fn/m+u6m61rjeFiU6Inh6xqT+E0cjSn1Rk7q9g5jR4fUIuEy8p2w6Q6H1zMXDKhlgkVMV4otK2IbYWo18h6x1hbZL0ldlUniWp2UK0pL56wO3uIbLa43RL5B/4dTl//0+h6zRd++a8idpesVqsBJAYmvigKyrJkNBoNloBBC396esrNmzcHAH12dsZ6vcZay2g0GsKTgpoiiiKm0ylpmnJwcEAcx5yengJdlyeYigQP9bC+Bqebhw8fDgB937IxzMMEsBvW3yB3ieP4GS39/oB5ANSr1WqQpYQOxj75uy/dCWtDIHz3nV4Chg2vHXTp+0PMSZIM6alBFz+bzYZ7xGw241vf+tb3ROnfF6C/9dZb3/HD/Una8/Nz3nrrrWeqg3v37vHVr36Vu3fvIqXkwYMHLBYLiqIYAHH4gLPZjJs3b2KM4e7du8/IXWTPfC8WC6y1PHr0aNCt77cGpJQcHx9TVdWQGKrinG2jSKbXqMWceHabaPoijTrEJwdULsfrWecLricQjfEiBZngey9skFcOK3QJg1p6tLRIGhQViWpJVYO0S/J8Qz7aUhV3Ob/4Njpak+UNOktQoxEy7nTiPpoOC68TExw5xo5o/YimSSmLiLqNaG2KdQFYy25oiA6cdIbie+B6XxoyHNmehh7Y66t0xgFca4tQvcd11DElCIewncVbJDy0BltasniCIKKoI6omunqNIVodhPRDyiKRI4oMOm6ItEFK0w029gBb+D7unMBadxKNtv9qiECILl3UOYTtgLVvRZe62TPXyhmkqTvmuimhLBB1SeYs2jhk2eKNpy5rqqrB0Wm4nUwGGz4RT/HxDB+PQBmE2uGjHaQNIjOQWsg8IpfIXOFTAZmGVCFSjUg1JAofSUQsnvH6xvrO0cR6tLVo2xIZQ44jcwZdFSRVSdZUZGWBvLxkvNuRrtakm4Kb0ylJ9hr/u3/2J7FeIkYO/8PAxwSybojOHqOKLbmtuZ177kRbXk4K5uVD8u1dDswFE7NE7U5JfYPzEY2NMD5mxwEbf8SlmXBWTViaGWs748IuuDQHPWs9onIZtY2xvtf+7zuEPAOs9wcZ9372TDFoQ4XUAUFXoOyWyG/JZEGmSsZxxTSumMQFh3nDJDEksiKiIBYNSnWextvtljRNmUwmg4bx+WH2wHyEYbHT01OePn062MI+O+R29fdwE9jXW4Z/h5tUWOS/m01X0C3GcUw+PyA6vIGf3aTODyiTBUU0pozG1NGIUmU0UY6JxrQyxsqULvqr97UP8qy+wOz8+z045Xpc3b8wBkmDoESwQbACztM3/9EXouIMNk8Qm8dEzRptNh1zXK/QZotvK6zM0eMjZHaIieY0ySE2P8amh7i0A9hGT/pwpRFOZniR4kUSTvah49KtKUAkgaaXAJVIVyDtlsOx5qWjnIPcM9U1U9kyEg2ZMIylJaPzvx4JQyZaciVwzg65FV/72tf42te+xq1bt9hsNsTpiHRyQjq5xuLkDl9/8wO+9LvvIZI5VowwIsdHM+r5JzGTV9CbRwjnO393PcGpKV6NurW+TyEWZoO03QO7QSQ1OutcgKwWEEWoZATxCKIJVuR4McKaBGciXKPxtcI3qhtAb/piM6KX9xnQohtUjxUoB7GAyIJqIQKZK3QeYSKPi0S3f73vgLVzQ/HqZQeukQLpDZEwKG/R3hB5i/KmeziL9I7Vckmej5A6oTYe4yVEGa2MaUVMIxOEtyS+7gZLXU3iKxLfENuK2FVkNIyV4+bhhB/9gY9x/923+H/8+gucFgt+8uTL/MTNd5jEglhFlFazLAxFK7nctpytaxofI9MpyWyG+si3MAjOPvgYX7ZvsHMxN85X+FZSO03lIirXBeS1RAgcqWyJRUsiWqQp0d6RSHi0XlC0GUJ6Pnfy20yjHakyiHbLKPbENIxjGMWOea4ZxZ5MdWtSXddcLpfYeMzK59jRMXU855vNgl8Vb2BbBa3nVfuIdzcvIBQc5+fItKWKR0TO0KKpRUwtEpyQJLQkriZxFbmy5KJFtwWq3RHbkkx0Q6cHuSaXhvMH75P4Gr9bYrYXuN2SRa5p6+oZ//KQQhqwl1KqUwj08pXlcklZVfzQH/opfuEzf4tG53xi9Wt84pv/CR9++OFg7LFPMGw2m8EeMnQA3+nn6V599VXiOKYoCt599112ux3GGGaz2TDoe3l5OchzQq7MYrGgabokXWAgaQPjHcB3SF0djUZD1k1IQg+MesB84d/7Tk77w6RBAx/AciBfvfdDonwA0PshXPsAO6zl4T2HWYJQNIRuQ5DbhHtPAPNBbial5PLychjkBYbugXOOp0+ffk+A/j9oSHT/DUNXRT18+HBglYLhvTGGy8vLgeL3MmFz+6dx2w/xD38dbv8bJOtf5wc/ogfJy2w2626kTiDjOSJbsMmusVEvoGY3aQ5n1GKKjxb4eA7RrGuN6xGPVAYy5VLEDJSj91Q9cK186Mc6cDXSrMBs8e2K2J+S6TVZskHJM4S6IJkY/MsfQVYXjKQkuvYG81QQ6YxNLSjqCONSWpNRtzFVM+e0OaGuEprk8zSTjrjqEkhsB1Da/uY67L/ngPXzmuuIPXDdpSB2ITidHV/HOHdtemzVsfK2BlNBWxHh8UypG41sd2SzGcblVHXUDeaRdGmYrur2zSjrFne3QSmPiiUyFsiRJJlZlCrRakMubDfE2A8yGq97UB0PshCP7GYrvcDYGGtjaMC1AtcoXKPAeiJv+mFGQ+QMttVI47ktLqhcRNoaUmFZxgl5UbF5cs5yU6LiMV51UodW5thkQhtPQG4hTiGtqXJDNJWIzONTgcglLvbYWEAiIdWIRKFSDbHs487XPbgWeDOCJoHa4MsayhpRVPjtFrHdIk7X+NWKtC4YtzXm6WPaJw/RyzPk6oLP/75P8m/922sW9oxv3vtj3Br9IHVd89577w2BVlpr7t27R1HseKe8RdNaPmZ/mUXckoia2Bcc1Irj5LeYvPZLrHaKk/yU8/MFxS9mmKJl04zZ+AO2fsHazbhvFnzDXmdtf4DCdMEirYtw7jmt9XcMMH7Hhb43I9AXHcKhRNsJgGSL9iXV5jG2PCNyWyI2aHuJai+QzROUOUWZFcJuEO0aZwroQXNYwLMsY7FYMJ1OB9vUYHsVmM6wsAYGY78dGTpg4f/sDykGzd/FxQWbzYbT01OKohiY07DG7Vv4PQPGncPGOSYfUUc5dnINOznBTY+xo0NsNsOPD/D5FJtM8PEIG6Vd4Sd0p8On97c3YQ3wva1ez2A3V04+HWvZomlJ2RDZgqhdE1eXiPWH2NVjfHFOs3raxZv/+X/yGeASWAJrTlEIFqRcR/MighvAdb26/IJJjnBHr+FfmFJGHcAO4NqLGFx/KwjWpVJ2UjZvwVuEswhvENZ2wNA6hDEIt8P7CkSEEwld2BcQC175lT9MVJ5h22aQ6QQ26sd//Mf58Vd+nNFoTGM120axKSWljSiaiNN2RNFqdrWiMDFlq9nUklUhWBWeVfFTrG/A7/gJzTjHqxSaGnVekG49Kq4pPrZEiAahHS5zuDzBXX8DlKBpZiQPfhYZjUFPkHrSyf18jncpxhxAcwJNP/xbCajpHqHpqPrbSrh24r4wyXx3Zw0WsD1xMWzBwtOGtK5eNhQSh28oMEuUKTieT5lPRlSbFaau0AgQAmM9jaUjN0REK2JamXRJxl6ivCf2bWeN6DuL08iWpL7GbC8RT+5z5/oh81Sg2oLE1YyVxztBU0taH1E0MYXLqZlQ+S7IqPEZOz/j0h3T+AgnFN985Pilt85RKuV4/iF3XniLarHlF7OINK1J0h3pqCDKDCozEFsO4o75L2xGaUYsqwmny2Pebm6zamP8TvJkFfHS/fsoK0h9hFApVSOojcaJnManlCZh5cZIpsSyJtKGyqYgJf7QslscsmZG4VPKRNPIlFpojIqxSncJxk7gQmEZAdd5VpoJkPakQ+GhEbzvrkMi8EDlNdvjBV5Iclfx1y//CTktkSlZP/2QYruhLEsWiwUnJye0bcvl5SWPHz8e5C2TyQSgS+J8+JBCSOLJAqtyzmrN+4VCj1/ire0Cc/0VCqexeoQ8XLCznUOdzGaQTbHRiFpm1MT4eMw/Ff05JgTvjz7Fx3q9+Wg0GlxflsslWZYxn89xzrHdbgfbRSEEN27cGEKKgIGRVkoNMr0AvIW4CocKDHVwhgkGInmeDzMQWa/pD77i6/V6WPeDRhx4Bozvu0uFnwEDFg3SmX01RWC9w9cgmw7ylKqqhoCk8DnC55xMJgMAD2x5IGzCvWeffQ8Afp+9D13akLYawPz32/4Hu7jsa4b2LWcePXrEN77xDZqm4ezsbPCkFEJw+bH/AHPnL3YnSVkDIyrgV8xT/I0ERxeP7rkC1wN4DbIQ34c92LL3/l5DeRfRXuKrM3z5FOqn5LOSP/p3fpLxSYG5XPL0575OMk8RN3+U00efoFzmYGpMG2PaE9r6RZptxHYb0VYxTZ3gLhKIHeQ9+/fOHoj+Dra617OGbys6VmTQd4hhoh99Ba5RFikMAgOuQrgGTI00DdJaXOtoG4EQKdYmWJfiRWfhRhtB0wBRdwOoDJimA6fZCOaaJktB5FB2jetxfs4023aDWj3jEiQhjUuojKXxCc7PkHQApelb4b4U+FZie3AtjEPTOSYkomVMQ+7XTGVD4jy/ce8FbBUTK3h9doHzCuMFrVU0VlFb1WutIwqXE0sDumWqWzLV8lTP2BymcOCRc4N4zSKiDFVpfCyxicLHEmJJFAnQZe+DLvAugVpDbbBVg6xbRFlCUZLUNf5sSfnkCawuSYodanWBOX1Eul3hz57QXpxjlktsH8ccwNq+Vi+c82mScPPmTZIk4fT0lFFfMed5zieLN/kzv/SbeA8v+ac8yd5lW8GtjaFxKRcX99i1KZsmZmdHrBrBt5pP8Uv291O6nMqm1C7GuAjjNANrHWQfz7PWw/d5dj5AAMp3vt+iA3+xbElUzSgyHIwl09TgylMiv+TFo5hbJzGRPWd19j6x6K43iX3GVi1oEn/5rV/m3r17jEajQZaxHwy2v5bE/XzIaDRiOp0OOr2wYAcdb5B6hAV+X6u3Px2/P7yzP1TUGsPltuC8NJwVDcsWKnVMe+cj2PExTTqnTqbYfI7Pp/hsiktH2CjD6wwv405yQFdI0tJ7onswDoy/AtgOKCW+6PaLxKB9S8aWxJdkdkdm1uRuTWq3xHZH5Aois0W1G3TbMdlj1VkAXpqUOjlh6XMumoSNz1lHM2w0x85fwh5N8GqElSns+DksKZYIi8b1losVIDopMGB3L/75LqzLOYRz3RyHSLp0XOsQWLyMwFuk7Toaym7QdotsN51e22yQ/XuXdos0G5SriJWgKkuyFw5R8wl3Vz9JkX6S5OID6oOfplATWpFjxYhW5Dg1wcoRf3dzxM/81zNK2813JLEhjQ1J5IgiS6Q9sickyMDOwMaCRkPloHaCxgq8iHp5l4FGYpsxu0Z0oHp80gHrSnReN0+Ad0X3nPGIOvpzHSCL++9F3RpNTGf3Kf3QTO0Adn/99YFeV6Da93/vvzYSWocQrpeOqKsuaNw/l5ed7MxX3dc42Xt+ULsS7SMW1vOiuWSzfUi7OWckDbEt0a7l6+KzrN2I3+d+g1m8pCXBrSRG5JQ2Z2diKten3pqcpb/T1RbtFqk/x/qyJU1rsrQkT3fkaUGeFYzSLVm2YzpeczA6J8pbZO4gdZAKnO4MAXZNTtGMWBdTttsZu92Yi2LGw+I6bRVhnka0dYSvFBQeWQIl3f2kkrhagfDIxKHTFpXabg4o6UBgVU+4m3wUpyRWiV6mI/q1X/QP+pAwSaUyquDWhQckXxWv7q2De7du39NM3iO8JQ7Jtb4bZpbC4fF40Ul+ap1ihYKpQDhHYmtsq/EtZGLHLhojcJQuxp1/wLmPaXXGiuvYg1cprOK+zrHNmNJHVLOYVeqpiWlkitF5lx4tEtpPZhjd3W8i26ce984yubRQrWkun2B3F1SP7pLQcBR5El8j6g1j7ZjF8LXf/FVUu+OHPv1xfuX1/yXro09x8Ov/IW+/+/ZAbIRQofDvAKT3pXqBAAlMdBjaDAAzMObh+/vW2cEnfL/7GGQfAfyH3wEGHfpisYA7r/Lej/1x1Nd+G/33/vag/w735OfTWMN9Jnjo13U9DH4GG8h9gB7Ac/BoD78DDC4u4f6z350N98CQWrovvclePEC/esTmi++wW24GTXuapgNzHwB7YOK/3/Z7BhU9L2IPHzAc3PAm33///aHtESZ9Q5W0Ju+GjfAdw207AKvOv8xErXDlU6ZxSXnxHuX5e7Tbh7jqElNd4G07nCxpmnLjxg2WyyWr1WqoZMrG0YoMny6QJ59ieXnI44e32Z0pzu5+Gu+nlF97laqd96xWA3i8VCBVt0DOJCz8XuW8x2z0gEiYDdI3SN+gaYipqV89wR9ECO0ZXVziVht26wLnNIgU6xM8OV5OgBFULbLZIZot0uwwxSW2WqOE6Vhw2m4hF52Hdj6dkIxyKmupG6hqENkYXv4YfnrYs3MzEIvOAabpLCL9ugSVQy04GD3m9uF9Zvkl9dMV5YcXmNU57cUZ5vwpevIxni7+Cn4zJdt+yMnFL5DJhsOJZnP+gJtHY37/j3yC+ViQyZpIGLRWQ0BVURQI2YGmXfQSv17cxiMQ0vPG9AnK7Yh8Qa4N85FgkUsSWZPplsiXKOEGNnSz2fC3Rn+KTfISEo96xeKvWfAO/rtfJFovaU+fIC7OsKePcKePcWePkdstbDb4snxG5hDOGyk7gXwSx4x6ILfb7UApUq2HCxS6YRIZdzaUo9kRcX7MphI0PoODGV5NUOkBcnaDp+kBhhx3MO6GbfUUoin/UTnmb/1Wys6N0aJlHFXkqiIROzJdMx9J8thi7YbzNufrzUdxUjJSK17k26Ryy1iX3DjQHB8c8H//4mfxRqCVwfZa0ZwzDtovE/sN07TmtZtjbl9PuHM9wRbnrC/uE4uGtu2Gm3e73cAABPnYrVu3SJKEd999l81mwwsvvMDJyQnr9Zp76xUARdstnsHXNdhWBdeHsEYEUL4/YJMkCZPJhOl0SpZlg+YwsDTDcHivMxRKU+sMl45pooxap9QyplIpW5GykTlVPKFQKYVIqaOURiW0umMPreoLfd+BpI61Ft2a09ID7B6YWrqfNxKWCi8Uwndla0JJ7EoSsyE2G2R5RmI2pL5ENRtUuyYXDWPdksuWSDpEpDDRmEIvKPWcSs0p5JidSlmpY55Gt2h8SuOTruNkuxRZ7xSYnmn3PePqB9F4B6q965lrOtbWS6jJ8QPkK+lg6Aq4RLIl6lzn8oc/99My8ijtEZFk/crncOMJ1Jbj//5vIp++g/cSS0SUHqDzA4hmWDnCyDGNz2jVbYwaYeMJVk2xUYbMYmTcECUNagTOSgo7AQ/14S3Ojv4kIvOIcYRLFTaRuCzG65hWxf1HNAgDTStoGgW1xtd0ALsQXbFR7rHXpuvmENNLaATEPWMdALbq72wJMBP9cHq/l6TfA9dhme8ZbtU/t/IQMgxkT7QEWaHon8wJWPfHSbnes7x/HtGxlV4I8LqXKNGJ6h1DUi5eQFEiz97D6wk+OYT0EKzA7m5iheBbD+GDqKVxM5xQIEBpi1Qd0ZJFWy4PD9ikU5QwqIXFK4GTFklL7OnmbIwnbh7z6g+/zcmrj9g1Y9bbKZvdlN1uxEU541F5japMaMuYdhVBK7sMgZbOWtbLLqPhnU52JWOLjD06kXhlERocDqv6fZ8JXCrwuYBDiRt5SIEMSC1xWpDnJdm9hlRVOC/44PyVHnzQdQWG49bfg71HeofCDYBaCY8SXT0qlQQBVkhaJK3QtKKDOYnrNPCZr8kxJK4hsZ18JzIVsfBEHtqyJFYabx1aRRjj2RDxtflHiKzjE+Y9XG45VxOqOsOsFHcu7rKRE57K6/xfpv9+p7cXLem46daRXmc/kgZXroibLS8njmZ9Rr16ijYlm6cPkM2Wa/OMlIajacbZ6SmLxWKQAh8dHcEYvn7360wmE37nm7/Dpz71qcFfO8tGfOUrX+HDtz6E1SNkFHHrhev8T/0/4V/8k/+Aqqq48/rrLJdLHj16RBRFLJfLQVqyP/h5dnY2AO7pdDrIasK6v91un2GZQ8iQlHJgotM0fSYbxxjTXX5xQjtbUGY5Lh/RJBlNktImGX40YXlwwMM//udopgv4wp9i8Y0v4772O8P9JxQMgZgJJE2YMQxYNeDVMJy5X0AEkinMFo3H4wE4h3vZ885XYQvMvIw1LlHIaUr28gl3/rO/iJCS7a+9y9v/zt9+Rreepukz80yDxe332f6lXFyef3OhIglJm2E6dz+tad+o3n/tb8GNz0N5Bu/+58g//L9n9vjXcL/5b1M4zc1XPk7lMw6uvcZm/vt4WvwIbbTA54eQHeDTBS6bs41HvBXleJ2BTqh0BEp3i2a/iG6959f+W4vE4psK0dYIW+HbVdfFbddE9YMuStpUHTsvHFIKVJIiogw3OqBe3MIbDzqHKAMEvtXQVEjTkCmDqbbw/hlSXieuCkYXlzRtxfrsKSpWtN7hlUKnGc4kOBMj8inR9UPaOKfJjnHxx8A02HLdRTr3D1muUfWOcndGe1kwFpYj5Xj83puY9QVNUyJ/+i8ilmdEf/tvkjQ78p55aJqGumkpXv1rqOnL3OI/5/f9RcP5gzX//D96wGQ8ZblcDoO5v//zf5D05TdYtRkfXf5dUnYdo9kmnLfnvDp6jduTYqgmobuIq6oaqsLANkfNu/zbr/4qb+1e4qdufp28uTv4s0opca0jb3Om02nvW3pl01RVFQ8fPuTH+TtMX/wTvBIX5F/6Jv94NEb8w3+A/JVfRkrJuG+LhYHeEJRljAGtsY7OB19NsHrKLpp1kqhohkoWTBY3acixIqcSY6zsOxPRDKIphe5/X0adbafb4JsVvlni2xWYNUpUeFVBu0LUd5nGlliUJKriaBbzyVs3+eOLL3Nd3uPdgz/L+NorVFXFBx98QJ7nvPTSS3jvefDgAZvtjhu7z3HejPjh9h8zksUg6ziZn/DSzZcYfV7yq2/P+LFrv8k3Ny+zrBJevPxPsfVp5yCkx3zs4GO8ePIii9mCS7ejls0w0xEkH3A1Zb9/Xe8vXPtzJuH7wfUogOugrQuaP+sc8XhGenDC6OQF9PSAHRoTj3AHJ1xkk05rnc5okgmVzql1Rhvn2CjtHzFO6o6hbhzCeGh7+0ZDz5Q6ZGl7PbYbnEU615mu0yBcjWp3KLNF1mt0syJqlvjtKbHdMlItI9kwjR0JNdPpiPE4R8UKq2IKOWZlM87biJ3PqGTONruOVa9iZE7rYqxPcD7CWd2/L9G9p1p0YVfCd6FRPVM3dAU9eC/7uU2F8AblOlcR2SwRZoUWNULUIGoQppOxKYdXdJaFcQpJzu7VP/pFBCME4+EheQnJ6wgklhaL3d38iQ7U1xJaiSwlurTInWH7yb+KlE23hApP3dPF3iucVXijus5ZrbFN7zmetaS5QWc7VGpRiUXEFtNG3Wd3QCqom1dgC5z1ALvpH4JnWGsf9Xei8OjtTYlFB+gOIMRVEO+Bc03Phvf/jlzXvVQ9weL8MPYwyEpauvfS9jcz4fcImcD8iX5KlSunmjBg6/b+PdwQey/W/Z8Nqcd9t2tPLQGd25SODXoUI+evo7TrmdszuhRf37+TTnYSt93+N7XG1pp2G+OswkQR36rHqMTghKSq+2Am1bVTgoKmcy4SfPhrd+A3PSTPFTkhn2AKHIZuncfL/aLFg3QksiLLC7KkJIsLsqgki0rSqCTXBamuyERJRvdIRUkuSlJVksn+oUqk8J0M742MxsR8sLrDz3z5L2PajMYrrJBE3pLRouodvlhxmGlmkeii7D94l2p1wYvXjjmcTEiUJvYRTd1wfnbBvfsPcV5y48VbZJODQQpS+oiCiDMxxlpJpiRjrZlzwTV9ykvjU67pJxzLUw7EOTO/ZOQ2TNyOsd2ycJdEFy26tsR1Q1ZXSDxrxvzX56/zn31zzvHRET/yIz/CtT6lOxBAjx494ud//ucpioI/8if+BC+89ALvNg/56le/ij09xQKT6cc6+Ycc8frrr7PZbBiNRkRRxOXlJe+99x7Xr1+nrmteeOEFFosFk8lkGDa/c+cOjx49Gtbu3W7H4eEhd+7c4dvf/vag9Q4zgd57RqMRh4eHXFxcDJKVpmlYrTqSxlrLeDLBJRkqzqjWBeXiGm40ZvuRj1LOD9gKxa4o8aMxjKf48YTL2QIxmeLGE9ok61JYsxyURlYFuiyQuy2y2KKKHaLYkrYNsTOotuk6f3hUXSF6/BEeASQHYk0IMbj8hGIgkMz7IUUAMo1QkxRxNEWOYg6O52QHU9Q4waYKNUqIFyOiaY7II+Q46R6jGDVJkeMENU6RaYSrWuy2xjcGmcYIKUjeuD7gpf1B1/3wpd9L3gK/B0DfH+gMW6iwiqJgt9vx/vvv8/jxY1arFdPplMvLy2FnBK2N+9z/GsYvwfhFuPGDuFRx+cqfgf/FBQjB2/suDf9/0v4s2JYtO8/Dvjmzz9Xu/vTn3HtuW1W3UCgABZBAESAJgGSAjSAFSVG0xZAjZNOyLNrBMPUg2WE9qAnbsvxgMkxTCtIM0SBpUSRIiCALBASBVQVUg1t1q7l9e9rdrz77nNMPmSN37o2LSymYESt2u3JlzpyZ8x//+Mc/TI2qS2yZQ5miygRdJdhsgT1/COk5JGeQnODmp5jlY+zqMfbf+03w42Yfx29jgyFqNMX4u81x5yt0vsKra3wzxSk3JLOmytmmc6pkji03eHWCSmdM21bHJ//+L0LQPLD0448gGlHG28zCIRbwqxydrTCuRzaIyGfH2GJBnSyxqxlOvsYpE1jO0Mmciae5No6o1zMiU/D+977FOAouTr9NBUnkuVgsmjFUiiWg0pTpYNAERP/R/9AERY6DjSIya7ob7fT0lOrN/ztVVfHBYEByeNDp/X3f71JU165d45l7d7iR/WLDsCqLUrqrhnZd95L+qj8H+pG0RIS+7/PFnQ/5mfCQPM9ZVhdpKeAC0PV0ZRLhzucLktziKfjR9/85ypvy6vceET48peQ+9TM/0La/npC6I7Q3RQVbjYbURigGlDZqir1s1XRllVe1hHJBXS1ZlMumfbmdYdYnTZfXatm8inkDwO2GraHLcBB2DRzk2gyHQ7a2tgiCgNPTU3YnkwuZhhsw9aZExvL19S0ODn6IO3t3ADrXosFg0I1lXdds1is+z5coKVmmS7IW/Hqe12kTf+zuQ14cf4eTkxP2g0fkec53np51BdriudtvyiDzSQC17/uXOvl1Eh6lyN2IVeTy2N/huBoz84d8tDUg0z4ro9lYh43yKfwRuT8k92IKNyT5gkvpeMz8qEnlVxW6NFCYhsGuHag1pv1KZdB5jUpMK7GwLYjRnXpMmRrXZHgmxa8T/HqNW8xR6RkqOUflC2w6o87XaFVjqdCOwgsC8NymJsHbovCnlO6EOpyQT56hvvYZUmfITMXUtUtdO83xVQY2wIoL+QICkkwjxW9BnG2BNY2jPSEpvsoIVUbgpPheieeWaLd12/AaRrxWlko3jF6uXErtkxhN6fhUXoTxh1TBLrgeeV2jygpKgy1NA/pLjTIKR4NDw44OTlc/AcaqHtlua6Vsra2plDGF45rC8eosxNYaJ2g0wF6U44QVeto06NKBQbsGXFBasiA0gVGhsbnGpJp641CtXOqNR7nyGq1vB+64YK61bWQlGhgCWzRA26dhVn0ugLZPA7LdJslqRVXZt3KUV0kTANRc1EjI3zpJZO8aiqb7ki2kZCm43B9BGO1ex+MOXMtmG4miF7T9EPwa3TZQUsqgVCOTaIKwxrXK1BpTONSlS5W7LcD2ms8P28+GNvgC6zW9B9CNRsnSODIZqzG6qfsxSrdBjKHWTTMwvKAJaOTgtWr22blB1QReRhQkREHWAuqEyEuI3AZgR27aAetQpURkTeG2Sol0RuQ0IBsgqyLSKiItI7Ky+ZpmEWkxIM1D8jJiXW5RVQHnxZikiHGsy33WOKVmc55xuqgo797nZpEw3n+fOkj4o+X3sMsRNZqiVpT4bIzL09mGk1XOYrrLMh6glYc3us9oehNXHxOoD7iujzlwjtgZnDIOT5ncXDAy66aXRJUSlylhmREUOUGe4xclylqKwKcIPPLAp9A+pedR+i6l71L5HoXvkvsemyDmzBuzGMakYcTj4CaZF7D3D8f83Af/JU5d8Ivfy5gvZsxbT/A/8Sf+RFdYWRQFr776KvP5nKqqeOuttxiPx93PwgYfHh4yHo87mYXYBp6dnfHo0aNuDT06OmJ7e5sgCJhOp51LyHg8ZrlccnJyQpZlzBYL4jRnNRgz+vwXeJyXnJcV2U/9kQ4wF9u7JDdusUZhhyMYjqnCiMRxsYMRH43GVEGE1Ro/z7CrBWa1QK/XPKpL4rqiXsywR4ewXqJOj9GbFWMN+2HAwBrGGoqzU+aPHrIT+ORpckniItjkpZdeYmdnB/XgdX51eoPV175M+ehDiCKCSYwNXdTAb8B0oHHHEf5kgDuOsKGDjVyinTEq9nBGEW4LqP1JjB6GDcD2HKpVRr3OGnC9KZrvV82LtMJscorHc6pVSrlIqJYp2fkKldXYTUG1TKg3eVMG2Epzbv4HP8fg9z7Lo//4v2MwGAB0mWspXu1nnftyoI/bPhGgV1V1qfulgLC6bqrpB4MBn/rUp4iiiDfeeAPXdbl37x7z+bxrU5tlGXr5LrUpQGmUOcKqfTCG+J/+Lwk2D7HJKde3YsaRw+NHj7qK2fF4TJqmHB8fd61ljTHoXvGBFaP3+QPYvgd1SfzL/wcGJNSbGTadY7MVcdRUFO/u7lJVFUmS8M4771C1uil66wBA5bpcu3EDJz+ldrZQRYL/X/17OOmSoM559toOP/FDn8OaprHEaDRiNBrxa7/2a3z1q19t9LXWEreFGHKcW3fusKkqptMpp2dnVMmaytUdgJLxnU6nJElyqV2tvObz+SXf1CRJOpbacRzG4zFxHKOUalqVbxpG/P79+3zwwQedhdJgMOCll17qijGEHZXiiqvpJPl8OR6ZYGdnZyRpzmByjZE+YKm2KVcRy1QxXxvOVxXLVFPYEKNH1HpIaSOS0iMpXdLSb75WPsY6eDon0DmUC9L1p7HDBbZYoOsVTrmG5IjYf4SvMmKv5ObBiEFQUWxOSJaHvP/Wt8jTOWWbThKWoAsylLpwmPsYFyOtNY7rMp8Z5jO6YEWqvu/du0cURV3gIqnBvkfq06dPieOYg4ODrrDGcRy2t7cv2fDN53OePHmCtZbVatVdK6VU590vWr0sy3j48GHXKOHs/Jy0shReRKJHHI5vsx7cJFUey1jxTjhlWUE5ikh2PDIngME2mde4htR+TK59Su2g7ltcU+Fai67b4GFsqCuFKTWmbjzgSQ16VaONQdtGi+1UNZBjldtKoR10neHUKYHNmoXerPGKBX61IiDHVAlFtiFNV+Rl1oAtV+MEAdaPqIIptb9F5U3Y+GOq8Dr1+DkMEcaGTVFjraCs28LLFuxY1aI828hCSosqgY1qi2QbatlVFYFK8GxG6GQEXoXr5zgqBZtiGvK5AU2OQ6UdrB9hg5hC+9ReoxOt3ICNM2Hp7DRzp67RVSutKcAWTTfUJh1vG9chVaFKg7aGwEBgFNYU2LrGVBpTug2oyz1s4aIcgxtVuHGJEza2nTqo0UHt64seAhbd2OfYusk6mEJjMk2dOtQrl3rlUZ06FJvhBSMtTLTT+yqa64AGWMvXMfBM+3NAy1i3khLR9wrIrVtpUcUFc17ZC1mJPGyT9qbrq3qsusxGdzX+qlcHxAX47jPWIkm8dF9btFs3OmevxvErtGtxHNMBa9EjC7C2taYum+xBXbhUmUeVu9jUwwSgwrYOIbRYbVCug3IslXZb29cmmGuFGFitMVo3jLQ2WEdRug0YxBPGmouCUkeCFkMYpMTBhtBPify0AdVe+9Vp2WvdAmpakN2+hLUOnYZdTVtg3YHqMiLNY9JV3ADrImZVTqkKj7pwqUuvywqNnZjAuMQ6JvYDlFU4SlFXhjTNSZKC+WpDpXwqN6bQIRk+SxUyJ0I7hjMSAlJGesHu8ITry/+B6+5T9g+P2VYzpnbGKF4yrNYMqk0Dquu0KTbVOX5WECxLjFIUgd+A6rB1/3I9Su1SBh6F51J4LrnjMNcep96o8ff3B1T4LNc1b7/7lLPTkvWsJn8MxdLhi7/nDzENrlMuXV797W+zWCxYLpc8efqU09vP4Jc5P/X8GX/s34wo6mdwTm7zNFH8lel/ypd+5Us8mT+lrhuG9PT0lM1mQxRF5HnOW2+9xYMHDzpCJc9zzs/Pmc1mXfMecS5xnKaD7kppTryQTWl5XIB+4RXi/Ws8UJr58wYzGDK8fgM9mrDWDrkXsFGa5Rf/NVb/LtTxgF+Mh6DAz3OcZI3arPGLjGp+jlkssMs5Jk3wTo8Jkg1hVTK0NeXsnPmjB9yeTnhmd5vdMGCkFbPzM77xjW/w8OFDAHZ2dnj22Wc5PT3lgw8+6NY2rTX+YMDeiy9w8Mwt1MCnuB6wdkJGLzxD7GtyxxCEDpUHxB7h1pBif5vjUYgNXZ5zDPmf/FHU4IvogQ8WzDqnXufUq4xymVCvc8w6o1plVMuU9GxFcnZIPl9DUlEuU/LZGp1VkFYsj86pVimudjpNvARCUvQpuFd80kXP3ne9AfBdl1rVnULg8D/7J50WfzqdMhgMOktJaWgkxKcQap+0fbIAhgv2XKzKhJqXNrA7Ozv4vs9yuWwY0+WSxWJxGeB9+z8hO/o6Kj1hVD+iuPvTuIffJM6P8DyPLMs4WtfMWoAqhXbPPPNMx8KfnJx0BXpSQHapC9P/88fhpT9M9PSbTFh17G7m1IQ724RhSFVVHB4eYoxhOByyu7vbNVnqp/PlRjl88oTh//EnCb/wc+jv/zrl8cOmKMAYTvI5mxfvMxwOGQ6HlGXJ6elpJ72QYMZxnI7ldF2X4XDI+fl51/ZWPquqqq59rDRREBAtYy/nu729zWAw4OjoqAsMHj9+3H2OFOdKh7E8zzk+PubZZxt935MnTzDGcOPGDcIoIisViyyksFuN3pSI03yXtPTJ8Vkfv8hXVzskhUtSeqRV47SQlC5J4bIpXEobop8YIq8g9kpivyJ0CgInQ9crfJUReSVbUcp0kDKKIHRyAicjcktMPiNdHmLLJWWR8sYbb/DNb34T2sBMLJVqGqDrjsd4QUBW17z5Vtk1RaiqimGkKdKLZgHApeBCfv7d5rsUOspckOhYa81oNOoqsbe2trhx4waj0Yjz8/Oue5nYNkmw068o7xofACkei3CL80nNqtacuSXJtoc73qEOBsR713gyeonMCflWtsfymk+0t0DrisKNKL0QtIMqc5yq5Nta4ymFqsGUhjQuqSuFtR7GuNjKbTw/yhrH1PhaN4W+tsJaRW0VpXWprMZVNa7JCao1brXGLRbNq97gqRqHkuvX9/B9zeHsED/ysY6mUrCpDKvKwUbbWH+bXA9Z1dtUJqCqW8eQooa4boBsTQu+dMemN6CuLw3QYE3jJGhytFmgqg3YDZoUzy1RJGiVo50S39cEwxg3ClHhkMoJqNyA0g0pddAEJo5P6gxYaI2qa3RVoyqLKWwLKBVaNV7RTuu0r8tG66pScIxB1xleXbTA2mlAXeZQ5y4YjROWeHGFG1YNay3g2jco37YdayWmUBijoNCYQlFnLvXaoVq71Euf9FHUWPVVLXt8GVyrDlx3Ot/e91u93wXmMusta4RIOEp1AaDz9jr0gW9G8+o3JeuA8hVw3THc7f92ILz39XdlrS3aq3GDEjeom+DErVFOKx9S5sK909I0Lqs0daUxhUtVtHKQzMUkHlWiISyxgcaJaqxvwG/sZ61WzQvVyI9sA6iN6oNrwLFUrkslUiNEo85lYO3WRH5KHKwbUO33ZCDehsjNCN1G6tGB6vZrqHtSELfRZidV3LDWPWCd5TFJHpEVIXkWsy4nVIVHlWnqXDcGXWlNvSp4+u5DhuGYgR8RBSGudjC15IkcTOsCk6vGdzt3AjI3JnVDKt3UHkU6Ycufc819h5vVEw70Ebv6lC01Y+IvGDkrBtGGuEqIhK3OC4Iix88L/LKi1roB1qFP6XsUodvY6wYele81v/N9Ks9n4W5z7gZUTsgyqXh6vGK1saDG5HnI4UcLZu+nPH5/xs1rz3P/mU/z8kuf4b033uOXfumXemBIc/fuPj/2Yz/G/fv3m+zja6/x5S8fstk04KvJPG+zPg/ZGY0ZTEJu3rzJ7u5uZ0kYHL2D1pq3fnvEWL/CZz/7UrPWho3Ud7lcNc95z4fxhHL/Og+iMcfxlA/imicvf56TcIc6HsBwzOrggCe37nD2UyWL2jRs9XBMHQ8w0YB/FsUoY3CyBDdZ4+UZQyx+nhKUBdV8Rnl+SnR2Snh6xG1T4+Up2ekJ5x99yOO33mBzdMj1Ycxn7j9LnqacnJywXC45ODjg4cOHbDYb1ut148ayu0sYhhwcHDCZTnnrvbeZ+ornXvwC4bbC2R+w9BSJcRje+xw3qxdxhgGj/W3caztMVc2nHIMzDHBGDVPtjkJ05LMqa1RaobOa3dmnqCvQmcFdJpCU1Ocr1KpEL5d4Tw2h0ejcUD0+5v1vfJt6lWI2BeWqCWQk+yu2iIKzrLUkScJwOOzcwkRjLhkJk+dgbKsWUJ3ziwDxvluMyGKkw6+QxF3PnhYb9LFC30M9yzImkwnL5RKlFDs7O500VwpsP2n7FwL0fmekvjBf9E7C/B0fHxPHMY8fP+5SLeINmWUpvP8PUVrDcEj07j9o2MYWSEqEuVgsUEqxv7/P888/D8DJyUnnnxnHcdcldD6fd4OilEKlM9zv/v8IBgOqVmohbVWlalgCjDAMybKMnZ0drLWcn59f0imJM0Rd12wev8/Ob/9iI1+4e5fj42NWqxVBEJCmaSfvkDa2sn/JAsgFlkpnqeoVeyMZ076EpK5rFosFZ2dn3fgEQdB5Z0r6a//gOoenG5xggru9T2EjMnfEkd8UNnrxDtXBAPSIMz3g3J1gfnBEaWOccIv3/Clv2Rg783DI8GyCY9e4dkOcVyizwjEbtokZeSVb4YbQyRmGNZFbErkFxeaEo0fvYMoZ9+/e4Natm10QJez10dFRZ4s3GAyI4/jSGEtDlsM0Z75JefPNN/nwww+5e/cuYRh23Wrn83k3sZfLZRcEyhzI87zLuozH407qMRgMOlmHVF33Abhscg0+rv1uXddMJhOGoxEfHZ0w2r/JNw9XeNNdru++gLk95jQpWe7sUz3zAnUwINMBr/kReRmSaZ9M+6TaJ1U+ufZwlCH49E/hv5jhVxlescErU9wqwy9SfO1ilwm2tiw3HtSK1EzRVeNn7NcpqpZz0NSOQ4FDaR0s4JocXa7xqjlOtcKrU1S5xpQbhnHAc8/dww89VFufWGtFrTWl1mxqh6eLnHnikJoJhutY47egrUaVNR/OGtmFKW3TVbXt8nqJxZatA9cpmAxtUhQZkOA6BVrnuE6F1iVKl02zKs/HeCHGizB+w/jXXozxI2p/gvX2msLJsqQoW3eVstEZa914RWsL2tTNqzRNE1xj8UyBW5UdsDalpm71vaZ0GsY1rnBCgxNVOEHdgutGEtLIMhowZlV77nVrI5pp6syhWnvUS49i5ZOfhw3oL7mQZFyVhkRXXgEwpbF9i4DQNu5SXvseR7dCYxqrJpGBlFx8lgQ5Sl1Yva5VHwP3gLK6ANWSYirb/xWJtnT1vcpcy74sjf7eq/DCRgbixiXaa7r5Km17GutG3iTA2lTCWrdykMylWnoUuJiwxAkcnKi5BspvdNHWaaQ0Fo1xdCMRUQKsVSML0Y0kxLiawvMbxrpz3mr11W2DNOUaojAh9jMiPyX0kwvW2k0v5CBOTwaiWhmIylq2OiVyMmqrScuYtAp7cpCYJI/JVgFpFrBJPFb1COoQU7rYXDM/XhBZjwEefu2xOkvQFjzHbRjEGlAOldFktaLUIaUTs7EuqROSeTGFF2I1RE7GMFxy4/M73PAect19xJ4+ZledMbELxmbJsF4zqBKiKiUSCUiRN+B6WeBWNZXjUIQ+he81jLXfgOoycC9+5/kUrkfi7lHpgFIHGCJqG1KZiNJOMEWMzUIWxyXvv/WUz37mx3C0j6e8LtPSl0FKLc6rr77K17/+dfI854tf/AGMMRy9+y5ZFnL44JTbByNGw2lXsCgZ6PPzc1zXZX9/n9Fo1BFg4t0t2czt7W1+7Ce+iLd3wGPtsylhduc5NkqzfuEHcCx4jkfuBSyM5Z/t7PLPd3bJvIANmjWKxR//t6mCCBPFqLrGzRK+i2WIwdtJUJs1yXgfnawJipww3XBnPePOeklydMi733qVoMzZDXzUZs1BHFKtV2At169f78i9o6Mj0tYMwfM8rl+/zvHxMaenp9y4eZO9rREnH77D0M0J7kawNWD98hh/ep3d4HnGrqHyFLt8moNhgGq11d44xpvEOMOAYuDzjKsxaUlSWNK0ZF0pdNYA7YPVGPPeQ7LZCnWW4iVLTt79iMXhKWado3NDdr5iOxyRnC2ZDkYcHBywv7/Pa6+9xvb2NtPptOtnkaZpk2G+fp27zz9PliVNb4uZYvP20259FuWEYKa+1a7USIk2XVQJArT767oQz31wL91Upc+OkG2CHeWrWAH3O5pKUazgwjiOSdOUPM87YwTBKkB3TP9SDLq0ohVJhFTBrtfrbkAEAIvMRQ5U0vLS/lUOfjqddpNLWMW+dZpEKicnJ4RhyMnJSadPiqKoYcWzrAPZMuiO43Q2bVprlstlFw1KpHL9+nWiKOr82vsG+MJ2ykWHiw6B8/n8knclwGq14stf/jJFUbBarS5FXv0K3TzPu+/v3bvHzs4OYRRh8QnGd3CjHUp3hNGj1gWkcQh55I7Jo6BpqOQMsd4E3DG1HvC+v0WlBk07aFM1bcerVVPAWM4pqyVVtcInxSMl4pRy8xbJ4pCtocIUC37os89xbSfCFPMGbIceq9WK8/Nz8iJn4A3YpE0xyU8981Ps7Oxcmvhy7Z+WJ5zVJ9S2xvcvbIkEePeDHmMMi8WChw8fIp0VRUMn4FjSQTdv3uzGLgxDdnd3KYrGjeTRo0ddcWr/Zluv10RR1B3rdDrl6dOn3TGVxlIGQ+rxABuOIB5BNIbBBOIxdjC5+Ln9HfEEG49hOOV8MOU0GoLjcZitYbNAZ2tOqpTIVuh0yWAc400GTGyJW+foPIPSMKsjPANxXVDWmqJS5PikJmBjfFLrs7E7zI1PYgMKPDxK4nlOoEocDLVVTJw1t50TtFt3+marwToa4zoYx6PQmtT4JJXXAIJqiq1cVFHjlDWqtKwry9H7uukbZBrm1hqFaQFT+xjrUFfTTirHUSVa52hVMBw6uEFF6axw3RqlCso6JS8SKqUwro/1B9hwgAkGGH+A8UOsv43RDpQlqqyoKostaPq2tEIUrXq2ZxgcY3GKRttsqwpbJ5gqa8B13oC6OnMb3BtVuFFj23YhBzHosGVMfYsVcKyaczelgykaYF1vXOqVS7nyKTZ+m3XovURu4bQsdgQMuAyuQ2AXuENTkBfaC7eR9nNRtMWlXJaC5DRguh8/ZjQ+Lf2+CV2H0LYgUvbV+quLvvlSweKlAkYuuVSBxQ0qHL9lrKMKZ1SjXdPo6VXDFivbA9a1wlTOhRwkb+Qg5dolX/g4fkUZeLhhhQobYK082nnbaKqtVhhfYwONUeqyHMQBvEZyWHle66pCy1qLvMai3YrIz4iC9EIK4vekIC24vlTAqJIGVF+SguRUxrmQgnSsdUxaRKSrgDTzWG0cjjdDymzczL/UUixzivma5GzF+ZM5+aqEukYZgBylalA52k2w/oDdW89yltTceullvK0D1solsQ5psWbkZmwF59yID7m9dcyBPmRPnzK1Mya20VYPKmGrM8Ii60C1nxX4iwK3NpSuSxF6FEHLWAetBETY6tCjcDw2bsRMTyitS45PWflkVUBehGRZxPzcsj24xVZ8A1sFYL2OiDp6fNQRYqvVislk0q2jsob2U/rGGJLzY9YLRVkY8C5nOvu9D2STdcBxHO7evcvJyQnD4RCAaDLh5mc+S33zLh/GE97ZvsbqR3+SRLusipJ0a5tXn73PG3sHqPGEtXKYPfPDnH7x56nDmNwP+d5gyK/4Abqu8NKEoCyoFzOcdENUV+j1irAqCJIEL0u5VufcqRLGGur5jM3hEz747ndYP32CXc65ubeLAn78x3+cnZ0dVqsVi8WC3/iN3+jkqAcHB3zhC1+gqipOzk6ZP3oDIo+Dl+4TPHeNybUdEkrOkiWnO2MOfUXpXqdwrmF9hTuJ0OOIJ75GxT7bA4889smV4lryE5SLVv6xyXGVD7lhpHySsyXnj47Ij2fUm5y9wZR6lbE5XxErn9Bo9kbbrE/mpOsNRil2d3eZTCYdcFVKoY6O+PrXv0546xb6+nVm3/0u6/Wa4XDI7bt3+eD0hKN0xtbWVtf3QuaCkJeO45BlWecu1gfFQsZdmFNc1KzJz/3mlVLnJs4o/To3mXd9y95+d/o+/hOjE6BTI/Rlsv3g0XEcNptN5wgjjL61ljhuaiKliaackxDF0ujok7ZPBOjCkq9Wqw5Mi6ODRANJkvCtb32Lb3/725dauYoQXmQjfVN5aUrUpfvbQRKGdW9vj8PDQ46PjztHmCiKODg44Pz8vGO8ZYClK5W0rk2Spvjg4OCgM6Pf39/vwLhIScTJRPbRP0fZJNLpKoCVg/LGrOsxy3xESUw1jDGTIbhjSm+KG+2go23WrRuIdUdkzoh34j3eIKIcxBhc9E6Ky6ZtQd4AbFvMQaXYYo4yc3RyiKqWqHqFyWcMdcanXrjF8uwh2eqI99/9fqe3FGZe5Dq14+BHEdfv3iVJEh49foS/t8cP/uAPcnc/wXFy0jol8IOOdQjDkCRJLhWevPHGG51/dRw3DizD4RCtdSfrkGsu11Im8uuvv856vea5557j9PSU73znO8xmMzabDb7vc//+fe7du9dlIj766CPefvttkiRhe3ubZ555hiiOeXo2Z2E81uMb8MKNxvYrGjXAOp7gjLZQkx3OBhNW011Kf0Dlx1R+jI3GDdiOhlAWkCwgWV583Syal/zu+KPm+3QNpcFVHr4bgHUIwyFGBaxLhYp30PE2JtqinOzjDO7wtNjlg9OQxDY33kCXKG1Z6QA03HaWjHSB9k0DUpTBaoWjNZGqcLF4xpDbkqz2WNSDC3eQFFZVzPvmZutqojB1CzB7APsCcKnWTcTiqBLPqXE9g+dWuEHd/OzXuG6N49Q4bpOqr7WmUopSu2SqadddaI9S++Q6atKudeOTbwqDzXdaCXLdsNaAthWubXTXmBZYbyy2zjFl2QLiFtRlLlQa7VfoqMKNKnQradC+aYFdy5r6gKewfsua1uqC/d441GuPauVRpVFT+Dnngk0WcC0MtgDruH31AfaEpuFMaC8VMtIytmAvrBtzLjPW/aLFSljr9mfBHH23j48riDSio6cnA7GXddgdfmmBdVDiBSVOYHCG1QWwbm0bVXvYF8D6QpbTFDB6VBuPfDagcAxOUDaMddgEg0rGQDcWggaFdRyMo7Ch7thqq1XbIdNSuw617zQNdOAysHbBcUtCP2UQpBdSkB5jHUrxonNZDhLSk4HolMApqIxzIQURxrqKSfKQdOmTpS7LjeJoFZCuPYrNEJsazp+ckZ4tSc4U6cyhzgxUJUo13uyuV6B0yqYwEIxQ0QQ12kFPdvG2DtDjKbU/IPAqxtfn3Lx/zOf9Qw6cJ+xyzLY6Z2rnTMyKoVkzqFKiMm2BdU5QFPjzopGB5AWOsRS+2xUvdoA68Ci95vsi8Chij7kecaK2yWzTgTorXdLcZ5P6ZPmQJx+tePsbD3nm9qfx3LizwpP1TogxIcdEPioyP6k9W61WbL28hxtv4UVe9z8iF3Uch7OzMw4PDzuiR9b0GzducHBwcInBrqqKzEISDSjDiMIPSV2PzPHIvIC10qxQZK5P6rg83n6Wox/6GfRkytt7B6zRbLRDGYQYz+eNuiKqCmJTo26uqJ+foZMNZjFHlTkozSTbsGtLgjInOz3hzW9+HS/LKM5PGStLWBbcuXbAgwcPmE6n/Oqv/mrHvgv5J/VBn/rUp3j22WcZTMccr845rE45ihOGn90nYQvn2g429nj4woDDiSaxMany8X/yD+CGGmcUUoxjfmsSUXkK499mv/osdlNSVVCsc9ISnLwm2ExxKrDLlOXjY7wK4hLy2WPmZ0vWxzOGOiBWHqvDcw4/ekyeZeR5DjTKg1deeQUch+27d8mfPuXhq69StbVw1U5TP2PSFG9ri63dXZy0htp06/vu7m7HMltrOT09ZX9/n2eeeaaT0z733HNd509xgplOp+zt7XFwcMDNmzc5PT1lvV532KwPsD3P6wheyWQLK94nR4EOVHued4kcFZwmc1dqF/uEoQBrydSI24uw730NfZIkHSDv3zfSB0jwJjT1eEI+Cx4T3Xqaph15LMch33/S9okA/dGjRyRJwttvv83BwQGf//znUUoxn887cH18fMy3v/3tDszD5bSBFEMA3ffiDCLa6r5n9c7ODlEUMZvNuoEX0Pzhhx92ln79iEm6UfUjn+3t7U5ucvfuXYqi4L0PH3N4lrLJHEquQfAiVRDB3gTrjam9CbU3AXcC7ojKm4I3AnfS+Fu3HtfCWoszSOcSUiyaltB2wzTYUKYPUMkSx6yJvYrPXX+G5fkjbu4PefzRG6wWs04nLjY8hW0CkncfvUuSJB3gFgmHG0VkJ6cMfZ/CZjhtkCKWd6PRqKsKr+uaJElYLBYMBoOO8RgMBkyn04Ytb3X9clNIJ6w4jrv0kUSIwgI8evSos32az+dkWcZwOOTdd9+lLMvO0Ucpxbde+y7hzj71XsFJ6vK2fw19/yVO0xIbjXm8vc9v+rt4z+5ioiHlD8esjSLVAafBgLejMaZlrUnEinLZAOjVHDZzSFeQrShnx/DoXUyxYeIp0rMjvCwnWaUoHJTyGtvMcAuiKTacQjjFhpPmd+GzMJ3C9S0Ip00L72JDlc2pswVetWadzag357jlEpvOcc7ewjMJ16YjXnr+PpNy0HSLMZbaOgTDXX5NfYpVFUANZ3VEUvtQN9OoLCxVralrh7Lt5ujrkkAXTJ01k9CgVM3byS64TRo/CDLc0OC6Na5rcFyDdi3ac8BxKLWmVJrMOhTKJdeNF3CF2+qm/dbCsPEUxtQo23glK9uw5U0DDwu1RdcFQV3ilboBw0XrTJG51HmA0gaiCh3aVmdd4QQGFTQ+yQQNuLO+am31VKPvNQ4m19SZ2+is1x7FJqAogqZj30I1gLeVrVByIQ3xaFjqPriOaYD1tfZvYiXXl5IIay2Mdd/+T1hrAddtY5XuZ7ismxbWug+s616jNWG3rb3QZ9ve/lSjr/ZacO0GNXpQNXKQFlhfSEFaYG10o7MuNXXpUudOA6xTj3wekwPab4Oc0DQBjtdmDHRju2eVbrTWrsb6F6x14/qhwLNYV1G5HpXj0emse3IQx6uIwqwD1GELqmM/uQDVwlo7KbE4g7SMtUhBAl1QGq9lrMOWtY5Jioi8jMiTgLIIqPKIRTbiNGs6ezqVSz5fU68KHrz5IZuznO3xDra2XD84wNEeX/7qb1FU55Q6pPYbD3dG2zDchtEUBhNUNGR6UHLrhXNuDc645h2xp56yq07YsnMm9ikjs2JQJwzKlLBMCcucsCjwWyeQhrG+7AhSBF7HWBeeR+465I5Lpl2WjuaJddiYAUW4hTeZUNshNWPWm4CvfeM9Ir1HttS4TkQcDzpQIWRXn8UTMC3P8Iv+FEecn58zHI6Yna+B9SWJZd+qTmxTJR0v64cQJ2EYNllxralcj8RxG4Y6GHLqDclcj+9sLMvJDZzpFqUfUsUDVijUaNK5gmSuT+Z6JNqldl3+Vl0R1RWxqRjUFVFdErVfg7JgYCq2sw3q8CHm+9/lxnjEDzz7DJvjQx6+/j2q2Tnzx4/4Az/x4+zt7bG1tcWDBw944403yPOc09NT4jjmU5/6FLdv32Y8HlPVNfN0xeHh2xC7uBODtz0m2jsg2ZsS/ciUIvb47B+5gTsM8SYxNnQg9iD2sKHDu6OIdyMX4yp0uY3OJ7jLG9hNQbjOqAvQuaEce4SuS7gsqI5XmLcfkJ4tiazLaLTFy3efp5ivOX10yGtff7UxIXjpJR4+fMidZ5/trtP+/j7n5+dc7+Gz+bxiPi94bvs2b731FrOq4oUXXqBaZ7z77ruXMNeDBw+6Qs7FYtGRmI7jdBp0aAjXjz76iCzLSNOUzWbDcDjk+vXrlxoXnZyccHh42JF2dV13jYDqumY2m3H9+vUuc9JXPCRJwunpKdeuXesMKg4ODphMJiwWC773ve8xHA67Ytnt7W1OT087bFnXNXEcd9a/fYWH67qsVqsOw/Qb5wkBK4WgfR25gPI++y5Md0d4tiBfMKeA/z7DLveS6NlFUSHMutxfUhT8L+Xi8jf+xt8A6NjuV199tWtMJGmoN954o5OhCKDrtzEVQA0Nsy6+nK7rYrZ+nIXzCt6TX8DaDxmPx52kpK+/jgdD1pkiUwHGHaG8CTrYAj3EDbZxom0WaoB1xuBN0MGUk2iHE7f5+bfzmMrG2Hs+3N6gyiW2s9RbQLVClQtUvYRqBdmTxt+6XnXgW5VLqBbYcoEyKbQX45I7SHvh7ty9y8HggIeHDxt9VVHgTCachM82Raqr27z99tsEQdA1dJJ9hGFTnCLa+Nls1kVxVVURx3EXsW02m0uWef3ihX7B69HREfv7+81NMtrnN+/+QdJ6xnj10aV26jJZh8MhrueR1oqNG7KMB6jhFBMOKb2YZQ2FF2HCEdnNgASHZa1IlY+NRhReRB0MsNGY+o+EqLrEzTe4xYb6UzPsZoFqfd5NviFIE5z0PQ5GEZEt8cqEkw/f5fD9t5k9fYKtFb4fo8IJuY7Qgx1UuEXlj6jcIYRT6ugWRK/AZEoRbXEabWGDSTORszk2nWGzOaRzyObofIFbrgjsChanlIcbyjTFFGUDvMTmwh1AMEFH29hwSqUH6OAmdfDpJqDwxiTeiIUT8MGqJEwKIqckdhuQHWMY+wWRU+BHls8Fx/iuaZpUKktaFhRKUyiP1LrkqtGqZ8pjoQecarexVasbsOzUNesqbIoaW2cQRdN0R9cGZQ3K1o1QtSpxak1UaYJLwNqhylys0a0rCLihRYUGJ2hkIcq3qKDVO/sNYLOewrgKRznURuMUbrOvlUe58SgzrwHU0jKn7AFsSydXaPysaZuZcAGw92gkISEXPtniiy0N1yrVyD2kcU3JRaGpgF/5XPidWuuOsbYX3tii3RZwDT29dY+17p6lFi8ocaMSNyhx/AonNjhtl+BGDtJ+aLufznJPCklby70y9ciXIaYaoLy659JiLhhrp9W5K9U4gqCxvsJE2lqtGh22o2wbuCjjuqpw3OYAlDWo2mkKGMH1CqIgIwzS1se67wrSfB+62QVrrZJWZ90WL6rm974uKWqPtO5JQaqItGiLFtc+ReGTpR6rjUeVTKg2FeWywK3gmYO7DPUAXbnYUlGkRZOUqKEoa9aFISNgY1w21mOlAzb4ZG5I6ccYT7MTHXJw7ZBP3Q44cI/Z0SfscMbUfouxWfEXfmjVSECqjKjICMqitdcr8POyYazXJXajKBatDEQ01b5H4XkUrkuqHQrXZ+65ZO6E1NNkrmZp4cOjOUcnFQ8eJiSbmNsHLzMZ7XF21jy7y7LsZJmitxWWuixL9vdH7OwMMKbm7Ozdjk3c3Q25ceNGZzogBI04gAjIENYQGgmpyAnF7QuaNL2xlsJxMdEQd7rFCtUw//GQxHHJHA87GlGFMSZuAHfuB6zRlGEEwxH/cDgmc31qrQlaxtpv9ephXVI6Q7w8Y2Bq/CJjvF6wtVowOdKMtWXH95hoRb2Y8b3f/Coq2fAHf/L3dS5YfdZeAJHIFA4PDym+8Zts3TwgDBOUYxjs1tQ3trAvRzx5NmS17xJuw/yFHaovfg58zVagUQOfj0YhD2IPG7nY0AU9IUj+FdiU+EkJaUmZGxZZBUmA2eQEGxd1nqOeJKikRGUVJCXZbE2gfMZuhF9r6qLs5JfGGKqyxA1Dgihi9/nnuXPnDrPZnOPjYz749ruYPKfwPLgFi7QhQeezWecCJtd5tVphre1q3YQUOz8/75zbdnd3SZKEvb093n77bV599VWm02lnviEa6TRNefr0KdPplJOTk0vt58WAYbFY4Hkew+GQ+Xze1bsBnJ+fs7293RFvOzs7bDYbbt26xYMHD1gul12zJAketdYMh0OyLOvqxyRAXCwWjEajjjg8Pz8HYDqdslqtODo64u7duwyHQ87OzjpptXjCi895X6LseR6TyaSr1RNQ3gfgUsApwLrvTd5Xewgek30AHcCXTeQ+V//WV4aIq5t8Rh/Iw0Vzv99tU59URfq5z33OSnpBIhIprPB9n/V6zeuvv37JhkZOXJhhGSzxYBb5g4pu8PSzXwXloqoF4fk/JhjuUxGRVQGVHqK8cctej1rWumWsqwWUS7RZMQxqtNmwmT2myma4bBhHltirONgOUdWCRx++Qb45hnKJo23X2KYvbZFXtwT39M0fN0Z928N+VDWZTDg4OOiKPOWC7e/vc+fOHRaLBVtbW3znO9+5BI7DMLyUDnrw4EEXPW82G05PTzk5OemcbXZ2dnj//fc7sN5P30ikZkzjiy5Fkb7vs/y/fo36oHFzeeaDL5NVhsIJqMMRJhpR+zFVOKRwA6xy8KsMv0oJ6wK/SvHaTqVBlRLUOW6RkJ4eMnv8EXYzxy9TzHrO+ugxe4OAajVjvckJJgcsS4dMhah4G8IpznCX0hlggylEE5zhHsFkn8prgHfpDsGLodigsjm6XEE6Q2ULfJtgkyX5eonJEqhKqOoOXGvlEkRDrA6wzpBCxSh/DMEU5Y+x3hjrjbDuAEzRBGDFEpvPoVhCtQaytllM2TSMcQyOr9G+hw5ccD0IB/jjbabXbpFYr+lq6QRUrk+lXZS1LWttobINiKbugHTDWDc4Svcaq9hWutHY5TkNuM5dqszBdNZ7ZVfA2DWMCWwnB7E+DXPaAmvjaAxOw75WLlXmYzYOZLoBtOKcUrQFhYW6YKz7tnweFwBbWOzOIYRGa+313qdoQG7KFcaaC+s9ucUum3j8TrcP8ayuuSxbqVtnk6v66p4cRCnbOIKEZesMUuL4pvGydg1aN37W3WeLbEgKGFvLvcYZxKNMvdbT2lwA69A0toduo59ufL1VU7SodAOyVesY0jHWXPiJOwpsG0Uo042h55ctqM4ugLXfB9U9L2unZ7N3RQri6Yq89slaYN1prYuQNA/Ico+6DKgzj3xVUywrTGIgrdmcrZk9PiOfp5C7DMMRtjb4foBSLtFgSu3HrEtN4cZU0Zg8GJH7EbkXU/kBg0HKzdEZ18Ijdt1T9pwTdtUpW/aciRQs1pvOs7rRVrfyj6LAz8rOEcQo1biBtBKQi5dP6ftsDOTKIzWapHbISs06U6wTh/MFnM8VDx8VFOmQPNFdbCbZTOlsXJYl0+n0kl5UWLC33377Unv0F198kbt3714CnMLKrVYrTk9Pu5qb2WzWdT0UACFF7Ldv3+bWrVsEYUQdhqSOxxrVdFocjqiiGBMNKKOYRLvknk8VxWSuT+4FpK5H7vnU8ZA6iqnCGKs1bpbiFxluq7P2iwydrHHThLAscLOEqC5x0wQn2ZCeHOFmKbGpuLs1JShz/LIk9BqQJbVZQsJd7ZehtcaLmiJEG3nUgWJZppxnK9xRyP7dm+hh0DHUJnSwoYMJXWzoULc/Ny+3aVSWmQ4sk1aotMQpLH6lcEswm5xykWDWBWbdeFo7uWm02EUjGzFpQ8QIgdg3apDrJrhHiC+gMxroGu/1mub0Jb2yBUHQWTsvFgs2m01nsrG1tdVd+37Dnb5UQ+rpRqMRYRh2mezT01PCMOQzn/lMZ70rGY+joyPyPO9qsgAODg6ARu4i9o/Xrl0jiiJWq1XHMkdRxHA4JAiCTju+XC67mi/pthkEATs7O8zncz766KNuTA4PDy8Vf96/f78zGBkOh1hr+fDDDzu3ub29vS5wWK/X7O7uMp1OO3Z/Op1ydnbGfD7n7bff7pr+iDOKjP96ve4C1vPz8w68C2ncd/2TfQdB0NU3ynXoE65yHfv6937N5NW53r8HZD4IDpN7WynV3f9ync/Ozn5XncsnMujHx8fdwYgBvgj9rbUcHx93oFwmqRywAHYZGCkalZOsTa//snLYcg9h/TbF5oT89AFBvWYcWYqk8bYW1loiEMdxuHbtWjfQ0tVyMBgwYMDecA+v8Hj//ffJ20gUoLYXkc5V/c/vZsP3L7LCkb9LOuPs7IwkSbqLJBGnaLNkTPsykuVyyWw263xXpemP+GLfvHmTwWDAzZs3eeutt8jzvEux9JvT+L7PZDLp/M8dx+lutKIoWHph03kVy81QMc5XVMsPWb7ziHx23BU+qnRJTM14NCQaTfHH+zjDXRLj8/bDEwodY8d7nFcuGxORqh+g3h5j/TEmnFK5A5bhFOuPAYvKFzjFCpuco4oVKtuQpWtsnqKqCjVfU5+vIDoi9gJqoyhKSAuDdYaoaIoTbVO7d6idAYk7ahp9jFaQLxp5UbmEcolSOY5ToQOIhz6ur1hultSswTukVgrrBxAMUdEIZ7jVdLz0RthgH6TpTlE0oL80YAyuattKQ8tjqhZoG9YZYMCpa3SV4pf5ReFh3jKmaXO7OaHY7tHoe9tmMX2dtfUVugXWyqkBjTY1lA51UVMXHmURUBZ+57l9wVxz4byh1YXEow+uBWCPgQN70UjG7wFFaeaSt/sWSUjFFTkITWOazF7IO+BC4mFo5SA0jHUfXNearuNj957LX5Vuui56YYkbFs3YRXXTDMgxbaOYBoVLY0hb614BY6NRrwqXKvEo5x5ZFmONRrfNe1Rg0IHt2R42jLVFmsMobKCwkWrt9mQ8a/A0tfaordMctGistcXzS6IwvVS8GPtJKwkRGUgDsGO3lX50XtYZoU6JdYKra/Labxnrxhkkq2KbVaHJyrAq0qAsVn6S1YPNstqaV5V7VlnvrDD+af1Pv/a/qVclbqUJ1QhqhatdTG2b+wyPeVrz4HSJM91ndOMemT9gg4saTfBHDtvDGfv7J7z8A6fse6fsqFO27QlTO2dkVgzrDYPqQgLSFSx2rxJ/WeIdtlZ7YSsF8XsFjJ5P6foUblPzMPO3yF2PVCsyN6CKR6wSn7OZg1E7zOeQHOedXZwwhzdv3uTevXv8wi/8Qpc2F521AGFhm5OkYjS6KGwHGA6HXVGXUqqrh7nalltcwkRrDXSA3PV9TBhDGFG0GuvM9SjykmwwJPN8VlY1GurBiNzzKYKGqVbjCR8Nx1RRTBk0AMsrMrwsw8sT/DzDy5pnjJeneFmGn6UMFueMN2v8PEMnG5x0Q9iy3Hqzpl7O8dr1ss/49e2LJa1vAeNA5Su8cYyNXNJ4wSbQ2MiFyMGEexDdgNijjNz29x4mclCRh42bn/GdxlY1LbFJCZuSYVJAUrLKKnRaYzYp7tyisrpxCsmqpnnMMu1cQUhKdGWxxmB6bKe1lkopil7xn2S3+y3b+3Vr/est17XvonbV3KC/3zRNO+Db3642dpR1/t133+2kFf39LZfLS58vLK7sR0CmgHvJosjfsizjm9/8Zvc5sm+R+wqbLaoE3/fJsqxrmjSfz7uuopvNpjuWMAyp65qzs7NO+iQWxicnJwwGA5577jnm8zknJyeXNNqu6zKbzRpbyiDg8PCwk0g9++yznWmINFN677330Fpz/fp1VqsVT5484fT0lK2tLVarVWcqIqoAMSVZLpdMp9OuiaAw7OLa0tesf1wA1WfUr14z+Zw+4JZ9yP76WPZq4ar8TQhYkWNLwND/vI+bR/3tEwF6URRdFatMMCmYXK/XXQQjDDnQ6cb7N4Ckbvo2Myp7yPTt/xXp+Pexdf5fc328IM9znj59it96R+bLnDJNcaxpCLReFe6k7d6Ypinn5+fs7Ozw/PPPc3h42BUwPn78uGPwxVtdDOkljfRJIP1/6iapoq2trU7jJcUHog3f39/n6OjoUqpEggxJX3700UeUZcnu7i47bQGHnOPt27cpioKzs7MuMpQbW7pbSncyCaTKqgYCpnde4tO/9td58uLP8LmJx+Bsziy1lO4dTr3rrLddyr0BpTeidAYYf0zljai1j1tn6GKFypaUe3OcMmdrGKHrClXkRGVFlZfkiyWaNbYCq3yUG1OpAB1MMd4I414HJ4AgAbUAd9UUwVZrNCmlUxFPApTNCVyLWc8p6yXGeUrtuqggQLkBhCMIR00BaDiC8BaEA/B8bF1TVhVlaUitxVUVyhocDA4NDlVYlDEo0wLNWmEThV0WmLJqG8W0xYepj6kcjFejohIV1KigwgZ1B+xUYLFe41BhPYX1dNOiXTdt06lqbAus69Klrr0WtAq4tg24tlw4fDhXwLXfe0XA1LYstroA39I0RoB1Xw4iwLqfVROgvOTjQXLNRRv7iot26aW66MrYvUddOIUACoMblnhhgReWTdHhsMZxG8Za6RZct++3RgoY9UUBY9HIccrMo1z5ZMcRpnLArXGiGic0qMCgXNu1iG/8rPUFa63BDoAR2H062Q6OwWgHI7S9NIlxLb6XE4WtFETY6qAvBUkb1tpNiZy2YYxIQdRFO3NX1WR1QFpHV7ovhuSFT77xKBOH1aLkeFazOS3YnCQUywxSy+IwYX28JFtUmNKgVVtL4Xik/+bf+zPAGIctXPZw2cVhS2kzDYNkdzc+fW4/PBoefP4lDvwzdp0zttUTtuystddbMahTojIh6kB1U7To5WUDqtNGX+1WNZXrtPpqn1wkIJ5H4TavXPmkymNmQlLjUjoBiReSVgErFZGaAess5u4zL3Pjxg3KsuwauUADcFStyDYXIELWhCAIOheJ+Xze+BuXjT7685//fPfsFFJoNBoxmUw6wNJfRNfrdUeOOI5DGIYdgFeOQ7CzSxFE1EFIWtYsaoP/7HOYeEDiuFRhTOEFnOQFiXbIvAA7GGIHI14bT/h2q7dWxuDlWQOi8xQvS7HLBWqzwiwX6M2aIM8YFxnzRw8ozk8xizkDWzNW4LQsNummUSi1i7sAyH6RmbW2ad5TV2x8Bz0M8MYRq2GAHgSobR9uhuihjx62/tTDEGcYgLQyH/iogY8etN97DiYpyDcFZlNgN3mv62KOWaeY4zk2KVBpw2iXq5R6lWHXBTYtyWdrzDpvuvVCx0ZfXXfF+ezq1tcGC0gSsNO3zeuTYQKyBKAK6OzGqSdN7QN4mUP9Y+oX9Mlnp2nazRnZ+lrnfjAo10vOXfYvjPlVhl7eJ1jpqoy2z9TKMfbHVbadnZ1Oo22MuVTvJ/sQrDYcDjuNuDjjSFaormu2trY6kKm1ZrPZ8P3vf5/1es3e3h6r1YowDDk6OiLLsk63vr+/f8lV77XXXutcAaVLepZl3L17l8lk0u3fdV3ee+89kiTBGEMcx931F1wldW7T6ZSjo6NLGaswDDtZmQQqHwe8Zdz628eB76tWiPJ+ybzIPq4Wssq8kfu1bwkp1+pfCqAfHBx0KQ5pby7aJikAlQORidyvcpVNTlA6Y8pJh7N/Ak//AfH2NkdHjY4oz/Oue6IUCUgaoh/JinempGSGwyEnJycdWD06Ouq0S2EYMhgMuiJH0QPB72TRP277uP/5OAlM/2L2CwriOGYymTCfz7uU0sftX9xqJKg4OTnh/Pyc6XSK0R7u9BYfJRH63o+TxksIPk/lDTH+BBVvYa8/y8qJmftjUkJUtEUSTrHeCKxF52sCW+AtSt7MPTxugq6wRcUmW4GxKKvxrEbbAuWssU5NrSIy41OwB2aKKqbU1Zp1meHUm6ZHigtx7OG4GUk6w/HqxqdYg6vBOA5OGGLdCBsMIZ5ANIFwCxPeAj+kbnWZWWVQdY2jmldgGycSbdtekNZ2wNq2rKxZGuzpBlOm1HljvVdlLnXmUSiLG5UQVBBWaL/R+NI2KiGwqNYdBE+Dq1DaoHDQtcWWFpPbBqRXIaWxDbg2NBKNTLUdEe0Fay3M9VXWeiTf2wvG2qeTRFw0h6EB1/0ujFVPH13QSFBWtJRxCzIvsdZcFDF2cpAWYMvt+TFyEKVrvKDCDSq8qMDxS7y4bGQgrkE7pvOzFl9sW/fkIKVD1VrulalPufbYnIZUuQsadCvLUaFBuaBc28g9+jrrFljjK2wEViwNXdt0sFSaGkWtTBvMNADb9/NGAhK2gLpnudcVL7Y+1iIHCXudFxvLvQxX1aR12JOChKRlRF42Raxl6lMXLjb3yfKQfGN48HTG8sRBpQOc3GJTy+5wn7qEqqpxtE9mHUonZGN8smDExglJnIiF0VS+IhqXjO9lHExWXA9n7Pnn7OgFU86ZmjmjetVIQKqUuPzBvx0Wjaa6AdUFftIC67Z40TGGwhNrvZa1bkF17nrkumGr58bn8DRH+/uMt27z5uExb71zRlmNuX7jByjqAask5xvf+EbXx6DfqEueYbKICtPVZ7H6z28BCuv1GqUU5+fnFEXBtWvXmrWhBUBZll1ie4MgYDAYIK4NQkgEgwEmHrI0lqWB97Z3Of/cj7KoLSYaYIYj6nhAHcaUYcTjeEAVNm3OqzDCtox15QcoU3cstZsleHlG6iiCIsdNNqjNCi9ZsXt2CqslT95+i2p+jpel3N2e8pl7d9nyHDanJ9CyuEVRsNlsOmcysV77whe+wJ07d/jmk3d468F7XVtyG7o4ByFqsIce3sIdhXjjGD0MIHLRwwBn2PyvHgaoFmSruFmT7SbHrAts0oBrs7poaW6TEpWWmIdz8mXSAOhWLiLstVnnVKsMW1/Yzwl4lPVdAGyfmXZdF6cHUnzlw8DvZJdCGvWBsTTpExtGudYSbMn/CQEmAE/mlMwF+Vs/MyBgui9Z6DPWol8ej8ddLVdf/94PDKCpx6uqisFg0M1xUQcI6SfAWwJAOUcx0pBzFB21yD3k/zpDiPazRW4j4L9/PcSSUMCgvEeKekXh0LcylvtRCjtFJy7FntDIsQSDbW9vd/fa3t4es9kMpRTHx8d89NFHOI7DcrnsLJK3t7e7Gohr1651kp47d+4wnzda/A8//LCTe2xvb3N0dMRsNusy/VtbW+R53gU9ErBba1kul9R13XXrFtZ8OBwymUw62XBZlhwdHXF0dHSpNuPqJnOsbyEp86X/7JH507eS7ruy9KUwMr8EmFtrO3l43+f/X6RB/0SAvrW1denC+77fRS/SgbNf1OH7PqPRCNd1OT4+vqTtEeZa0hUC7uVmF8cSaxtrHJkgcvJ98A8XbHVVVVy7dq3TOC2XS46Ojjg9Pe1M5aMo4uzsrBvUfiT5cRfrqrb8k7Z+dHt1MZIbdH9/H5RGx9ssnW3m8TPo3W3S4S42mFD7Yyp3iIq3WQ53SGyI8UfU/pjaG/PIH4OOeLPKcKscty4o4xX2Zo7Nc6gqrIVNHUHlYAsXage7CcHRre+yi6pd6jpH24LcTXB9w3Do4LkllT1BtVpc5bSAyXXAc3CjGIKYWVYxz20LrEek0Q7WD8FxoW46MWIMmgLH1i1jbdFKTCAEWDeFd7ZSkCrMMsOURePo0fpZV6lPVWtMUGGDqpEiBBXWr1spSOtpHdMWFJomEtAOStFIeMWpo3SoKo/K+E1jGykSNDSMcgkkraRDqwvmWkC1R6Oz9mlBPQ2g7gPrksYKUXTc0oGxbIG10a1W2l74WVsQUC3SjM7to98oRhjrSrUFl72HjBQzGtDatBrrqmWtK5yobBnrumGru0Yxfcb6cmvzKm/01WXikcyGXUGpar2xVWDQvkV1BYw0riBad77WOGCHNB0sPdpCRgtaYXAwXZYA0IYgyDt9tbQ2j/zWZk9erRxEnEGiS64gCZFO0cqS1mFXvNi0NQ/Ji4A0c8mXDkWiSHOXPAs4WyrKuUO58jGrCLeAellz8mSGqZrpsnd9lzvPfZrEeiysTxVPyPwRG+1TuA4EGX68Itifs337lF1/zq5z1nRYtN9mUi8be71y01jrdc1gGumHsNVeURCcl/hPCrS1FL5HHlwULGaOQ6ZdEqWbbn777pvWVXPrc5o73kmOdwg80aX5aHCWvrf76PypU5jVv/7qHysF8PaBsrWWKIq4ceMGruvy6NEjvvjFL7LZGNbmMV/+5t9hMvFQocVxGkZNWOnZbMb+/n7n2iSgRogTATFSCC9AYjgcNlK7tk+FPxiw9gLqG7dJ0Lzu+SwNbJRmoxySa8+RvOJSBiFFEHUOIJkXNIWMXkDhBxSuh2MMYVUQlTlDLMXus5SzM9w0QbedGKNkzeDkKXqzxskSgjKH1ZLnrh3w/LV97HJBnWzIs4zxeMy1a9eI47g7lw8//JDj42PefPNNHNflM5/7LN8Zlnztg3eZXNvhx3/m01QeZKFDZn0WRcLaZNiwKU4cbn0eZxDgjiPinTHnw5BjD2rvHvfLmmqZorMaJzeQlg24XueQNpIPlVaYpxkqWzeSj9xQr7Omy2ILqlV+wbDLtRagBhcZbQDX2k4/LOuVUgqLDyP/UtZbyLI+6yvNYAQgy/onnwP0OnpeYAj5nD5gEeDzcTpwqa3qr7lScCjMpdTGCe7oN1m8yoQKOBcg1gdPfZe4q6A+SZIuEy/nLWPXyW165F+f1c6yrDt/yRjI+UoGXX6W6yXnWhQFs9ms04CLbV8QBGw2m0vnJgWrctxlWTIej/F9v3N4Gw6HjMdjxuPxJcJSCkslGJNgWdxg0jRla2uLNE07pxXVEmsCuIfDYVejIYFolmXcuHGD+/fv89577126dmKd3dd8izRX2Pf+sffJ4cFgwKc//WmWyyVFUXBwcECe513gtb29zfb2Nu+//36n8b/aNEiAeJ9Zl+8F1/XxZ38uXQXlfRwof+8z7EIWW2u7DMEnbZ8I0Pf391mv190kOzs7o6qqLpXRp/QHgwH7+/tMp1OePHnSRRYSPUhTmocPH3aaN1kghDHuTzCJIvuFp7L1U1vWNp6ct2/f5uDgoCvCkIgY6C6sHFN/UPoAu/+7/tcujegGjf1euIUNJ41NXzDBxtsQTrDRNvlgh+PxHrmOUdEu1p/ynXCbb+sIjMWpMsydBF2VqLpG1RXWNEykwkVVHrXR2DrE5CFoH+oUVRzhkaLrDbFfs1kcYm0K5YaqTsGpcccRpSnBVbjBkPLGz4Ef4Cav4foVerBN7kQU/pRNEHPiB2Aatppb99CqanXWjRxESSfGVlesQstWW8BoK4U9zzFFccnPus5cqjyk1tKBscTxa3RQofzWdq9tFkMExlNYz2Ic25K69QUQrRrWuqjcC8ZaAL7WLZPcSi/6khCPxgEkVI3GWiQNnr0A3G7LcgtTLc1hCgV5D1jXNF/zdv6pVsZhDdI1sZN2iLe1vKdUFwBb5CGiIxGG24J2WimIX+JGFV5Q4ARVc80c0zQl0i24tmBbQG5q3chBSn0FWPukiwGrzMWULnim7cDYurP02ssb3bqCKNUCa9UEPRMwbjOeVjfBi7WKChdxBFFOTeDnnca6KV5MesWLKWHrEBK3OuvIyS7prGPduIMAZCbspCBJGZKVAVnhk2Uu+UJTbGC+MhwvC6pZRjnPqZcl1dpSzizpuSVdlDjKcv3GDjefeZFo64DMG3Baah7ME1I3pgpDxrshox1DMFgz3Fuy5c3ZbtuWT9WSsVkzrhq2Oiq/S1T+fYIy7woWO1CdlATZhSsIcGG153vkXtth0QvIlEeGS4bPwoxJMoe0CpktFe+8d877H65YbXwen6x5dDwnL0qsrXAci9bVpc5zbdHRy5/0/JbN+9N/unsOX2X07t+/z6c//WmqquLevXvMZjMeP37MarXqNNb9DOZoNGqac5QVwY1bZMMxRRBShhErq1gYS+b6jY1ePIDRGBMNmvblgxEmHnRgO3U8SsfBMzVRVRLWJWFVEuQpbtZIQsKqaNxXFjOue6tGJrJaoJM1rJbcnIy5szUhNjVOXaHbZ7kwa7/+67/OgwcPUEp1DGZd12RU1L5CtbKOsyLh/OwRNnTxbkaMDrap7t3iJNCkKscEJRtbkL0Ig73nKfV9isjlm56DTW/ymdUXMeuc97OGha5XGV7tY5Y1xZMV5nzddVbcHkxwC5gGA+7sXqdeZZCW/LNf/hLf+c53ePnll3nxxRebx0QLVmSt7TPBfQmF4/jUyqGKfdzxhc+ztbYDLP01WYCDAHXRzApwkffaFsDDhfyi75cuIL2v7Zb39o9djlls5uQzPM/rwL38vu9NLaSdBAV9Rw55z9WgU8Cb1IRdJdv6BZ99mVCfLZX9Sp2XnJPIpeCCCb8K4iUTIIXGAn5FPtK/BvJ/Au4FtHfN9drARQLcyWTSfa4ECDIW8r3Uno1GIxaLxSX8JEHP3bt3McZwenrK3t5eJ0cR8lL049PplPF43BUE9549XW2bMYYXXniBzWbDu+++25G0VVVxenrKrVu3CMOQ119/vXPskTE8Ojrqil+Fgde6sduO4/hSsOc4Thf894PB09PT7v8Wi0WXabDWMp1OAdjd3e1kzsvlkq9+9avdtegHcP3Pu6oK6WPEPst+FY/+bsSusOZiYdmfM7/b9ol/vXfvXrezp0+fdqy0VLbLiQwGA7a3t9nb2+tSEHLzyKJQVVVXyLNarToQLg8A6brZj9Dlf2Q//ZtQBk0KMHd3d0nTlNdff50kSRiPx+zs7DCbzboLV1QVOpxip9exwRgV70A4wURbDdCOthoAHu1AtAvhDtafgD8Bbwg6gCKDMm8a3tQV1C1Qs43NRa09ah2CE2KLGpI1vlpjs4f4NqHMZtTlEj8A1zM4jsGqGqNqvMhviFalMI4DQYDxQio3gsEWdjAl97ZJvRicz0KZQZ7gkOHYDLTBb3XW2o1QrFB2BeZG12UwrBV2U2Fma0yRtFrrpq12nQbkRjegOmw6Cmq/RHsVyq/QXo11qwbgDsF6DsZ1sdrF6BhbG0xloKyxteqx1q3WWnS+tgW+wiIbpy26q8GpYehCoC+AdvdqAbjWPX21upCBiJd1aRuQndPZVzfNZbTcRRfFjH1JiGitiyvAWoou5f7sgLVIQcqmgDEo8aISxyvRnm2kII5BtwWMGIW1fca6lYO0jiBF4pMfh2zyUSsHUeioarpgtn7WIhtCtc4gugHXaJrgI7ZwoLAuWEe31DzUSlErp9V9G4IwJwwywiBrALZ/IQcJ+90XRQ7i9FnrVgqiG8CTmYapTuqWra5CssIjSx2yOeQby9misdirFjnlLKNe5NTtz8WiothAVS6o2EAwpvYN9TCgGgypBlvYwQh/4jDcdhhvlQzvlExe2rDrL9hxZmypORO7ZGRWjKo1UfUtouo320YwrV91H1ivS4Kz1hGkKBv9buB3LiC533RYzB2fQvuUOiAxIQszJrcRm9Rjk0UU9YT1JmCVaj48POetDx6xtbXdZM1obMME1MrCIs9OIS7quua98+/x/un7pGlKWihQFwu8ABBZxPsLwv+YTXTW2nFw4gGZ61E6HuerNR9cu8t3zzbkfkgVjphPhzzVU4ogpP69P8fpYMiTrW1MPKQIQnI/pA5jjOfx20WO28pAhKVmvUKtl+jNGrVc4B4fEtcVn751g4NBzBBDVBYMTM3A1tze3sKnSYcLkOzri+M47tYQLwyoXMinhmIvovZiTOiy1hXnrqX0NJmuMYFD5WXYqCL53A9C8TyFCyp0UQMfJ/bwlcImBbbTVxc4uWF3MGEnnrATjRk5IU5hKBcJydGC97//EL9SZLMPefKt77M4PCWfb9gsVx0gEoZR7BVblwb29/eZHR3B1hZb9++z3Gx4+/QU9/f+Xh4+fMjTp095+vRp12FwuVxesNm9ta8PIuXvIusQoHy1C6f8D1xmt/vgWkCcSDf6oKTPXAtTKoBGJAh9sClSVaDLsvf9pPvFkAI2+5LQqwV+/cCgH6AK2BTNd5+Fvjr/oQkOxLCi73ctAcdVT3lh/H83OWy/4E+uhexT6icEv1wNAkT+JayqqAX61wjo2OLVatUVeYrErO9nL40a5Vhv3rzJBx98gOd5lzzO5XiMadzh5vN593w6Pz/H931OT0+7cfF9n+Vy2Vk7il59tVqxtbXFdDrFcZoGk9evX6eu6665kWC+9957j/39/U7qAo31pzDtAuRlXspnSzMjmTciXxHbaWubwlAB9kEQMJvNOjlzVVU8ePCgk86t12u01p1tuNxX/YCvX28p99TV6yJzTOZs//7p14T07wcJtuS4xbf9X4pBF2eU4+NjHjx40Om35WEkQNwYw9nZGavVqm2QkHRRYZZluK7bGctLumW5XJKoAau9z+E8+i2sRM1aY/Z+hGL5BPL3uoecbEZ7DViNthqAPdoje/4VXvWvMcdl+dmfo/K2INzhQ29CqWOMOwR3gNUBtQGKHKo25VeLfleD8hrGWjlQJVBsYLMGZrD+PpTnYJPGdo+iLQKsGzbWc8B10EGAE8TUjo8NBzijXdRoh9LZowyGWC+AuqIuUxxyXJXhqQpNk/5XKHwLqm6PqdZY40AFNgW7zDFF2bXVrrOYKp1QlA6lWzfgOihxgwp8jQ4s2k3RboZ1DMY12Eg3wNrxqJXfkLm1xtYWakNdOU0RYx02OX5xptC21VfTOMEop/neqxrw7Dvg1GidYj2L9YDAaV6e2wDrzFzY7eVcAOHcXshENrbRGkNjNafaz+tAtWnHhlaqoi502SVQ6sustSxWBsDiOK0UxG801m5YNR0Yh60cxLGdzlpQua0bxt5UGlO3lnt5A6rLxCc/C9lkQ8rcBetA30/ca7TWIqExrd2eVbphol2wUxof8NZBRDQoF63PGzeXMMgJg5QwbLXVYXrhCNI5gySXmsQ0tntZ42mtUyKdYaxqZCB1RFrHZFXY6KvLgLLwqDcedaoxKSyWLptkRFRtUc9TsvOEIQFOoaF0QTXp/CezDWsVUMZbPE0q1srFm4TEW5rhtmU8LRgeVOzGa3a9OVt6yZQFI7tqtNVlQlSlhOVHDVNdFAQtqPbyqgHXaUnQdl30ysYRJG+Bden7FH5A5QUUTkDh+JT4VGqXjIjcDCnUkNwdUdghq9rhLC0JtvcIBlPoBf0CJKqiYa6zLOu8e6XASpg+sT07Ozunqi6au/i+T5Ik7O/vdzI+ISEEhEr3ujRNGY1G3UItmUBrbZO1CSPUeIIajqmjGDsYoR5UP0+TI7r6mvR/Hv6l/5wyjBrHEMfFLfLW6WPNa8kG1drs6WTNyLEEbsZws2SarwmOH1B/e069mJEcH8KqAd+sl3i9tLCApTRNOxmhAL47z97jT7z8pyicJZUPJnCYFxvOPEh3E+pAU3kxmQ7YmJw60BSupfIUdaDar5raUygDXungFuCV4FUWp1C4hSXCgwSCSjHOFdHKISLEbHy+9ZWv89Gb76KzGrewmKSgrupO+6uVYrq1xf6NGywWC6K9PcbXrnF4fMzrr7/Ozs4Or3z602RZxpe/+S0evvYmxphL7chFthAEAfP5nOFwyHQ6ZbPZcHZ2RlEUPHnypJM9LpdLvvzlLxOGIWma8uyzz3YZY9d1OwAjC34/+9EHh32tcp/EEsAxGAy69VPmXl/G0AfhV4NBCZr6ctY+EO0TdQJorqb1++BFSLc+Q9+t7R8DruVc+rLRvjmCjIc0nhFiTzYJTOT/+mx4/3/qur7kkNInGPvBUT8Q6R/fVdmDBNcfFwj17w95Loiuvh+MAJ0Dy+npadcBvXPaaZ9Rm82GxWLR/X8URUwmk+48Pc9js9l0ROj169e5ceMGJycnPH36lCdPnlBVFbu7u12ws7e31wHuxWLB6elpl5GYz+fEccxsNuvsR2V+7Ozs8PjxY46Pjzk9Pe2utxSDSxAhRaj94lV59snc7u8/z/NOjtcPCAVbSp+Bqmq6ow6HQz766KNujkjt5OPHj7tx7xfeynzvZ3/6ILy/JvQlzXLN+24wfTcf0f7Lc2K5XF6qOfy4wuj+9okA/Vd/9Ve7SFCiFIlkpGGOtH8tiqKLWiRCjOO4K/45OTnpqmvTNMUdblP+W9+kcCM4eRP1tb/cdHV85o/C/u8FNCzfZaPchr12B+CEzSFXJdQ1pjbkVvGe5Nx9F0yJsjmuzSFZ4xRLVPUIl4TxUBN4hsPjx5R12koiVKNd9jzwPXAiCCKIxo3Wev8lcF3Q9wELRYY2Ga7KcFSBtnWjs9aqsd3DQVmNMhrb6o5tpYiTCnO+xBRNW+3GHWRClu2QQQOqwwodW1RYo50KFagG4FUz3FBjfLC+Q6UVtR9TG4+6UJgKMKZhrcuAyoTkpqV5HQt62DZ7afXSvtPKPWpwa3ArcHNwUgj85vx90/w+aBFkUkFqGoCd0VgPlo3tH6Xb/K6qQYFxggZUtExvw5TbpqLT0mqme5IQ0YEXDlTORUGjkffSyINc03ZeLFt3kGbM3KDE8aoWWLdyEHoSb7HcKxvLvSr3KDc+RepTzAKSrNFZm9oFz6KDqm0UY5pAQTWYu9FXO1itL2QeA4uZ6lYy00xbCSTqdgyUQwOkw4wozJvixaAHqP1GChJfcga5KGCMpUmMzqitbmQgLbjOqpCiCinKgLr0MZmHzV10GaOzAdmyYDNL0OsatanwS8WuP2SIpnIG/AfZn6ZUDrf1CT8cvM9GeVShQx2UuGGCFycE8YZoJyH0V2yrhAlzJmbZdVhsXECu6KqLogXVVSP/OC3wHzeAWxxBxAkkcz1yxyVVLql12FSKk1yxymC+UcxXDmdzl+V6QFqGKH/C1rW7bF+/x3i6x7htK91PF1ZVxenJaaeRFADRT0vXdc0m3+AvEoa1vgRcZBPgJaSEpGhlIe4/ZH3f7xa1LMuojWFVW05OzymDCH/7gLXS6PGU3A/aVuVR2xBGUfghddTIQsogwgwG2MEIhuPmGbRZodYrWK9gtQD4X9P47/Rf77VfF/K7H/p//cdfVusVbpbglwWb1o9ZmE1plra1tdVZtNbGcDQ7QcU+JSXR1pDh9RD77BR3dB0TOoRbQ5xRiB40TiBq4GECBxu51L7CRh7uOET5Lv+ktDiFwS0tXqnwyiGhddgUlmq1wVkZQutw3Qmpj3K8CqplilsY3MISWpexGxF7AevVRQdBuY4Nixei1O9sW59likTtgj4jd3JUrNBD3THF/cX2zTffpK5rPvywaZz33nvv8cEHH7C7u9vZNSZJ0qXl+xIDAUc3btxgOBySpikHBwfd/Dk8PKSua27evInjOB3LWdc1Ozs7bG9vs7Ozw/e+9z1ef/11XnnlFSaTySXAKWCxD1IElPZBeZ8FhguWVwCuBFMCvvvvF3Ai8/tfVMgmAKivUe//Te6/fgGnBKb9wEKCvH6AIIBU5AF93bqw0AKq+ppy+T9pjtNnksV8Qj5TjlO+9uUNcm/3LRL7gUSfyZZxjqLo0vuAS5kHuUZ9Nl80yf3rLKBX3Fb6RaEyPmEYcvv27W5MRZcuBaAyPv1Mhph9yHFKQ6TDw0OCIOh08rK/xWLR1ZnMZjPiOGZ/f5+zs7MOVGutO1cWCSwPDw9J07Q7tyRJGI1GQBPUjEaj7toHQcBoNGI0GnU+7jKfxuNxd8/JHBK7SLlujx8/xnVdBoNB14hJnslyveT7vvxKwHr/93J8/a99iUv/edHP/Hxc/aI8H/ryLfmM9Xr9LwfQxQy/T/9HUdQxPX3bJ5lU0FjH7O7uslwuiaKoq2Bfr9dMp9MGuDs7WG8Ajg/7n4L7P41N5zC4BYXbumMEkD4AswazaShkk4DTMpKOQzgeYl2XzNIAy3CEGmxRR2NsOMKGB+CFmCpnZTdkdkFU7BBkC7QpcKxBG4NCoawGPBQe1rpYW2OXM0ypWz/r1k85HVCmE4rKQXsVjl+g3QLtpGidoMwGbZoupJGT4duUYnGETc7ZnDzCzRYEyQIvXeDZBJeMyXjMk7/wlzj7w38cUHDuY59EjZQiq/BVBTYHm1HkCWYcghu1IDjDcUusWPwFLoQBoGGT46Q5pBlmnWCTFJUV2KSCpYLaBeOBCsAPIIwuWqKrdnpY0+qs3StFjC37XdC2TncuWp53LiEGx2sYatcv8UKRhFS4UaO1dhyDcnutzcUZxChM3XMGyTyKjd84gyw9suOYqvW3RmtU0BRFKr9CuSXKKYEca0tqU+LoGt/XWMewCQ5gIIWLVVNMq4CqwlBjbIV2ioapDlKiKCUKc+IoJwwL4jAn8LKLToy/g7FOL1qa67wF1iFJPeicQYrSpyw9ytylXjmY3MUpXepsxGoVcXI0pz6z6I1Hca45/XDOEJetacRw5EEUUPohhRughh7e2EUNatxJgROX6DCnHq4ZTVPGbsJQ54zIGJATVhlRmfEr5V++6KzYA9XessQ/aSQgQZbj5yWOMZSeS+61zWBabXXpBpQ6pNQhsxW88TThbAmzpeJs7pLkA4bTG+zeeJbR3k1O04IP33/C06dPO6DYLyjvN/CKogilGlvVG8/eYCuOieOY6XTaLfLy/r4kQNLxoqPup+X7DEqe511mUB72Fig8n8z1KYKQtXLYuAPyOMS2HtaJ45E6blOkGITkXkAZhFRhTBmE5EFI4TfWbk6WojYrwrJAbdbY5Ry1WWGXC5x0g57PsasF7nKBOT9l6jrozZqjd9/BrhawWqLSTdNms92MMZi6/tmrz+yf5287wJAeg74azWHfa7KO20O2xjEjX6EHfgOsYx8buWQDj2IQoAY+JnCYOArSikmrqyYpKRYbSCrqdQZZTbnOsWeNlZ7KKsplSrVMSU4XZOcrPv3sC1yb7DKKBrzyyito3ThwiVMXwIMHZ5yfn3Pt2jX296c8enTapKHbOeEFAV6ocIagvAug2fc87gM2kcUAXQHdvXv3mE6nfPe73wUugJgABCkqe+edd5jP5x37tVwuu07P7733Hj/yIz/CCy+80LF/Wuvu8/ps3WQy4dGjR3iex8HBQedUYYzpmvsEQdBlVwR8+b7PK6+8wltvvcVqteLmzZsd+y5AVY71KmgQUCBjcbUgU+4pOeerDQZlXIQp7e9PPk804/J7CZSEKew7lfRxQ599lr/L+fQZailmlsBYrP7kesg4CSPdt1GWYxyNRh0jK6D9qoZY7qH+s0OkKKKJl3OVv8u++iYTImGQ/UkwIEBSXuv1unMkuqo7vqph71sg9vvJxHHcge6r+nfZTx/US+GrXBMZ/761n1wnoBtrsUrc3d3tsjtSmDqfz5nNZt3zWsC7MNh13XQil4BKNjkXkeLIs7uvjhiNRsRx3LnKyPN7Mpl01t4yb+W+H41GXf8DGY+HDx92f5PrJjhUzrfvxNLX6F/N6Mjv+tmR/nyWMez/f1+GJdIkmRd9zXqfbf/dtk8E6MIEfPTRR43Vn7U4X/xzJNu32f/232G/LDsvdDHdr6qKJE05XSaU7oiN46J3XqAOx5hgyGk8wZ/sEUz3GG69i762w+bNB9QvfAYVj7G6gnHZykbugbmNyma49RzHzHFMgmeLpn28Ak9rqtIytq0I1/qAwRQpJjWYOsdUIXXhU2URm2REnT8DyuAGOdrN0U6CVinKrtFmDfUKikPIF5gbL2Kfexnz+m/Bb/xX1PMT6vkRZHMoFhhTNlhUa9RwhBpNYThuUtDjCeG9Z1kHEVwbs4n2KV74OZw6wSlWDTM2HGEHI54Mx1SDETYdtCxzBtctJDnOeUFNhLdJMasVdpPD8WMYxY2/bDnHDLaw1jSg2vFb7bUB41CbATDs5CDWqAuWuqDnFEILxBtph+tVuH7R2O21shkvLHEHFW5QoF2D4xpQ0iiGDlhfeFnrTl9dblpgnYVUmUdVOljlgmvQQY52ctA5qAJUjuMabOBgRkPq9ZzdGzV6atjMSopoBDhNIKc0VDnWFlRUOLpo2Ooob15hRhQWhGHeMNh+RuR9pZODhJ3lXtb6WTfAOtAFldGkVUBaeKS5S1E4OGqAq2JU6eLmLifrKa9zj0Fq+Dn3u+xMlpgn93lcLjlKTvj6Xz3i5vZdtrZ3MM9+ikH9PqfxS9zyFZnKyZ0UBiXB2OCNUtR2iTpI8e8vicyKmISByYirirBaE5THTUe/vMQryk4C4uYVflISnEuTmAZga2sba722aLFwA3I3oNA+Z842pxzgVxkqr8jrmLSIycuQVebw+HTNuw+O2dQOpQqpbZOmvnPnTvcA76dmv/vd7/KVr3ylS63HccyLL77I7r17JFqTbSzrpLFSfemllzDGcHx8zPn5eedVLAVCm80Gz/M4vn6X9z/7Be6sjpmcH3XsUd8rGEA5LpnjkjguqXY5G24zCyck2umAdBk0Uo/CbwoVE6fpwmgHI4ogpPACct9HWZoxLnL8Imv8rLO0/V2TLRiUBdvrGepwhU42hGWBV2Q4yZri7JT0+AjnizewA4dH/+//HlVcsJ+dtKJl4JIiI1MVs3wNowg9jti/7+KMbuGOQ9xRhDtuXt4owh2H/Dx/+1f4nfKWIU343LHq8Z/7EeymgLRk5EZ4JZh1jptY3LmBdI1KKwKj2Q7HTLwIt7CsTxcM2mLLJElYLpd86Utf6hZ8ue6y0AgQdByHodYMibg22ObuzdsdmBYdrAAzWYTruu4ArOjRRZ4ggFLAQ18+I0BJpAJ9yYcszsJsCyiWfQrzJ/NIKcWLL754uZi0BVXiBiauEsJG9rWsIl3I85zBYMDt27fJ87wrUh0MBmRZ1um8JRiV9wqAeOaZZzrv52vXrlFVFe+88w5wUUB5FTAISBUGW0Ban7lOkuSSdEK+FxZUpAoirZLxEd2vXENZ8+V6S6NAySr0Le0ECElQJecgcpc+S6217vquiJ1hPxMg9s5Sy1bXdceiCpB2XbfTM/flItIzRc5B5qw42EgGTmoABCTLcfQLpQUEC3kpsl8JVvqFoTKH+906r3YhlTGRz5drk6ZpJ4mT8xHWVY5ZxhkagN2vAZCxlLktIH8ymXB6esp8Pu8kKBIEyXyaTCYMBgPOz8/xPK/LtkggKsWdUli8Xq+7TM/5+Xl3X/XJkb4EKY7jbnykzvHRo0fdscqckLknchmgCwZFKiPsuMgNZU7KNRQrTMkU9OVX/cC2n3nqB42y9es/+vIY+J2GIrJJMC3Pjr7Pfv8e/N22TwToL774IoPBAGOaStnwCz/PGz/9f8Jqh9MX/gi7qw+wbkSGT2ldKm8A0Zg6HHPuepCvIVli0gWky/a1IE+XOJHlxhdrtq+/SflZmL85ZP2RA0WJOfsQUwcNsC4DqiyiSkdk6bNgNNorcLwc181QOkXZFaoF1rqaNcA5nWGTc/xqw4ic/OwJu4FmEhgevvc9nj56n9o2zR2ubh0roTV86xziIfzUz8KtUxgMW2DdgGsG7fejMbauG8ZrvYT1CrtZMsub5hRenlK89AfAuUZdAh9+B3VSw5mH8nQjKQmdRoOdKqgjeMZAFFK61+GJorRbDTMd0LDUawHWQ+yMlnm2zbLcOZ60rLdYCGJ57ve/STjOcFsrPjdsJSOtdOT0zQOWjyeUqU+R+BQbj2rjsTmNm2LSwqMuhUXJcZ0C3yma4MakbG7eo972ULZAn34ATo1xSggKGIfYwDYsv99mShKFqTxslmIrDSYEIoz2MPEQnjfg7mAHh/zE9V9vWptrYa0zQicjblnrUKUEuqQyTgOsS58006SZIt1YqlRj5opqXTM7XLCYpaRnCcnJmux4RXGyplwUrGc52aqizOqOabHW8lM/9VP82T/7Zzu/1pOTE/6/n/oTHA73CVRBcDDnR4LH8Mz7GM8yDSr+8E/cxtEVWj8F/YRXfvP7DFZ/F1ckIGmJP2t01QKsvaJEWdt4V/utnZzrU3sRlRtRORG1GpDVIcsqoqhH1GoEzgSGE6pJ03L9nQdHHC02fPDhA27fvs3e3h537tzpgIqcW0FBVmV4oYcetUVidc32bsIzOyfdQ99xHDabDaPRiPF4fIn1KcuSW7du8bM/+7O4rttl2iQ1KuBEQJboJ588eYLnedy6dasDJaPRiHfeeYcMxS//L/4S1vP4tqn5oUfvkLte4wLi+mSuR+p6ZE7T2lwb07iBlAVhkeNmCayX6GSDX+QERYa3XDAujnHSBkh7ecb9gz3iuqI4P2N9+ASVJoS9Ih4BleLDLQ9o8eftp6dTWcD/wHPof/dHwdHc+4PPYT+Y4Y0jnGGIiRxM4EDUFC6qwMVkJdUq7Vjo7usiaX6/SMgP56zffkq1TLnxr/3oX+Z3SlyWQPr3+dc7KugH/+c/aAUU3bx5k5/8yZ/srpsERR1bWjrougkadJuO1rrxbH748CGDwaBjpvq6XwEowtDK/h49etQxrE+ePOkAnDhQVFXTJVGYrDfeeKMrngqC4BKDKvNVFkqxfJRr4ft+Z4cowECAlEgrxRNbgLwADCnoG41G3fn0mdKqqliv150bmcxlYbT7zOlwOGRnZ6djz37kR36EJ0+ecHR0xKc+9alLOlfZ+m4lURQxGAy65jECSEVuKsBX7i05977toVy3/mdIUd3x8TGbzeZSgOu6btMcMM+Zz+cdeJRAUoCwSFsFbIi1sujcBSTL8Xie1/mD9+Ugcp6S/pc1V+bBVX26AGFha09OTrD2wvu6L2VYLBZdkNLPTCRJ0gUMfQcUOQ8BsX15gtw3kn0Qf3IZZwlMZbxlTkuQJyBa5rFcD8kQ9HX1Mk5yTSS46H8vgY7MAzlu+Xy5V4TB7geQnucxnU5JkoR3332XwWBwKVshYxHHMc8//zyz2aw7nj4JI4Fx/5r25VMy1+QZI/NQgk4JHuV6SGAnOm25h/rWj/2sgdyTci+fn59z48aNS9becRxTlmWXdZjP5505SZ8Zv6oj/9205P0g82qthPzfVSZdPkOCLSnu7Ruh9OVgH7d9IkCXhfjll1+mKAreGYyQojld5USPvsXtgYfdzHn7O99k/uQj6s0MNnN0meBpdSmK6ldnqzt73Pu//RWU7zI/8dm8H5McxajlU+yDX0Znc8rFMfXihGp+jN2coqoVVCusqYnadIhUHMvNIA9TYQTG+/uddvBJUfBe29xIK4W5kvL6HZtSTWGj/PjWd2Fxjlovsatlm35uAflmhcpS9BXArxwHV9iD//BX4f41sAb96q/jHb+HWyZMXHCLDUfX/hjJc3+68fB+YYO9NmiKJN91YKFQpsL9/t+lOn+Imj/GzA5xlkfU/+r/B7buNRrvcyB3oM4YPvo7eNWC1e4XqK7/CGC5+8Pf59pnTnC9gsP/5qssHp9QzRbUsxnl2RnZM3+BbPozVIUDb/1t9NFvQrlEFUsol9hsTp3N8GzCzjjk2sE+w+GQ+XyO67qc+dss/uI/biQjqzXqN/9aE5itZpj1DLdM8KuMYn6KHd2g/t9/GVwftTxm8rW/Qr44xaYLyJZMIpfjv/o3sZOAsT7nj47/Hs+GD0jPc7799x5TzDKqZcH84Rnrx2ekp2s25xnZqgajO5C4Wq26BXg4HHL//n201pyfn3dNF/qRrET/otWUm/POnTv87M/+LIPBgCRJ+MpXvsJrr73G4i/+MRjsg6q5E37AnfAjchty6mxTOw5m7aGfTnDrkM89+e+49+67uEXNm+7v4fUHKU+O1lhnzL0XP8to9yap8fCGOxR4FOWFX64siv3FvSgKkjLBKtsxBQKeAA7uToi2liRpztbWFjdv3uwq76uqurQAhWHYgXCJ+IXp2Gw2HfMzmUy4du1alyoVNrssy27fwqxD05VOJG7SMlqYMPHrDYKAZ555pgODvu9z7949bBihtG7rCSw7Tx8SVgUjDHFdEVUlKlnjJBuuDWMCa7p7ME1TVqtVt9D0WSdhhLMsYz6fc+MHfqApfMo2pFnK6dlZt5hIwZ48XGVBL4qC4+PjjukSRqcrwCsLInnEpCX+e0vy+RPKxYZqlbE5mVMsErLzFWePjljPFh8LTmSBuKqLfPA3f+MffPIDrNn6bLHruty6davTqcoi15f/CPDoP1cdx+HRo0cdcOyztrLYCKMqoN3zPO7fv8+dO3cuLdbCTAvIkYVZGDApyh2NRmituwYm/W7UMkdlAZR7QwDlarW61ORIQI3YsIlsQX4va5OAHjknAUCy7729PZ577rlubPosr4yvNH+R9Wg8Hndz/+DgoBuz/n0s5yXAUQDsfD7nK1/5SmdrLPeqzMG+/3hf392fK7JvuV8lIJFj78vEBDT2pQTyfmGxpRi2/7wUkCr3l3jfy/ztbzL3JEiXz5BAoT+uV5/L/fkm28cVnPbHQo5BnnGia5Y5LF79/YLX/jWReSJzaTAYNHV0PdZd3teXjPTdauRZaK3tZHvCJstxSoajr4eW8+oXAEuQAJczKHLv9fGWZLQEWIvDkDiq9CVJEqD+0A/9UEdA7e3tMRgMODs769h7Yy46k8oxDYfDztKxD3L74yGbAFUhPIAuGO1f493dXabTKWmasru7eynIFvtEWb/DMOSll14iTVPOzs4YDAad68t7773XFcJ+3NYPmmQsr2rR5f/kelyVS/WzD7L1pVXyTJRz7LPxn7R9IkCXLm8HBwe8/fbb2Hd+iTuVTzo44NqrfxOzOsO0LV3ffusr6OWyYZFt4zxQmcvpgf5J5Y/Pee1f/c/Y/ukf4PFf+xWc8Z/DCfZw3/zP8XVjiJ+3spm+jk0eoLu7u93x9SPI/o0rF1ceJsJySeHC1Qtw9UFCVaH+Zz+N/eN/BvVLfxf17d+6+N/mzR97fP39GWO6h0H2//gz8HN/Ae+Db+J98xeb92mNnk4pqorxW9/EmR+hqg3RL3+J1b/ybxB895u8yDXe819h+4N/wONX/zFa9F3G4Hoe4//2TxP8/r9I9daXqM5XbLZ/P8PHv8C4brRYySKj/qH/HWxOmP/y32fyb/9+zn/tdU6/9N1Lp2utxXzzL1K98gTWD3He+ms4osuViWkMyhi07zOItwD48MMPuxvH8+bs/PV/h+y5H2P4G3+dxdvf6lJjWNvY/fk+yhjU6hT9t/4tuP97ib/2XzIsz7Gr1cXNEUW88B//Oxz9xB9k9Otf4oPRq/CFHb78Vx9x/ijpFs5+eu1C09Y8FCTNC80NJkBRGgacnp52DxFZEOQhISAjjmNGoxF/6A/9IW7fvo3runzve9/ja1/7GrPZjNt/+X/L0Rf/FMN3f5vJ7zln+2e2OHr7swT7Gjsq4JtTfNMsju/nP8+g/Oe8Xd/mP/273+V73/te98C9e/eYP/kn/yTPP/88lVGEYYBFXbLgkrS91k3L5ddff52HDx8CTeffl19+mclk0qWcB4NBV7y4v7/PZDJBtNfywOo3EJN7Rey8hC0ej8fNqLaMl4AnAeYC4i5unap7cApTIwuk7KeqKra3txmPx93nycNRGBff9/lz//Rv8Y39uxz883/K4PGHDetqLZtegWZd1zzqLU59FylJQUvwIuA6TVPKsmQ2m/Huu+8ymUxYLpfMZjPOz8+75hdxHLO7u8uNGzc6kCPPEGnCIW3oBcgDFF99xOqvfJki1PDfvoFJChaLRSffEEA3cn2WXBSp9nWz/WdJP516dYH4pG1vb6+bO9J4p894yz4FYMhYySbgWxy15FiEQRfg1tfX1nXTIvyHf/iHO6a5LMsOhMgzsd+WW4I2YRtl7oqDQ99V41vf+hb7+/vs7+93C7pIWBzHYb1e8/Tp0w54ie3uo0ePOjcLYUX75yzztq9Preua5XLJcrnswFy/GLJfPwEX7gxS0CfgTGQHMkf7nyM/C+Mr38dx3DWjkftLsl5yfP0iRQG2Esz0r2U/EyEATOaFHLeMo5xf/1kozKncw/Js6K97fXlNP5PSzxDI/vtBh4DVvrb6qoxAjlHGU4677y5TVU2DLLnGckwCqPtjIEWF8ozvSx/keOQ4ZC2QrI4c21WJU3+T6yLPor6uXrar0hK5L69mjGTe9QMFCXKuysxE7iHaZ9mPPHOEdBBpWf88x+MxN27cwHGcDgQDXYOxfsZJAgfRWu/u7l6SusmYyfUBOj24/E3IJJE8BUHQNYOSpkdy/gKQpbi1LEvu3bt3aY7LOAgJ88Ybb3B4ePg7iqDl+vWxWz/DIWPc//mTnrsfhyf7Wz/7JddAnp+ftH3iXz3P67wj5/M5Ctj5zn/TdP1s0zbyN0nPRFHUMXN9Ol8M7fsPttNf/m3O/smrTKdTtstf4OzsjLKqGG5tsdlsunRa/yEgOqokSVgsFh1Ik+iq/z9yMYIg4Ozs7FJxwNWBvQrOu4F+7euo177e/K73948D5P0L2D/e9XqN4ziMg5rxr/4XzY1y4wZw8XBfLpdonRF97f/SPSgH/8X/mZ2dHSZ37/LM7Fc5OjrqJrtcaMdxGOSH3HvzL3N62rhWmIdfwg0C1sZ01kPuV/4jjDGsgbf+/b/9O469O69yif7Gf9h839O3XZ2cosk8OjrqCvFWLbgOvvGLbH//Vy6lvvtj1H/4u2/8EuEH/6xJT49Gl2yXlFKM3n8b++1vorXmW0+O+N4vnXdj309N9VmZq5/VvwkkBSqRfp8BcxynA4XD4bADE67r8vLLL/O5z32uu/EfPnxIkiTNIjo/ZO8X/hPKsuS/P3uRvfhPEUUR06z53MIpwGmOM9E3+cbg3+AXf/EX+da3vtUdr+hM/9E/+kf8+T//5xmPx5dYTRkPmcNZlvFbv/VbfP/73+8euI8ePeL09JTf9/t+H3EcXwpapMEF0Lk2OI7DkydPeO211zg7O+uyZdPplK2trS6NL0BJWAC46MYmTOR6veb09LRbIKULsWSvBJDLIiQPv76es/9QHw6HjZtIXXN39gjv21/n8PCQZVuMJExEH2DCha6w/xny2TKeEkzIM2uxWHB4eNgtqJJulgVMmqmdnp5y/fr1S/69cu7CLgn4lQVn/l9/g81mw507d1iZJmja399nNBp12RDJOEhQcHZ2dind3X8+/U8F5wCvvPJK9/7d3d1unGXe9bOOfauz/v/Vdc3e3h63bt26lO7tSwX6zNlkMuHFF1/s9Kui57XWdk1RgiDo5CgylpvNhuVyyWKx6GocZK72CwPfeecdDg4OODg4uCSlEKu869evX2LyhSj56Z/+6a6roDwfrhZKipZXwFeapjx8+LALIuU4+oWJfW23jIP4Rcu4ChCROdrXn8vzTOZD/zqv1+suOJDgTl59wNW/P6WoVPYr178oii7LJeMl5yH3Sx9E9OefHH+/6FQyF/19yLzuBw39DFB/679PpFOy9eeTzEHJbIqcog9m++BfMgP9tULGV8CpjNHVIvWPA94iU+nvo/8s6+vu5VjlWva9r6/KK/os69VrJQGsBCV9ogYuAjXJqMgYy/NNCEz5PJF8hGHIwcEB77zzzu9gfauq4uTkpNNrXwWqfUZfzvfjAl25FrJ2yP2V5/mlRk4S7In3v5yLAPKjo6NLQV6fFJAxkvGR5xI0a97R0RGvv/56dw9+XBayP79lk+svv+9fV/mMj8OQ/Xl7lYG/KpeRffaDtY/bPhGgX7t2Ddd1uzascsPLZJYIR9JeSimeeeYZDg8POT8/vxQRyuDKZO1H3EVRcHh42FXLn52dNYB2PO5uOLlZJNUo7BbQpSHlhpD3FkXB9vY2T58+7SLNvn7of8xC90n/c/WiXn3fVWZAQI0UxojcQjp0iS5QInSRDIim7mpXMNnnZDLprNKq/397Z/Yj2ZlV+xWRU0RmRGREzukaXJPLKk/d2Fi0TBta9AsS3YD6AWQJiWce+AP6f0E8IJ7gqSUEAoPUwgJhsLFsY5uyXVWZNWVWTjFnZETGcB+S384Vp7KqaRru7VucLZWqKjPinPNN51t77bX31z+ptYmXy8RgcvoL8GleYbJtPrFxgKh778k4sF+E+Ukg8nv4ePKC5AUEA4ZOFeZgcnIy/qbtzuBy/2Q0gw2af0sKyUO5XNbCwkLMK0ARejj6+vLly/r2t78d92s0GioUCnrjjTeCwYLFp7yaM6S+BjKZjJrNpm7evBnjjA2HQ929e1e7u7sq/oezwnXc8ZicnNSDBw9069atOGdgYuLklLWNjQ2tra3p9ddfj5doNpsN5tzX8c7Ojt577z3du3dP2exJpYHd3V1VKhV961vfiqPcmVfJDYWN6/79+/rkk0+0vb0d6/zKlSt66aWXVCwWAwTQDtagO/U8J3pWGHuedXNzM8YZh98ZTMae+e5ayH7/pHavAyzplNXgOXyO8pz0GeuYg2gIyXJ9wDkSCh/TtbW1KP81Pz+vK1euhDwD1piw79TUlBqNRgDN5PvlpwXn0slJyrR5YWFhTDPJGDrAdJbJARvkC0wT+mjeuYuLi7p06ZKWlpa0tLQU73aOtGa82u126LMXFhZiT+GzBwcHY+dnwNCzMf74xz/WF198oX6/r+eee06Li4tjwHg4PClaADsPi5fNZnX9+nUtLy/r4cOHqlar2t/fD5KH0m0HBwdaXFwM57xarWpnZyckZD4mbMYObhkjGNPRaDQGYn1v4J1EfycdTpLbKCPHOsG5WVhYiLwNCAbugfPD9SCrks/nrK0zhhhgAgDG713a43LAZGQBB8l10c6gOujnWZ2JdceRfZHnBHtI41VQAD+sKWqBw9i6Q0p7aIf3E+Wl+b1jDU9i9igEUTAHi8PhMEhK3iXueDgbTflOwCjPz9izDzoBhZPshAcSFvqCU0HZl7gn7VtcXNT3v//9kHAuLi6OVezBwSACyVgg66KP3LnHMZifn49+Bucwnjgaftqty5t4VtYHjqLPMXfGWq2WOp2Obty4obm5Of3N3/zNmHTILUl8+HuW/vb16NGO5M/9Ok8z1mIoC55iTwXoc3NzajQaevjw4ZjmiIt2Op2xTYqOBHBKCl2rd/js7GyARwrXs1l6WI3TqiYnT2pqskl4Uo90MnnILM9msyqVSpqfn1c+n49nZ4EDdH7ajS7JsD/tGsnBY0A8OSa5Wa2srMTL+ODgYGyjhwUnmuBeNv3Ki29ycjIcFyYLEyrpDZ71zL55n+V08Nl2u61qtRpMAt917xOQnjSuzfUYE04I88WUrETgP+dl505IcqFwPd+U0MbCusGOkdFPwtPk5EnN2N/+7d/WxYsXx8ALL2hemIQVAY44Gcw7Xp6ZTCa0z/6c9Cvj53VruZaDoXq9romJiWCdYJRgHTy8zDPxkqO/7t27F5UzYMzI7i+XywFS6NtkNGRyclLNZlOfffaZ/v3f/z1C+Pv7+2o0GhoMBvrFX/zF2IApe8ZaQNqDphNnZ35+XsViMQ42cwnD4eFhOIWA7SQD5mtO0pjGGOBH5YCk4+HOnY/1cDiMSNFwONTCwkJIHVgDydD1cHhSWQTHoFAoaHl5Ob7HfIWJPjw8jMM8Wq3WU9fpT/P+Amzw/knKvrwaC+0jQgFQAjDgcPk8mp6e1vLysq5evar5/6hJz5rm3e/9yfsejTV9MBgMooRbqVTS3t6eMplMyIay2ay2t7f1ox/9SIeHh2o2mzp//rxef/31MQDDnAAUoXN1x2x1dTXyUzY3NzUYDOK0bN7DzWZTg8EgIjbkT+Bw8B53B7Hf7wdopK8zmRPpAPsp0T7XokuKd1CydjJkE9Fs+n1ubk61Wk2lUknr6+tjyY5JxpP2u4PA+DsbyM94ruS6SkqAIH385FA+73ISJ5UAxrQ/ubd6tIF3OHPEI3jD4fCxRF1yKniXcO2jo6M4JAoJBxEy+oh8GycPeEeRaMo8RSbp658zX0qlkmq1mpaWlmKeMo7O/gMkSXZ2JxmMxJxA3sLcZvwBu56P4ePLHGo2m8FK9/t9LS8va3V1VXt7e6E8gLBiftOn/v4bjU4Zep6B95fPE3//SoqEUK+u4mPqhAqYkvmDA+QOge9vyXuRSHx8fKylpaWx+zEXec5kBIE5Ko2faHsWpvOfn/W7s8w/k5REnWVPBeidTicOI6rVagECM5lMSEx4wfOw6H0wCtMfHR2NeecMPJuldArUXKc3OXmSOU1yA7VpaWgyoQmAWK1WA/yyMNiY/CX1k0D2f8ae9Nnk4HkCw8zMjM6fPx8vVRYO1TFgI/v9kyoHvExwaHxCorPs90+qYvgkoI30Oc/1pAnmL+cnOSWzs7NaX1/X7u6u2u22pFPWAtDIRlksFmMOSaeaO8bJF40DBAAALACJLf5sZz0fG5h78IAeAPJwOIzkxHK5rHw+r1KppHq9rlqtFlUlFhcX9YMf/EDXr18fk2wNBoOotUvbpdMQXKFQiI3DIxZEiNyJkhQhwImJCVUqlTHPPRka48V5fHys5eXl+CzrZDgchrwF5sp1wQ5We71egGHuB4CtVqsRjuQ8A6/jOhicJD7VarWoZ+7g6ODgQDdv3tT6+rrOnz8/xjay6eKsuvNfq9W0vr6ufD6vVqsVZb6YMy5H8Lnq6zgJBKm1zgueyits/E8DwvSLl+Lb29vT8fGxFhYWYtP29YIzxj3QQhYKBT333HNxQAfPRDnK0Wiker0e4MQ3heRc/2neTxcvXox3d7FYjMQq+tCdUpxqnIuJiYmo5JXJZHTx4kVVq9Wxd24ul9MLL7yg1dXVeG7AF0d60/fZbFY7OzuxMTcaDS0vL4+VXWOc0fs7oKMa0Pz8vDqdjlqtVjgFzF8cChww3pv0I3kFh4eHunDhgj788EN9/vnnoRHNZDIRceE9m8/n9dprr2k0GkVEmfA872MiRKwxwEi1Wg1guLOzE+NMvWic36OjI+Xz+UjMgwC4du2avv76az18+DDW+OTkZAAR6SRK8vzzz8e84FAY1qtLPJLJcqxlj0oiE3EQ4RJCQDFRLdqA/IB/J1lQd/iRM/COrNVqWlhYGJOeDIdD3b59W5cvXw6HyPFBr9fT119/rVarFRGFQqEQzjvPub29rY2NjdC9F4vFIMLW19ejdKx06tDu7OyE81oulwNX1Go1bWxsaHl5WeVyWaurq6rX62o2m7p161Y4SUhrb9++rampKb3++ut6/fXX9dlnn2lvb0+7u7sB9HEk2+12vBOI5vT7fX3jG9+ItZXL5VQqlfSd73xHR0dH2tra0mg00hdffBGOf6vV0uLiosrlckiOkZQwzq+++qq++uor7e7u6vj4OCJRXj0IR3NycjLe8cViMd6bExMnuSkAaNZQsoa870GYR4Vd6pR877mUi/e6E5K8y7LZbMjpcGaIQjge4H3vrLtHSbg3f5zMcHNs9T9lTwXosGBsurxgpROPLJvNanl5WZVKJfS4dFqtVotNqFAoBEBjEHlpskk4YGTjvHDhQoR7K5VKLFxCAzyLhwq8ysWjR48kneoCCc2cFer4SZbcyB1QJieVA+QkWAZgOBswOzurxcXFcDbwIF999VXVajXt7OzEUb75fD48crdWq6XNzc04uAFzcP60KIB7rE/yFn1i80L7+uuv44VPe5CdUJKO7yVlCEknAQDtIJw55OWkkmDI54Jfl5dHPp+POv1uzD+kHYyNdMJ2/Pqv/7pefvnlsWvyb+ZR0uGbnJzUzs6O7ty5E/WMncmGbXDdKVEhSpbhtEmnG1s2mx3Tjh4dHenOnTuamprS4uKiisWiNjc3NTExEUmBflCHh6AJV9JOD6FKJ071/Px89DvGhssm1++fVC0plUq6dOmSJAXLSsIPOnLMS5exCSMp2tjYULlc1je/+c1wOP7t3/4tnCuez8c+yRj57zudTrw//EWc1IKeZe7cJl/a7jCWy+UYV3cM6HPaBxMGKcG1ATT5fD6O7AaInPX++K/Y6uqqstnsGNB2J5c/gCXuA2j2fr927VqcHM38J2rJpsnY8G/ej2yaCwsLsXFXKpUA1JnMae6QV2DxPnjhhRf0+7//+yG3uXLlyhjbyni5FMHZ4qmpqXBGpqamtLS0pO9973t6++23Y9z8PebjSF4G0VoHCplMJoAKBMDCwkKwnFx3NBoFscE6Yo9lDkHIAMYuXbqk73//+3r33XfHdMOsxVqtpk8//VRzc3MB+j/++GO99NJLMeaMgzPm7nwyBwDwAHuPaPJ9zz1xZtc18JIe29+dMHDp7L179/TNb35TDx48CEcKxrrZbKrT6Whra0sLCwtaW1uL+TUcDrW7u6uNjQ0NBgMtLCzo/v37On/+vK5fvx7vgOPjY62trenRo0fa39/XZ599posXLyqTyaharY4l5TL/R6OTkzUzmZMIDgejLS4uSpLefPNNVavVyFNaW1tTNpvV5uamZmdntbGxEdGjZrMZ83tzc1MffPBBkFbci/KdVJeiJCmyK9hsnL5CoRBM+6VLlzQYDIJEyWazEW2/fv26hsOhbt68qffee0+lUknPP/98zNO33nor5nk+nw8ppJN1jClzHQeGvYl3w/Hxcawt5hHjCcDGwWZeOPHB+5F3ve9ZvoZJKEf2yveZy/1+P94jOD5gJn+n+h7wNBDu//b2+jvnfwqkPxWgk5BCeD2bzYbkZDQ6Oaq1UChoNBqNVS84d+6c5ufntb+/r06nEwczSBp7UU1OTkZlFTqJicNLbjgcamlpSevr69ra2oojZ2FVkiJ7QIRrmxn0pDfGi/csdio5EJ6ZDyOLHp6NjdAVbfDQOzYcniSlIXNgUz44OFClUlGxWAyPmrASgA7nB6Ndd+7ceQx4+GeexMT5BEt6i/79ZN8AStrtttbW1qKKCAuJFzWL9kmJEHwGIO6JH8lnYLySDkRSW8axwCxOWBQ2bFjV+/fvx3V4kdL2yclJ/eqv/qq+9a1vxQuekDZAo1QqxRi7Bz4zM6ObN2/qr/7qr3T9+nW99dZbOn/+fABTZyL4w5x03aHPPcaN8edligzl0qVLY6wo7LnPG5xUlwK0Wi3duXMnANbKyoru3r2r+fl5NZtNvfbaa9FvbFzukMJ0ekh/MBhEFIA2AAB4scO8uEYe58S15ZTgQ+7DhpF0Nv3fzJVutxsHyvg752kJa0lzxzJpg8Egro9unvXvmwB9xzuN0/ZwcplD2Ww2ImpEipLvjuRz/Wft4OAg2GQ2RSrrjEYn5deIWHjEBhDIe9NLBxKdpG+TmzXJaEgcJQXohtXjmfx9y9x2rTfvbvppZWUl5mCtVgsiaTQ61QXDNMNMM+cA/My7fD6vmZkZLS8vj5ECyFvYhwaDQSSbQZZ48h8nFTJmSF3m5+fHACwRtkajEe8B9irWJ+8J1gW1qdlj6Rv6gLyj/f19ZbNZPXz4UBsbG5KkGzduxJrySLXPaZ6B9wfACMkbMoFLly5pdnZWt2/fjnKi9CV9RdtZ36zNzc1NXb16VRsbG1pfX9fMzIw++eQTXb58WY1GQ7u7u9rf39doNNLW1pZeeOEFraysaGNjI/bLbrer8+fPBwDrdrsqlUqanZ3VgwcPYvwePXqkl19+OTTXRLOIKlB//eDgIAgvtNnsXzhHLicajUa6f/++bt26pRdffFEbGxva2dnRysqKOp1OHJB07do1VavVcDQZx9u3bweBA06Znp5WuVyOKinValX1el0PHz7U9PR0JO8fHx+PYY3Dw0Ntbm6OaeXr9XpovT1SmM1mQ3WAzAegT2QK0ob8PyccfE7wvmo0GiqXy7F2WS9+/0ajEWVZWf8HBwc6d+7c2H7i7w3PcYHkg5RCTnR4eKhWqxV7aZL0Yj/l37ybnJRIEm5JspCfS6eSLyfjzgLmTnZIZ+8bP609FaD/9V//9f8cd59aav+L7Yc//OHP9P0f/OAHP/Mz/N7v/d7PfI3/SXvzzTf1B3/wB/+vH+P/e3vuueeiOgphYN/ElpeXVSgUdPPmzTHpwdzcnAaDQWjvy+Vy6LlJvK7VaiFjBOAdHx8HmJ+dnQ1AvrGxEWU3KadIpEVSyEampqZCUwuQhzXLZrMhFyoUClpYWAhwTNLpcDjUl19+GW0EfOD0jEYnp2EuLi6GZMINUE9eCclygGhYVHfGktItnDBqMcOuHx8fx3kEOAySAjx7VI0kT673/vvvj+mCIYPo91qtFnJHZByeHwHw8qR5+sTBPk6lpEhshkEmzwSjvS7z43kBKuR7jEajiABUq1VtbW0FOVIul/XWW2/p0aNH+pd/+Zex6h5zc3N68OCBhsOh7t+/H0SFy9wGg5PTQhuNRujLp6endffu3RjTXq8XYPrcuXPxPeYpABhgNjs7q48++ki/8iu/EoD+448/liTdunVLw+FQFy9eVC6X05dffhmgsVKpqFKp6P79+5qZmVG5XNbs7GzINfP5vC5fvqzd3V11Oh2trq6enNT+H45uo9GIw5dIYl5bW9PU1FSMKeQjRBFjwFpk/Nvttv7+7/9e77//fqwhTgmFdHTCgzmSJO8o/sC8Y10DsGH0ScAdDAZB3jLfcHI8ygURCYGDk5vJZGINAoSTRACECEw6bfDcAxwtjx4lQfhZ4DxpHnl0gyjOZDLxfsrlcmMH1/0s9vQijKmlllpqqf1/bX4kOaxSUvZBxZOz5C5UBvEKOLlcLrSoSK+ICsCCA4YB6KVSKUAvURuiW4BbmGIAmEtliESwKRLJQpoAYy8pKgGxKfNzNvvRaKRKpRJVojzyOBwOIy9jMBiELAZgARvvoNwjcB6JAzD5iZyU0ZROQYEfXkPZOEAPYLpcLo8VPHA5Dnp9DIBDqT0HFg5Sku2BvUTbvLGxEQCIn/tzE2VxoANo57O5XE5vv/12HBjXbDbVarXi38yFixcv6r333lOtVtP29rYqlUpUORoMBgF6YHGl0yoeSBmJ1lJY4e7duxGd63Q6khQg8sGDBxoMBnrzzTfDgWWOEOEnIjExMRGnUZKrxLkJJDs/evRIk5OT2tvbiwpkRNdgy/v9vtbX11UqlR47D+H4+Fi3bt3S7u5uOHCrq6vK5/Pa398PuYbLI3O5XEQzODXT5R69Xk+ffvpp5I188MEHunr1qpaXl8e04ThoZ0l2iSoAnvkM0V6PltIWrxUPQOc6yaRfZ6gB4DizSceBnyeNderRYknhBLgU1qN1PLfPX+wn/T+5BlkLJNn+d1gK0FNLLbXUnmEjccvL9GGus/ZSdg48AfHSeHUnZ8zZ4DwhEVCSDCt7sldSs8/GxmaMNRqNKMV648aNqCAFWHEtNzI2zz1whpdwubdFOi1FC1ClfbQ7CXSTDoN0egCXSzqR7+FUUHzBQ+Q8CwAdEOJjNRwOQzJDW12S5A4FTPnh4WGMK0DIAYq3B70z0kwkIMgyAbg8R7VaDaDmyYHkWAFUiGq0Wq3IjyE3h+fe2trS3/3d3wVTztkCgCqvlOPJp/1+X/Pz85GcSxUotOVbW1u6fPlyyIsoI4sTsbS0FCVOAYasC9hwdOZUlWu1WnH2ClEpoh08H4fbIQetVqtaWFjQwsJC9CNRIK96hbPc7/ej/b1eT7dv39bS0lIk8c/MzOjq1auPzcnkWpuYmNCLL76o3d1dNRoNNZvNmDf0q6+zs8A5P0smcjq4dqDqsk13JrgGbLzLqpjnjLMDctalO4MuKfb57EnImOvifV2eZWf9/Kw+Pksy8zRZ4n/VUoCeWmqppfYM29bWlhYXF1WpVMY0p+QHwDhduHBhDKS6Jpm/JUXyGiXi/IAZNio/lRY9OOCMTZsEMn4GmPUk+n6/r52dHf3Zn/2Zfu3Xfk2/8Au/oOHwpLY8YXGvEMGGef78eUmnx8OjRwdUcBCVJza6Bt1PSuz3+9ra2hpL7qWd/l2+R6i7VCrFKbnkLUkKvbBLTWCnAXuAGtj76elpnTt3TleuXBlzbKTT8qzNZjPyQSqVig4ODnRwcBCOGeBfUuSqOHtOW7ku+SS9Xk/37t2L2uuMNYwz55u4gzQajUID3el0dOfOnajFPT09PXYqa7fb1c7Ojr766qtILGRuAp4XFxdDV41mnvlIWcNGo6HZ2dnID0EmRB9MTU1Fsna73VYul9Py8nLIo/xgH5h75jDXIB8AOQe5G37WCQUykBvRR5Tv9DUkSdvb23HYDyCTvmUNkmjMvKEfuS9yGJ/vRGm++93vamNjQ19++WVo8Mmr4rsOchlfZ4ZZP4BQny+j0UmFuk6n89jhYzjVWDZ7cgJ2u93WwsJCAHdkWLD+Xs/d5Xb8jHmMo+WOrDuqmcxJ5amPPvpoLOeQP+5UOKhnnZ8le3HAzr2k0xr4EBb/HZYC9NRSSy21Z9go4+o6Sen0aHev0OUJtGw+gE9nuAFJ8/PzAVidiYM9J9HdWTIHuIBqdLWeyAor/uGHH+r27dt68cUXdePGjQBHgB4AhoNtNnQPnzs7CCDr9/vBkPb7/QB1hP/b7XYcB8+1XH4DkOb/gNtkpST6TFKAXmRArvt2kE/iqnQiE7lx44Y+/PDD0FkT4QBsOABxNs8ZffrHmUcH+z6GLl8YjcYr63z11VdRTYS5gKPB9ba3t8MRKJVKAbwZ+0qlElVnKpVKtMMLSxweHsZ3/VwEd+KQK8E889xTU1NaXl4OwLSwsBCgn9r9Hq3wXAKYZS8GMDk5qUKhEKfUkswMk7u6uhpO2OTkZHyXe5DoKZ3kcMzPz485vZRC9J8npVmMEyc8098+jsyF4XAYzt6VK1dUrVZDbkZOgoNvvpesMuWFENxhTzrz7tRSXKRQKMSzZTKZiDow7/2kVF9XOI84GshUDg8Pw7FjjvsBR8xFxuv4+FilUmnskDXvTwfmZ4F0X1v8zKMtONmMoevf/zssBeippZZaas+wnTt3LnTl0mmVDZcYsAl57X6YLFg5ABqne6KpZSOX9Bj7Jp3qlV2z7SF4mEOAKVpSNv1//dd/VTab1aeffqqrV6/qypUrYyVR0cv7iZUu//AKQNJpGB+dN4CADZZ2wbp6CVLkBQ7GvY30o0tqvD0AEC9VBzPp5TUd/JNEWyqV9M4774RDkWTuXJ7jCXXD4TCcJR8jfpfNZkP6wGE23i5074BeSfqLv/gLbW5uam1tTd/5znfGqnJI0v379/WjH/1I9+/f15UrV/TOO++MsbBECyYnJ4P1JxH1q6++iqorn3/+uV566SWtra1pZWVFkqKEXiaTCUY9m82GI+i1r3FOmBe0m3kDc5t0YABe5EgwLgBXKtxsbW3p6tWrUbnKoxFelpq2J6uxuVPMHHL5ictNWGtEVYi6uL6a77OW+czKykokdbIeiKTxPU8OdQfT286zM7eY38wx+nw0GkUFIpevFItFvfTSS1Fqsdvthnaee5GQzTjgNHS73ThTwaVhSTkcfYujvba2pt3dXf3t3/5tAH/WIv3m8h2Xr7h2ns/SHs8JoK/5jF8/KYtjjP4zlgL01FJLLbVn2Nh4SCZjk0lWa3B2ycsKOpiQFGdeVCqVYMZcH+qhY9deO+CXThP8uL/LJ6ju0e1241C0Wq0WAF5SnFgNgASoO7stnR6wg1Yctg52ENANyOWzsLyw2w4K6FdYOd+4AU3ZbDYOTaGu9/7+vmq1WvQbB6VJimRcQEKz2QymkogCh/Ag1UCeAUvoP5MU0gmYbMAORjtxvpL144kwkGxHm3q9nhqNhnq9nq5evRpsN8B2Y2NDjUYjWFOSEj2hlLGATaWyyD/90z+pWq1qfX1dr7/+uvr9vi5evBiRDCriIJ/JZrNRDahUKo2BNfrAoz84L8wVB+7uZKCnxzmBIQZ8vvHGGwHckLUwrpICtHnUgL9xIPyZAHZ8BiaYPvPzRQqFQjDQzBEH28xT6sjjjHKyMXMaB9lzNbxcoUdoXKriIJbnn5ubG8t34aRunCBkQ8fHx7p375663a5arZbW1taUy+VCnpSM4CF38bwGHA9nrZkTScKAuVYul8fqrvuc4Gf+b+4xHJ7m4Pg44mQz35LRNX8/8Dkv+ej3fpKlAD211FJL7Rk2T6Z0VodKE87AuVQiedotm129Xo8DrpJgDpDgmxYAB3DoGnY2P6q3kHAICyedRAA+++yzOHDk4cOHyuVykaAHaIPZx9Fw+Q7OCXWx+Tn6cE4elRS/89OCcVRg/VxuQL+wGQOkAMtU2uD3q6urY6ws/cJp0bDYSEj8nowbzoWz85JCFw0gpz0ALsbBAeFwOAwnwWUStNsPwMlms3r06JG2trbCuXGwUa1Wtb29rQcPHoxJqGC1vQqNpGgPp542Gg3dunVLlUpFL7zwgv7hH/5B9Xpdv/zLvxzPATBHjkRbAKtEI5hDLuVx0Al4pi9dJsE68UOwJIXjWi6X1e12tbu7q0KhEBVqGDfGgvsCconQeASENeNOMn0D4PXkR0Amcz2plebztIm1TDuRufEs7oByfZwx+o9revt83jnwZNxpo8+PiYkJffzxx/rggw/C6UR2w1x35548Bkousr4kxTkSTjRwT57Lo2rczyMN3If2OEuOY41cj/6nHXyPdejRMEC6Py99zfNxGNnTLAXoqaWWWmrPsHkY10PTSTDG72G5OdnQgRygD+YSFs43NjZ+NivfLGEtJQXg5ehyQCLXmp2djRMR2di++OILPf/883E9SWP1q53p80oo/mwYgMlD/GzisJcwe5ICPDhb7nr6JCOGPAUGd2NjQ6VSacyRARRwWNRgMIgkPtcA04/eF5RxhEF04O1SJdrGeJGk57XEJY0BG9oDyKTyzdzcnLa3t1WtVtXv97W8vBzRmVu3bundd9/V4eGhXnnlFa2srIRenjZQUcPBUiaTifZOTEyEfpkKKF9++WWwo8zfZC3/6elp1ev16JezokP05Wh0ehK0AynmAyDdk44dSDEvOayLXIHk/EDqlKwsMhgMQs6VyWRCZ548QIr1mVxH7XY7okdUr+FefIaKPJ1ORwcHB7p3754ODg7U7/dVr9f14MGDiEgkpTnkYuCo+hxyR5655OCUNeGHJhKFaTabarfb+sd//Mc4lGo4HGp7ezvamwSszEHmj0cZKOXo9/Y66jhj9AfXoB1+T2+XrwWvJuPRQX8n+hzh556b4Hp1J0mS/XuWpQA9tdRSS+0ZNi+l6FUVnFUHBMBWud7VE0QdULDJZDInJ4tKp2yUM0VJ7aaH9jl8BRDHs8C0e03l4XAYpy5KJ5psQCwOgTOUniRHOBoQkgS8PBeAFIDrDLEnivnGm9Ss8hn6aHp6Wvfu3dNzzz03VtvcK+ngKHAoE4yzAz5nWl0Ljbk8g7FwtpM2FwqFkJe4k0b/e3UZjw5ICplBr9fT4uKiBoOBvv76a7Xbbf3pn/6pNjY29Fu/9Vt6++23NRwOI7kXwCgpDqqC2WQ86ffh8OQgo3a7HQ4ihwBxDcbbx+25554bA1r0FdeVNBa5YS14bW6cG9YHwAo9OfN7cvLkRFA/VMufy5MFqdbiThnsN3pycjy81jjjzNzHyeC0ToBjr9fT7u5uRLUA2b1eT59//rlu3bqlxcVFlctl3b17V5cuXVKlUtHS0lKARPrBnQrWKGsHw5HtdDoqFotj8jJ3MGCvKSPJqdOtViukI0lZEU6Ot5s+x0HzfAuX9bAmeO7kHCDi4065r33a5gCc33lkxedq8hp8l7ax9jyZ1J2Kn6RFTwF6aqmlltozbGhOqYIwOzs7BkxcEw6b6BUPACmeGInsQjrdoAjH838/ydOZL+pEU2YRkAETCIj1JE3Y1ldffVXr6+tj7JNLAAaDQUhFaEO3243Sb4AzNmKiAjC3VIeBfTw6OtLExMkJjVQhAUB5EipyFvqE58pkMsE2O3vP7z3Rr1QqBdN44cKFMd2rM704B4wfEQ+AAQ4NTs/h4WH0VbFYDMAFGExGAGgDiXnck+e5cOFCVCl544039Omnn+qjjz7S9va2FhYW9Morr6hYLIYG3Zl+xpExdZYanfXKyoqmp6dDcuQMs0ularVaAHqqCUmnOQLulNB/9JvPcxyQmZmZmJf9fl+bm5uanp6OkzdxWJEj3b17V48ePdLR0ZF+4zd+I5IGXf7AaaHOVsPuJteFVyRiLiM1Acw1Go2xU22J7nBvHIx79+6p0+loYWFBS0tLeuWVV7Szs6OHDx8+tmaYCzjy7rgmHVQcTkljUiWfpzw/1zo+Ptbu7q7q9XpoxN3B5bP+fR9LZ7X9d0k2m/v5u8DBOc4ozi/tPQvQu5SH/3tkhHcH4+VSI3fYvQ1JYiBZgvIsSwF6aqmlltozbOh12Vhd482GJZ0CauoOS6eMuANz6ki7Ptc3SRwBZAgwxR5ahq3PZDIR6mcDBHRha2trKhaLmp6e1urqajxXUooA8INBJJmRuuNUSeF5cFQ8osDzo4fnWdlsATJssgBkdwj8dEhkFZTca7Vawey7hhgWFsDFfb2+OCwc7QVkAh64F8AcGRJj76wzpfDQcPPcgBEfV9rLdf75n/9ZnU5HrVYrTtXc3t7WlStX9L3vfU/f+MY3VK/X9fnnn6tSqeiTTz7Rd7/73TGZFZVo6Fccvl6vp9deey2ct8uXL8d8yuVympg4OTGz1WqpXC4HuMT5KpVKcagVQFs6kYXk83nNz8+PgUCX2bgeezgc6tq1a2PyGBjuXC6nra0t/cmf/Inq9bpefvlldbvdODUWvTORkYWFBUkacwrdMXZH19lij6IwH8iLYC5zPy/5SP7EzMyMKpVKVL/Z3NxUt9vV559/rhdffFGtVitkKh7xcQkLURb61/tiNDqpdT83NxeOCWPl0o3j42NVq1U1m001m80xgEu/83+ff8wLxhBAzud8/jjrz9iSUJuc06wRrukyIt5PrCGX5XnEi74nSoAjwzMD0KlV73Isl/YlnfakpQA9tdRSS+0ZNsLn2exJpQ203s4YucQEsEkVBq/kwCZHxQ00sMnQL9eGgXZtunSqx+10OlHpBBAK6GYTnpg4OQ3RNa1stJ68yCZMm5xd9URMdPQAMmdonSWVTmU8/X4/dL+uzfakMTZxJDZsxvl8PgC6JNVqtTEGFacoyagCwOkb1yQ7IMC4DiBibm4uAIyzozwn88L7PpPJhJQA58kjLplMRnfv3o2xffjwoWq1mm7cuKGlpSXt7Ozo5s2b2t7eViaT0e/+7u/qz//8z3Xz5k1du3Zt7LmRGjgggwWmHcViUevr6yoUCgEeSZ5l7DnMSFIkHLv0YTQahSPiJ2k68M3n82OyCq4F4z4cDsekLB9//LF6vV7UawecU1mEcfQxdHDp5TtZc4BFj17RN8ncEaIbGEDQ20uC4/z8fFT6+aVf+iVJJ04vVYLW19cf6yeibkgxmD9+wJGXRXVgXyqVxtjr0Wik+fl5bW5uanZ2VrOzs6pWq9HXXqqR8eD9hJ4bEE+0i76kApOXYywUCvFOKZVKYw4/85g1enR0FGNXKBTi0Cvm51naehxbauozbxk/JE18l7MUOOmWak1872mWSS7y1FJLLbXUnh374z/+4xEA2cO5zlL5zwBynkTojJP0eOUJQL4z2h4+hn1ylko61Rz7PTwZDbYRFs2PZU8y9zyfh9kd8EunoXTXvjqQS/YNP3eGkedNMs0OMLyfkqF/16kiffEEsrP6yWVIRAm8YoQ/I9d31s7lR4xVJpOJ6jZeXjFZSQSnSjphe+/evauHDx9qYmIiDv9xprFQKMTJl2tra9rY2AjNMyDfx47IB213oOnzMdkXPieJyLh222uEO+B1gOfAnrngMg9/BmdRNzY21Gw2A6hdv379sfXhc8rHyOeXy1cAhIBentnnqOcJJLXSzA3mEUAxCR5xyNrtdjgnzNOjo6OohoP5HPYcBz7DPAZMS6fJmr1eT1NTU9rZ2dEXX3yhfr+vO3fuaHd3N+aCR5qYm4DvycnJ0Oj72oTh97XCd11Lz7+RlxAlYv41m03Nzc1FlMvfkdxvYmJi7HTUo6OjOD/Ba/IndeU+ZuVyWZ1OR7lcTvl8fixv5p133nliKZcUoKeWWmqpPcP2R3/0RyOv9JEECdJp1Q7XULJZOXB1YOuSlSeZJ6I6GHItKX8DUhxQO1DCHPRI44DeJTSSosShP79v0E8yB5DJn7kW/yzjug6gflIymN/Dn82Bv1/fQT4/837y5MVk3yUdjuT9k59zMIuz5NfwPpIUYOtJfePtgQF3rXPSefFnRJKSdEiSzzE9Pa1qtTo2//wzyYiQX8vnUHLOwHgmHVwH8C6/SPY1QBMACvgGbA6Hw5DzuNyG5/G/vT/cMXV2mWf1eeBOiwPd5PP62CfXGPchQsC4O/tNQrE7qFybtc5neSZ3oJLOgFdy8SgbwDvZfv++O1p8n0hFskKLdCpxYbyIlPgc8r7wPvP+8e8QtUk6PL/zO7/zRICeSlxSSy211J5hYyNzLSfmDBQbnMs12LAcpBNyRv7ilUJ8w0oCezYkP8ZbGj9dz2UBSckB9/BrJjd3bDQajdVylsYZP/876Wg4WEmy2LCC3lbvU2drk9dwEHVWRMKT4BxYJhlkH1ecKpeJOCsJWOH5HTQkQb73nd+P3yMJeBoAAti4o5HsI66ZdAaSGmLmrD8vf3ukweUUSWlJsm3e7uS8T477WZEU5Fv0B+DWfy+d1p7HXIZFNMudGCQk5GX4YUL+LMwbb0sy8uLXPKu/AceupXbgmnTQcZQ8ssF4JvvS7zc1NRXVepzFT7bZ3xU+9z2p0h2swWD8xGMfZx8zfyfQTj5LxCTZRw7YySPIZrORE8HzJwG494nfP7m+mRs+L55mKUBPLbXUUnuG7f333x8Das7mYA6KXM/pIJmwOqdZ7u3tSTrVPju77sA+GS5PJiOyuZMQ6UBNGj8mnedPsrdci/9zqiRJazyDRwXOAr20RxqvUsHnj4+Po6TkWUaypbN1SeDk13am0xNCkwDPjXHxNjtARxPtyXjJtp0FKJLglJ/5s5IomHS+YHKTEqfk8/F7+mVqaipYaQDQk5wRrpVkbB2wZzKnSYc+7smoQ9IxSTocZ/U5oNrBNJIO5jDXQJZRKpWiAhASCJ7XHTLajJyIvjnL8RyNxmVHvla8rT62PJ8nMeNwcj8cB57P546XWqWdtMXH5Ung2GVQAH7/rgN1r8LiUQwft8Fg8FgVGQfyvE88MsEzup7fq6/4HPCa9NlsNpJi/bO8lxgDdzT8ujhWPt4eCXiapRKX1FJLLbVn2L797W+PSChLbsy+EfM7kiqdrXO9s7OUfpQ815NOgaqz27ByHDzi14aJH41GUX2EDZiNkpP32FynpqZCwuLgj8RMgK8Dcr4DyOIZHTSQBMZzT01NqdFoaDQaRYWK0eikkgZaa5cfUNebRDX6g80aJtK1x4VCQZ1O57FQPGCK5EH6nyRD7uGgGeBKm/gu7QO4kDg8HJ4e0+7jMjMzEwl02WxWtVotQC/1uIfDYVSxoY2c9ultpD0wt/SrnwLb7XajIsnx8fHYiZucbgkwxgmRTqumkEDIwVVcL5PJRPIj5Rt5Lq+C404Jzhgac9o1GAzicB5PbgSodzodVSoVVSqVcGAlRb9RjtNlQowXYBKnxWvFA3I9kRgWmFKYjJPXUQdEUxGmUqmoXq+PSZXQacPeM4eSQDQZ2cIZcnaa9pRKJc3Nzenu3buPVanhnjz3WVIilzmhmfcEWWfXJyZOyqACpDH6108cZU37u43xTUpoAN0zMzOxppFNeY6Bl4H1XBfvF94nXtaUPv3Lv/zLVOKSWmqppfa/0a5duxbgj/rRs7OzOj4+VqlUUrvdDmBG8hJl2zg1sdPpjFU/cOAgaewo7sPDw9i0vFIFAABQSuIimyaVRwAw0qlWVzo9aEQ6Zch4FsCnJ6+SEAcwwHnodDoR2m42m1GOD0ZtampK5XI5gB9Ow9HRUQBMqp7AKgMe6INcLhclFXO5XFS6wTmQFFU2uLcfkCMpwCSnqcIuAxxyuVyAIqpLAKwAwV7VhWu7bIR+7ff72t/fDzANC40WOqm7BwADiABBk5OTyuVyqtVqAV6SDLGPL7WgK5WKRqORWq1WOFuAbWQ1jCPzbnt7O9rNnODP0dGR8vl8AHP616NCfMcdVK8bzzydnp4eK7EIoCuVStEWJByStLW1pZWVlXA8er2eDg8P1Wq1Aug1Gg3l83kNhyeVZTCcL06cTcpqqMjicxrHBGeZvgVo8rl6vT4WoQAsU10kWbEFAI60yZMy5+fnI7kUEMta5Hna7baOjo5ijiSjZVgmc1Ktx09X5eeAWL7H6aAw7YDepAxnbm5uLCKHg+ORH+bY1NRUjC/OgD8fDuL6+rpKpVKsXUp5ZrNZ7ezsqNFoxGdpA040Toqvr7PkZUlLAXpqqaWW2jNsv/mbvxkbm5f4ggF0XSYbNWAeMAq4ZtPmM0gehsOhisViHCfuUhaYRGdDAUEkkbFZ+jXZJAFPbKAw8BweA2NOPeZCoaClpaWxjRI2EaYzyWQBGgAIgA8/oImqIM1mc6zcHM5Go9GIDXt5eTmuC1PsrL0zot4vvnF7eUU/Op0a9UmQA9iEYfYay1QG8SowGEAU8OD9MTMzo6OjI3W7XRUKhZD4kMiIbAFnhT7pdrvhQACeAIw4FIBD5gpjzTPhMLmURdIYA02kxiUJzFueH+cSkA647Xa7ajabAWABeDiksMx8nr5znbTr2N3xoUJHo9HQo0ePor0+XnxHOgHTRGNKpZIODg7UbrfH1mO/39fe3l6UFcT5rdVqKhaLERHheswtxqZer8fvXPLl1VNwyOhzIhPMJRwI6tgzD4vFYowBa4YIk5dsLBQKcVqxdBo94zrIxwqFQqw5Kjcxlh7FmZiYCPDr0SqicB5dwJFyR4YoFc4318a5JfpG+cx+vx9lWSEwBoNBnI7q85gIoOv6WQs4fk+Ty0kpQE8ttdRSe6aNg1IAgGzcACskChMTE1H5A4BEQqjLJmAL+R7s99HRkcrlcmxmbFCw9LCJ/X5f+Xw+NmAkKjBSgExqBlODnGPfYdw4ah6mEtaPEyEdMLLRwkqzSfOMgLdSqRTl2brdbvQXp0EeH58cTQ+4gvnlQCQHODMzM2M1xXF0JI3Jf6TTg5socwdzLGmsDB5Sj+FwGMwnjos0fpoj7DFOjMuFcFwAOk/T+3vCKuMM0IBJpI8dFA+Hw5Cr4GxICrDKNb22Nc9JW6jVXS6Xw0nksChOK+WaZ4FAZA9IZwCdgEmcVkAYjg3zAQcBByuXy0V5QvqM+UvbGU8AZLPZVK1WC8nQ4eFhHHk/Nzen+fn5GEfakc/nVa/XxyrcZDInyaMzMzNxlgHs79zcnA4ODuKZAJGMFRIL5ioONQATkIssCOnR8fFxlCIEdAKWmRe8V+izqakpHRwcxEFSHIg0OTmpcrmsweDkRFTmAU63R34A6PV6/bETN3u9nlqtlorFYswfnDScrUKhoGz25NwHErvpL9aFRyA4YRmnFwkba5Q6/H7Gg3SaYO5r2R3dXC4XY+p5AK1WK96tT7MUoKeWWmqpPcPWarVCdgFQci0soIbNnE0SZpPN0Zl3WCfYclihbPakbB6AtN/vq91uq1AoxGaGDKLVasV3/CREErK4FxvkhQsXQsICUMSxAHDwnIAQGDGYM2cleUaYdMCOJ0V6SHw0OjlwBcdEUoDobDar1dXVACu+QQNcYeV4bqQ1LlmhbwaDQQAQwCPPAOinfjNSCQfbAH5AMkyiy01gzf3AFdoKWHIG16U9flCVg3sconw+H7II7gWT7TkMs7Oz4Ywwtp6MWC6X1Wq1lM1mo79wqDqdTox5u90ObTnOCeB8aWkpQBj3RRfMoUY4h8hn/JAn5hGRByRgmUxGzWYz5jvthBGen58fSx5FnlIulyNyAKB3LTbgfmZmJupmu1acsYDNB/QvLi6Go+QlD3EwmfMzMzMRuYKdzuVyccrnaDSKw3U86lKv1yOK4mCdazIm2WxW5XI52gGj7e8Sro3TLZ0e2oPMaTAYxP0As5AEHh0D7HJt/sYpw7Fh3fCe8QgA7xgOcmNOHBwcKJfLxWFd/X4/nK1kycx+v6+Dg4OYkxhrDod6eno6HJBkVZ6kpQA9tdRSS+0ZN456RxddKBSC7YVthPFDiuDaTTZk10oXi8VggDxMTYgbzW65XA7GmU0YyQXOgVeBgR31UoRouGdmZtRqtUIeAZtXLpdj44Tdo3YxABSWr9PpjDG8DmYBM5JCzjI1NRWMHAAOcMYR8twTuQ263cPDw2ASaRvgXVKwwUhjuK8nk3pyJfcYDocB9AE+sOcwgYwjwKTb7WpmZkb1ej2ANkyhM+QAXPTiPLs7EITmYWp55nK5rH6/r2azGUBHOmGZC4VCnMbqB79wb+afdHIgFYBxbm7uMW1vNptVq9WK3Ao00cxVrlEoFFSr1SIycXx8rFarFfOExFieo91ux3xxJw2mHSeg1WqpUqlobm5Ojx49ijWFpInoEEnJzDvuxTWl0xMqcXRYq9zP54VHWLLZrOr1eujtK5VKsNPMKRwlmNv5+fmQafgJoOinAcSsFU9grVQqcUgPoJi5MTU1FesGwA/Qx7kg4ZY24hzhcOFo8y7A+eYdA8tOUjPvrOXl5ZBUcX2XpwHUaRMJ5NVqVXNzcxGBk06jVcytUqkU0RPAdaVSCekXzizjOT8/Pwa6acPh4WH0JY7B7OxsyI6eZClATy211FJ7hg0GDs0l7CCgyhMBJcVGyu8B9q5tdkmCV3jp9XpRkUQ6ZadI9CuVSgE6ALUYbBslApGYuDQAxg3wwQbI93q9XoTrAbiwks5MohcFlOE8oEkdDofR5kKhMMa8AaZmZ2ej2gZseaPRiGd0FhfwD9MGwEbqIin6Zjg8LcE3MzOjWq02FqUAMLDZcz3s4OAg5B6A7MFgoFKpFEebM7Ywk+hrASKSAjTBaMM8O2PoeQJEILh3UudLxAQmFAAH8OK+3JPnAnSTX9BoNMaSlQGBjBvOIG0kmZWIyfLycgDyfD6vfD6vXq8Xx88vLS1pNBrp4OAgPt/tdoOFZu4fHBxEgqtrsmdmZsYiDPQtcwLA2Ww2tb+/r3w+r1wup2q1OqbJRuKFI4tkCvDP/KWazqNHjzQajQKoSxq7JlEzJDI4ZMwBWG7Wz9zcnOr1ujqdTkSOyEXgfZKsksJcQHZCNApZWr1ej882Go2IHnW73ZCQEYVB1sT/mZs4QRALOB0AfRzz3d3d0IkfHBzE56STqjqw2DhAaPVzuZxKpZKq1Wrcv9FoqNvtKp/P6/DwUMVicUy2xvz38qY4QsxlIhE4hkQ/nmYpQE8ttdRSe4YNNgrA6PITSWNSBD6LnIENuNfrBbsE2JFOKxxQqYRESzYmgD5aTO6JthyNMddCc0tyWqPRGAM86Nxh2gA8PEOn01Gv11OxWBxLdESDC/PrCWVspFRvgFmvVCoRJUAeAJvoFSNg5GC3nXX2DRjGuFAoRETAgQ2yA6QDjUZjLHGPPsM8yRRmFmeC6Id04qDgNMCo41DBVqLBBWQTjYCVROaBFAQtNIymJNXr9YiQeAlAT1xE0w84dJ0veQoAmmw2G33F89LGdrutbrcbOmqcQMaj1+up0+moWq2OzXsclrm5OeVyOe3v70c98/n5+ZBAtdvtAPgHBwfRb7DMOE6NRiP6FbDZbDbVbDZVKpWiXb1eT7Ozs6FzR+YAk9xsNseqirA2iIYQvSgWi9rZ2RlL1gYclkolbW9vSzrNcQB8Hx0daXV1VYeHh6EnJ3JCVKPZbOr+/fsql8taXFzU8fGxVlZWIqoAi16r1dTr9VQqlVSv14MAoP2M/eTkZADbubm5YPYzmYzq9XqMQbPZDK06TtPBwYE6nY4WFxfVbrfjPrSZPAAc1snJSe3u7sbcb7Va4cAwj1gTvPtwugDWtEM6qbLD2FYqlZjz5KKwJolA8X5knXQ6nZi35XI5iBBPFD48PIz18iRL66CnllpqqT3D9u67744IW8NUevgeFghggOyDDWZ6ejoYJ9hIWCpntmDNYaL4HWABWQIJaL5JoklFuiKdJrWSmOmVUGAm0a5ns1k1m80A7Tw/Zc5gbiWFFhm2zPW/kgIoAmS80ob/n2t4eB+HAPYQ8E04ns/ATuM48BlAoB8p77IjgAARBsAuhzLhUNFXOAs4SYCETCYzJkmA9WOsJicnA+TAgnIvTxLlO56oCkuLHhyniBKaDm6YA0Q1cPSYp1TpIMERxp1nkU4TYwG/ODJEM2DE+SzODMnLOATValXdbjciDVyL39E3tCuTyUR9fOYBjgJRDeYn7fQ6/CQrwoZPT09H0ivgtlKphFMDGAU8wuLStsXFRXW7XVUqlcg7wVmAxZYUJUTRxTOG0kmJSJzVRqOhixcvRvQkl8vF2jk8PFSpVNL+/v5YSUtPjCTiIkl7e3taXFwMh7dWq0lSJKYDnKms4/kSrE3mdrvd1uLioiqVijKZTFSJQkKE00+Eq9lsqlAoaG9vL6Jve3t7WllZUa/X097enhYWFrS0tBROy97entrt9lg5UapGSdLOzo7OnTsXAH92dlbFYlGdTiccnna7rXa7rUuXLsVc29raCs0+eTg//OEPn1gHPQXoqaWWWmrPsP34xz8ekSw5Pz8fYW3ptOY2zBaML2FnmB8+C+ijHrBXKIC9AlC5hMW11+hnAeewgQ7A2ZwBhoTfSdID7AKavBycSyVg/pBbeHUW9Lc8F7IIwIJLdwiFu0PDMwIsYDv9gB36gI3c5TZe9YG2ETEg6Q0nimf1CjgwlvQx2tzk7zzhE4DOeNN3zsYiW+l0OmO1rskrAKB68inP75EVPkeEArYWwOnlOBuNRnzO2V2qcMDuj0anZThhUklQ7Ha7WlxcHKtzj4PWbrcD0DJXGDcHgV63O5fLReSG58OZARjv7u4GEEQmlcvlVK/XI8mVyBJt2NvbC4AsKaQyVDjByaTPGU+AM84oa0dSHAyE05jNnpRf9ORaT/52uRbOAesUKU2tVgtmHicUqc29e/d07ty5mMvVanUsQuIlKpkrruP2XAfkKpK0u7srSZEEzXWJrJGISYQFkFsul8fm76NHjyRJFy5ciPcM7yXYdGrN7+7uajAYaG9vT9euXYtDnRqNxpjED6dEUjhIOEwkDuP8kX+yv7+vhYUFPf/885qbm9PDhw/jPcXY/uEf/mEK0FNLLbXU/pda+pJPLbXUUvv5tPQk0dRSSy21/6X2xA0gtdRSSy21n0/L/uSPpJZaaqmlllpqqaWWWmr/tywF6KmlllpqqaWWWmqppfZzZClATy211FJLLbXUUksttZ8jSwF6aqmlllpqqaWWWmqp/RxZCtBTSy211FJLLbXUUkvt58hSgJ5aaqmlllpqqaWWWmo/R/Z/AJvOBjV/V9+TAAAAAElFTkSuQmCC" + }, + "metadata": {} + } + ], + "metadata": {} + }, + { + "cell_type": "code", + "execution_count": null, + "source": [], + "outputs": [], + "metadata": {} + } + ], + "metadata": { + "kernelspec": { + "name": "python3", + "display_name": "Python 3.8.8 64-bit ('svcnn': conda)" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.8" + }, + "orig_nbformat": 2, + "interpreter": { + "hash": "5b8911f875a754a9ad2a8804064d078bf6a1985972bb0389b9d67771213c8e20" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/notebooks/notebooks_utils/__init__.py b/imcui/third_party/MatchAnything/third_party/ROMA/notebooks/notebooks_utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..81512278dfdfa73dd0915defa732b3b0e7db6af6 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/notebooks/notebooks_utils/__init__.py @@ -0,0 +1 @@ +from .plotting import * \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/notebooks/notebooks_utils/plotting.py b/imcui/third_party/MatchAnything/third_party/ROMA/notebooks/notebooks_utils/plotting.py new file mode 100644 index 0000000000000000000000000000000000000000..fb577a8012a30b1cdbf3145dcb3210986e04b2c0 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/notebooks/notebooks_utils/plotting.py @@ -0,0 +1,331 @@ +import numpy as np +import matplotlib.pyplot as plt +import matplotlib +from matplotlib.colors import hsv_to_rgb +import pylab as pl +import matplotlib.cm as cm +import cv2 + + +def visualize_features(feat, img_h, img_w, save_path=None): + from sklearn.decomposition import PCA + pca = PCA(n_components=3, svd_solver="arpack") + img = pca.fit_transform(feat).reshape(img_h * 2, img_w, 3) + img_norm = cv2.normalize( + img, None, alpha=0, beta=255, norm_type=cv2.NORM_MINMAX, dtype=cv2.CV_8UC3 + ) + img_resized = cv2.resize( + img_norm, (img_w * 8, img_h * 2 * 8), interpolation=cv2.INTER_LINEAR + ) + img_colormap = img_resized + img1, img2 = img_colormap[: img_h * 8, :, :], img_colormap[img_h * 8 :, :, :] + img_gapped = np.hstack( + (img1, np.ones((img_h * 8, 10, 3), dtype=np.uint8) * 255, img2) + ) + if save_path is not None: + cv2.imwrite(save_path, img_gapped) + + fig, axes = plt.subplots(1, 1, dpi=200) + axes.imshow(img_gapped) + axes.get_yaxis().set_ticks([]) + axes.get_xaxis().set_ticks([]) + plt.tight_layout(pad=0.5) + return fig + + +def make_matching_figure( + img0, + img1, + mkpts0, + mkpts1, + color, + kpts0=None, + kpts1=None, + text=[], + path=None, + draw_detection=False, + draw_match_type='corres', # ['color', 'corres', None] + r_normalize_factor=0.4, + white_center=True, + vertical=False, + use_position_color=False, + draw_local_window=False, + window_size=(9, 9), + plot_size_factor=1, # Point size and line width + anchor_pts0=None, + anchor_pts1=None, +): + # draw image pair + fig, axes = ( + plt.subplots(2, 1, figsize=(10, 6), dpi=600) + if vertical + else plt.subplots(1, 2, figsize=(10, 6), dpi=600) + ) + axes[0].imshow(img0) + axes[1].imshow(img1) + for i in range(2): # clear all frames + axes[i].get_yaxis().set_ticks([]) + axes[i].get_xaxis().set_ticks([]) + for spine in axes[i].spines.values(): + spine.set_visible(False) + plt.tight_layout(pad=1) + + if use_position_color: + mean_coord = np.mean(mkpts0, axis=0) + x_center, y_center = mean_coord + # NOTE: set r_normalize_factor to a smaller number will make plotted figure more contrastive. + position_color = matching_coord2color( + mkpts0, + x_center, + y_center, + r_normalize_factor=r_normalize_factor, + white_center=white_center, + ) + color[:, :3] = position_color + + if draw_detection and kpts0 is not None and kpts1 is not None: + # color = 'g' + color = 'r' + axes[0].scatter(kpts0[:, 0], kpts0[:, 1], c=color, s=1 * plot_size_factor) + axes[1].scatter(kpts1[:, 0], kpts1[:, 1], c=color, s=1 * plot_size_factor) + + if draw_match_type is 'corres': + # draw matches + fig.canvas.draw() + transFigure = fig.transFigure.inverted() + fkpts0 = transFigure.transform(axes[0].transData.transform(mkpts0)) + fkpts1 = transFigure.transform(axes[1].transData.transform(mkpts1)) + fig.lines = [ + matplotlib.lines.Line2D( + (fkpts0[i, 0], fkpts1[i, 0]), + (fkpts0[i, 1], fkpts1[i, 1]), + transform=fig.transFigure, + c=color[i], + linewidth=1* plot_size_factor, + ) + for i in range(len(mkpts0)) + ] + + axes[0].scatter(mkpts0[:, 0], mkpts0[:, 1], c=color[:, :3], s=2* plot_size_factor) + axes[1].scatter(mkpts1[:, 0], mkpts1[:, 1], c=color[:, :3], s=2* plot_size_factor) + elif draw_match_type is 'color': + # x_center = img0.shape[-1] / 2 + # y_center = img1.shape[-2] / 2 + + mean_coord = np.mean(mkpts0, axis=0) + x_center, y_center = mean_coord + # NOTE: set r_normalize_factor to a smaller number will make plotted figure more contrastive. + kpts_color = matching_coord2color( + mkpts0, + x_center, + y_center, + r_normalize_factor=r_normalize_factor, + white_center=white_center, + ) + axes[0].scatter(mkpts0[:, 0], mkpts0[:, 1], c=kpts_color, s=1 * plot_size_factor) + axes[1].scatter(mkpts1[:, 0], mkpts1[:, 1], c=kpts_color, s=1 * plot_size_factor) + + if draw_local_window: + anchor_pts0 = mkpts0 if anchor_pts0 is None else anchor_pts0 + anchor_pts1 = mkpts1 if anchor_pts1 is None else anchor_pts1 + plot_local_windows( + anchor_pts0, color=(1, 0, 0, 0.4), lw=0.2, ax_=0, window_size=window_size + ) + plot_local_windows( + anchor_pts1, color=(1, 0, 0, 0.4), lw=0.2, ax_=1, window_size=window_size + ) # lw =0.2 + + # put txts + txt_color = "k" if img0[:100, :200].mean() > 200 else "w" + fig.text( + 0.01, + 0.99, + "\n".join(text), + transform=fig.axes[0].transAxes, + fontsize=15, + va="top", + ha="left", + color=txt_color, + ) + plt.tight_layout(pad=1) + + # save or return figure + if path: + plt.savefig(str(path), bbox_inches="tight", pad_inches=0) + plt.close() + else: + return fig + +def make_triple_matching_figure( + img0, + img1, + img2, + mkpts01, + mkpts12, + color01, + color12, + text=[], + path=None, + draw_match=True, + r_normalize_factor=0.4, + white_center=True, + vertical=False, + draw_local_window=False, + window_size=(9, 9), + anchor_pts0=None, + anchor_pts1=None, +): + # draw image pair + fig, axes = ( + plt.subplots(3, 1, figsize=(10, 6), dpi=600) + if vertical + else plt.subplots(1, 3, figsize=(10, 6), dpi=600) + ) + axes[0].imshow(img0) + axes[1].imshow(img1) + axes[2].imshow(img2) + for i in range(3): # clear all frames + axes[i].get_yaxis().set_ticks([]) + axes[i].get_xaxis().set_ticks([]) + for spine in axes[i].spines.values(): + spine.set_visible(False) + plt.tight_layout(pad=1) + + if draw_match: + # draw matches for [0,1] + fig.canvas.draw() + transFigure = fig.transFigure.inverted() + fkpts0 = transFigure.transform(axes[0].transData.transform(mkpts01[0])) + fkpts1 = transFigure.transform(axes[1].transData.transform(mkpts01[1])) + fig.lines = [ + matplotlib.lines.Line2D( + (fkpts0[i, 0], fkpts1[i, 0]), + (fkpts0[i, 1], fkpts1[i, 1]), + transform=fig.transFigure, + c=color01[i], + linewidth=1, + ) + for i in range(len(mkpts01[0])) + ] + + axes[0].scatter(mkpts01[0][:, 0], mkpts01[0][:, 1], c=color01[:, :3], s=1) + axes[1].scatter(mkpts01[1][:, 0], mkpts01[1][:, 1], c=color01[:, :3], s=1) + + fig.canvas.draw() + # draw matches for [1,2] + fkpts1_1 = transFigure.transform(axes[1].transData.transform(mkpts12[0])) + fkpts2 = transFigure.transform(axes[2].transData.transform(mkpts12[1])) + fig.lines += [ + matplotlib.lines.Line2D( + (fkpts1_1[i, 0], fkpts2[i, 0]), + (fkpts1_1[i, 1], fkpts2[i, 1]), + transform=fig.transFigure, + c=color12[i], + linewidth=1, + ) + for i in range(len(mkpts12[0])) + ] + + axes[1].scatter(mkpts12[0][:, 0], mkpts12[0][:, 1], c=color12[:, :3], s=1) + axes[2].scatter(mkpts12[1][:, 0], mkpts12[1][:, 1], c=color12[:, :3], s=1) + + # # put txts + # txt_color = "k" if img0[:100, :200].mean() > 200 else "w" + # fig.text( + # 0.01, + # 0.99, + # "\n".join(text), + # transform=fig.axes[0].transAxes, + # fontsize=15, + # va="top", + # ha="left", + # color=txt_color, + # ) + plt.tight_layout(pad=0.1) + + # save or return figure + if path: + plt.savefig(str(path), bbox_inches="tight", pad_inches=0) + plt.close() + else: + return fig + + +def matching_coord2color(kpts, x_center, y_center, r_normalize_factor=0.4, white_center=True): + """ + r_normalize_factor is used to visualize clearer according to points space distribution + r_normalize_factor maxium=1, larger->points darker/brighter + """ + if not white_center: + # dark center points + V, H = np.mgrid[0:1:10j, 0:1:360j] + S = np.ones_like(V) + else: + # white center points + S, H = np.mgrid[0:1:10j, 0:1:360j] + V = np.ones_like(S) + + HSV = np.dstack((H, S, V)) + RGB = hsv_to_rgb(HSV) + """ + # used to visualize hsv + pl.imshow(RGB, origin="lower", extent=[0, 360, 0, 1], aspect=150) + pl.xlabel("H") + pl.ylabel("S") + pl.title("$V_{HSV}=1$") + pl.show() + """ + kpts = np.copy(kpts) + distance = kpts - np.array([x_center, y_center])[None] + r_max = np.percentile(np.linalg.norm(distance, axis=1), 85) + # r_max = np.sqrt((x_center) ** 2 + (y_center) ** 2) + kpts[:, 0] = kpts[:, 0] - x_center # x + kpts[:, 1] = kpts[:, 1] - y_center # y + + r = np.sqrt(kpts[:, 0] ** 2 + kpts[:, 1] ** 2) + 1e-6 + r_normalized = r / (r_max * r_normalize_factor) + r_normalized[r_normalized > 1] = 1 + r_normalized = (r_normalized) * 9 + + cos_theta = kpts[:, 0] / r # x / r + theta = np.arccos(cos_theta) # from 0 to pi + change_angle_mask = kpts[:, 1] < 0 + theta[change_angle_mask] = 2 * np.pi - theta[change_angle_mask] + theta_degree = np.degrees(theta) + theta_degree[theta_degree == 360] = 0 # to avoid overflow + theta_degree = theta_degree / 360 * 360 + kpts_color = RGB[r_normalized.astype(int), theta_degree.astype(int)] + return kpts_color + + +def show_image_pair(img0, img1, path=None): + fig, axes = plt.subplots(1, 2, figsize=(10, 6), dpi=200) + axes[0].imshow(img0, cmap="gray") + axes[1].imshow(img1, cmap="gray") + for i in range(2): # clear all frames + axes[i].get_yaxis().set_ticks([]) + axes[i].get_xaxis().set_ticks([]) + for spine in axes[i].spines.values(): + spine.set_visible(False) + plt.tight_layout(pad=1) + if path: + plt.savefig(str(path), bbox_inches="tight", pad_inches=0) + return fig + +def plot_local_windows(kpts, color="r", lw=1, ax_=0, window_size=(9, 9)): + ax = plt.gcf().axes + for kpt in kpts: + ax[ax_].add_patch( + matplotlib.patches.Rectangle( + ( + kpt[0] - (window_size[0] // 2) - 1, + kpt[1] - (window_size[1] // 2) - 1, + ), + window_size[0] + 1, + window_size[1] + 1, + lw=lw, + color=color, + fill=False, + ) + ) + diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/requirements.txt b/imcui/third_party/MatchAnything/third_party/ROMA/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..f0dbab3d4cb35a5f00e3dbc8e3f8b00a3e578428 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/requirements.txt @@ -0,0 +1,13 @@ +torch +einops +torchvision +opencv-python +kornia +albumentations +loguru +tqdm +matplotlib +h5py +wandb +timm +#xformers # Optional, used for memefficient attention \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/__init__.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a7c96481e0a808b68c7b3054a3e34fa0b5c45ab9 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/__init__.py @@ -0,0 +1,8 @@ +import os +from .models import roma_outdoor, roma_indoor + +DEBUG_MODE = False +RANK = int(os.environ.get('RANK', default = 0)) +GLOBAL_STEP = 0 +STEP_SIZE = 1 +LOCAL_RANK = -1 \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/benchmarks/__init__.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/benchmarks/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..de7b841a5a6ab2ba91297a181a79dfaa91c9e104 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/benchmarks/__init__.py @@ -0,0 +1,4 @@ +from .hpatches_sequences_homog_benchmark import HpatchesHomogBenchmark +from .scannet_benchmark import ScanNetBenchmark +from .megadepth_pose_estimation_benchmark import MegaDepthPoseEstimationBenchmark +from .megadepth_dense_benchmark import MegadepthDenseBenchmark diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/benchmarks/hpatches_sequences_homog_benchmark.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/benchmarks/hpatches_sequences_homog_benchmark.py new file mode 100644 index 0000000000000000000000000000000000000000..2154a471c73d9e883c3ba8ed1b90d708f4950a63 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/benchmarks/hpatches_sequences_homog_benchmark.py @@ -0,0 +1,113 @@ +from PIL import Image +import numpy as np + +import os + +from tqdm import tqdm +from roma.utils import pose_auc +import cv2 + + +class HpatchesHomogBenchmark: + """Hpatches grid goes from [0,n-1] instead of [0.5,n-0.5]""" + + def __init__(self, dataset_path) -> None: + seqs_dir = "hpatches-sequences-release" + self.seqs_path = os.path.join(dataset_path, seqs_dir) + self.seq_names = sorted(os.listdir(self.seqs_path)) + # Ignore seqs is same as LoFTR. + self.ignore_seqs = set( + [ + "i_contruction", + "i_crownnight", + "i_dc", + "i_pencils", + "i_whitebuilding", + "v_artisans", + "v_astronautis", + "v_talent", + ] + ) + + def convert_coordinates(self, im_A_coords, im_A_to_im_B, wq, hq, wsup, hsup): + offset = 0.5 # Hpatches assumes that the center of the top-left pixel is at [0,0] (I think) + im_A_coords = ( + np.stack( + ( + wq * (im_A_coords[..., 0] + 1) / 2, + hq * (im_A_coords[..., 1] + 1) / 2, + ), + axis=-1, + ) + - offset + ) + im_A_to_im_B = ( + np.stack( + ( + wsup * (im_A_to_im_B[..., 0] + 1) / 2, + hsup * (im_A_to_im_B[..., 1] + 1) / 2, + ), + axis=-1, + ) + - offset + ) + return im_A_coords, im_A_to_im_B + + def benchmark(self, model, model_name = None): + n_matches = [] + homog_dists = [] + for seq_idx, seq_name in tqdm( + enumerate(self.seq_names), total=len(self.seq_names) + ): + im_A_path = os.path.join(self.seqs_path, seq_name, "1.ppm") + im_A = Image.open(im_A_path) + w1, h1 = im_A.size + for im_idx in range(2, 7): + im_B_path = os.path.join(self.seqs_path, seq_name, f"{im_idx}.ppm") + im_B = Image.open(im_B_path) + w2, h2 = im_B.size + H = np.loadtxt( + os.path.join(self.seqs_path, seq_name, "H_1_" + str(im_idx)) + ) + dense_matches, dense_certainty = model.match( + im_A_path, im_B_path + ) + good_matches, _ = model.sample(dense_matches, dense_certainty, 5000) + pos_a, pos_b = self.convert_coordinates( + good_matches[:, :2], good_matches[:, 2:], w1, h1, w2, h2 + ) + try: + H_pred, inliers = cv2.findHomography( + pos_a, + pos_b, + method = cv2.RANSAC, + confidence = 0.99999, + ransacReprojThreshold = 3 * min(w2, h2) / 480, + ) + except: + H_pred = None + if H_pred is None: + H_pred = np.zeros((3, 3)) + H_pred[2, 2] = 1.0 + corners = np.array( + [[0, 0, 1], [0, h1 - 1, 1], [w1 - 1, 0, 1], [w1 - 1, h1 - 1, 1]] + ) + real_warped_corners = np.dot(corners, np.transpose(H)) + real_warped_corners = ( + real_warped_corners[:, :2] / real_warped_corners[:, 2:] + ) + warped_corners = np.dot(corners, np.transpose(H_pred)) + warped_corners = warped_corners[:, :2] / warped_corners[:, 2:] + mean_dist = np.mean( + np.linalg.norm(real_warped_corners - warped_corners, axis=1) + ) / (min(w2, h2) / 480.0) + homog_dists.append(mean_dist) + + n_matches = np.array(n_matches) + thresholds = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] + auc = pose_auc(np.array(homog_dists), thresholds) + return { + "hpatches_homog_auc_3": auc[2], + "hpatches_homog_auc_5": auc[4], + "hpatches_homog_auc_10": auc[9], + } diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/benchmarks/megadepth_dense_benchmark.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/benchmarks/megadepth_dense_benchmark.py new file mode 100644 index 0000000000000000000000000000000000000000..0600d354b1d0dfa7f8e2b0f8882a4cc08fafeed9 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/benchmarks/megadepth_dense_benchmark.py @@ -0,0 +1,106 @@ +import torch +import numpy as np +import tqdm +from roma.datasets import MegadepthBuilder +from roma.utils import warp_kpts +from torch.utils.data import ConcatDataset +import roma + +class MegadepthDenseBenchmark: + def __init__(self, data_root="data/megadepth", h = 384, w = 512, num_samples = 2000) -> None: + mega = MegadepthBuilder(data_root=data_root) + self.dataset = ConcatDataset( + mega.build_scenes(split="test_loftr", ht=h, wt=w) + ) # fixed resolution of 384,512 + self.num_samples = num_samples + + def geometric_dist(self, depth1, depth2, T_1to2, K1, K2, dense_matches): + b, h1, w1, d = dense_matches.shape + with torch.no_grad(): + x1 = dense_matches[..., :2].reshape(b, h1 * w1, 2) + mask, x2 = warp_kpts( + x1.double(), + depth1.double(), + depth2.double(), + T_1to2.double(), + K1.double(), + K2.double(), + ) + x2 = torch.stack( + (w1 * (x2[..., 0] + 1) / 2, h1 * (x2[..., 1] + 1) / 2), dim=-1 + ) + prob = mask.float().reshape(b, h1, w1) + x2_hat = dense_matches[..., 2:] + x2_hat = torch.stack( + (w1 * (x2_hat[..., 0] + 1) / 2, h1 * (x2_hat[..., 1] + 1) / 2), dim=-1 + ) + gd = (x2_hat - x2.reshape(b, h1, w1, 2)).norm(dim=-1) + gd = gd[prob == 1] + pck_1 = (gd < 1.0).float().mean() + pck_3 = (gd < 3.0).float().mean() + pck_5 = (gd < 5.0).float().mean() + return gd, pck_1, pck_3, pck_5, prob + + def benchmark(self, model, batch_size=8): + model.train(False) + with torch.no_grad(): + gd_tot = 0.0 + pck_1_tot = 0.0 + pck_3_tot = 0.0 + pck_5_tot = 0.0 + sampler = torch.utils.data.WeightedRandomSampler( + torch.ones(len(self.dataset)), replacement=False, num_samples=self.num_samples + ) + B = batch_size + dataloader = torch.utils.data.DataLoader( + self.dataset, batch_size=B, num_workers=batch_size, sampler=sampler + ) + for idx, data in tqdm.tqdm(enumerate(dataloader), disable = roma.RANK > 0): + im_A, im_B, depth1, depth2, T_1to2, K1, K2 = ( + data["im_A"], + data["im_B"], + data["im_A_depth"].cuda(), + data["im_B_depth"].cuda(), + data["T_1to2"].cuda(), + data["K1"].cuda(), + data["K2"].cuda(), + ) + matches, certainty = model.match(im_A, im_B, batched=True) + gd, pck_1, pck_3, pck_5, prob = self.geometric_dist( + depth1, depth2, T_1to2, K1, K2, matches + ) + if roma.DEBUG_MODE: + from roma.utils.utils import tensor_to_pil + import torch.nn.functional as F + path = "vis" + H, W = model.get_output_resolution() + white_im = torch.ones((B,1,H,W),device="cuda") + im_B_transfer_rgb = F.grid_sample( + im_B.cuda(), matches[:,:,:W, 2:], mode="bilinear", align_corners=False + ) + warp_im = im_B_transfer_rgb + c_b = certainty[:,None]#(certainty*0.9 + 0.1*torch.ones_like(certainty))[:,None] + vis_im = c_b * warp_im + (1 - c_b) * white_im + for b in range(B): + import os + os.makedirs(f"{path}/{model.name}/{idx}_{b}_{H}_{W}",exist_ok=True) + tensor_to_pil(vis_im[b], unnormalize=True).save( + f"{path}/{model.name}/{idx}_{b}_{H}_{W}/warp.jpg") + tensor_to_pil(im_A[b].cuda(), unnormalize=True).save( + f"{path}/{model.name}/{idx}_{b}_{H}_{W}/im_A.jpg") + tensor_to_pil(im_B[b].cuda(), unnormalize=True).save( + f"{path}/{model.name}/{idx}_{b}_{H}_{W}/im_B.jpg") + + + gd_tot, pck_1_tot, pck_3_tot, pck_5_tot = ( + gd_tot + gd.mean(), + pck_1_tot + pck_1, + pck_3_tot + pck_3, + pck_5_tot + pck_5, + ) + return { + "epe": gd_tot.item() / len(dataloader), + "mega_pck_1": pck_1_tot.item() / len(dataloader), + "mega_pck_3": pck_3_tot.item() / len(dataloader), + "mega_pck_5": pck_5_tot.item() / len(dataloader), + } diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/benchmarks/megadepth_pose_estimation_benchmark.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/benchmarks/megadepth_pose_estimation_benchmark.py new file mode 100644 index 0000000000000000000000000000000000000000..8007fe8ecad09c33401450ad6b7af1f3dad043d2 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/benchmarks/megadepth_pose_estimation_benchmark.py @@ -0,0 +1,140 @@ +import numpy as np +import torch +from roma.utils import * +from PIL import Image +from tqdm import tqdm +import torch.nn.functional as F +import roma +import kornia.geometry.epipolar as kepi + +class MegaDepthPoseEstimationBenchmark: + def __init__(self, data_root="data/megadepth", scene_names = None) -> None: + if scene_names is None: + self.scene_names = [ + "0015_0.1_0.3.npz", + "0015_0.3_0.5.npz", + "0022_0.1_0.3.npz", + "0022_0.3_0.5.npz", + "0022_0.5_0.7.npz", + ] + else: + self.scene_names = scene_names + self.scenes = [ + np.load(f"{data_root}/{scene}", allow_pickle=True) + for scene in self.scene_names + ] + self.data_root = data_root + + def benchmark(self, model, model_name = None, resolution = None, scale_intrinsics = True, calibrated = True): + H,W = model.get_output_resolution() + with torch.no_grad(): + data_root = self.data_root + tot_e_t, tot_e_R, tot_e_pose = [], [], [] + thresholds = [5, 10, 20] + for scene_ind in range(len(self.scenes)): + import os + scene_name = os.path.splitext(self.scene_names[scene_ind])[0] + scene = self.scenes[scene_ind] + pairs = scene["pair_infos"] + intrinsics = scene["intrinsics"] + poses = scene["poses"] + im_paths = scene["image_paths"] + pair_inds = range(len(pairs)) + for pairind in tqdm(pair_inds): + idx1, idx2 = pairs[pairind][0] + K1 = intrinsics[idx1].copy() + T1 = poses[idx1].copy() + R1, t1 = T1[:3, :3], T1[:3, 3] + K2 = intrinsics[idx2].copy() + T2 = poses[idx2].copy() + R2, t2 = T2[:3, :3], T2[:3, 3] + R, t = compute_relative_pose(R1, t1, R2, t2) + T1_to_2 = np.concatenate((R,t[:,None]), axis=-1) + im_A_path = f"{data_root}/{im_paths[idx1]}" + im_B_path = f"{data_root}/{im_paths[idx2]}" + dense_matches, dense_certainty = model.match( + im_A_path, im_B_path, K1.copy(), K2.copy(), T1_to_2.copy() + ) + sparse_matches,_ = model.sample( + dense_matches, dense_certainty, 5000 + ) + + im_A = Image.open(im_A_path) + w1, h1 = im_A.size + im_B = Image.open(im_B_path) + w2, h2 = im_B.size + + if scale_intrinsics: + scale1 = 1200 / max(w1, h1) + scale2 = 1200 / max(w2, h2) + w1, h1 = scale1 * w1, scale1 * h1 + w2, h2 = scale2 * w2, scale2 * h2 + K1, K2 = K1.copy(), K2.copy() + K1[:2] = K1[:2] * scale1 + K2[:2] = K2[:2] * scale2 + + kpts1 = sparse_matches[:, :2] + kpts1 = ( + np.stack( + ( + w1 * (kpts1[:, 0] + 1) / 2, + h1 * (kpts1[:, 1] + 1) / 2, + ), + axis=-1, + ) + ) + kpts2 = sparse_matches[:, 2:] + kpts2 = ( + np.stack( + ( + w2 * (kpts2[:, 0] + 1) / 2, + h2 * (kpts2[:, 1] + 1) / 2, + ), + axis=-1, + ) + ) + + for _ in range(5): + shuffling = np.random.permutation(np.arange(len(kpts1))) + kpts1 = kpts1[shuffling] + kpts2 = kpts2[shuffling] + try: + threshold = 0.5 + if calibrated: + norm_threshold = threshold / (np.mean(np.abs(K1[:2, :2])) + np.mean(np.abs(K2[:2, :2]))) + R_est, t_est, mask = estimate_pose( + kpts1, + kpts2, + K1, + K2, + norm_threshold, + conf=0.99999, + ) + T1_to_2_est = np.concatenate((R_est, t_est), axis=-1) # + e_t, e_R = compute_pose_error(T1_to_2_est, R, t) + e_pose = max(e_t, e_R) + except Exception as e: + print(repr(e)) + e_t, e_R = 90, 90 + e_pose = max(e_t, e_R) + tot_e_t.append(e_t) + tot_e_R.append(e_R) + tot_e_pose.append(e_pose) + tot_e_pose = np.array(tot_e_pose) + auc = pose_auc(tot_e_pose, thresholds) + acc_5 = (tot_e_pose < 5).mean() + acc_10 = (tot_e_pose < 10).mean() + acc_15 = (tot_e_pose < 15).mean() + acc_20 = (tot_e_pose < 20).mean() + map_5 = acc_5 + map_10 = np.mean([acc_5, acc_10]) + map_20 = np.mean([acc_5, acc_10, acc_15, acc_20]) + print(f"{model_name} auc: {auc}") + return { + "auc_5": auc[0], + "auc_10": auc[1], + "auc_20": auc[2], + "map_5": map_5, + "map_10": map_10, + "map_20": map_20, + } diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/benchmarks/scannet_benchmark.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/benchmarks/scannet_benchmark.py new file mode 100644 index 0000000000000000000000000000000000000000..853af0d0ebef4dfefe2632eb49e4156ea791ee76 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/benchmarks/scannet_benchmark.py @@ -0,0 +1,143 @@ +import os.path as osp +import numpy as np +import torch +from roma.utils import * +from PIL import Image +from tqdm import tqdm + + +class ScanNetBenchmark: + def __init__(self, data_root="data/scannet") -> None: + self.data_root = data_root + + def benchmark(self, model, model_name = None): + model.train(False) + with torch.no_grad(): + data_root = self.data_root + tmp = np.load(osp.join(data_root, "test.npz")) + pairs, rel_pose = tmp["name"], tmp["rel_pose"] + tot_e_t, tot_e_R, tot_e_pose = [], [], [] + pair_inds = np.random.choice( + range(len(pairs)), size=len(pairs), replace=False + ) + for pairind in tqdm(pair_inds, smoothing=0.9): + scene = pairs[pairind] + scene_name = f"scene0{scene[0]}_00" + im_A_path = osp.join( + self.data_root, + "scans_test", + scene_name, + "color", + f"{scene[2]}.jpg", + ) + im_A = Image.open(im_A_path) + im_B_path = osp.join( + self.data_root, + "scans_test", + scene_name, + "color", + f"{scene[3]}.jpg", + ) + im_B = Image.open(im_B_path) + T_gt = rel_pose[pairind].reshape(3, 4) + R, t = T_gt[:3, :3], T_gt[:3, 3] + K = np.stack( + [ + np.array([float(i) for i in r.split()]) + for r in open( + osp.join( + self.data_root, + "scans_test", + scene_name, + "intrinsic", + "intrinsic_color.txt", + ), + "r", + ) + .read() + .split("\n") + if r + ] + ) + w1, h1 = im_A.size + w2, h2 = im_B.size + K1 = K.copy() + K2 = K.copy() + dense_matches, dense_certainty = model.match(im_A_path, im_B_path) + sparse_matches, sparse_certainty = model.sample( + dense_matches, dense_certainty, 5000 + ) + scale1 = 480 / min(w1, h1) + scale2 = 480 / min(w2, h2) + w1, h1 = scale1 * w1, scale1 * h1 + w2, h2 = scale2 * w2, scale2 * h2 + K1 = K1 * scale1 + K2 = K2 * scale2 + + offset = 0.5 + kpts1 = sparse_matches[:, :2] + kpts1 = ( + np.stack( + ( + w1 * (kpts1[:, 0] + 1) / 2 - offset, + h1 * (kpts1[:, 1] + 1) / 2 - offset, + ), + axis=-1, + ) + ) + kpts2 = sparse_matches[:, 2:] + kpts2 = ( + np.stack( + ( + w2 * (kpts2[:, 0] + 1) / 2 - offset, + h2 * (kpts2[:, 1] + 1) / 2 - offset, + ), + axis=-1, + ) + ) + for _ in range(5): + shuffling = np.random.permutation(np.arange(len(kpts1))) + kpts1 = kpts1[shuffling] + kpts2 = kpts2[shuffling] + try: + norm_threshold = 0.5 / ( + np.mean(np.abs(K1[:2, :2])) + np.mean(np.abs(K2[:2, :2]))) + R_est, t_est, mask = estimate_pose( + kpts1, + kpts2, + K1, + K2, + norm_threshold, + conf=0.99999, + ) + T1_to_2_est = np.concatenate((R_est, t_est), axis=-1) # + e_t, e_R = compute_pose_error(T1_to_2_est, R, t) + e_pose = max(e_t, e_R) + except Exception as e: + print(repr(e)) + e_t, e_R = 90, 90 + e_pose = max(e_t, e_R) + tot_e_t.append(e_t) + tot_e_R.append(e_R) + tot_e_pose.append(e_pose) + tot_e_t.append(e_t) + tot_e_R.append(e_R) + tot_e_pose.append(e_pose) + tot_e_pose = np.array(tot_e_pose) + thresholds = [5, 10, 20] + auc = pose_auc(tot_e_pose, thresholds) + acc_5 = (tot_e_pose < 5).mean() + acc_10 = (tot_e_pose < 10).mean() + acc_15 = (tot_e_pose < 15).mean() + acc_20 = (tot_e_pose < 20).mean() + map_5 = acc_5 + map_10 = np.mean([acc_5, acc_10]) + map_20 = np.mean([acc_5, acc_10, acc_15, acc_20]) + return { + "auc_5": auc[0], + "auc_10": auc[1], + "auc_20": auc[2], + "map_5": map_5, + "map_10": map_10, + "map_20": map_20, + } diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/checkpointing/__init__.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/checkpointing/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..22f5afe727aa6f6e8fffa9ecf5be69cbff686577 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/checkpointing/__init__.py @@ -0,0 +1 @@ +from .checkpoint import CheckPoint diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/checkpointing/checkpoint.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/checkpointing/checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..8995efeb54f4d558127ea63423fa958c64e9088f --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/checkpointing/checkpoint.py @@ -0,0 +1,60 @@ +import os +import torch +from torch.nn.parallel.data_parallel import DataParallel +from torch.nn.parallel.distributed import DistributedDataParallel +from loguru import logger +import gc + +import roma + +class CheckPoint: + def __init__(self, dir=None, name="tmp"): + self.name = name + self.dir = dir + os.makedirs(self.dir, exist_ok=True) + + def save( + self, + model, + optimizer, + lr_scheduler, + n, + ): + if roma.RANK == 0: + assert model is not None + if isinstance(model, (DataParallel, DistributedDataParallel)): + model = model.module + states = { + "model": model.state_dict(), + "n": n, + "optimizer": optimizer.state_dict(), + "lr_scheduler": lr_scheduler.state_dict(), + } + torch.save(states, self.dir + self.name + f"_latest.pth") + logger.info(f"Saved states {list(states.keys())}, at step {n}") + + def load( + self, + model, + optimizer, + lr_scheduler, + n, + ): + if os.path.exists(self.dir + self.name + f"_latest.pth") and roma.RANK == 0: + states = torch.load(self.dir + self.name + f"_latest.pth") + if "model" in states: + model.load_state_dict(states["model"]) + if "n" in states: + n = states["n"] if states["n"] else n + if "optimizer" in states: + try: + optimizer.load_state_dict(states["optimizer"]) + except Exception as e: + print(f"Failed to load states for optimizer, with error {e}") + if "lr_scheduler" in states: + lr_scheduler.load_state_dict(states["lr_scheduler"]) + print(f"Loaded states {list(states.keys())}, at step {n}") + del states + gc.collect() + torch.cuda.empty_cache() + return model, optimizer, lr_scheduler, n \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/datasets/__init__.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/datasets/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b60c709926a4a7bd019b73eac10879063a996c90 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/datasets/__init__.py @@ -0,0 +1,2 @@ +from .megadepth import MegadepthBuilder +from .scannet import ScanNetBuilder \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/datasets/megadepth.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/datasets/megadepth.py new file mode 100644 index 0000000000000000000000000000000000000000..5deee5ac30c439a9f300c0ad2271f141931020c0 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/datasets/megadepth.py @@ -0,0 +1,230 @@ +import os +from PIL import Image +import h5py +import numpy as np +import torch +import torchvision.transforms.functional as tvf +import kornia.augmentation as K +from roma.utils import get_depth_tuple_transform_ops, get_tuple_transform_ops +import roma +from roma.utils import * +import math + +class MegadepthScene: + def __init__( + self, + data_root, + scene_info, + ht=384, + wt=512, + min_overlap=0.0, + max_overlap=1.0, + shake_t=0, + rot_prob=0.0, + normalize=True, + max_num_pairs = 100_000, + scene_name = None, + use_horizontal_flip_aug = False, + use_single_horizontal_flip_aug = False, + colorjiggle_params = None, + random_eraser = None, + use_randaug = False, + randaug_params = None, + randomize_size = False, + ) -> None: + self.data_root = data_root + self.scene_name = os.path.splitext(scene_name)[0]+f"_{min_overlap}_{max_overlap}" + self.image_paths = scene_info["image_paths"] + self.depth_paths = scene_info["depth_paths"] + self.intrinsics = scene_info["intrinsics"] + self.poses = scene_info["poses"] + self.pairs = scene_info["pairs"] + self.overlaps = scene_info["overlaps"] + threshold = (self.overlaps > min_overlap) & (self.overlaps < max_overlap) + self.pairs = self.pairs[threshold] + self.overlaps = self.overlaps[threshold] + if len(self.pairs) > max_num_pairs: + pairinds = np.random.choice( + np.arange(0, len(self.pairs)), max_num_pairs, replace=False + ) + self.pairs = self.pairs[pairinds] + self.overlaps = self.overlaps[pairinds] + if randomize_size: + area = ht * wt + s = int(16 * (math.sqrt(area)//16)) + sizes = ((ht,wt), (s,s), (wt,ht)) + choice = roma.RANK % 3 + ht, wt = sizes[choice] + # counts, bins = np.histogram(self.overlaps,20) + # print(counts) + self.im_transform_ops = get_tuple_transform_ops( + resize=(ht, wt), normalize=normalize, colorjiggle_params = colorjiggle_params, + ) + self.depth_transform_ops = get_depth_tuple_transform_ops( + resize=(ht, wt) + ) + self.wt, self.ht = wt, ht + self.shake_t = shake_t + self.random_eraser = random_eraser + if use_horizontal_flip_aug and use_single_horizontal_flip_aug: + raise ValueError("Can't both flip both images and only flip one") + self.use_horizontal_flip_aug = use_horizontal_flip_aug + self.use_single_horizontal_flip_aug = use_single_horizontal_flip_aug + self.use_randaug = use_randaug + + def load_im(self, im_path): + im = Image.open(im_path) + return im + + def horizontal_flip(self, im_A, im_B, depth_A, depth_B, K_A, K_B): + im_A = im_A.flip(-1) + im_B = im_B.flip(-1) + depth_A, depth_B = depth_A.flip(-1), depth_B.flip(-1) + flip_mat = torch.tensor([[-1, 0, self.wt],[0,1,0],[0,0,1.]]).to(K_A.device) + K_A = flip_mat@K_A + K_B = flip_mat@K_B + + return im_A, im_B, depth_A, depth_B, K_A, K_B + + def load_depth(self, depth_ref, crop=None): + depth = np.array(h5py.File(depth_ref, "r")["depth"]) + return torch.from_numpy(depth) + + def __len__(self): + return len(self.pairs) + + def scale_intrinsic(self, K, wi, hi): + sx, sy = self.wt / wi, self.ht / hi + sK = torch.tensor([[sx, 0, 0], [0, sy, 0], [0, 0, 1]]) + return sK @ K + + def rand_shake(self, *things): + t = np.random.choice(range(-self.shake_t, self.shake_t + 1), size=2) + return [ + tvf.affine(thing, angle=0.0, translate=list(t), scale=1.0, shear=[0.0, 0.0]) + for thing in things + ], t + + def __getitem__(self, pair_idx): + # read intrinsics of original size + idx1, idx2 = self.pairs[pair_idx] + K1 = torch.tensor(self.intrinsics[idx1].copy(), dtype=torch.float).reshape(3, 3) + K2 = torch.tensor(self.intrinsics[idx2].copy(), dtype=torch.float).reshape(3, 3) + + # read and compute relative poses + T1 = self.poses[idx1] + T2 = self.poses[idx2] + T_1to2 = torch.tensor(np.matmul(T2, np.linalg.inv(T1)), dtype=torch.float)[ + :4, :4 + ] # (4, 4) + + # Load positive pair data + im_A, im_B = self.image_paths[idx1], self.image_paths[idx2] + depth1, depth2 = self.depth_paths[idx1], self.depth_paths[idx2] + im_A_ref = os.path.join(self.data_root, im_A) + im_B_ref = os.path.join(self.data_root, im_B) + depth_A_ref = os.path.join(self.data_root, depth1) + depth_B_ref = os.path.join(self.data_root, depth2) + im_A = self.load_im(im_A_ref) + im_B = self.load_im(im_B_ref) + K1 = self.scale_intrinsic(K1, im_A.width, im_A.height) + K2 = self.scale_intrinsic(K2, im_B.width, im_B.height) + + if self.use_randaug: + im_A, im_B = self.rand_augment(im_A, im_B) + + depth_A = self.load_depth(depth_A_ref) + depth_B = self.load_depth(depth_B_ref) + # Process images + im_A, im_B = self.im_transform_ops((im_A, im_B)) + depth_A, depth_B = self.depth_transform_ops( + (depth_A[None, None], depth_B[None, None]) + ) + + [im_A, im_B, depth_A, depth_B], t = self.rand_shake(im_A, im_B, depth_A, depth_B) + K1[:2, 2] += t + K2[:2, 2] += t + + im_A, im_B = im_A[None], im_B[None] + if self.random_eraser is not None: + im_A, depth_A = self.random_eraser(im_A, depth_A) + im_B, depth_B = self.random_eraser(im_B, depth_B) + + if self.use_horizontal_flip_aug: + if np.random.rand() > 0.5: + im_A, im_B, depth_A, depth_B, K1, K2 = self.horizontal_flip(im_A, im_B, depth_A, depth_B, K1, K2) + if self.use_single_horizontal_flip_aug: + if np.random.rand() > 0.5: + im_B, depth_B, K2 = self.single_horizontal_flip(im_B, depth_B, K2) + + if roma.DEBUG_MODE: + tensor_to_pil(im_A[0], unnormalize=True).save( + f"vis/im_A.jpg") + tensor_to_pil(im_B[0], unnormalize=True).save( + f"vis/im_B.jpg") + + data_dict = { + "im_A": im_A[0], + "im_A_identifier": self.image_paths[idx1].split("/")[-1].split(".jpg")[0], + "im_B": im_B[0], + "im_B_identifier": self.image_paths[idx2].split("/")[-1].split(".jpg")[0], + "im_A_depth": depth_A[0, 0], + "im_B_depth": depth_B[0, 0], + "K1": K1, + "K2": K2, + "T_1to2": T_1to2, + "im_A_path": im_A_ref, + "im_B_path": im_B_ref, + + } + return data_dict + + +class MegadepthBuilder: + def __init__(self, data_root="data/megadepth", loftr_ignore=True, imc21_ignore = True) -> None: + self.data_root = data_root + self.scene_info_root = os.path.join(data_root, "prep_scene_info") + self.all_scenes = os.listdir(self.scene_info_root) + self.test_scenes = ["0017.npy", "0004.npy", "0048.npy", "0013.npy"] + # LoFTR did the D2-net preprocessing differently than we did and got more ignore scenes, can optionially ignore those + self.loftr_ignore_scenes = set(['0121.npy', '0133.npy', '0168.npy', '0178.npy', '0229.npy', '0349.npy', '0412.npy', '0430.npy', '0443.npy', '1001.npy', '5014.npy', '5015.npy', '5016.npy']) + self.imc21_scenes = set(['0008.npy', '0019.npy', '0021.npy', '0024.npy', '0025.npy', '0032.npy', '0063.npy', '1589.npy']) + self.test_scenes_loftr = ["0015.npy", "0022.npy"] + self.loftr_ignore = loftr_ignore + self.imc21_ignore = imc21_ignore + + def build_scenes(self, split="train", min_overlap=0.0, scene_names = None, **kwargs): + if split == "train": + scene_names = set(self.all_scenes) - set(self.test_scenes) + elif split == "train_loftr": + scene_names = set(self.all_scenes) - set(self.test_scenes_loftr) + elif split == "test": + scene_names = self.test_scenes + elif split == "test_loftr": + scene_names = self.test_scenes_loftr + elif split == "custom": + scene_names = scene_names + else: + raise ValueError(f"Split {split} not available") + scenes = [] + for scene_name in scene_names: + if self.loftr_ignore and scene_name in self.loftr_ignore_scenes: + continue + if self.imc21_ignore and scene_name in self.imc21_scenes: + continue + scene_info = np.load( + os.path.join(self.scene_info_root, scene_name), allow_pickle=True + ).item() + scenes.append( + MegadepthScene( + self.data_root, scene_info, min_overlap=min_overlap,scene_name = scene_name, **kwargs + ) + ) + return scenes + + def weight_scenes(self, concat_dataset, alpha=0.5): + ns = [] + for d in concat_dataset.datasets: + ns.append(len(d)) + ws = torch.cat([torch.ones(n) / n**alpha for n in ns]) + return ws diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/datasets/scannet.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/datasets/scannet.py new file mode 100644 index 0000000000000000000000000000000000000000..704ea57259afdfbbca627ad143bee97a0a79d41c --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/datasets/scannet.py @@ -0,0 +1,160 @@ +import os +import random +from PIL import Image +import cv2 +import h5py +import numpy as np +import torch +from torch.utils.data import ( + Dataset, + DataLoader, + ConcatDataset) + +import torchvision.transforms.functional as tvf +import kornia.augmentation as K +import os.path as osp +import matplotlib.pyplot as plt +import roma +from roma.utils import get_depth_tuple_transform_ops, get_tuple_transform_ops +from roma.utils.transforms import GeometricSequential +from tqdm import tqdm + +class ScanNetScene: + def __init__(self, data_root, scene_info, ht = 384, wt = 512, min_overlap=0., shake_t = 0, rot_prob=0.,use_horizontal_flip_aug = False, +) -> None: + self.scene_root = osp.join(data_root,"scans","scans_train") + self.data_names = scene_info['name'] + self.overlaps = scene_info['score'] + # Only sample 10s + valid = (self.data_names[:,-2:] % 10).sum(axis=-1) == 0 + self.overlaps = self.overlaps[valid] + self.data_names = self.data_names[valid] + if len(self.data_names) > 10000: + pairinds = np.random.choice(np.arange(0,len(self.data_names)),10000,replace=False) + self.data_names = self.data_names[pairinds] + self.overlaps = self.overlaps[pairinds] + self.im_transform_ops = get_tuple_transform_ops(resize=(ht, wt), normalize=True) + self.depth_transform_ops = get_depth_tuple_transform_ops(resize=(ht, wt), normalize=False) + self.wt, self.ht = wt, ht + self.shake_t = shake_t + self.H_generator = GeometricSequential(K.RandomAffine(degrees=90, p=rot_prob)) + self.use_horizontal_flip_aug = use_horizontal_flip_aug + + def load_im(self, im_B, crop=None): + im = Image.open(im_B) + return im + + def load_depth(self, depth_ref, crop=None): + depth = cv2.imread(str(depth_ref), cv2.IMREAD_UNCHANGED) + depth = depth / 1000 + depth = torch.from_numpy(depth).float() # (h, w) + return depth + + def __len__(self): + return len(self.data_names) + + def scale_intrinsic(self, K, wi, hi): + sx, sy = self.wt / wi, self.ht / hi + sK = torch.tensor([[sx, 0, 0], + [0, sy, 0], + [0, 0, 1]]) + return sK@K + + def horizontal_flip(self, im_A, im_B, depth_A, depth_B, K_A, K_B): + im_A = im_A.flip(-1) + im_B = im_B.flip(-1) + depth_A, depth_B = depth_A.flip(-1), depth_B.flip(-1) + flip_mat = torch.tensor([[-1, 0, self.wt],[0,1,0],[0,0,1.]]).to(K_A.device) + K_A = flip_mat@K_A + K_B = flip_mat@K_B + + return im_A, im_B, depth_A, depth_B, K_A, K_B + def read_scannet_pose(self,path): + """ Read ScanNet's Camera2World pose and transform it to World2Camera. + + Returns: + pose_w2c (np.ndarray): (4, 4) + """ + cam2world = np.loadtxt(path, delimiter=' ') + world2cam = np.linalg.inv(cam2world) + return world2cam + + + def read_scannet_intrinsic(self,path): + """ Read ScanNet's intrinsic matrix and return the 3x3 matrix. + """ + intrinsic = np.loadtxt(path, delimiter=' ') + return torch.tensor(intrinsic[:-1, :-1], dtype = torch.float) + + def __getitem__(self, pair_idx): + # read intrinsics of original size + data_name = self.data_names[pair_idx] + scene_name, scene_sub_name, stem_name_1, stem_name_2 = data_name + scene_name = f'scene{scene_name:04d}_{scene_sub_name:02d}' + + # read the intrinsic of depthmap + K1 = K2 = self.read_scannet_intrinsic(osp.join(self.scene_root, + scene_name, + 'intrinsic', 'intrinsic_color.txt'))#the depth K is not the same, but doesnt really matter + # read and compute relative poses + T1 = self.read_scannet_pose(osp.join(self.scene_root, + scene_name, + 'pose', f'{stem_name_1}.txt')) + T2 = self.read_scannet_pose(osp.join(self.scene_root, + scene_name, + 'pose', f'{stem_name_2}.txt')) + T_1to2 = torch.tensor(np.matmul(T2, np.linalg.inv(T1)), dtype=torch.float)[:4, :4] # (4, 4) + + # Load positive pair data + im_A_ref = os.path.join(self.scene_root, scene_name, 'color', f'{stem_name_1}.jpg') + im_B_ref = os.path.join(self.scene_root, scene_name, 'color', f'{stem_name_2}.jpg') + depth_A_ref = os.path.join(self.scene_root, scene_name, 'depth', f'{stem_name_1}.png') + depth_B_ref = os.path.join(self.scene_root, scene_name, 'depth', f'{stem_name_2}.png') + + im_A = self.load_im(im_A_ref) + im_B = self.load_im(im_B_ref) + depth_A = self.load_depth(depth_A_ref) + depth_B = self.load_depth(depth_B_ref) + + # Recompute camera intrinsic matrix due to the resize + K1 = self.scale_intrinsic(K1, im_A.width, im_A.height) + K2 = self.scale_intrinsic(K2, im_B.width, im_B.height) + # Process images + im_A, im_B = self.im_transform_ops((im_A, im_B)) + depth_A, depth_B = self.depth_transform_ops((depth_A[None,None], depth_B[None,None])) + if self.use_horizontal_flip_aug: + if np.random.rand() > 0.5: + im_A, im_B, depth_A, depth_B, K1, K2 = self.horizontal_flip(im_A, im_B, depth_A, depth_B, K1, K2) + + data_dict = {'im_A': im_A, + 'im_B': im_B, + 'im_A_depth': depth_A[0,0], + 'im_B_depth': depth_B[0,0], + 'K1': K1, + 'K2': K2, + 'T_1to2':T_1to2, + } + return data_dict + + +class ScanNetBuilder: + def __init__(self, data_root = 'data/scannet') -> None: + self.data_root = data_root + self.scene_info_root = os.path.join(data_root,'scannet_indices') + self.all_scenes = os.listdir(self.scene_info_root) + + def build_scenes(self, split = 'train', min_overlap=0., **kwargs): + # Note: split doesn't matter here as we always use same scannet_train scenes + scene_names = self.all_scenes + scenes = [] + for scene_name in tqdm(scene_names, disable = roma.RANK > 0): + scene_info = np.load(os.path.join(self.scene_info_root,scene_name), allow_pickle=True) + scenes.append(ScanNetScene(self.data_root, scene_info, min_overlap=min_overlap, **kwargs)) + return scenes + + def weight_scenes(self, concat_dataset, alpha=.5): + ns = [] + for d in concat_dataset.datasets: + ns.append(len(d)) + ws = torch.cat([torch.ones(n)/n**alpha for n in ns]) + return ws diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/losses/__init__.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/losses/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2e08abacfc0f83d7de0f2ddc0583766a80bf53cf --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/losses/__init__.py @@ -0,0 +1 @@ +from .robust_loss import RobustLosses \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/losses/robust_loss.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/losses/robust_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..7375a15c27775ac06718f471a99cf186c7a3dba1 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/losses/robust_loss.py @@ -0,0 +1,157 @@ +from einops.einops import rearrange +import torch +import torch.nn as nn +import torch.nn.functional as F +from roma.utils.utils import get_gt_warp +# import wandb +import roma +import math + +class RobustLosses(nn.Module): + def __init__( + self, + robust=False, + center_coords=False, + scale_normalize=False, + ce_weight=0.01, + local_loss=True, + local_dist=4.0, + local_largest_scale=8, + smooth_mask = False, + depth_interpolation_mode = "bilinear", + mask_depth_loss = False, + relative_depth_error_threshold = 0.05, + alpha = 1., + c = 1e-3, + ): + super().__init__() + self.robust = robust # measured in pixels + self.center_coords = center_coords + self.scale_normalize = scale_normalize + self.ce_weight = ce_weight + self.local_loss = local_loss + self.local_dist = local_dist + self.local_largest_scale = local_largest_scale + self.smooth_mask = smooth_mask + self.depth_interpolation_mode = depth_interpolation_mode + self.mask_depth_loss = mask_depth_loss + self.relative_depth_error_threshold = relative_depth_error_threshold + self.avg_overlap = dict() + self.alpha = alpha + self.c = c + + def gm_cls_loss(self, x2, prob, scale_gm_cls, gm_certainty, scale): + with torch.no_grad(): + B, C, H, W = scale_gm_cls.shape + device = x2.device + cls_res = round(math.sqrt(C)) + G = torch.meshgrid(*[torch.linspace(-1+1/cls_res, 1 - 1/cls_res, steps = cls_res,device = device) for _ in range(2)]) + G = torch.stack((G[1], G[0]), dim = -1).reshape(C,2) + GT = (G[None,:,None,None,:]-x2[:,None]).norm(dim=-1).min(dim=1).indices + cls_loss = F.cross_entropy(scale_gm_cls, GT, reduction = 'none')[prob > 0.99] + if not torch.any(cls_loss): + cls_loss = (certainty_loss * 0.0) # Prevent issues where prob is 0 everywhere + + certainty_loss = F.binary_cross_entropy_with_logits(gm_certainty[:,0], prob) + losses = { + f"gm_certainty_loss_{scale}": certainty_loss.mean(), + f"gm_cls_loss_{scale}": cls_loss.mean(), + } + wandb.log(losses, step = roma.GLOBAL_STEP) + return losses + + def delta_cls_loss(self, x2, prob, flow_pre_delta, delta_cls, certainty, scale, offset_scale): + with torch.no_grad(): + B, C, H, W = delta_cls.shape + device = x2.device + cls_res = round(math.sqrt(C)) + G = torch.meshgrid(*[torch.linspace(-1+1/cls_res, 1 - 1/cls_res, steps = cls_res,device = device) for _ in range(2)]) + G = torch.stack((G[1], G[0]), dim = -1).reshape(C,2) * offset_scale + GT = (G[None,:,None,None,:] + flow_pre_delta[:,None] - x2[:,None]).norm(dim=-1).min(dim=1).indices + cls_loss = F.cross_entropy(delta_cls, GT, reduction = 'none')[prob > 0.99] + if not torch.any(cls_loss): + cls_loss = (certainty_loss * 0.0) # Prevent issues where prob is 0 everywhere + certainty_loss = F.binary_cross_entropy_with_logits(certainty[:,0], prob) + losses = { + f"delta_certainty_loss_{scale}": certainty_loss.mean(), + f"delta_cls_loss_{scale}": cls_loss.mean(), + } + wandb.log(losses, step = roma.GLOBAL_STEP) + return losses + + def regression_loss(self, x2, prob, flow, certainty, scale, eps=1e-8, mode = "delta"): + epe = (flow.permute(0,2,3,1) - x2).norm(dim=-1) + if scale == 1: + pck_05 = (epe[prob > 0.99] < 0.5 * (2/512)).float().mean() + wandb.log({"train_pck_05": pck_05}, step = roma.GLOBAL_STEP) + + ce_loss = F.binary_cross_entropy_with_logits(certainty[:, 0], prob) + a = self.alpha + cs = self.c * scale + x = epe[prob > 0.99] + reg_loss = cs**a * ((x/(cs))**2 + 1**2)**(a/2) + if not torch.any(reg_loss): + reg_loss = (ce_loss * 0.0) # Prevent issues where prob is 0 everywhere + losses = { + f"{mode}_certainty_loss_{scale}": ce_loss.mean(), + f"{mode}_regression_loss_{scale}": reg_loss.mean(), + } + wandb.log(losses, step = roma.GLOBAL_STEP) + return losses + + def forward(self, corresps, batch): + scales = list(corresps.keys()) + tot_loss = 0.0 + # scale_weights due to differences in scale for regression gradients and classification gradients + scale_weights = {1:1, 2:1, 4:1, 8:1, 16:1} + for scale in scales: + scale_corresps = corresps[scale] + scale_certainty, flow_pre_delta, delta_cls, offset_scale, scale_gm_cls, scale_gm_certainty, flow, scale_gm_flow = ( + scale_corresps["certainty"], + scale_corresps["flow_pre_delta"], + scale_corresps.get("delta_cls"), + scale_corresps.get("offset_scale"), + scale_corresps.get("gm_cls"), + scale_corresps.get("gm_certainty"), + scale_corresps["flow"], + scale_corresps.get("gm_flow"), + + ) + flow_pre_delta = rearrange(flow_pre_delta, "b d h w -> b h w d") + b, h, w, d = flow_pre_delta.shape + gt_warp, gt_prob = get_gt_warp( + batch["im_A_depth"], + batch["im_B_depth"], + batch["T_1to2"], + batch["K1"], + batch["K2"], + H=h, + W=w, + ) + x2 = gt_warp.float() + prob = gt_prob + + if self.local_largest_scale >= scale: + prob = prob * ( + F.interpolate(prev_epe[:, None], size=(h, w), mode="nearest-exact")[:, 0] + < (2 / 512) * (self.local_dist[scale] * scale)) + + if scale_gm_cls is not None: + gm_cls_losses = self.gm_cls_loss(x2, prob, scale_gm_cls, scale_gm_certainty, scale) + gm_loss = self.ce_weight * gm_cls_losses[f"gm_certainty_loss_{scale}"] + gm_cls_losses[f"gm_cls_loss_{scale}"] + tot_loss = tot_loss + scale_weights[scale] * gm_loss + elif scale_gm_flow is not None: + gm_flow_losses = self.regression_loss(x2, prob, scale_gm_flow, scale_gm_certainty, scale, mode = "gm") + gm_loss = self.ce_weight * gm_flow_losses[f"gm_certainty_loss_{scale}"] + gm_flow_losses[f"gm_regression_loss_{scale}"] + tot_loss = tot_loss + scale_weights[scale] * gm_loss + + if delta_cls is not None: + delta_cls_losses = self.delta_cls_loss(x2, prob, flow_pre_delta, delta_cls, scale_certainty, scale, offset_scale) + delta_cls_loss = self.ce_weight * delta_cls_losses[f"delta_certainty_loss_{scale}"] + delta_cls_losses[f"delta_cls_loss_{scale}"] + tot_loss = tot_loss + scale_weights[scale] * delta_cls_loss + else: + delta_regression_losses = self.regression_loss(x2, prob, flow, scale_certainty, scale) + reg_loss = self.ce_weight * delta_regression_losses[f"delta_certainty_loss_{scale}"] + delta_regression_losses[f"delta_regression_loss_{scale}"] + tot_loss = tot_loss + scale_weights[scale] * reg_loss + prev_epe = (flow.permute(0,2,3,1) - x2).norm(dim=-1).detach() + return tot_loss diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/matchanything_roma_model.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/matchanything_roma_model.py new file mode 100644 index 0000000000000000000000000000000000000000..86175c85db551063c6b73445fb59042d86b4e7d7 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/matchanything_roma_model.py @@ -0,0 +1,104 @@ +import torch +import torch.nn as nn +from PIL import Image +import numpy as np +from pathlib import Path +import sys +sys.path.append(str(Path(__file__).parent.parent.resolve())) + +from ..experiments.roma_outdoor import get_model + +class MatchAnything_Model(nn.Module): + def __init__(self, config, test_mode=False) -> None: + super().__init__() + self.config = config + self.test_mode = test_mode + self.resize_by_stretch = config['resize_by_stretch'] + self.norm_image = config['normalize_img'] + model_config = self.config['model'] + if not test_mode : + self.model = get_model(pretrained_backbone=True, amp=model_config['amp'], coarse_backbone_type=model_config['coarse_backbone'], coarse_feat_dim=model_config['coarse_feat_dim'], medium_feat_dim=model_config['medium_feat_dim'], coarse_patch_size=model_config['coarse_patch_size']) # Train mode + else: + self.model = get_model(pretrained_backbone=True, amp=model_config['amp'], coarse_backbone_type=model_config['coarse_backbone'], coarse_feat_dim=model_config['coarse_feat_dim'], medium_feat_dim=model_config['medium_feat_dim'], coarse_patch_size=model_config['coarse_patch_size'], coarse_resolution=self.config['test_time']['coarse_res'], symmetric=self.config['test_time']['symmetric'], upsample_preds=self.config['test_time']['upsample'], attenuate_cert=self.config['test_time']['attenutate_cert']) # Test mode + self.model.upsample_res = self.config['test_time']['upsample_res'] + self.model.sample_mode = self.config['sample']['method'] + self.model.sample_thresh = self.config['sample']['thresh'] + + def forward(self, data): + if not self.test_mode: + return self.forward_train_framework(data) + else: + return self.forward_inference(data) + + def forward_train_framework(self, data): + # Get already resize & padded images by dataloader + img0, img1 = data['image0'], data['image1'] # B * C * H * W + corresps = self.model.forward({"im_A": img0, "im_B": img1}, batched=True) + + data.update({"corresps":corresps}) # for supervision + + warp, certainity = self.model.self_train_time_match(data, corresps) # batched and padded + + m_bids = [] + mkpts0_f = [] + mkpts1_f = [] + m_conf = [] + for b_id in range(warp.shape[0]): + if self.resize_by_stretch: + H_A, W_A = data["origin_img_size0"][b_id][0], data["origin_img_size0"][b_id][1] + H_B, W_B = data["origin_img_size1"][b_id][0], data["origin_img_size1"][b_id][1] + else: + # By padding: + H_A, W_A = data["origin_img_size0"][b_id].max(), data["origin_img_size0"][b_id].max() + H_B, W_B = data["origin_img_size1"][b_id].max(), data["origin_img_size1"][b_id].max() + # # Sample matches for estimation + matches, certainity_ = self.model.sample(warp[b_id], certainity[b_id], num=self.config['sample']['n_sample']) + kpts0, kpts1 = self.model.to_pixel_coordinates(matches, H_A, W_A, H_B, W_B) + m_bids.append(torch.ones((kpts0.shape[0],), device=matches.device, dtype=torch.long) * b_id) + mkpts0_f.append(kpts0) + mkpts1_f.append(kpts1) + m_conf.append(certainity_) + data.update({'m_bids': torch.cat(m_bids), "mkpts0_f": torch.cat(mkpts0_f), "mkpts1_f": torch.cat(mkpts1_f), "mconf": torch.cat(m_conf)}) + + def forward_inference(self, data): + # Assume Loaded image in original image shape + if 'image0_rgb_origin' in data: + img0, img1 = data['image0_rgb_origin'][0], data['image1_rgb_origin'][0] + elif 'image0_rgb' in data: + img0, img1 = data['image0_rgb'][0], data['image1_rgb'][0] + else: + raise NotImplementedError + warp, dense_certainity = self.model.self_inference_time_match(img0, img1, resize_by_stretch=self.resize_by_stretch, norm_img=self.norm_image) + + if self.resize_by_stretch: + H_A, W_A = img0.shape[-2], img0.shape[-1] + H_B, W_B = img1.shape[-2], img1.shape[-1] + else: + A_max_edge = max(img0.shape[-2:]) + H_A, W_A = A_max_edge, A_max_edge + B_max_edge = max(img1.shape[-2:]) + H_B, W_B = B_max_edge, B_max_edge + + # Sample matches for estimation + matches, certainity = self.model.sample(warp, dense_certainity, num=self.config['sample']['n_sample']) + kpts0, kpts1 = self.model.to_pixel_coordinates(matches, H_A, W_A, H_B, W_B) + + mask = certainity > self.config['match_thresh'] + # Mask borders: + mask *= (kpts0[:, 0] <= img0.shape[-1]-1) * (kpts0[:, 1] <= img0.shape[-2]-1) * (kpts1[:, 0] <= img1.shape[-1]-1) * (kpts1[:, 1] <= img1.shape[-2]-1) + data.update({'m_bids': torch.zeros_like(kpts0[:, 0])[mask], "mkpts0_f": kpts0[mask], "mkpts1_f": kpts1[mask], "mconf": certainity[mask]}) + + # Warp query points: + if 'query_points' in data: + detector_kpts0 = data['query_points'].to(torch.float32) # B * N * 2 + within_mask = (detector_kpts0[..., 0] >= 0) & (detector_kpts0[..., 0] <= (W_A - 1)) & (detector_kpts0[..., 1] >= 0) & (detector_kpts0[..., 1] <= (H_A - 1)) + internal_detector_kpts0 = detector_kpts0[within_mask] + warped_detector_kpts0, cert_A_to_B = self.model.warp_keypoints(internal_detector_kpts0, warp, dense_certainity, H_A, W_A, H_B, W_B) + data.update({"query_points_warpped": warped_detector_kpts0}) + return data + + def load_state_dict(self, state_dict, *args, **kwargs): + for k in list(state_dict.keys()): + if k.startswith('matcher.'): + state_dict[k.replace('matcher.', '', 1)] = state_dict.pop(k) + return super().load_state_dict(state_dict, *args, **kwargs) \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/__init__.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5f20461e2f3a1722e558cefab94c5164be8842c3 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/__init__.py @@ -0,0 +1 @@ +from .model_zoo import roma_outdoor, roma_indoor \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/blocks.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/blocks.py new file mode 100644 index 0000000000000000000000000000000000000000..18133524f0ae265b0bd8d062d7c9eeaa63858a9b --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/blocks.py @@ -0,0 +1,241 @@ +# Copyright (C) 2022-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). + + +# -------------------------------------------------------- +# Main encoder/decoder blocks +# -------------------------------------------------------- +# References: +# timm +# https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/vision_transformer.py +# https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/layers/helpers.py +# https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/layers/drop.py +# https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/layers/mlp.py +# https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/layers/patch_embed.py + + +import torch +import torch.nn as nn + +from itertools import repeat +import collections.abc + + +def _ntuple(n): + def parse(x): + if isinstance(x, collections.abc.Iterable) and not isinstance(x, str): + return x + return tuple(repeat(x, n)) + return parse +to_2tuple = _ntuple(2) + +def drop_path(x, drop_prob: float = 0., training: bool = False, scale_by_keep: bool = True): + """Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks). + """ + if drop_prob == 0. or not training: + return x + keep_prob = 1 - drop_prob + shape = (x.shape[0],) + (1,) * (x.ndim - 1) # work with diff dim tensors, not just 2D ConvNets + random_tensor = x.new_empty(shape).bernoulli_(keep_prob) + if keep_prob > 0.0 and scale_by_keep: + random_tensor.div_(keep_prob) + return x * random_tensor + +class DropPath(nn.Module): + """Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks). + """ + def __init__(self, drop_prob: float = 0., scale_by_keep: bool = True): + super(DropPath, self).__init__() + self.drop_prob = drop_prob + self.scale_by_keep = scale_by_keep + + def forward(self, x): + return drop_path(x, self.drop_prob, self.training, self.scale_by_keep) + + def extra_repr(self): + return f'drop_prob={round(self.drop_prob,3):0.3f}' + +class Mlp(nn.Module): + """ MLP as used in Vision Transformer, MLP-Mixer and related networks""" + def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, bias=True, drop=0.): + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + bias = to_2tuple(bias) + drop_probs = to_2tuple(drop) + + self.fc1 = nn.Linear(in_features, hidden_features, bias=bias[0]) + self.act = act_layer() + self.drop1 = nn.Dropout(drop_probs[0]) + self.fc2 = nn.Linear(hidden_features, out_features, bias=bias[1]) + self.drop2 = nn.Dropout(drop_probs[1]) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop1(x) + x = self.fc2(x) + x = self.drop2(x) + return x + +class Attention(nn.Module): + + def __init__(self, dim, rope=None, num_heads=8, qkv_bias=False, attn_drop=0., proj_drop=0.): + super().__init__() + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = head_dim ** -0.5 + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_drop) + self.rope = rope + + def forward(self, x, xpos): + B, N, C = x.shape + + qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).transpose(1,3) + q, k, v = [qkv[:,:,i] for i in range(3)] + # q,k,v = qkv.unbind(2) # make torchscript happy (cannot use tensor as tuple) + + if self.rope is not None: + q = self.rope(q, xpos) + k = self.rope(k, xpos) + + attn = (q @ k.transpose(-2, -1)) * self.scale + attn = attn.softmax(dim=-1) + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + +class Block(nn.Module): + + def __init__(self, dim, num_heads, mlp_ratio=4., qkv_bias=False, drop=0., attn_drop=0., + drop_path=0., act_layer=nn.GELU, norm_layer=nn.LayerNorm, rope=None): + super().__init__() + self.norm1 = norm_layer(dim) + self.attn = Attention(dim, rope=rope, num_heads=num_heads, qkv_bias=qkv_bias, attn_drop=attn_drop, proj_drop=drop) + # NOTE: drop path for stochastic depth, we shall see if this is better than dropout here + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.norm2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop) + + def forward(self, x, xpos): + x = x + self.drop_path(self.attn(self.norm1(x), xpos)) + x = x + self.drop_path(self.mlp(self.norm2(x))) + return x + +class CrossAttention(nn.Module): + + def __init__(self, dim, rope=None, num_heads=8, qkv_bias=False, attn_drop=0., proj_drop=0.): + super().__init__() + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = head_dim ** -0.5 + + self.projq = nn.Linear(dim, dim, bias=qkv_bias) + self.projk = nn.Linear(dim, dim, bias=qkv_bias) + self.projv = nn.Linear(dim, dim, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_drop) + + self.rope = rope + + def forward(self, query, key, value, qpos, kpos): + B, Nq, C = query.shape + Nk = key.shape[1] + Nv = value.shape[1] + + q = self.projq(query).reshape(B,Nq,self.num_heads, C// self.num_heads).permute(0, 2, 1, 3) + k = self.projk(key).reshape(B,Nk,self.num_heads, C// self.num_heads).permute(0, 2, 1, 3) + v = self.projv(value).reshape(B,Nv,self.num_heads, C// self.num_heads).permute(0, 2, 1, 3) + + if self.rope is not None: + q = self.rope(q, qpos) + k = self.rope(k, kpos) + + attn = (q @ k.transpose(-2, -1)) * self.scale + attn = attn.softmax(dim=-1) + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B, Nq, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + +class DecoderBlock(nn.Module): + + def __init__(self, dim, num_heads, mlp_ratio=4., qkv_bias=False, drop=0., attn_drop=0., + drop_path=0., act_layer=nn.GELU, norm_layer=nn.LayerNorm, norm_mem=True, rope=None): + super().__init__() + self.norm1 = norm_layer(dim) + self.attn = Attention(dim, rope=rope, num_heads=num_heads, qkv_bias=qkv_bias, attn_drop=attn_drop, proj_drop=drop) + self.cross_attn = CrossAttention(dim, rope=rope, num_heads=num_heads, qkv_bias=qkv_bias, attn_drop=attn_drop, proj_drop=drop) + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.norm2 = norm_layer(dim) + self.norm3 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop) + self.norm_y = norm_layer(dim) if norm_mem else nn.Identity() + + def forward(self, x, y, xpos, ypos): + x = x + self.drop_path(self.attn(self.norm1(x), xpos)) + y_ = self.norm_y(y) + x = x + self.drop_path(self.cross_attn(self.norm2(x), y_, y_, xpos, ypos)) + x = x + self.drop_path(self.mlp(self.norm3(x))) + return x, y + + +# patch embedding +class PositionGetter(object): + """ return positions of patches """ + + def __init__(self): + self.cache_positions = {} + + def __call__(self, b, h, w, device): + if not (h,w) in self.cache_positions: + x = torch.arange(w, device=device) + y = torch.arange(h, device=device) + self.cache_positions[h,w] = torch.cartesian_prod(y, x) # (h, w, 2) + pos = self.cache_positions[h,w].view(1, h*w, 2).expand(b, -1, 2).clone() + return pos + +class PatchEmbed(nn.Module): + """ just adding _init_weights + position getter compared to timm.models.layers.patch_embed.PatchEmbed""" + + def __init__(self, img_size=224, patch_size=16, in_chans=3, embed_dim=768, norm_layer=None, flatten=True): + super().__init__() + img_size = to_2tuple(img_size) + patch_size = to_2tuple(patch_size) + self.img_size = img_size + self.patch_size = patch_size + self.grid_size = (img_size[0] // patch_size[0], img_size[1] // patch_size[1]) + self.num_patches = self.grid_size[0] * self.grid_size[1] + self.flatten = flatten + + self.proj = nn.Conv2d(in_chans, embed_dim, kernel_size=patch_size, stride=patch_size) + self.norm = norm_layer(embed_dim) if norm_layer else nn.Identity() + + self.position_getter = PositionGetter() + + def forward(self, x): + B, C, H, W = x.shape + torch._assert(H == self.img_size[0], f"Input image height ({H}) doesn't match model ({self.img_size[0]}).") + torch._assert(W == self.img_size[1], f"Input image width ({W}) doesn't match model ({self.img_size[1]}).") + x = self.proj(x) + pos = self.position_getter(B, x.size(2), x.size(3), x.device) + if self.flatten: + x = x.flatten(2).transpose(1, 2) # BCHW -> BNC + x = self.norm(x) + return x, pos + + def _init_weights(self): + w = self.proj.weight.data + torch.nn.init.xavier_uniform_(w.view([w.shape[0], -1])) + diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/criterion.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/criterion.py new file mode 100644 index 0000000000000000000000000000000000000000..11696c40865344490f23796ea45e8fbd5e654731 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/criterion.py @@ -0,0 +1,37 @@ +# Copyright (C) 2022-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# Criterion to train CroCo +# -------------------------------------------------------- +# References: +# MAE: https://github.com/facebookresearch/mae +# -------------------------------------------------------- + +import torch + +class MaskedMSE(torch.nn.Module): + + def __init__(self, norm_pix_loss=False, masked=True): + """ + norm_pix_loss: normalize each patch by their pixel mean and variance + masked: compute loss over the masked patches only + """ + super().__init__() + self.norm_pix_loss = norm_pix_loss + self.masked = masked + + def forward(self, pred, mask, target): + + if self.norm_pix_loss: + mean = target.mean(dim=-1, keepdim=True) + var = target.var(dim=-1, keepdim=True) + target = (target - mean) / (var + 1.e-6)**.5 + + loss = (pred - target) ** 2 + loss = loss.mean(dim=-1) # [N, L], mean loss per patch + if self.masked: + loss = (loss * mask).sum() / mask.sum() # mean loss on masked patches + else: + loss = loss.mean() # mean loss + return loss diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/croco.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/croco.py new file mode 100644 index 0000000000000000000000000000000000000000..6a53985b6ffdc51f125cc51c0533629399776f5d --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/croco.py @@ -0,0 +1,253 @@ +# Copyright (C) 2022-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). + + +# -------------------------------------------------------- +# CroCo model during pretraining +# -------------------------------------------------------- + + + +import torch +import torch.nn as nn +# torch.backends.cuda.matmul.allow_tf32 = True # for gpu >= Ampere and pytorch >= 1.12 +from functools import partial + +from .blocks import Block, DecoderBlock, PatchEmbed +from .pos_embed import get_2d_sincos_pos_embed, RoPE2D +from .masking import RandomMask + + +class CroCoNet(nn.Module): + + def __init__(self, + img_size=224, # input image size + patch_size=16, # patch_size + mask_ratio=0.9, # ratios of masked tokens + enc_embed_dim=768, # encoder feature dimension + enc_depth=12, # encoder depth + enc_num_heads=12, # encoder number of heads in the transformer block + dec_embed_dim=512, # decoder feature dimension + dec_depth=8, # decoder depth + dec_num_heads=16, # decoder number of heads in the transformer block + mlp_ratio=4, + norm_layer=partial(nn.LayerNorm, eps=1e-6), + norm_im2_in_dec=True, # whether to apply normalization of the 'memory' = (second image) in the decoder + pos_embed='cosine', # positional embedding (either cosine or RoPE100) + ): + + super(CroCoNet, self).__init__() + + # patch embeddings (with initialization done as in MAE) + self._set_patch_embed(img_size, patch_size, enc_embed_dim) + + # mask generations + self._set_mask_generator(self.patch_embed.num_patches, mask_ratio) + + self.pos_embed = pos_embed + if pos_embed=='cosine': + # positional embedding of the encoder + enc_pos_embed = get_2d_sincos_pos_embed(enc_embed_dim, int(self.patch_embed.num_patches**.5), n_cls_token=0) + self.register_buffer('enc_pos_embed', torch.from_numpy(enc_pos_embed).float()) + # positional embedding of the decoder + dec_pos_embed = get_2d_sincos_pos_embed(dec_embed_dim, int(self.patch_embed.num_patches**.5), n_cls_token=0) + self.register_buffer('dec_pos_embed', torch.from_numpy(dec_pos_embed).float()) + # pos embedding in each block + self.rope = None # nothing for cosine + elif pos_embed.startswith('RoPE'): # eg RoPE100 + self.enc_pos_embed = None # nothing to add in the encoder with RoPE + self.dec_pos_embed = None # nothing to add in the decoder with RoPE + if RoPE2D is None: raise ImportError("Cannot find cuRoPE2D, please install it following the README instructions") + freq = float(pos_embed[len('RoPE'):]) + self.rope = RoPE2D(freq=freq) + else: + raise NotImplementedError('Unknown pos_embed '+pos_embed) + + # transformer for the encoder + self.enc_depth = enc_depth + self.enc_embed_dim = enc_embed_dim + self.enc_blocks = nn.ModuleList([ + Block(enc_embed_dim, enc_num_heads, mlp_ratio, qkv_bias=True, norm_layer=norm_layer, rope=self.rope) + for i in range(enc_depth)]) + self.enc_norm = norm_layer(enc_embed_dim) + + # masked tokens + self._set_mask_token(dec_embed_dim) + + # decoder + self._set_decoder(enc_embed_dim, dec_embed_dim, dec_num_heads, dec_depth, mlp_ratio, norm_layer, norm_im2_in_dec) + + # prediction head + self._set_prediction_head(dec_embed_dim, patch_size) + + # initializer weights + self.initialize_weights() + + @property + def device(self): + return self.patch_embed.device + + def _set_patch_embed(self, img_size=224, patch_size=16, enc_embed_dim=768): + self.patch_embed = PatchEmbed(img_size, patch_size, 3, enc_embed_dim) + + def _set_mask_generator(self, num_patches, mask_ratio): + self.mask_generator = RandomMask(num_patches, mask_ratio) + + def _set_mask_token(self, dec_embed_dim): + self.mask_token = nn.Parameter(torch.zeros(1, 1, dec_embed_dim)) + + def _set_decoder(self, enc_embed_dim, dec_embed_dim, dec_num_heads, dec_depth, mlp_ratio, norm_layer, norm_im2_in_dec): + self.dec_depth = dec_depth + self.dec_embed_dim = dec_embed_dim + # transfer from encoder to decoder + self.decoder_embed = nn.Linear(enc_embed_dim, dec_embed_dim, bias=True) + # transformer for the decoder + self.dec_blocks = nn.ModuleList([ + DecoderBlock(dec_embed_dim, dec_num_heads, mlp_ratio=mlp_ratio, qkv_bias=True, norm_layer=norm_layer, norm_mem=norm_im2_in_dec, rope=self.rope) + for i in range(dec_depth)]) + # final norm layer + self.dec_norm = norm_layer(dec_embed_dim) + + def _set_prediction_head(self, dec_embed_dim, patch_size): + self.prediction_head = nn.Linear(dec_embed_dim, patch_size**2 * 3, bias=True) + + + def initialize_weights(self): + # patch embed + self.patch_embed._init_weights() + # mask tokens + if self.mask_token is not None: torch.nn.init.normal_(self.mask_token, std=.02) + # linears and layer norms + self.apply(self._init_weights) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + # we use xavier_uniform following official JAX ViT: + torch.nn.init.xavier_uniform_(m.weight) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + def _encode_image(self, image, do_mask=False, return_all_blocks=False): + """ + image has B x 3 x img_size x img_size + do_mask: whether to perform masking or not + return_all_blocks: if True, return the features at the end of every block + instead of just the features from the last block (eg for some prediction heads) + """ + # embed the image into patches (x has size B x Npatches x C) + # and get position if each return patch (pos has size B x Npatches x 2) + x, pos = self.patch_embed(image) + # add positional embedding without cls token + if self.enc_pos_embed is not None: + x = x + self.enc_pos_embed[None,...] + # apply masking + B,N,C = x.size() + if do_mask: + masks = self.mask_generator(x) + x = x[~masks].view(B, -1, C) + posvis = pos[~masks].view(B, -1, 2) + else: + B,N,C = x.size() + masks = torch.zeros((B,N), dtype=bool) + posvis = pos + # now apply the transformer encoder and normalization + if return_all_blocks: + out = [] + for blk in self.enc_blocks: + x = blk(x, posvis) + out.append(x) + out[-1] = self.enc_norm(out[-1]) + return out, pos, masks + else: + for blk in self.enc_blocks: + x = blk(x, posvis) + x = self.enc_norm(x) + return x, pos, masks + + def _decoder(self, feat1, pos1, masks1, feat2, pos2, return_all_blocks=False): + """ + return_all_blocks: if True, return the features at the end of every block + instead of just the features from the last block (eg for some prediction heads) + + masks1 can be None => assume image1 fully visible + """ + # encoder to decoder layer + visf1 = self.decoder_embed(feat1) + f2 = self.decoder_embed(feat2) + # append masked tokens to the sequence + B,Nenc,C = visf1.size() + if masks1 is None: # downstreams + f1_ = visf1 + else: # pretraining + Ntotal = masks1.size(1) + f1_ = self.mask_token.repeat(B, Ntotal, 1).to(dtype=visf1.dtype) + f1_[~masks1] = visf1.view(B * Nenc, C) + # add positional embedding + if self.dec_pos_embed is not None: + f1_ = f1_ + self.dec_pos_embed + f2 = f2 + self.dec_pos_embed + # apply Transformer blocks + out = f1_ + out2 = f2 + if return_all_blocks: + _out, out = out, [] + for blk in self.dec_blocks: + _out, out2 = blk(_out, out2, pos1, pos2) + out.append(_out) + out[-1] = self.dec_norm(out[-1]) + else: + for blk in self.dec_blocks: + out, out2 = blk(out, out2, pos1, pos2) + out = self.dec_norm(out) + return out + + def patchify(self, imgs): + """ + imgs: (B, 3, H, W) + x: (B, L, patch_size**2 *3) + """ + p = self.patch_embed.patch_size[0] + assert imgs.shape[2] == imgs.shape[3] and imgs.shape[2] % p == 0 + + h = w = imgs.shape[2] // p + x = imgs.reshape(shape=(imgs.shape[0], 3, h, p, w, p)) + x = torch.einsum('nchpwq->nhwpqc', x) + x = x.reshape(shape=(imgs.shape[0], h * w, p**2 * 3)) + + return x + + def unpatchify(self, x, channels=3): + """ + x: (N, L, patch_size**2 *channels) + imgs: (N, 3, H, W) + """ + patch_size = self.patch_embed.patch_size[0] + h = w = int(x.shape[1]**.5) + assert h * w == x.shape[1] + x = x.reshape(shape=(x.shape[0], h, w, patch_size, patch_size, channels)) + x = torch.einsum('nhwpqc->nchpwq', x) + imgs = x.reshape(shape=(x.shape[0], channels, h * patch_size, h * patch_size)) + return imgs + + def forward(self, img1, img2): + """ + img1: tensor of size B x 3 x img_size x img_size + img2: tensor of size B x 3 x img_size x img_size + + out will be B x N x (3*patch_size*patch_size) + masks are also returned as B x N just in case + """ + # encoder of the masked first image + feat1, pos1, mask1 = self._encode_image(img1, do_mask=True) + # encoder of the second image + feat2, pos2, _ = self._encode_image(img2, do_mask=False) + # decoder + decfeat = self._decoder(feat1, pos1, mask1, feat2, pos2) + # prediction head + out = self.prediction_head(decfeat) + # get target + target = self.patchify(img1) + return out, mask1, target diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/croco_downstream.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/croco_downstream.py new file mode 100644 index 0000000000000000000000000000000000000000..159dfff4d2c1461bc235e21441b57ce1e2088f76 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/croco_downstream.py @@ -0,0 +1,122 @@ +# Copyright (C) 2022-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). + +# -------------------------------------------------------- +# CroCo model for downstream tasks +# -------------------------------------------------------- + +import torch + +from .croco import CroCoNet + + +def croco_args_from_ckpt(ckpt): + if 'croco_kwargs' in ckpt: # CroCo v2 released models + return ckpt['croco_kwargs'] + elif 'args' in ckpt and hasattr(ckpt['args'], 'model'): # pretrained using the official code release + s = ckpt['args'].model # eg "CroCoNet(enc_embed_dim=1024, enc_num_heads=16, enc_depth=24)" + assert s.startswith('CroCoNet(') + return eval('dict'+s[len('CroCoNet'):]) # transform it into the string of a dictionary and evaluate it + else: # CroCo v1 released models + return dict() + +class CroCoDownstreamMonocularEncoder(CroCoNet): + + def __init__(self, + head, + **kwargs): + """ Build network for monocular downstream task, only using the encoder. + It takes an extra argument head, that is called with the features + and a dictionary img_info containing 'width' and 'height' keys + The head is setup with the croconet arguments in this init function + NOTE: It works by *calling super().__init__() but with redefined setters + + """ + super(CroCoDownstreamMonocularEncoder, self).__init__(**kwargs) + head.setup(self) + self.head = head + + def _set_mask_generator(self, *args, **kwargs): + """ No mask generator """ + return + + def _set_mask_token(self, *args, **kwargs): + """ No mask token """ + self.mask_token = None + return + + def _set_decoder(self, *args, **kwargs): + """ No decoder """ + return + + def _set_prediction_head(self, *args, **kwargs): + """ No 'prediction head' for downstream tasks.""" + return + + def forward(self, img): + """ + img if of size batch_size x 3 x h x w + """ + B, C, H, W = img.size() + img_info = {'height': H, 'width': W} + need_all_layers = hasattr(self.head, 'return_all_blocks') and self.head.return_all_blocks + out, _, _ = self._encode_image(img, do_mask=False, return_all_blocks=need_all_layers) + return self.head(out, img_info) + + +class CroCoDownstreamBinocular(CroCoNet): + + def __init__(self, + head, + **kwargs): + """ Build network for binocular downstream task + It takes an extra argument head, that is called with the features + and a dictionary img_info containing 'width' and 'height' keys + The head is setup with the croconet arguments in this init function + """ + super(CroCoDownstreamBinocular, self).__init__(**kwargs) + head.setup(self) + self.head = head + + def _set_mask_generator(self, *args, **kwargs): + """ No mask generator """ + return + + def _set_mask_token(self, *args, **kwargs): + """ No mask token """ + self.mask_token = None + return + + def _set_prediction_head(self, *args, **kwargs): + """ No prediction head for downstream tasks, define your own head """ + return + + def encode_image_pairs(self, img1, img2, return_all_blocks=False): + """ run encoder for a pair of images + it is actually ~5% faster to concatenate the images along the batch dimension + than to encode them separately + """ + ## the two commented lines below is the naive version with separate encoding + #out, pos, _ = self._encode_image(img1, do_mask=False, return_all_blocks=return_all_blocks) + #out2, pos2, _ = self._encode_image(img2, do_mask=False, return_all_blocks=False) + ## and now the faster version + out, pos, _ = self._encode_image( torch.cat( (img1,img2), dim=0), do_mask=False, return_all_blocks=return_all_blocks ) + if return_all_blocks: + out,out2 = list(map(list, zip(*[o.chunk(2, dim=0) for o in out]))) + out2 = out2[-1] + else: + out,out2 = out.chunk(2, dim=0) + pos,pos2 = pos.chunk(2, dim=0) + return out, out2, pos, pos2 + + def forward(self, img1, img2): + B, C, H, W = img1.size() + img_info = {'height': H, 'width': W} + return_all_blocks = hasattr(self.head, 'return_all_blocks') and self.head.return_all_blocks + out, out2, pos, pos2 = self.encode_image_pairs(img1, img2, return_all_blocks=return_all_blocks) + if return_all_blocks: + decout = self._decoder(out[-1], pos, None, out2, pos2, return_all_blocks=return_all_blocks) + decout = out+decout + else: + decout = self._decoder(out, pos, None, out2, pos2, return_all_blocks=return_all_blocks) + return self.head(decout, img_info) \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/curope/__init__.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/curope/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..25e3d48a162760260826080f6366838e83e26878 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/curope/__init__.py @@ -0,0 +1,4 @@ +# Copyright (C) 2022-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). + +from .curope2d import cuRoPE2D diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/curope/curope.cpp b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/curope/curope.cpp new file mode 100644 index 0000000000000000000000000000000000000000..8fe9058e05aa1bf3f37b0d970edc7312bc68455b --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/curope/curope.cpp @@ -0,0 +1,69 @@ +/* + Copyright (C) 2022-present Naver Corporation. All rights reserved. + Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +*/ + +#include + +// forward declaration +void rope_2d_cuda( torch::Tensor tokens, const torch::Tensor pos, const float base, const float fwd ); + +void rope_2d_cpu( torch::Tensor tokens, const torch::Tensor positions, const float base, const float fwd ) +{ + const int B = tokens.size(0); + const int N = tokens.size(1); + const int H = tokens.size(2); + const int D = tokens.size(3) / 4; + + auto tok = tokens.accessor(); + auto pos = positions.accessor(); + + for (int b = 0; b < B; b++) { + for (int x = 0; x < 2; x++) { // y and then x (2d) + for (int n = 0; n < N; n++) { + + // grab the token position + const int p = pos[b][n][x]; + + for (int h = 0; h < H; h++) { + for (int d = 0; d < D; d++) { + // grab the two values + float u = tok[b][n][h][d+0+x*2*D]; + float v = tok[b][n][h][d+D+x*2*D]; + + // grab the cos,sin + const float inv_freq = fwd * p / powf(base, d/float(D)); + float c = cosf(inv_freq); + float s = sinf(inv_freq); + + // write the result + tok[b][n][h][d+0+x*2*D] = u*c - v*s; + tok[b][n][h][d+D+x*2*D] = v*c + u*s; + } + } + } + } + } +} + +void rope_2d( torch::Tensor tokens, // B,N,H,D + const torch::Tensor positions, // B,N,2 + const float base, + const float fwd ) +{ + TORCH_CHECK(tokens.dim() == 4, "tokens must have 4 dimensions"); + TORCH_CHECK(positions.dim() == 3, "positions must have 3 dimensions"); + TORCH_CHECK(tokens.size(0) == positions.size(0), "batch size differs between tokens & positions"); + TORCH_CHECK(tokens.size(1) == positions.size(1), "seq_length differs between tokens & positions"); + TORCH_CHECK(positions.size(2) == 2, "positions.shape[2] must be equal to 2"); + TORCH_CHECK(tokens.is_cuda() == positions.is_cuda(), "tokens and positions are not on the same device" ); + + if (tokens.is_cuda()) + rope_2d_cuda( tokens, positions, base, fwd ); + else + rope_2d_cpu( tokens, positions, base, fwd ); +} + +PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) { + m.def("rope_2d", &rope_2d, "RoPE 2d forward/backward"); +} diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/curope/curope2d.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/curope/curope2d.py new file mode 100644 index 0000000000000000000000000000000000000000..a49c12f8c529e9a889b5ac20c5767158f238e17d --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/curope/curope2d.py @@ -0,0 +1,40 @@ +# Copyright (C) 2022-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). + +import torch + +try: + import curope as _kernels # run `python setup.py install` +except ModuleNotFoundError: + from . import curope as _kernels # run `python setup.py build_ext --inplace` + + +class cuRoPE2D_func (torch.autograd.Function): + + @staticmethod + def forward(ctx, tokens, positions, base, F0=1): + ctx.save_for_backward(positions) + ctx.saved_base = base + ctx.saved_F0 = F0 + # tokens = tokens.clone() # uncomment this if inplace doesn't work + _kernels.rope_2d( tokens, positions, base, F0 ) + ctx.mark_dirty(tokens) + return tokens + + @staticmethod + def backward(ctx, grad_res): + positions, base, F0 = ctx.saved_tensors[0], ctx.saved_base, ctx.saved_F0 + _kernels.rope_2d( grad_res, positions, base, -F0 ) + ctx.mark_dirty(grad_res) + return grad_res, None, None, None + + +class cuRoPE2D(torch.nn.Module): + def __init__(self, freq=100.0, F0=1.0): + super().__init__() + self.base = freq + self.F0 = F0 + + def forward(self, tokens, positions): + cuRoPE2D_func.apply( tokens.transpose(1,2), positions, self.base, self.F0 ) + return tokens \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/curope/kernels.cu b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/curope/kernels.cu new file mode 100644 index 0000000000000000000000000000000000000000..7156cd1bb935cb1f0be45e58add53f9c21505c20 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/curope/kernels.cu @@ -0,0 +1,108 @@ +/* + Copyright (C) 2022-present Naver Corporation. All rights reserved. + Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +*/ + +#include +#include +#include +#include + +#define CHECK_CUDA(tensor) {\ + TORCH_CHECK((tensor).is_cuda(), #tensor " is not in cuda memory"); \ + TORCH_CHECK((tensor).is_contiguous(), #tensor " is not contiguous"); } +void CHECK_KERNEL() {auto error = cudaGetLastError(); TORCH_CHECK( error == cudaSuccess, cudaGetErrorString(error));} + + +template < typename scalar_t > +__global__ void rope_2d_cuda_kernel( + //scalar_t* __restrict__ tokens, + torch::PackedTensorAccessor32 tokens, + const int64_t* __restrict__ pos, + const float base, + const float fwd ) + // const int N, const int H, const int D ) +{ + // tokens shape = (B, N, H, D) + const int N = tokens.size(1); + const int H = tokens.size(2); + const int D = tokens.size(3); + + // each block update a single token, for all heads + // each thread takes care of a single output + extern __shared__ float shared[]; + float* shared_inv_freq = shared + D; + + const int b = blockIdx.x / N; + const int n = blockIdx.x % N; + + const int Q = D / 4; + // one token = [0..Q : Q..2Q : 2Q..3Q : 3Q..D] + // u_Y v_Y u_X v_X + + // shared memory: first, compute inv_freq + if (threadIdx.x < Q) + shared_inv_freq[threadIdx.x] = fwd / powf(base, threadIdx.x/float(Q)); + __syncthreads(); + + // start of X or Y part + const int X = threadIdx.x < D/2 ? 0 : 1; + const int m = (X*D/2) + (threadIdx.x % Q); // index of u_Y or u_X + + // grab the cos,sin appropriate for me + const float freq = pos[blockIdx.x*2+X] * shared_inv_freq[threadIdx.x % Q]; + const float cos = cosf(freq); + const float sin = sinf(freq); + /* + float* shared_cos_sin = shared + D + D/4; + if ((threadIdx.x % (D/2)) < Q) + shared_cos_sin[m+0] = cosf(freq); + else + shared_cos_sin[m+Q] = sinf(freq); + __syncthreads(); + const float cos = shared_cos_sin[m+0]; + const float sin = shared_cos_sin[m+Q]; + */ + + for (int h = 0; h < H; h++) + { + // then, load all the token for this head in shared memory + shared[threadIdx.x] = tokens[b][n][h][threadIdx.x]; + __syncthreads(); + + const float u = shared[m]; + const float v = shared[m+Q]; + + // write output + if ((threadIdx.x % (D/2)) < Q) + tokens[b][n][h][threadIdx.x] = u*cos - v*sin; + else + tokens[b][n][h][threadIdx.x] = v*cos + u*sin; + } +} + +void rope_2d_cuda( torch::Tensor tokens, const torch::Tensor pos, const float base, const float fwd ) +{ + const int B = tokens.size(0); // batch size + const int N = tokens.size(1); // sequence length + const int H = tokens.size(2); // number of heads + const int D = tokens.size(3); // dimension per head + + TORCH_CHECK(tokens.stride(3) == 1 && tokens.stride(2) == D, "tokens are not contiguous"); + TORCH_CHECK(pos.is_contiguous(), "positions are not contiguous"); + TORCH_CHECK(pos.size(0) == B && pos.size(1) == N && pos.size(2) == 2, "bad pos.shape"); + TORCH_CHECK(D % 4 == 0, "token dim must be multiple of 4"); + + // one block for each layer, one thread per local-max + const int THREADS_PER_BLOCK = D; + const int N_BLOCKS = B * N; // each block takes care of H*D values + const int SHARED_MEM = sizeof(float) * (D + D/4); + + AT_DISPATCH_FLOATING_TYPES_AND_HALF(tokens.type(), "rope_2d_cuda", ([&] { + rope_2d_cuda_kernel <<>> ( + //tokens.data_ptr(), + tokens.packed_accessor32(), + pos.data_ptr(), + base, fwd); //, N, H, D ); + })); +} diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/curope/setup.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/curope/setup.py new file mode 100644 index 0000000000000000000000000000000000000000..230632ed05e309200e8f93a3a852072333975009 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/curope/setup.py @@ -0,0 +1,34 @@ +# Copyright (C) 2022-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). + +from setuptools import setup +from torch import cuda +from torch.utils.cpp_extension import BuildExtension, CUDAExtension + +# compile for all possible CUDA architectures +all_cuda_archs = cuda.get_gencode_flags().replace('compute=','arch=').split() +# alternatively, you can list cuda archs that you want, eg: +# all_cuda_archs = [ + # '-gencode', 'arch=compute_70,code=sm_70', + # '-gencode', 'arch=compute_75,code=sm_75', + # '-gencode', 'arch=compute_80,code=sm_80', + # '-gencode', 'arch=compute_86,code=sm_86' +# ] + +setup( + name = 'curope', + ext_modules = [ + CUDAExtension( + name='curope', + sources=[ + "curope.cpp", + "kernels.cu", + ], + extra_compile_args = dict( + nvcc=['-O3','--ptxas-options=-v',"--use_fast_math"]+all_cuda_archs, + cxx=['-O3']) + ) + ], + cmdclass = { + 'build_ext': BuildExtension + }) diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/dpt_block.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/dpt_block.py new file mode 100644 index 0000000000000000000000000000000000000000..d4ddfb74e2769ceca88720d4c730e00afd71c763 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/dpt_block.py @@ -0,0 +1,450 @@ +# Copyright (C) 2022-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). + +# -------------------------------------------------------- +# DPT head for ViTs +# -------------------------------------------------------- +# References: +# https://github.com/isl-org/DPT +# https://github.com/EPFL-VILAB/MultiMAE/blob/main/multimae/output_adapters.py + +import torch +import torch.nn as nn +import torch.nn.functional as F +from einops import rearrange, repeat +from typing import Union, Tuple, Iterable, List, Optional, Dict + +def pair(t): + return t if isinstance(t, tuple) else (t, t) + +def make_scratch(in_shape, out_shape, groups=1, expand=False): + scratch = nn.Module() + + out_shape1 = out_shape + out_shape2 = out_shape + out_shape3 = out_shape + out_shape4 = out_shape + if expand == True: + out_shape1 = out_shape + out_shape2 = out_shape * 2 + out_shape3 = out_shape * 4 + out_shape4 = out_shape * 8 + + scratch.layer1_rn = nn.Conv2d( + in_shape[0], + out_shape1, + kernel_size=3, + stride=1, + padding=1, + bias=False, + groups=groups, + ) + scratch.layer2_rn = nn.Conv2d( + in_shape[1], + out_shape2, + kernel_size=3, + stride=1, + padding=1, + bias=False, + groups=groups, + ) + scratch.layer3_rn = nn.Conv2d( + in_shape[2], + out_shape3, + kernel_size=3, + stride=1, + padding=1, + bias=False, + groups=groups, + ) + scratch.layer4_rn = nn.Conv2d( + in_shape[3], + out_shape4, + kernel_size=3, + stride=1, + padding=1, + bias=False, + groups=groups, + ) + + scratch.layer_rn = nn.ModuleList([ + scratch.layer1_rn, + scratch.layer2_rn, + scratch.layer3_rn, + scratch.layer4_rn, + ]) + + return scratch + +class ResidualConvUnit_custom(nn.Module): + """Residual convolution module.""" + + def __init__(self, features, activation, bn): + """Init. + Args: + features (int): number of features + """ + super().__init__() + + self.bn = bn + + self.groups = 1 + + self.conv1 = nn.Conv2d( + features, + features, + kernel_size=3, + stride=1, + padding=1, + bias=not self.bn, + groups=self.groups, + ) + + self.conv2 = nn.Conv2d( + features, + features, + kernel_size=3, + stride=1, + padding=1, + bias=not self.bn, + groups=self.groups, + ) + + if self.bn == True: + self.bn1 = nn.BatchNorm2d(features) + self.bn2 = nn.BatchNorm2d(features) + + self.activation = activation + + self.skip_add = nn.quantized.FloatFunctional() + + def forward(self, x): + """Forward pass. + Args: + x (tensor): input + Returns: + tensor: output + """ + + out = self.activation(x) + out = self.conv1(out) + if self.bn == True: + out = self.bn1(out) + + out = self.activation(out) + out = self.conv2(out) + if self.bn == True: + out = self.bn2(out) + + if self.groups > 1: + out = self.conv_merge(out) + + return self.skip_add.add(out, x) + +class FeatureFusionBlock_custom(nn.Module): + """Feature fusion block.""" + + def __init__( + self, + features, + activation, + deconv=False, + bn=False, + expand=False, + align_corners=True, + width_ratio=1, + ): + """Init. + Args: + features (int): number of features + """ + super(FeatureFusionBlock_custom, self).__init__() + self.width_ratio = width_ratio + + self.deconv = deconv + self.align_corners = align_corners + + self.groups = 1 + + self.expand = expand + out_features = features + if self.expand == True: + out_features = features // 2 + + self.out_conv = nn.Conv2d( + features, + out_features, + kernel_size=1, + stride=1, + padding=0, + bias=True, + groups=1, + ) + + self.resConfUnit1 = ResidualConvUnit_custom(features, activation, bn) + self.resConfUnit2 = ResidualConvUnit_custom(features, activation, bn) + + self.skip_add = nn.quantized.FloatFunctional() + + def forward(self, *xs): + """Forward pass. + Returns: + tensor: output + """ + output = xs[0] + + if len(xs) == 2: + res = self.resConfUnit1(xs[1]) + if self.width_ratio != 1: + res = F.interpolate(res, size=(output.shape[2], output.shape[3]), mode='bilinear') + + output = self.skip_add.add(output, res) + # output += res + + output = self.resConfUnit2(output) + + if self.width_ratio != 1: + # and output.shape[3] < self.width_ratio * output.shape[2] + #size=(image.shape[]) + if (output.shape[3] / output.shape[2]) < (2 / 3) * self.width_ratio: + shape = 3 * output.shape[3] + else: + shape = int(self.width_ratio * 2 * output.shape[2]) + output = F.interpolate(output, size=(2* output.shape[2], shape), mode='bilinear') + else: + output = nn.functional.interpolate(output, scale_factor=2, + mode="bilinear", align_corners=self.align_corners) + output = self.out_conv(output) + return output + +def make_fusion_block(features, use_bn, width_ratio=1): + return FeatureFusionBlock_custom( + features, + nn.ReLU(False), + deconv=False, + bn=use_bn, + expand=False, + align_corners=True, + width_ratio=width_ratio, + ) + +class Interpolate(nn.Module): + """Interpolation module.""" + + def __init__(self, scale_factor, mode, align_corners=False): + """Init. + Args: + scale_factor (float): scaling + mode (str): interpolation mode + """ + super(Interpolate, self).__init__() + + self.interp = nn.functional.interpolate + self.scale_factor = scale_factor + self.mode = mode + self.align_corners = align_corners + + def forward(self, x): + """Forward pass. + Args: + x (tensor): input + Returns: + tensor: interpolated data + """ + + x = self.interp( + x, + scale_factor=self.scale_factor, + mode=self.mode, + align_corners=self.align_corners, + ) + + return x + +class DPTOutputAdapter(nn.Module): + """DPT output adapter. + + :param num_cahnnels: Number of output channels + :param stride_level: tride level compared to the full-sized image. + E.g. 4 for 1/4th the size of the image. + :param patch_size_full: Int or tuple of the patch size over the full image size. + Patch size for smaller inputs will be computed accordingly. + :param hooks: Index of intermediate layers + :param layer_dims: Dimension of intermediate layers + :param feature_dim: Feature dimension + :param last_dim: out_channels/in_channels for the last two Conv2d when head_type == regression + :param use_bn: If set to True, activates batch norm + :param dim_tokens_enc: Dimension of tokens coming from encoder + """ + + def __init__(self, + num_channels: int = 1, + stride_level: int = 1, + patch_size: Union[int, Tuple[int, int]] = 16, + main_tasks: Iterable[str] = ('rgb',), + hooks: List[int] = [2, 5, 8, 11], + layer_dims: List[int] = [96, 192, 384, 768], + feature_dim: int = 256, + last_dim: int = 32, + use_bn: bool = False, + dim_tokens_enc: Optional[int] = None, + head_type: str = 'regression', + output_width_ratio=1, + **kwargs): + super().__init__() + self.num_channels = num_channels + self.stride_level = stride_level + self.patch_size = pair(patch_size) + self.main_tasks = main_tasks + self.hooks = hooks + self.layer_dims = layer_dims + self.feature_dim = feature_dim + self.dim_tokens_enc = dim_tokens_enc * len(self.main_tasks) if dim_tokens_enc is not None else None + self.head_type = head_type + + # Actual patch height and width, taking into account stride of input + self.P_H = max(1, self.patch_size[0] // stride_level) + self.P_W = max(1, self.patch_size[1] // stride_level) + + self.scratch = make_scratch(layer_dims, feature_dim, groups=1, expand=False) + + self.scratch.refinenet1 = make_fusion_block(feature_dim, use_bn, output_width_ratio) + self.scratch.refinenet2 = make_fusion_block(feature_dim, use_bn, output_width_ratio) + self.scratch.refinenet3 = make_fusion_block(feature_dim, use_bn, output_width_ratio) + self.scratch.refinenet4 = make_fusion_block(feature_dim, use_bn, output_width_ratio) + + if self.head_type == 'regression': + # The "DPTDepthModel" head + self.head = nn.Sequential( + nn.Conv2d(feature_dim, feature_dim // 2, kernel_size=3, stride=1, padding=1), + Interpolate(scale_factor=2, mode="bilinear", align_corners=True), + nn.Conv2d(feature_dim // 2, last_dim, kernel_size=3, stride=1, padding=1), + nn.ReLU(True), + nn.Conv2d(last_dim, self.num_channels, kernel_size=1, stride=1, padding=0) + ) + elif self.head_type == 'semseg': + # The "DPTSegmentationModel" head + self.head = nn.Sequential( + nn.Conv2d(feature_dim, feature_dim, kernel_size=3, padding=1, bias=False), + nn.BatchNorm2d(feature_dim) if use_bn else nn.Identity(), + nn.ReLU(True), + nn.Dropout(0.1, False), + nn.Conv2d(feature_dim, self.num_channels, kernel_size=1), + Interpolate(scale_factor=2, mode="bilinear", align_corners=True), + ) + else: + raise ValueError('DPT head_type must be "regression" or "semseg".') + + if self.dim_tokens_enc is not None: + self.init(dim_tokens_enc=dim_tokens_enc) + + def init(self, dim_tokens_enc=768): + """ + Initialize parts of decoder that are dependent on dimension of encoder tokens. + Should be called when setting up MultiMAE. + + :param dim_tokens_enc: Dimension of tokens coming from encoder + """ + #print(dim_tokens_enc) + + # Set up activation postprocessing layers + if isinstance(dim_tokens_enc, int): + dim_tokens_enc = 4 * [dim_tokens_enc] + + self.dim_tokens_enc = [dt * len(self.main_tasks) for dt in dim_tokens_enc] + + self.act_1_postprocess = nn.Sequential( + nn.Conv2d( + in_channels=self.dim_tokens_enc[0], + out_channels=self.layer_dims[0], + kernel_size=1, stride=1, padding=0, + ), + nn.ConvTranspose2d( + in_channels=self.layer_dims[0], + out_channels=self.layer_dims[0], + kernel_size=4, stride=4, padding=0, + bias=True, dilation=1, groups=1, + ) + ) + + self.act_2_postprocess = nn.Sequential( + nn.Conv2d( + in_channels=self.dim_tokens_enc[1], + out_channels=self.layer_dims[1], + kernel_size=1, stride=1, padding=0, + ), + nn.ConvTranspose2d( + in_channels=self.layer_dims[1], + out_channels=self.layer_dims[1], + kernel_size=2, stride=2, padding=0, + bias=True, dilation=1, groups=1, + ) + ) + + self.act_3_postprocess = nn.Sequential( + nn.Conv2d( + in_channels=self.dim_tokens_enc[2], + out_channels=self.layer_dims[2], + kernel_size=1, stride=1, padding=0, + ) + ) + + self.act_4_postprocess = nn.Sequential( + nn.Conv2d( + in_channels=self.dim_tokens_enc[3], + out_channels=self.layer_dims[3], + kernel_size=1, stride=1, padding=0, + ), + nn.Conv2d( + in_channels=self.layer_dims[3], + out_channels=self.layer_dims[3], + kernel_size=3, stride=2, padding=1, + ) + ) + + self.act_postprocess = nn.ModuleList([ + self.act_1_postprocess, + self.act_2_postprocess, + self.act_3_postprocess, + self.act_4_postprocess + ]) + + def adapt_tokens(self, encoder_tokens): + # Adapt tokens + x = [] + x.append(encoder_tokens[:, :]) + x = torch.cat(x, dim=-1) + return x + + def forward(self, encoder_tokens: List[torch.Tensor], image_size): + #input_info: Dict): + assert self.dim_tokens_enc is not None, 'Need to call init(dim_tokens_enc) function first' + H, W = image_size + + # Number of patches in height and width + N_H = H // (self.stride_level * self.P_H) + N_W = W // (self.stride_level * self.P_W) + + # Hook decoder onto 4 layers from specified ViT layers + layers = [encoder_tokens[hook] for hook in self.hooks] + + # Extract only task-relevant tokens and ignore global tokens. + layers = [self.adapt_tokens(l) for l in layers] + + # Reshape tokens to spatial representation + layers = [rearrange(l, 'b (nh nw) c -> b c nh nw', nh=N_H, nw=N_W) for l in layers] + + layers = [self.act_postprocess[idx](l) for idx, l in enumerate(layers)] + # Project layers to chosen feature dim + layers = [self.scratch.layer_rn[idx](l) for idx, l in enumerate(layers)] + + # Fuse layers using refinement stages + path_4 = self.scratch.refinenet4(layers[3]) + path_3 = self.scratch.refinenet3(path_4, layers[2]) + path_2 = self.scratch.refinenet2(path_3, layers[1]) + path_1 = self.scratch.refinenet1(path_2, layers[0]) + + # Output head + out = self.head(path_1) + + return out diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/head_downstream.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/head_downstream.py new file mode 100644 index 0000000000000000000000000000000000000000..bd40c91ba244d6c3522c6efd4ed4d724b7bdc650 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/croco/head_downstream.py @@ -0,0 +1,58 @@ +# Copyright (C) 2022-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). + +# -------------------------------------------------------- +# Heads for downstream tasks +# -------------------------------------------------------- + +""" +A head is a module where the __init__ defines only the head hyperparameters. +A method setup(croconet) takes a CroCoNet and set all layers according to the head and croconet attributes. +The forward takes the features as well as a dictionary img_info containing the keys 'width' and 'height' +""" + +import torch +import torch.nn as nn +from .dpt_block import DPTOutputAdapter + + +class PixelwiseTaskWithDPT(nn.Module): + """ DPT module for CroCo. + by default, hooks_idx will be equal to: + * for encoder-only: 4 equally spread layers + * for encoder+decoder: last encoder + 3 equally spread layers of the decoder + """ + + def __init__(self, *, hooks_idx=None, layer_dims=[96,192,384,768], + output_width_ratio=1, num_channels=1, postprocess=None, **kwargs): + super(PixelwiseTaskWithDPT, self).__init__() + self.return_all_blocks = True # backbone needs to return all layers + self.postprocess = postprocess + self.output_width_ratio = output_width_ratio + self.num_channels = num_channels + self.hooks_idx = hooks_idx + self.layer_dims = layer_dims + + def setup(self, croconet): + dpt_args = {'output_width_ratio': self.output_width_ratio, 'num_channels': self.num_channels} + if self.hooks_idx is None: + if hasattr(croconet, 'dec_blocks'): # encoder + decoder + step = {8: 3, 12: 4, 24: 8}[croconet.dec_depth] + hooks_idx = [croconet.dec_depth+croconet.enc_depth-1-i*step for i in range(3,-1,-1)] + else: # encoder only + step = croconet.enc_depth//4 + hooks_idx = [croconet.enc_depth-1-i*step for i in range(3,-1,-1)] + self.hooks_idx = hooks_idx + print(f' PixelwiseTaskWithDPT: automatically setting hook_idxs={self.hooks_idx}') + dpt_args['hooks'] = self.hooks_idx + dpt_args['layer_dims'] = self.layer_dims + self.dpt = DPTOutputAdapter(**dpt_args) + dim_tokens = [croconet.enc_embed_dim if hook0: + pos_embed = np.concatenate([np.zeros([n_cls_token, embed_dim]), pos_embed], axis=0) + return pos_embed + + +def get_2d_sincos_pos_embed_from_grid(embed_dim, grid): + assert embed_dim % 2 == 0 + + # use half of dimensions to encode grid_h + emb_h = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[0]) # (H*W, D/2) + emb_w = get_1d_sincos_pos_embed_from_grid(embed_dim // 2, grid[1]) # (H*W, D/2) + + emb = np.concatenate([emb_h, emb_w], axis=1) # (H*W, D) + return emb + + +def get_1d_sincos_pos_embed_from_grid(embed_dim, pos): + """ + embed_dim: output dimension for each position + pos: a list of positions to be encoded: size (M,) + out: (M, D) + """ + assert embed_dim % 2 == 0 + omega = np.arange(embed_dim // 2, dtype=float) + omega /= embed_dim / 2. + omega = 1. / 10000**omega # (D/2,) + + pos = pos.reshape(-1) # (M,) + out = np.einsum('m,d->md', pos, omega) # (M, D/2), outer product + + emb_sin = np.sin(out) # (M, D/2) + emb_cos = np.cos(out) # (M, D/2) + + emb = np.concatenate([emb_sin, emb_cos], axis=1) # (M, D) + return emb + + +# -------------------------------------------------------- +# Interpolate position embeddings for high-resolution +# References: +# MAE: https://github.com/facebookresearch/mae/blob/main/util/pos_embed.py +# DeiT: https://github.com/facebookresearch/deit +# -------------------------------------------------------- +def interpolate_pos_embed(model, checkpoint_model): + if 'pos_embed' in checkpoint_model: + pos_embed_checkpoint = checkpoint_model['pos_embed'] + embedding_size = pos_embed_checkpoint.shape[-1] + num_patches = model.patch_embed.num_patches + num_extra_tokens = model.pos_embed.shape[-2] - num_patches + # height (== width) for the checkpoint position embedding + orig_size = int((pos_embed_checkpoint.shape[-2] - num_extra_tokens) ** 0.5) + # height (== width) for the new position embedding + new_size = int(num_patches ** 0.5) + # class_token and dist_token are kept unchanged + if orig_size != new_size: + print("Position interpolate from %dx%d to %dx%d" % (orig_size, orig_size, new_size, new_size)) + extra_tokens = pos_embed_checkpoint[:, :num_extra_tokens] + # only the position tokens are interpolated + pos_tokens = pos_embed_checkpoint[:, num_extra_tokens:] + pos_tokens = pos_tokens.reshape(-1, orig_size, orig_size, embedding_size).permute(0, 3, 1, 2) + pos_tokens = torch.nn.functional.interpolate( + pos_tokens, size=(new_size, new_size), mode='bicubic', align_corners=False) + pos_tokens = pos_tokens.permute(0, 2, 3, 1).flatten(1, 2) + new_pos_embed = torch.cat((extra_tokens, pos_tokens), dim=1) + checkpoint_model['pos_embed'] = new_pos_embed + + +#---------------------------------------------------------- +# RoPE2D: RoPE implementation in 2D +#---------------------------------------------------------- + +try: + from models.curope import cuRoPE2D + RoPE2D = cuRoPE2D +except ImportError: + print('Warning, cannot find cuda-compiled version of RoPE2D, using a slow pytorch version instead') + + class RoPE2D(torch.nn.Module): + + def __init__(self, freq=100.0, F0=1.0): + super().__init__() + self.base = freq + self.F0 = F0 + self.cache = {} + + def get_cos_sin(self, D, seq_len, device, dtype): + if (D,seq_len,device,dtype) not in self.cache: + inv_freq = 1.0 / (self.base ** (torch.arange(0, D, 2).float().to(device) / D)) + t = torch.arange(seq_len, device=device, dtype=inv_freq.dtype) + freqs = torch.einsum("i,j->ij", t, inv_freq).to(dtype) + freqs = torch.cat((freqs, freqs), dim=-1) + cos = freqs.cos() # (Seq, Dim) + sin = freqs.sin() + self.cache[D,seq_len,device,dtype] = (cos,sin) + return self.cache[D,seq_len,device,dtype] + + @staticmethod + def rotate_half(x): + x1, x2 = x[..., : x.shape[-1] // 2], x[..., x.shape[-1] // 2 :] + return torch.cat((-x2, x1), dim=-1) + + def apply_rope1d(self, tokens, pos1d, cos, sin): + assert pos1d.ndim==2 + cos = torch.nn.functional.embedding(pos1d, cos)[:, None, :, :] + sin = torch.nn.functional.embedding(pos1d, sin)[:, None, :, :] + return (tokens * cos) + (self.rotate_half(tokens) * sin) + + def forward(self, tokens, positions): + """ + input: + * tokens: batch_size x nheads x ntokens x dim + * positions: batch_size x ntokens x 2 (y and x position of each token) + output: + * tokens after appplying RoPE2D (batch_size x nheads x ntokens x dim) + """ + assert tokens.size(3)%2==0, "number of dimensions should be a multiple of two" + D = tokens.size(3) // 2 + assert positions.ndim==3 and positions.shape[-1] == 2 # Batch, Seq, 2 + cos, sin = self.get_cos_sin(D, int(positions.max())+1, tokens.device, tokens.dtype) + # split features into two along the feature dimension, and apply rope1d on each half + y, x = tokens.chunk(2, dim=-1) + y = self.apply_rope1d(y, positions[:,:,0], cos, sin) + x = self.apply_rope1d(x, positions[:,:,1], cos, sin) + tokens = torch.cat((y, x), dim=-1) + return tokens \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/__init__.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a32692113d830ddc4af4e6ed608f222fbe062e6e --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/__init__.py @@ -0,0 +1,2 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/cloud_opt/__init__.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/cloud_opt/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..fc597c702861154bbe7a08f23b089474e926bb35 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/cloud_opt/__init__.py @@ -0,0 +1,29 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# global alignment optimization wrapper function +# -------------------------------------------------------- +from enum import Enum + +from .optimizer import PointCloudOptimizer +from .pair_viewer import PairViewer + + +class GlobalAlignerMode(Enum): + PointCloudOptimizer = "PointCloudOptimizer" + PairViewer = "PairViewer" + + +def global_aligner(dust3r_output, device, mode=GlobalAlignerMode.PointCloudOptimizer, **optim_kw): + # extract all inputs + view1, view2, pred1, pred2 = [dust3r_output[k] for k in 'view1 view2 pred1 pred2'.split()] + # build the optimizer + if mode == GlobalAlignerMode.PointCloudOptimizer: + net = PointCloudOptimizer(view1, view2, pred1, pred2, **optim_kw).to(device) + elif mode == GlobalAlignerMode.PairViewer: + net = PairViewer(view1, view2, pred1, pred2, **optim_kw).to(device) + else: + raise NotImplementedError(f'Unknown mode {mode}') + + return net diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/cloud_opt/base_opt.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/cloud_opt/base_opt.py new file mode 100644 index 0000000000000000000000000000000000000000..0869abd75b19ad441e5c32a27f34973edf1f9795 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/cloud_opt/base_opt.py @@ -0,0 +1,375 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# Base class for the global alignement procedure +# -------------------------------------------------------- +from copy import deepcopy + +import numpy as np +import torch +import torch.nn as nn +import roma +from copy import deepcopy +import tqdm + +from dust3r.utils.geometry import inv, geotrf +from dust3r.utils.device import to_numpy +from dust3r.utils.image import rgb +from dust3r.viz import SceneViz, segment_sky, auto_cam_size +from dust3r.optim_factory import adjust_learning_rate_by_lr + +from dust3r.cloud_opt.commons import (edge_str, ALL_DISTS, NoGradParamDict, get_imshapes, signed_expm1, signed_log1p, + cosine_schedule, linear_schedule, get_conf_trf) +import dust3r.cloud_opt.init_im_poses as init_fun + + +class BasePCOptimizer (nn.Module): + """ Optimize a global scene, given a list of pairwise observations. + Graph node: images + Graph edges: observations = (pred1, pred2) + """ + + def __init__(self, *args, **kwargs): + if len(args) == 1 and len(kwargs) == 0: + other = deepcopy(args[0]) + attrs = '''edges is_symmetrized dist n_imgs pred_i pred_j imshapes + min_conf_thr conf_thr conf_i conf_j im_conf + base_scale norm_pw_scale POSE_DIM pw_poses + pw_adaptors pw_adaptors has_im_poses rand_pose imgs'''.split() + self.__dict__.update({k: other[k] for k in attrs}) + else: + self._init_from_views(*args, **kwargs) + + def _init_from_views(self, view1, view2, pred1, pred2, + dist='l1', + conf='log', + min_conf_thr=3, + base_scale=0.5, + allow_pw_adaptors=False, + pw_break=20, + rand_pose=torch.randn, + iterationsCount=None): + super().__init__() + if not isinstance(view1['idx'], list): + view1['idx'] = view1['idx'].tolist() + if not isinstance(view2['idx'], list): + view2['idx'] = view2['idx'].tolist() + self.edges = [(int(i), int(j)) for i, j in zip(view1['idx'], view2['idx'])] + self.is_symmetrized = set(self.edges) == {(j, i) for i, j in self.edges} + self.dist = ALL_DISTS[dist] + + self.n_imgs = self._check_edges() + + # input data + pred1_pts = pred1['pts3d'] + pred2_pts = pred2['pts3d_in_other_view'] + self.pred_i = NoGradParamDict({ij: pred1_pts[n] for n, ij in enumerate(self.str_edges)}) + self.pred_j = NoGradParamDict({ij: pred2_pts[n] for n, ij in enumerate(self.str_edges)}) + self.imshapes = get_imshapes(self.edges, pred1_pts, pred2_pts) + + # work in log-scale with conf + pred1_conf = pred1['conf'] + pred2_conf = pred2['conf'] + self.min_conf_thr = min_conf_thr + self.conf_trf = get_conf_trf(conf) + + self.conf_i = NoGradParamDict({ij: pred1_conf[n] for n, ij in enumerate(self.str_edges)}) + self.conf_j = NoGradParamDict({ij: pred2_conf[n] for n, ij in enumerate(self.str_edges)}) + self.im_conf = self._compute_img_conf(pred1_conf, pred2_conf) + + # pairwise pose parameters + self.base_scale = base_scale + self.norm_pw_scale = True + self.pw_break = pw_break + self.POSE_DIM = 7 + self.pw_poses = nn.Parameter(rand_pose((self.n_edges, 1+self.POSE_DIM))) # pairwise poses + self.pw_adaptors = nn.Parameter(torch.zeros((self.n_edges, 2))) # slight xy/z adaptation + self.pw_adaptors.requires_grad_(allow_pw_adaptors) + self.has_im_poses = False + self.rand_pose = rand_pose + + # possibly store images for show_pointcloud + self.imgs = None + if 'img' in view1 and 'img' in view2: + imgs = [torch.zeros((3,)+hw) for hw in self.imshapes] + for v in range(len(self.edges)): + idx = view1['idx'][v] + imgs[idx] = view1['img'][v] + idx = view2['idx'][v] + imgs[idx] = view2['img'][v] + self.imgs = rgb(imgs) + + @property + def n_edges(self): + return len(self.edges) + + @property + def str_edges(self): + return [edge_str(i, j) for i, j in self.edges] + + @property + def imsizes(self): + return [(w, h) for h, w in self.imshapes] + + @property + def device(self): + return next(iter(self.parameters())).device + + def state_dict(self, trainable=True): + all_params = super().state_dict() + return {k: v for k, v in all_params.items() if k.startswith(('_', 'pred_i.', 'pred_j.', 'conf_i.', 'conf_j.')) != trainable} + + def load_state_dict(self, data): + return super().load_state_dict(self.state_dict(trainable=False) | data) + + def _check_edges(self): + indices = sorted({i for edge in self.edges for i in edge}) + assert indices == list(range(len(indices))), 'bad pair indices: missing values ' + return len(indices) + + @torch.no_grad() + def _compute_img_conf(self, pred1_conf, pred2_conf): + im_conf = nn.ParameterList([torch.zeros(hw, device=self.device) for hw in self.imshapes]) + for e, (i, j) in enumerate(self.edges): + im_conf[i] = torch.maximum(im_conf[i], pred1_conf[e]) + im_conf[j] = torch.maximum(im_conf[j], pred2_conf[e]) + return im_conf + + def get_adaptors(self): + adapt = self.pw_adaptors + adapt = torch.cat((adapt[:, 0:1], adapt), dim=-1) # (scale_xy, scale_xy, scale_z) + if self.norm_pw_scale: # normalize so that the product == 1 + adapt = adapt - adapt.mean(dim=1, keepdim=True) + return (adapt / self.pw_break).exp() + + def _get_poses(self, poses): + # normalize rotation + Q = poses[:, :4] + T = signed_expm1(poses[:, 4:7]) + RT = roma.RigidUnitQuat(Q, T).normalize().to_homogeneous() + return RT + + def _set_pose(self, poses, idx, R, T=None, scale=None, force=False): + # all poses == cam-to-world + pose = poses[idx] + if not (pose.requires_grad or force): + return pose + + if R.shape == (4, 4): + assert T is None + T = R[:3, 3] + R = R[:3, :3] + + if R is not None: + pose.data[0:4] = roma.rotmat_to_unitquat(R) + if T is not None: + pose.data[4:7] = signed_log1p(T / (scale or 1)) # translation is function of scale + + if scale is not None: + assert poses.shape[-1] in (8, 13) + pose.data[-1] = np.log(float(scale)) + return pose + + def get_pw_norm_scale_factor(self): + if self.norm_pw_scale: + # normalize scales so that things cannot go south + # we want that exp(scale) ~= self.base_scale + return (np.log(self.base_scale) - self.pw_poses[:, -1].mean()).exp() + else: + return 1 # don't norm scale for known poses + + def get_pw_scale(self): + scale = self.pw_poses[:, -1].exp() # (n_edges,) + scale = scale * self.get_pw_norm_scale_factor() + return scale + + def get_pw_poses(self): # cam to world + RT = self._get_poses(self.pw_poses) + scaled_RT = RT.clone() + scaled_RT[:, :3] *= self.get_pw_scale().view(-1, 1, 1) # scale the rotation AND translation + return scaled_RT + + def get_masks(self): + return [(conf > self.min_conf_thr) for conf in self.im_conf] + + def depth_to_pts3d(self): + raise NotImplementedError() + + def get_pts3d(self, raw=False): + res = self.depth_to_pts3d() + if not raw: + res = [dm[:h*w].view(h, w, 3) for dm, (h, w) in zip(res, self.imshapes)] + return res + + def _set_focal(self, idx, focal, force=False): + raise NotImplementedError() + + def get_focals(self): + raise NotImplementedError() + + def get_known_focal_mask(self): + raise NotImplementedError() + + def get_principal_points(self): + raise NotImplementedError() + + def get_conf(self, mode=None): + trf = self.conf_trf if mode is None else get_conf_trf(mode) + return [trf(c) for c in self.im_conf] + + def get_im_poses(self): + raise NotImplementedError() + + def _set_depthmap(self, idx, depth, force=False): + raise NotImplementedError() + + def get_depthmaps(self, raw=False): + raise NotImplementedError() + + @torch.no_grad() + def clean_pointcloud(self, tol=0.001, max_bad_conf=0): + """ Method: + 1) express all 3d points in each camera coordinate frame + 2) if they're in front of a depthmap --> then lower their confidence + """ + assert 0 <= tol < 1 + cams = inv(self.get_im_poses()) + K = self.get_intrinsics() + depthmaps = self.get_depthmaps() + res = deepcopy(self) + + for i, pts3d in enumerate(self.depth_to_pts3d()): + for j in range(self.n_imgs): + if i == j: + continue + + # project 3dpts in other view + Hi, Wi = self.imshapes[i] + Hj, Wj = self.imshapes[j] + proj = geotrf(cams[j], pts3d[:Hi*Wi]).reshape(Hi, Wi, 3) + proj_depth = proj[:, :, 2] + u, v = geotrf(K[j], proj, norm=1, ncol=2).round().long().unbind(-1) + + # check which points are actually in the visible cone + msk_i = (proj_depth > 0) & (0 <= u) & (u < Wj) & (0 <= v) & (v < Hj) + msk_j = v[msk_i], u[msk_i] + + # find bad points = those in front but less confident + bad_points = (proj_depth[msk_i] < (1-tol) * depthmaps[j][msk_j] + ) & (res.im_conf[i][msk_i] < res.im_conf[j][msk_j]) + + bad_msk_i = msk_i.clone() + bad_msk_i[msk_i] = bad_points + res.im_conf[i][bad_msk_i] = res.im_conf[i][bad_msk_i].clip_(max=max_bad_conf) + + return res + + def forward(self, ret_details=False): + pw_poses = self.get_pw_poses() # cam-to-world + pw_adapt = self.get_adaptors() + proj_pts3d = self.get_pts3d() + # pre-compute pixel weights + weight_i = {i_j: self.conf_trf(c) for i_j, c in self.conf_i.items()} + weight_j = {i_j: self.conf_trf(c) for i_j, c in self.conf_j.items()} + + loss = 0 + if ret_details: + details = -torch.ones((self.n_imgs, self.n_imgs)) + + for e, (i, j) in enumerate(self.edges): + i_j = edge_str(i, j) + # distance in image i and j + aligned_pred_i = geotrf(pw_poses[e], pw_adapt[e] * self.pred_i[i_j]) + aligned_pred_j = geotrf(pw_poses[e], pw_adapt[e] * self.pred_j[i_j]) + li = self.dist(proj_pts3d[i], aligned_pred_i, weight=weight_i[i_j]).mean() + lj = self.dist(proj_pts3d[j], aligned_pred_j, weight=weight_j[i_j]).mean() + loss = loss + li + lj + + if ret_details: + details[i, j] = li + lj + loss /= self.n_edges # average over all pairs + + if ret_details: + return loss, details + return loss + + def compute_global_alignment(self, init=None, niter_PnP=10, **kw): + if init is None: + pass + elif init == 'msp' or init == 'mst': + init_fun.init_minimum_spanning_tree(self, niter_PnP=niter_PnP) + elif init == 'known_poses': + init_fun.init_from_known_poses(self, min_conf_thr=self.min_conf_thr, niter_PnP=niter_PnP) + else: + raise ValueError(f'bad value for {init=}') + + global_alignment_loop(self, **kw) + + @torch.no_grad() + def mask_sky(self): + res = deepcopy(self) + for i in range(self.n_imgs): + sky = segment_sky(self.imgs[i]) + res.im_conf[i][sky] = 0 + return res + + def show(self, show_pw_cams=False, show_pw_pts3d=False, cam_size=None, **kw): + viz = SceneViz() + if self.imgs is None: + colors = np.random.randint(0, 256, size=(self.n_imgs, 3)) + colors = list(map(tuple, colors.tolist())) + for n in range(self.n_imgs): + viz.add_pointcloud(self.get_pts3d()[n], colors[n], self.get_masks()[n]) + else: + viz.add_pointcloud(self.get_pts3d(), self.imgs, self.get_masks()) + colors = np.random.randint(256, size=(self.n_imgs, 3)) + + # camera poses + im_poses = to_numpy(self.get_im_poses()) + if cam_size is None: + cam_size = auto_cam_size(im_poses) + viz.add_cameras(im_poses, self.get_focals(), colors=colors, + images=self.imgs, imsizes=self.imsizes, cam_size=cam_size) + if show_pw_cams: + pw_poses = self.get_pw_poses() + viz.add_cameras(pw_poses, color=(192, 0, 192), cam_size=cam_size) + + if show_pw_pts3d: + pts = [geotrf(pw_poses[e], self.pred_i[edge_str(i, j)]) for e, (i, j) in enumerate(self.edges)] + viz.add_pointcloud(pts, (128, 0, 128)) + + viz.show(**kw) + return viz + + +def global_alignment_loop(net, lr=0.01, niter=300, schedule='cosine', lr_min=1e-6, verbose=False): + params = [p for p in net.parameters() if p.requires_grad] + if not params: + return net + + if verbose: + print([name for name, value in net.named_parameters() if value.requires_grad]) + + lr_base = lr + optimizer = torch.optim.Adam(params, lr=lr, betas=(0.9, 0.9)) + + with tqdm.tqdm(total=niter) as bar: + while bar.n < bar.total: + t = bar.n / bar.total + + if schedule == 'cosine': + lr = cosine_schedule(t, lr_base, lr_min) + elif schedule == 'linear': + lr = linear_schedule(t, lr_base, lr_min) + else: + raise ValueError(f'bad lr {schedule=}') + adjust_learning_rate_by_lr(optimizer, lr) + + optimizer.zero_grad() + loss = net() + loss.backward() + optimizer.step() + loss = float(loss) + bar.set_postfix_str(f'{lr=:g} loss={loss:g}') + bar.update() diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/cloud_opt/commons.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/cloud_opt/commons.py new file mode 100644 index 0000000000000000000000000000000000000000..3be9f855a69ea18c82dcc8e5769e0149a59649bd --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/cloud_opt/commons.py @@ -0,0 +1,90 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# utility functions for global alignment +# -------------------------------------------------------- +import torch +import torch.nn as nn +import numpy as np + + +def edge_str(i, j): + return f'{i}_{j}' + + +def i_j_ij(ij): + return edge_str(*ij), ij + + +def edge_conf(conf_i, conf_j, edge): + return float(conf_i[edge].mean() * conf_j[edge].mean()) + + +def compute_edge_scores(edges, conf_i, conf_j): + return {(i, j): edge_conf(conf_i, conf_j, e) for e, (i, j) in edges} + + +def NoGradParamDict(x): + assert isinstance(x, dict) + return nn.ParameterDict(x).requires_grad_(False) + + +def get_imshapes(edges, pred_i, pred_j): + n_imgs = max(max(e) for e in edges) + 1 + imshapes = [None] * n_imgs + for e, (i, j) in enumerate(edges): + shape_i = tuple(pred_i[e].shape[0:2]) + shape_j = tuple(pred_j[e].shape[0:2]) + if imshapes[i]: + assert imshapes[i] == shape_i, f'incorrect shape for image {i}' + if imshapes[j]: + assert imshapes[j] == shape_j, f'incorrect shape for image {j}' + imshapes[i] = shape_i + imshapes[j] = shape_j + return imshapes + + +def get_conf_trf(mode): + if mode == 'log': + def conf_trf(x): return x.log() + elif mode == 'sqrt': + def conf_trf(x): return x.sqrt() + elif mode == 'm1': + def conf_trf(x): return x-1 + elif mode in ('id', 'none'): + def conf_trf(x): return x + else: + raise ValueError(f'bad mode for {mode=}') + return conf_trf + + +def l2_dist(a, b, weight): + return ((a - b).square().sum(dim=-1) * weight) + + +def l1_dist(a, b, weight): + return ((a - b).norm(dim=-1) * weight) + + +ALL_DISTS = dict(l1=l1_dist, l2=l2_dist) + + +def signed_log1p(x): + sign = torch.sign(x) + return sign * torch.log1p(torch.abs(x)) + + +def signed_expm1(x): + sign = torch.sign(x) + return sign * torch.expm1(torch.abs(x)) + + +def cosine_schedule(t, lr_start, lr_end): + assert 0 <= t <= 1 + return lr_end + (lr_start - lr_end) * (1+np.cos(t * np.pi))/2 + + +def linear_schedule(t, lr_start, lr_end): + assert 0 <= t <= 1 + return lr_start + (lr_end - lr_start) * t diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/cloud_opt/init_im_poses.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/cloud_opt/init_im_poses.py new file mode 100644 index 0000000000000000000000000000000000000000..6ed6116be3b81ed5b483fa87dfb013e1e5f1d29a --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/cloud_opt/init_im_poses.py @@ -0,0 +1,312 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# Initialization functions for global alignment +# -------------------------------------------------------- +from functools import cache + +import numpy as np +import scipy.sparse as sp +import torch +import cv2 +import roma +from tqdm import tqdm + +from dust3r.utils.geometry import geotrf, inv, get_med_dist_between_poses +from dust3r.post_process import estimate_focal_knowing_depth +from dust3r.viz import to_numpy + +from dust3r.cloud_opt.commons import edge_str, i_j_ij, compute_edge_scores + + +@torch.no_grad() +def init_from_known_poses(self, niter_PnP=10, min_conf_thr=3): + device = self.device + + # indices of known poses + nkp, known_poses_msk, known_poses = get_known_poses(self) + assert nkp == self.n_imgs, 'not all poses are known' + + # get all focals + nkf, _, im_focals = get_known_focals(self) + assert nkf == self.n_imgs + im_pp = self.get_principal_points() + + best_depthmaps = {} + # init all pairwise poses + for e, (i, j) in enumerate(tqdm(self.edges)): + i_j = edge_str(i, j) + + # find relative pose for this pair + P1 = torch.eye(4, device=device) + msk = self.conf_i[i_j] > min(min_conf_thr, self.conf_i[i_j].min() - 0.1) + _, P2 = fast_pnp(self.pred_j[i_j], float(im_focals[i].mean()), + pp=im_pp[i], msk=msk, device=device, niter_PnP=niter_PnP) + + # align the two predicted camera with the two gt cameras + s, R, T = align_multiple_poses(torch.stack((P1, P2)), known_poses[[i, j]]) + # normally we have known_poses[i] ~= sRT_to_4x4(s,R,T,device) @ P1 + # and geotrf(sRT_to_4x4(1,R,T,device), s*P2[:3,3]) + self._set_pose(self.pw_poses, e, R, T, scale=s) + + # remember if this is a good depthmap + score = float(self.conf_i[i_j].mean()) + if score > best_depthmaps.get(i, (0,))[0]: + best_depthmaps[i] = score, i_j, s + + # init all image poses + for n in range(self.n_imgs): + assert known_poses_msk[n] + _, i_j, scale = best_depthmaps[n] + depth = self.pred_i[i_j][:, :, 2] + self._set_depthmap(n, depth * scale) + + +@torch.no_grad() +def init_minimum_spanning_tree(self, **kw): + """ Init all camera poses (image-wise and pairwise poses) given + an initial set of pairwise estimations. + """ + device = self.device + pts3d, _, im_focals, im_poses = minimum_spanning_tree(self.imshapes, self.edges, + self.pred_i, self.pred_j, self.conf_i, self.conf_j, self.im_conf, self.min_conf_thr, + device, has_im_poses=self.has_im_poses, **kw) + + return init_from_pts3d(self, pts3d, im_focals, im_poses) + + +def init_from_pts3d(self, pts3d, im_focals, im_poses): + # init poses + nkp, known_poses_msk, known_poses = get_known_poses(self) + if nkp == 1: + raise NotImplementedError("Would be simpler to just align everything afterwards on the single known pose") + elif nkp > 1: + # global rigid SE3 alignment + s, R, T = align_multiple_poses(im_poses[known_poses_msk], known_poses[known_poses_msk]) + trf = sRT_to_4x4(s, R, T, device=known_poses.device) + + # rotate everything + im_poses = trf @ im_poses + im_poses[:, :3, :3] /= s # undo scaling on the rotation part + for img_pts3d in pts3d: + img_pts3d[:] = geotrf(trf, img_pts3d) + + # set all pairwise poses + for e, (i, j) in enumerate(self.edges): + i_j = edge_str(i, j) + # compute transform that goes from cam to world + s, R, T = rigid_points_registration(self.pred_i[i_j], pts3d[i], conf=self.conf_i[i_j]) + self._set_pose(self.pw_poses, e, R, T, scale=s) + + # take into account the scale normalization + s_factor = self.get_pw_norm_scale_factor() + im_poses[:, :3, 3] *= s_factor # apply downscaling factor + for img_pts3d in pts3d: + img_pts3d *= s_factor + + # init all image poses + if self.has_im_poses: + for i in range(self.n_imgs): + cam2world = im_poses[i] + depth = geotrf(inv(cam2world), pts3d[i])[..., 2] + self._set_depthmap(i, depth) + self._set_pose(self.im_poses, i, cam2world) + if im_focals[i] is not None: + self._set_focal(i, im_focals[i]) + + print(' init loss =', float(self())) + + +def minimum_spanning_tree(imshapes, edges, pred_i, pred_j, conf_i, conf_j, im_conf, min_conf_thr, + device, has_im_poses=True, niter_PnP=10): + n_imgs = len(imshapes) + sparse_graph = -dict_to_sparse_graph(compute_edge_scores(map(i_j_ij, edges), conf_i, conf_j)) + msp = sp.csgraph.minimum_spanning_tree(sparse_graph).tocoo() + + # temp variable to store 3d points + pts3d = [None] * len(imshapes) + + todo = sorted(zip(-msp.data, msp.row, msp.col)) # sorted edges + im_poses = [None] * n_imgs + im_focals = [None] * n_imgs + + # init with strongest edge + score, i, j = todo.pop() + print(f' init edge ({i}*,{j}*) {score=}') + i_j = edge_str(i, j) + pts3d[i] = pred_i[i_j].clone() + pts3d[j] = pred_j[i_j].clone() + done = {i, j} + if has_im_poses: + im_poses[i] = torch.eye(4, device=device) + im_focals[i] = estimate_focal(pred_i[i_j]) + + # set intial pointcloud based on pairwise graph + msp_edges = [(i, j)] + while todo: + # each time, predict the next one + score, i, j = todo.pop() + + if im_focals[i] is None: + im_focals[i] = estimate_focal(pred_i[i_j]) + + if i in done: + print(f' init edge ({i},{j}*) {score=}') + assert j not in done + # align pred[i] with pts3d[i], and then set j accordingly + i_j = edge_str(i, j) + s, R, T = rigid_points_registration(pred_i[i_j], pts3d[i], conf=conf_i[i_j]) + trf = sRT_to_4x4(s, R, T, device) + pts3d[j] = geotrf(trf, pred_j[i_j]) + done.add(j) + msp_edges.append((i, j)) + + if has_im_poses and im_poses[i] is None: + im_poses[i] = sRT_to_4x4(1, R, T, device) + + elif j in done: + print(f' init edge ({i}*,{j}) {score=}') + assert i not in done + i_j = edge_str(i, j) + s, R, T = rigid_points_registration(pred_j[i_j], pts3d[j], conf=conf_j[i_j]) + trf = sRT_to_4x4(s, R, T, device) + pts3d[i] = geotrf(trf, pred_i[i_j]) + done.add(i) + msp_edges.append((i, j)) + + if has_im_poses and im_poses[i] is None: + im_poses[i] = sRT_to_4x4(1, R, T, device) + else: + # let's try again later + todo.insert(0, (score, i, j)) + + if has_im_poses: + # complete all missing informations + pair_scores = list(sparse_graph.values()) # already negative scores: less is best + edges_from_best_to_worse = np.array(list(sparse_graph.keys()))[np.argsort(pair_scores)] + for i, j in edges_from_best_to_worse.tolist(): + if im_focals[i] is None: + im_focals[i] = estimate_focal(pred_i[edge_str(i, j)]) + + for i in range(n_imgs): + if im_poses[i] is None: + msk = im_conf[i] > min_conf_thr + res = fast_pnp(pts3d[i], im_focals[i], msk=msk, device=device, niter_PnP=niter_PnP) + if res: + im_focals[i], im_poses[i] = res + if im_poses[i] is None: + im_poses[i] = torch.eye(4, device=device) + im_poses = torch.stack(im_poses) + else: + im_poses = im_focals = None + + return pts3d, msp_edges, im_focals, im_poses + + +def dict_to_sparse_graph(dic): + n_imgs = max(max(e) for e in dic) + 1 + res = sp.dok_array((n_imgs, n_imgs)) + for edge, value in dic.items(): + res[edge] = value + return res + + +def rigid_points_registration(pts1, pts2, conf): + R, T, s = roma.rigid_points_registration( + pts1.reshape(-1, 3), pts2.reshape(-1, 3), weights=conf.ravel(), compute_scaling=True) + return s, R, T # return un-scaled (R, T) + + +def sRT_to_4x4(scale, R, T, device): + trf = torch.eye(4, device=device) + trf[:3, :3] = R * scale + trf[:3, 3] = T.ravel() # doesn't need scaling + return trf + + +def estimate_focal(pts3d_i, pp=None): + if pp is None: + H, W, THREE = pts3d_i.shape + assert THREE == 3 + pp = torch.tensor((W/2, H/2), device=pts3d_i.device) + focal = estimate_focal_knowing_depth(pts3d_i.unsqueeze(0), pp.unsqueeze( + 0), focal_mode='weiszfeld', min_focal=0.5, max_focal=3.5).ravel() + return float(focal) + + +@cache +def pixel_grid(H, W): + return np.mgrid[:W, :H].T.astype(np.float32) + + +def fast_pnp(pts3d, focal, msk, device, pp=None, niter_PnP=10): + # extract camera poses and focals with RANSAC-PnP + if msk.sum() < 4: + return None # we need at least 4 points for PnP + pts3d, msk = map(to_numpy, (pts3d, msk)) + + H, W, THREE = pts3d.shape + assert THREE == 3 + pixels = pixel_grid(H, W) + + if focal is None: + S = max(W, H) + tentative_focals = np.geomspace(S/2, S*3, 21) + else: + tentative_focals = [focal] + + if pp is None: + pp = (W/2, H/2) + else: + pp = to_numpy(pp) + + best = 0, + for focal in tentative_focals: + K = np.float32([(focal, 0, pp[0]), (0, focal, pp[1]), (0, 0, 1)]) + + success, R, T, inliers = cv2.solvePnPRansac(pts3d[msk], pixels[msk], K, None, + iterationsCount=niter_PnP, reprojectionError=5, flags=cv2.SOLVEPNP_SQPNP) + if not success: + continue + + score = len(inliers) + if success and score > best[0]: + best = score, R, T, focal + + if not best[0]: + return None + + _, R, T, best_focal = best + R = cv2.Rodrigues(R)[0] # world to cam + R, T = map(torch.from_numpy, (R, T)) + return best_focal, inv(sRT_to_4x4(1, R, T, device)) # cam to world + + +def get_known_poses(self): + if self.has_im_poses: + known_poses_msk = torch.tensor([not (p.requires_grad) for p in self.im_poses]) + known_poses = self.get_im_poses() + return known_poses_msk.sum(), known_poses_msk, known_poses + else: + return 0, None, None + + +def get_known_focals(self): + if self.has_im_poses: + known_focal_msk = self.get_known_focal_mask() + known_focals = self.get_focals() + return known_focal_msk.sum(), known_focal_msk, known_focals + else: + return 0, None, None + + +def align_multiple_poses(src_poses, target_poses): + N = len(src_poses) + assert src_poses.shape == target_poses.shape == (N, 4, 4) + + def center_and_z(poses): + eps = get_med_dist_between_poses(poses) / 100 + return torch.cat((poses[:, :3, 3], poses[:, :3, 3] + eps*poses[:, :3, 2])) + R, T, s = roma.rigid_points_registration(center_and_z(src_poses), center_and_z(target_poses), compute_scaling=True) + return s, R, T diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/cloud_opt/optimizer.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/cloud_opt/optimizer.py new file mode 100644 index 0000000000000000000000000000000000000000..e53636dac67739e6e92affae811855bd1e42ac96 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/cloud_opt/optimizer.py @@ -0,0 +1,230 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# Main class for the implementation of the global alignment +# -------------------------------------------------------- +import numpy as np +import torch +import torch.nn as nn + +from dust3r.cloud_opt.base_opt import BasePCOptimizer +from dust3r.utils.geometry import xy_grid, geotrf +from dust3r.utils.device import to_cpu, to_numpy + + +class PointCloudOptimizer(BasePCOptimizer): + """ Optimize a global scene, given a list of pairwise observations. + Graph node: images + Graph edges: observations = (pred1, pred2) + """ + + def __init__(self, *args, optimize_pp=False, focal_break=20, **kwargs): + super().__init__(*args, **kwargs) + + self.has_im_poses = True # by definition of this class + self.focal_break = focal_break + + # adding thing to optimize + self.im_depthmaps = nn.ParameterList(torch.randn(H, W)/10-3 for H, W in self.imshapes) # log(depth) + self.im_poses = nn.ParameterList(self.rand_pose(self.POSE_DIM) for _ in range(self.n_imgs)) # camera poses + self.im_focals = nn.ParameterList(torch.FloatTensor( + [self.focal_break*np.log(max(H, W))]) for H, W in self.imshapes) # camera intrinsics + self.im_pp = nn.ParameterList(torch.zeros((2,)) for _ in range(self.n_imgs)) # camera intrinsics + self.im_pp.requires_grad_(optimize_pp) + + self.imshape = self.imshapes[0] + im_areas = [h*w for h, w in self.imshapes] + self.max_area = max(im_areas) + + # adding thing to optimize + self.im_depthmaps = ParameterStack(self.im_depthmaps, is_param=True, fill=self.max_area) + self.im_poses = ParameterStack(self.im_poses, is_param=True) + self.im_focals = ParameterStack(self.im_focals, is_param=True) + self.im_pp = ParameterStack(self.im_pp, is_param=True) + self.register_buffer('_pp', torch.tensor([(w/2, h/2) for h, w in self.imshapes])) + self.register_buffer('_grid', ParameterStack( + [xy_grid(W, H, device=self.device) for H, W in self.imshapes], fill=self.max_area)) + + # pre-compute pixel weights + self.register_buffer('_weight_i', ParameterStack( + [self.conf_trf(self.conf_i[i_j]) for i_j in self.str_edges], fill=self.max_area)) + self.register_buffer('_weight_j', ParameterStack( + [self.conf_trf(self.conf_j[i_j]) for i_j in self.str_edges], fill=self.max_area)) + + # precompute aa + self.register_buffer('_stacked_pred_i', ParameterStack(self.pred_i, self.str_edges, fill=self.max_area)) + self.register_buffer('_stacked_pred_j', ParameterStack(self.pred_j, self.str_edges, fill=self.max_area)) + self.register_buffer('_ei', torch.tensor([i for i, j in self.edges])) + self.register_buffer('_ej', torch.tensor([j for i, j in self.edges])) + self.total_area_i = sum([im_areas[i] for i, j in self.edges]) + self.total_area_j = sum([im_areas[j] for i, j in self.edges]) + + def _check_all_imgs_are_selected(self, msk): + assert np.all(self._get_msk_indices(msk) == np.arange(self.n_imgs)), 'incomplete mask!' + + def preset_pose(self, known_poses, pose_msk=None): # cam-to-world + self._check_all_imgs_are_selected(pose_msk) + + if isinstance(known_poses, torch.Tensor) and known_poses.ndim == 2: + known_poses = [known_poses] + for idx, pose in zip(self._get_msk_indices(pose_msk), known_poses): + print(f' (setting pose #{idx} = {pose[:3,3]})') + self._no_grad(self._set_pose(self.im_poses, idx, torch.tensor(pose))) + + # normalize scale if there's less than 1 known pose + n_known_poses = sum((p.requires_grad is False) for p in self.im_poses) + self.norm_pw_scale = (n_known_poses <= 1) + + self.im_poses.requires_grad_(False) + self.norm_pw_scale = False + + def preset_focal(self, known_focals, msk=None): + self._check_all_imgs_are_selected(msk) + + for idx, focal in zip(self._get_msk_indices(msk), known_focals): + print(f' (setting focal #{idx} = {focal})') + self._no_grad(self._set_focal(idx, focal)) + + self.im_focals.requires_grad_(False) + + def preset_principal_point(self, known_pp, msk=None): + self._check_all_imgs_are_selected(msk) + + for idx, pp in zip(self._get_msk_indices(msk), known_pp): + print(f' (setting principal point #{idx} = {pp})') + self._no_grad(self._set_principal_point(idx, pp)) + + self.im_pp.requires_grad_(False) + + def _no_grad(self, tensor): + assert tensor.requires_grad, 'it must be True at this point, otherwise no modification occurs' + + def _set_focal(self, idx, focal, force=False): + param = self.im_focals[idx] + if param.requires_grad or force: # can only init a parameter not already initialized + param.data[:] = self.focal_break * np.log(focal) + return param + + def get_focals(self): + log_focals = torch.stack(list(self.im_focals), dim=0) + return (log_focals / self.focal_break).exp() + + def get_known_focal_mask(self): + return torch.tensor([not (p.requires_grad) for p in self.im_focals]) + + def _set_principal_point(self, idx, pp, force=False): + param = self.im_pp[idx] + H, W = self.imshapes[idx] + if param.requires_grad or force: # can only init a parameter not already initialized + param.data[:] = to_cpu(to_numpy(pp) - (W/2, H/2)) / 10 + return param + + def get_principal_points(self): + return self._pp + 10 * self.im_pp + + def get_intrinsics(self): + K = torch.zeros((self.n_imgs, 3, 3), device=self.device) + focals = self.get_focals().flatten() + K[:, 0, 0] = K[:, 1, 1] = focals + K[:, :2, 2] = self.get_principal_points() + K[:, 2, 2] = 1 + return K + + def get_im_poses(self): # cam to world + cam2world = self._get_poses(self.im_poses) + return cam2world + + def _set_depthmap(self, idx, depth, force=False): + depth = _ravel_hw(depth, self.max_area) + + param = self.im_depthmaps[idx] + if param.requires_grad or force: # can only init a parameter not already initialized + param.data[:] = depth.log().nan_to_num(neginf=0) + return param + + def get_depthmaps(self, raw=False): + res = self.im_depthmaps.exp() + if not raw: + res = [dm[:h*w].view(h, w) for dm, (h, w) in zip(res, self.imshapes)] + return res + + def depth_to_pts3d(self): + # Get depths and projection params if not provided + focals = self.get_focals() + pp = self.get_principal_points() + im_poses = self.get_im_poses() + depth = self.get_depthmaps(raw=True) + + # get pointmaps in camera frame + rel_ptmaps = _fast_depthmap_to_pts3d(depth, self._grid, focals, pp=pp) + # project to world frame + return geotrf(im_poses, rel_ptmaps) + + def get_pts3d(self, raw=False): + res = self.depth_to_pts3d() + if not raw: + res = [dm[:h*w].view(h, w, 3) for dm, (h, w) in zip(res, self.imshapes)] + return res + + def forward(self): + pw_poses = self.get_pw_poses() # cam-to-world + pw_adapt = self.get_adaptors().unsqueeze(1) + proj_pts3d = self.get_pts3d(raw=True) + + # rotate pairwise prediction according to pw_poses + aligned_pred_i = geotrf(pw_poses, pw_adapt * self._stacked_pred_i) + aligned_pred_j = geotrf(pw_poses, pw_adapt * self._stacked_pred_j) + + # compute the less + li = self.dist(proj_pts3d[self._ei], aligned_pred_i, weight=self._weight_i).sum() / self.total_area_i + lj = self.dist(proj_pts3d[self._ej], aligned_pred_j, weight=self._weight_j).sum() / self.total_area_j + + return li + lj + + +def _fast_depthmap_to_pts3d(depth, pixel_grid, focal, pp): + pp = pp.unsqueeze(1) + focal = focal.unsqueeze(1) + assert focal.shape == (len(depth), 1, 1) + assert pp.shape == (len(depth), 1, 2) + assert pixel_grid.shape == depth.shape + (2,) + depth = depth.unsqueeze(-1) + return torch.cat((depth * (pixel_grid - pp) / focal, depth), dim=-1) + + +def ParameterStack(params, keys=None, is_param=None, fill=0): + if keys is not None: + params = [params[k] for k in keys] + + if fill > 0: + params = [_ravel_hw(p, fill) for p in params] + + requires_grad = params[0].requires_grad + assert all(p.requires_grad == requires_grad for p in params) + + params = torch.stack(list(params)).float().detach() + if is_param or requires_grad: + params = nn.Parameter(params) + params.requires_grad_(requires_grad) + return params + + +def _ravel_hw(tensor, fill=0): + # ravel H,W + tensor = tensor.view((tensor.shape[0] * tensor.shape[1],) + tensor.shape[2:]) + + if len(tensor) < fill: + tensor = torch.cat((tensor, tensor.new_zeros((fill - len(tensor),)+tensor.shape[1:]))) + return tensor + + +def acceptable_focal_range(H, W, minf=0.5, maxf=3.5): + focal_base = max(H, W) / (2 * np.tan(np.deg2rad(60) / 2)) # size / 1.1547005383792515 + return minf*focal_base, maxf*focal_base + + +def apply_mask(img, msk): + img = img.copy() + img[msk] = 0 + return img diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/cloud_opt/pair_viewer.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/cloud_opt/pair_viewer.py new file mode 100644 index 0000000000000000000000000000000000000000..a49e9a17df9ddc489b8fe3dddc027636c0c5973d --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/cloud_opt/pair_viewer.py @@ -0,0 +1,125 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# Dummy optimizer for visualizing pairs +# -------------------------------------------------------- +import numpy as np +import torch +import torch.nn as nn +import cv2 + +from dust3r.cloud_opt.base_opt import BasePCOptimizer +from dust3r.utils.geometry import inv, geotrf, depthmap_to_absolute_camera_coordinates +from dust3r.cloud_opt.commons import edge_str +from dust3r.post_process import estimate_focal_knowing_depth + + +class PairViewer (BasePCOptimizer): + """ + This a Dummy Optimizer. + To use only when the goal is to visualize the results for a pair of images (with is_symmetrized) + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + assert self.is_symmetrized and self.n_edges == 2 + self.has_im_poses = True + + # compute all parameters directly from raw input + self.focals = [] + self.pp = [] + rel_poses = [] + confs = [] + for i in range(self.n_imgs): + conf = float(self.conf_i[edge_str(i, 1-i)].mean() * self.conf_j[edge_str(i, 1-i)].mean()) + print(f' - {conf=:.3} for edge {i}-{1-i}') + confs.append(conf) + + H, W = self.imshapes[i] + pts3d = self.pred_i[edge_str(i, 1-i)] + pp = torch.tensor((W/2, H/2)) + focal = float(estimate_focal_knowing_depth(pts3d[None], pp, focal_mode='weiszfeld')) + self.focals.append(focal) + self.pp.append(pp) + + # estimate the pose of pts1 in image 2 + pixels = np.mgrid[:W, :H].T.astype(np.float32) + pts3d = self.pred_j[edge_str(1-i, i)].numpy() + assert pts3d.shape[:2] == (H, W) + msk = self.get_masks()[i].numpy() + K = np.float32([(focal, 0, pp[0]), (0, focal, pp[1]), (0, 0, 1)]) + + try: + res = cv2.solvePnPRansac(pts3d[msk], pixels[msk], K, None, + iterationsCount=100, reprojectionError=5, flags=cv2.SOLVEPNP_SQPNP) + success, R, T, inliers = res + assert success + + R = cv2.Rodrigues(R)[0] # world to cam + pose = inv(np.r_[np.c_[R, T], [(0, 0, 0, 1)]]) # cam to world + except: + pose = np.eye(4) + rel_poses.append(torch.from_numpy(pose.astype(np.float32))) + + # let's use the pair with the most confidence + if confs[0] > confs[1]: + # ptcloud is expressed in camera1 + self.im_poses = [torch.eye(4), rel_poses[1]] # I, cam2-to-cam1 + self.depth = [self.pred_i['0_1'][..., 2], geotrf(inv(rel_poses[1]), self.pred_j['0_1'])[..., 2]] + else: + # ptcloud is expressed in camera2 + self.im_poses = [rel_poses[0], torch.eye(4)] # I, cam1-to-cam2 + self.depth = [geotrf(inv(rel_poses[0]), self.pred_j['1_0'])[..., 2], self.pred_i['1_0'][..., 2]] + + self.im_poses = nn.Parameter(torch.stack(self.im_poses, dim=0), requires_grad=False) + self.focals = nn.Parameter(torch.tensor(self.focals), requires_grad=False) + self.pp = nn.Parameter(torch.stack(self.pp, dim=0), requires_grad=False) + self.depth = nn.ParameterList(self.depth) + for p in self.parameters(): + p.requires_grad = False + + def _set_depthmap(self, idx, depth, force=False): + print('_set_depthmap is ignored in PairViewer') + return + + def get_depthmaps(self, raw=False): + depth = [d.to(self.device) for d in self.depth] + return depth + + def _set_focal(self, idx, focal, force=False): + self.focals[idx] = focal + + def get_focals(self): + return self.focals + + def get_known_focal_mask(self): + return torch.tensor([not (p.requires_grad) for p in self.focals]) + + def get_principal_points(self): + return self.pp + + def get_intrinsics(self): + focals = self.get_focals() + pps = self.get_principal_points() + K = torch.zeros((len(focals), 3, 3), device=self.device) + for i in range(len(focals)): + K[i, 0, 0] = K[i, 1, 1] = focals[i] + K[i, :2, 2] = pps[i] + K[i, 2, 2] = 1 + return K + + def get_im_poses(self): + return self.im_poses + + def depth_to_pts3d(self): + pts3d = [] + for d, intrinsics, im_pose in zip(self.depth, self.get_intrinsics(), self.get_im_poses()): + pts, _ = depthmap_to_absolute_camera_coordinates(d.cpu().numpy(), + intrinsics.cpu().numpy(), + im_pose.cpu().numpy()) + pts3d.append(torch.from_numpy(pts).to(device=self.device)) + return pts3d + + def forward(self): + return float('nan') diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/datasets/__init__.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/datasets/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..cc5e79718e4a3eb2e31c60c8a390e61a19ec5432 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/datasets/__init__.py @@ -0,0 +1,42 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +from .utils.transforms import * +from .base.batched_sampler import BatchedRandomSampler # noqa: F401 +from .co3d import Co3d # noqa: F401 + + +def get_data_loader(dataset, batch_size, num_workers=8, shuffle=True, drop_last=True, pin_mem=True): + import torch + from croco.utils.misc import get_world_size, get_rank + + # pytorch dataset + if isinstance(dataset, str): + dataset = eval(dataset) + + world_size = get_world_size() + rank = get_rank() + + try: + sampler = dataset.make_sampler(batch_size, shuffle=shuffle, world_size=world_size, + rank=rank, drop_last=drop_last) + except (AttributeError, NotImplementedError): + # not avail for this dataset + if torch.distributed.is_initialized(): + sampler = torch.utils.data.DistributedSampler( + dataset, num_replicas=world_size, rank=rank, shuffle=shuffle, drop_last=drop_last + ) + elif shuffle: + sampler = torch.utils.data.RandomSampler(dataset) + else: + sampler = torch.utils.data.SequentialSampler(dataset) + + data_loader = torch.utils.data.DataLoader( + dataset, + sampler=sampler, + batch_size=batch_size, + num_workers=num_workers, + pin_memory=pin_mem, + drop_last=drop_last, + ) + + return data_loader diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/datasets/base/__init__.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/datasets/base/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a32692113d830ddc4af4e6ed608f222fbe062e6e --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/datasets/base/__init__.py @@ -0,0 +1,2 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/datasets/base/base_stereo_view_dataset.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/datasets/base/base_stereo_view_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..17390ca29d4437fc41f3c946b235888af9e4c888 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/datasets/base/base_stereo_view_dataset.py @@ -0,0 +1,220 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# base class for implementing datasets +# -------------------------------------------------------- +import PIL +import numpy as np +import torch + +from dust3r.datasets.base.easy_dataset import EasyDataset +from dust3r.datasets.utils.transforms import ImgNorm +from dust3r.utils.geometry import depthmap_to_absolute_camera_coordinates +import dust3r.datasets.utils.cropping as cropping + + +class BaseStereoViewDataset (EasyDataset): + """ Define all basic options. + + Usage: + class MyDataset (BaseStereoViewDataset): + def _get_views(self, idx, rng): + # overload here + views = [] + views.append(dict(img=, ...)) + return views + """ + + def __init__(self, *, # only keyword arguments + split=None, + resolution=None, # square_size or (width, height) or list of [(width,height), ...] + transform=ImgNorm, + aug_crop=False, + seed=None): + self.num_views = 2 + self.split = split + self._set_resolutions(resolution) + + self.transform = transform + if isinstance(transform, str): + transform = eval(transform) + + self.aug_crop = aug_crop + self.seed = seed + + def __len__(self): + return len(self.scenes) + + def get_stats(self): + return f"{len(self)} pairs" + + def __repr__(self): + resolutions_str = '['+';'.join(f'{w}x{h}' for w, h in self._resolutions)+']' + return f"""{type(self).__name__}({self.get_stats()}, + {self.split=}, + {self.seed=}, + resolutions={resolutions_str}, + {self.transform=})""".replace('self.', '').replace('\n', '').replace(' ', '') + + def _get_views(self, idx, resolution, rng): + raise NotImplementedError() + + def __getitem__(self, idx): + if isinstance(idx, tuple): + # the idx is specifying the aspect-ratio + idx, ar_idx = idx + else: + assert len(self._resolutions) == 1 + ar_idx = 0 + + # set-up the rng + if self.seed: # reseed for each __getitem__ + self._rng = np.random.default_rng(seed=self.seed + idx) + elif not hasattr(self, '_rng'): + seed = torch.initial_seed() # this is different for each dataloader process + self._rng = np.random.default_rng(seed=seed) + + # over-loaded code + resolution = self._resolutions[ar_idx] # DO NOT CHANGE THIS (compatible with BatchedRandomSampler) + views = self._get_views(idx, resolution, self._rng) + assert len(views) == self.num_views + + # check data-types + for v, view in enumerate(views): + assert 'pts3d' not in view, f"pts3d should not be there, they will be computed afterwards based on intrinsics+depthmap for view {view_name(view)}" + view['idx'] = (idx, ar_idx, v) + + # encode the image + width, height = view['img'].size + view['true_shape'] = np.int32((height, width)) + view['img'] = self.transform(view['img']) + + assert 'camera_intrinsics' in view + if 'camera_pose' not in view: + view['camera_pose'] = np.full((4, 4), np.nan, dtype=np.float32) + else: + assert np.isfinite(view['camera_pose']).all(), f'NaN in camera pose for view {view_name(view)}' + assert 'pts3d' not in view + assert 'valid_mask' not in view + assert np.isfinite(view['depthmap']).all(), f'NaN in depthmap for view {view_name(view)}' + pts3d, valid_mask = depthmap_to_absolute_camera_coordinates(**view) + + view['pts3d'] = pts3d + view['valid_mask'] = valid_mask & np.isfinite(pts3d).all(axis=-1) + + # check all datatypes + for key, val in view.items(): + res, err_msg = is_good_type(key, val) + assert res, f"{err_msg} with {key}={val} for view {view_name(view)}" + K = view['camera_intrinsics'] + + # last thing done! + for view in views: + # transpose to make sure all views are the same size + transpose_to_landscape(view) + # this allows to check whether the RNG is is the same state each time + view['rng'] = int.from_bytes(self._rng.bytes(4), 'big') + return views + + def _set_resolutions(self, resolutions): + assert resolutions is not None, 'undefined resolution' + + if not isinstance(resolutions, list): + resolutions = [resolutions] + + self._resolutions = [] + for resolution in resolutions: + if isinstance(resolution, int): + width = height = resolution + else: + width, height = resolution + assert isinstance(width, int), f'Bad type for {width=} {type(width)=}, should be int' + assert isinstance(height, int), f'Bad type for {height=} {type(height)=}, should be int' + assert width >= height + self._resolutions.append((width, height)) + + def _crop_resize_if_necessary(self, image, depthmap, intrinsics, resolution, rng=None, info=None): + """ This function: + - first downsizes the image with LANCZOS inteprolation, + which is better than bilinear interpolation in + """ + if not isinstance(image, PIL.Image.Image): + image = PIL.Image.fromarray(image) + + # downscale with lanczos interpolation so that image.size == resolution + # cropping centered on the principal point + W, H = image.size + cx, cy = intrinsics[:2, 2].round().astype(int) + min_margin_x = min(cx, W-cx) + min_margin_y = min(cy, H-cy) + assert min_margin_x > W/5, f'Bad principal point in view={info}' + assert min_margin_y > H/5, f'Bad principal point in view={info}' + # the new window will be a rectangle of size (2*min_margin_x, 2*min_margin_y) centered on (cx,cy) + l, t = cx - min_margin_x, cy - min_margin_y + r, b = cx + min_margin_x, cy + min_margin_y + crop_bbox = (l, t, r, b) + image, depthmap, intrinsics = cropping.crop_image_depthmap(image, depthmap, intrinsics, crop_bbox) + + # transpose the resolution if necessary + W, H = image.size # new size + assert resolution[0] >= resolution[1] + if H > 1.1*W: + # image is portrait mode + resolution = resolution[::-1] + elif 0.9 < H/W < 1.1 and resolution[0] != resolution[1]: + # image is square, so we chose (portrait, landscape) randomly + if rng.integers(2): + resolution = resolution[::-1] + + # high-quality Lanczos down-scaling + target_resolution = np.array(resolution) + if self.aug_crop > 1: + target_resolution += rng.integers(0, self.aug_crop) + image, depthmap, intrinsics = cropping.rescale_image_depthmap(image, depthmap, intrinsics, target_resolution) + + # actual cropping (if necessary) with bilinear interpolation + intrinsics2 = cropping.camera_matrix_of_crop(intrinsics, image.size, resolution, offset_factor=0.5) + crop_bbox = cropping.bbox_from_intrinsics_in_out(intrinsics, intrinsics2, resolution) + image, depthmap, intrinsics2 = cropping.crop_image_depthmap(image, depthmap, intrinsics, crop_bbox) + + return image, depthmap, intrinsics2 + + +def is_good_type(key, v): + """ returns (is_good, err_msg) + """ + if isinstance(v, (str, int, tuple)): + return True, None + if v.dtype not in (np.float32, torch.float32, bool, np.int32, np.int64, np.uint8): + return False, f"bad {v.dtype=}" + return True, None + + +def view_name(view, batch_index=None): + def sel(x): return x[batch_index] if batch_index not in (None, slice(None)) else x + db = sel(view['dataset']) + label = sel(view['label']) + instance = sel(view['instance']) + return f"{db}/{label}/{instance}" + + +def transpose_to_landscape(view): + height, width = view['true_shape'] + + if width < height: + # rectify portrait to landscape + assert view['img'].shape == (3, height, width) + view['img'] = view['img'].swapaxes(1, 2) + + assert view['valid_mask'].shape == (height, width) + view['valid_mask'] = view['valid_mask'].swapaxes(0, 1) + + assert view['depthmap'].shape == (height, width) + view['depthmap'] = view['depthmap'].swapaxes(0, 1) + + assert view['pts3d'].shape == (height, width, 3) + view['pts3d'] = view['pts3d'].swapaxes(0, 1) + + # transpose x and y pixels + view['camera_intrinsics'] = view['camera_intrinsics'][[1, 0, 2]] diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/datasets/base/batched_sampler.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/datasets/base/batched_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..85f58a65d41bb8101159e032d5b0aac26a7cf1a1 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/datasets/base/batched_sampler.py @@ -0,0 +1,74 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# Random sampling under a constraint +# -------------------------------------------------------- +import numpy as np +import torch + + +class BatchedRandomSampler: + """ Random sampling under a constraint: each sample in the batch has the same feature, + which is chosen randomly from a known pool of 'features' for each batch. + + For instance, the 'feature' could be the image aspect-ratio. + + The index returned is a tuple (sample_idx, feat_idx). + This sampler ensures that each series of `batch_size` indices has the same `feat_idx`. + """ + + def __init__(self, dataset, batch_size, pool_size, world_size=1, rank=0, drop_last=True): + self.batch_size = batch_size + self.pool_size = pool_size + + self.len_dataset = N = len(dataset) + self.total_size = round_by(N, batch_size*world_size) if drop_last else N + assert world_size == 1 or drop_last, 'must drop the last batch in distributed mode' + + # distributed sampler + self.world_size = world_size + self.rank = rank + self.epoch = None + + def __len__(self): + return self.total_size // self.world_size + + def set_epoch(self, epoch): + self.epoch = epoch + + def __iter__(self): + # prepare RNG + if self.epoch is None: + assert self.world_size == 1 and self.rank == 0, 'use set_epoch() if distributed mode is used' + seed = int(torch.empty((), dtype=torch.int64).random_().item()) + else: + seed = self.epoch + 777 + rng = np.random.default_rng(seed=seed) + + # random indices (will restart from 0 if not drop_last) + sample_idxs = np.arange(self.total_size) + rng.shuffle(sample_idxs) + + # random feat_idxs (same across each batch) + n_batches = (self.total_size+self.batch_size-1) // self.batch_size + feat_idxs = rng.integers(self.pool_size, size=n_batches) + feat_idxs = np.broadcast_to(feat_idxs[:, None], (n_batches, self.batch_size)) + feat_idxs = feat_idxs.ravel()[:self.total_size] + + # put them together + idxs = np.c_[sample_idxs, feat_idxs] # shape = (total_size, 2) + + # Distributed sampler: we select a subset of batches + # make sure the slice for each node is aligned with batch_size + size_per_proc = self.batch_size * ((self.total_size + self.world_size * + self.batch_size-1) // (self.world_size * self.batch_size)) + idxs = idxs[self.rank*size_per_proc: (self.rank+1)*size_per_proc] + + yield from (tuple(idx) for idx in idxs) + + +def round_by(total, multiple, up=False): + if up: + total = total + multiple-1 + return (total//multiple) * multiple diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/datasets/base/easy_dataset.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/datasets/base/easy_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..4939a88f02715a1f80be943ddb6d808e1be84db7 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/datasets/base/easy_dataset.py @@ -0,0 +1,157 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# A dataset base class that you can easily resize and combine. +# -------------------------------------------------------- +import numpy as np +from dust3r.datasets.base.batched_sampler import BatchedRandomSampler + + +class EasyDataset: + """ a dataset that you can easily resize and combine. + Examples: + --------- + 2 * dataset ==> duplicate each element 2x + + 10 @ dataset ==> set the size to 10 (random sampling, duplicates if necessary) + + dataset1 + dataset2 ==> concatenate datasets + """ + + def __add__(self, other): + return CatDataset([self, other]) + + def __rmul__(self, factor): + return MulDataset(factor, self) + + def __rmatmul__(self, factor): + return ResizedDataset(factor, self) + + def set_epoch(self, epoch): + pass # nothing to do by default + + def make_sampler(self, batch_size, shuffle=True, world_size=1, rank=0, drop_last=True): + if not (shuffle): + raise NotImplementedError() # cannot deal yet + num_of_aspect_ratios = len(self._resolutions) + return BatchedRandomSampler(self, batch_size, num_of_aspect_ratios, world_size=world_size, rank=rank, drop_last=drop_last) + + +class MulDataset (EasyDataset): + """ Artifically augmenting the size of a dataset. + """ + multiplicator: int + + def __init__(self, multiplicator, dataset): + assert isinstance(multiplicator, int) and multiplicator > 0 + self.multiplicator = multiplicator + self.dataset = dataset + + def __len__(self): + return self.multiplicator * len(self.dataset) + + def __repr__(self): + return f'{self.multiplicator}*{repr(self.dataset)}' + + def __getitem__(self, idx): + if isinstance(idx, tuple): + idx, other = idx + return self.dataset[idx // self.multiplicator, other] + else: + return self.dataset[idx // self.multiplicator] + + @property + def _resolutions(self): + return self.dataset._resolutions + + +class ResizedDataset (EasyDataset): + """ Artifically changing the size of a dataset. + """ + new_size: int + + def __init__(self, new_size, dataset): + assert isinstance(new_size, int) and new_size > 0 + self.new_size = new_size + self.dataset = dataset + + def __len__(self): + return self.new_size + + def __repr__(self): + size_str = str(self.new_size) + for i in range((len(size_str)-1) // 3): + sep = -4*i-3 + size_str = size_str[:sep] + '_' + size_str[sep:] + return f'{size_str} @ {repr(self.dataset)}' + + def set_epoch(self, epoch): + # this random shuffle only depends on the epoch + rng = np.random.default_rng(seed=epoch+777) + + # shuffle all indices + perm = rng.permutation(len(self.dataset)) + + # rotary extension until target size is met + shuffled_idxs = np.concatenate([perm] * (1 + (len(self)-1) // len(self.dataset))) + self._idxs_mapping = shuffled_idxs[:self.new_size] + + assert len(self._idxs_mapping) == self.new_size + + def __getitem__(self, idx): + assert hasattr(self, '_idxs_mapping'), 'You need to call dataset.set_epoch() to use ResizedDataset.__getitem__()' + if isinstance(idx, tuple): + idx, other = idx + return self.dataset[self._idxs_mapping[idx], other] + else: + return self.dataset[self._idxs_mapping[idx]] + + @property + def _resolutions(self): + return self.dataset._resolutions + + +class CatDataset (EasyDataset): + """ Concatenation of several datasets + """ + + def __init__(self, datasets): + for dataset in datasets: + assert isinstance(dataset, EasyDataset) + self.datasets = datasets + self._cum_sizes = np.cumsum([len(dataset) for dataset in datasets]) + + def __len__(self): + return self._cum_sizes[-1] + + def __repr__(self): + # remove uselessly long transform + return ' + '.join(repr(dataset).replace(',transform=Compose( ToTensor() Normalize(mean=(0.5, 0.5, 0.5), std=(0.5, 0.5, 0.5)))', '') for dataset in self.datasets) + + def set_epoch(self, epoch): + for dataset in self.datasets: + dataset.set_epoch(epoch) + + def __getitem__(self, idx): + other = None + if isinstance(idx, tuple): + idx, other = idx + + if not (0 <= idx < len(self)): + raise IndexError() + + db_idx = np.searchsorted(self._cum_sizes, idx, 'right') + dataset = self.datasets[db_idx] + new_idx = idx - (self._cum_sizes[db_idx - 1] if db_idx > 0 else 0) + + if other is not None: + new_idx = (new_idx, other) + return dataset[new_idx] + + @property + def _resolutions(self): + resolutions = self.datasets[0]._resolutions + for dataset in self.datasets[1:]: + assert tuple(dataset._resolutions) == tuple(resolutions) + return resolutions diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/datasets/co3d.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/datasets/co3d.py new file mode 100644 index 0000000000000000000000000000000000000000..9fc94f9420d86372e643c00e7cddf85b3d1982c6 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/datasets/co3d.py @@ -0,0 +1,146 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# Dataloader for preprocessed Co3d_v2 +# dataset at https://github.com/facebookresearch/co3d - Creative Commons Attribution-NonCommercial 4.0 International +# See datasets_preprocess/preprocess_co3d.py +# -------------------------------------------------------- +import os.path as osp +import json +import itertools +from collections import deque + +import cv2 +import numpy as np + +from dust3r.datasets.base.base_stereo_view_dataset import BaseStereoViewDataset +from dust3r.utils.image import imread_cv2 + + +class Co3d(BaseStereoViewDataset): + def __init__(self, mask_bg=True, *args, ROOT, **kwargs): + self.ROOT = ROOT + super().__init__(*args, **kwargs) + assert mask_bg in (True, False, 'rand') + self.mask_bg = mask_bg + + # load all scenes + with open(osp.join(self.ROOT, f'selected_seqs_{self.split}.json'), 'r') as f: + self.scenes = json.load(f) + self.scenes = {k: v for k, v in self.scenes.items() if len(v) > 0} + self.scenes = {(k, k2): v2 for k, v in self.scenes.items() + for k2, v2 in v.items()} + self.scene_list = list(self.scenes.keys()) + + # for each scene, we have 100 images ==> 360 degrees (so 25 frames ~= 90 degrees) + # we prepare all combinations such that i-j = +/- [5, 10, .., 90] degrees + self.combinations = [(i, j) + for i, j in itertools.combinations(range(100), 2) + if 0 < abs(i-j) <= 30 and abs(i-j) % 5 == 0] + + self.invalidate = {scene: {} for scene in self.scene_list} + + def __len__(self): + return len(self.scene_list) * len(self.combinations) + + def _get_views(self, idx, resolution, rng): + # choose a scene + obj, instance = self.scene_list[idx // len(self.combinations)] + image_pool = self.scenes[obj, instance] + im1_idx, im2_idx = self.combinations[idx % len(self.combinations)] + + # add a bit of randomness + last = len(image_pool)-1 + + if resolution not in self.invalidate[obj, instance]: # flag invalid images + self.invalidate[obj, instance][resolution] = [False for _ in range(len(image_pool))] + + # decide now if we mask the bg + mask_bg = (self.mask_bg == True) or (self.mask_bg == 'rand' and rng.choice(2)) + + views = [] + imgs_idxs = [max(0, min(im_idx + rng.integers(-4, 5), last)) for im_idx in [im2_idx, im1_idx]] + imgs_idxs = deque(imgs_idxs) + while len(imgs_idxs) > 0: # some images (few) have zero depth + im_idx = imgs_idxs.pop() + + if self.invalidate[obj, instance][resolution][im_idx]: + # search for a valid image + random_direction = 2 * rng.choice(2) - 1 + for offset in range(1, len(image_pool)): + tentative_im_idx = (im_idx + (random_direction * offset)) % len(image_pool) + if not self.invalidate[obj, instance][resolution][tentative_im_idx]: + im_idx = tentative_im_idx + break + + view_idx = image_pool[im_idx] + + impath = osp.join(self.ROOT, obj, instance, 'images', f'frame{view_idx:06n}.jpg') + + # load camera params + input_metadata = np.load(impath.replace('jpg', 'npz')) + camera_pose = input_metadata['camera_pose'].astype(np.float32) + intrinsics = input_metadata['camera_intrinsics'].astype(np.float32) + + # load image and depth + rgb_image = imread_cv2(impath) + depthmap = imread_cv2(impath.replace('images', 'depths') + '.geometric.png', cv2.IMREAD_UNCHANGED) + depthmap = (depthmap.astype(np.float32) / 65535) * np.nan_to_num(input_metadata['maximum_depth']) + + if mask_bg: + # load object mask + maskpath = osp.join(self.ROOT, obj, instance, 'masks', f'frame{view_idx:06n}.png') + maskmap = imread_cv2(maskpath, cv2.IMREAD_UNCHANGED).astype(np.float32) + maskmap = (maskmap / 255.0) > 0.1 + + # update the depthmap with mask + depthmap *= maskmap + + rgb_image, depthmap, intrinsics = self._crop_resize_if_necessary( + rgb_image, depthmap, intrinsics, resolution, rng=rng, info=impath) + + num_valid = (depthmap > 0.0).sum() + if num_valid == 0: + # problem, invalidate image and retry + self.invalidate[obj, instance][resolution][im_idx] = True + imgs_idxs.append(im_idx) + continue + + views.append(dict( + img=rgb_image, + depthmap=depthmap, + camera_pose=camera_pose, + camera_intrinsics=intrinsics, + dataset='Co3d_v2', + label=osp.join(obj, instance), + instance=osp.split(impath)[1], + )) + return views + + +if __name__ == "__main__": + from dust3r.datasets.base.base_stereo_view_dataset import view_name + from dust3r.viz import SceneViz, auto_cam_size + from dust3r.utils.image import rgb + + dataset = Co3d(split='train', ROOT="data/co3d_subset_processed", resolution=224, aug_crop=16) + + for idx in np.random.permutation(len(dataset)): + views = dataset[idx] + assert len(views) == 2 + print(view_name(views[0]), view_name(views[1])) + viz = SceneViz() + poses = [views[view_idx]['camera_pose'] for view_idx in [0, 1]] + cam_size = max(auto_cam_size(poses), 0.001) + for view_idx in [0, 1]: + pts3d = views[view_idx]['pts3d'] + valid_mask = views[view_idx]['valid_mask'] + colors = rgb(views[view_idx]['img']) + viz.add_pointcloud(pts3d, colors, valid_mask) + viz.add_camera(pose_c2w=views[view_idx]['camera_pose'], + focal=views[view_idx]['camera_intrinsics'][0, 0], + color=(idx*255, (1 - idx)*255, 0), + image=colors, + cam_size=cam_size) + viz.show() diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/datasets/utils/__init__.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/datasets/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a32692113d830ddc4af4e6ed608f222fbe062e6e --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/datasets/utils/__init__.py @@ -0,0 +1,2 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/datasets/utils/cropping.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/datasets/utils/cropping.py new file mode 100644 index 0000000000000000000000000000000000000000..02b1915676f3deea24f57032f7588ff34cbfaeb9 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/datasets/utils/cropping.py @@ -0,0 +1,119 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# croppping utilities +# -------------------------------------------------------- +import PIL.Image +import os +os.environ["OPENCV_IO_ENABLE_OPENEXR"] = "1" +import cv2 # noqa +import numpy as np # noqa +from dust3r.utils.geometry import colmap_to_opencv_intrinsics, opencv_to_colmap_intrinsics # noqa +try: + lanczos = PIL.Image.Resampling.LANCZOS +except AttributeError: + lanczos = PIL.Image.LANCZOS + + +class ImageList: + """ Convenience class to aply the same operation to a whole set of images. + """ + + def __init__(self, images): + if not isinstance(images, (tuple, list, set)): + images = [images] + self.images = [] + for image in images: + if not isinstance(image, PIL.Image.Image): + image = PIL.Image.fromarray(image) + self.images.append(image) + + def __len__(self): + return len(self.images) + + def to_pil(self): + return tuple(self.images) if len(self.images) > 1 else self.images[0] + + @property + def size(self): + sizes = [im.size for im in self.images] + assert all(sizes[0] == s for s in sizes) + return sizes[0] + + def resize(self, *args, **kwargs): + return ImageList(self._dispatch('resize', *args, **kwargs)) + + def crop(self, *args, **kwargs): + return ImageList(self._dispatch('crop', *args, **kwargs)) + + def _dispatch(self, func, *args, **kwargs): + return [getattr(im, func)(*args, **kwargs) for im in self.images] + + +def rescale_image_depthmap(image, depthmap, camera_intrinsics, output_resolution): + """ Jointly rescale a (image, depthmap) + so that (out_width, out_height) >= output_res + """ + image = ImageList(image) + input_resolution = np.array(image.size) # (W,H) + output_resolution = np.array(output_resolution) + if depthmap is not None: + # can also use this with masks instead of depthmaps + assert tuple(depthmap.shape[:2]) == image.size[::-1] + assert output_resolution.shape == (2,) + # define output resolution + scale_final = max(output_resolution / image.size) + 1e-8 + output_resolution = np.floor(input_resolution * scale_final).astype(int) + + # first rescale the image so that it contains the crop + image = image.resize(output_resolution, resample=lanczos) + if depthmap is not None: + depthmap = cv2.resize(depthmap, output_resolution, fx=scale_final, + fy=scale_final, interpolation=cv2.INTER_NEAREST) + + # no offset here; simple rescaling + camera_intrinsics = camera_matrix_of_crop( + camera_intrinsics, input_resolution, output_resolution, scaling=scale_final) + + return image.to_pil(), depthmap, camera_intrinsics + + +def camera_matrix_of_crop(input_camera_matrix, input_resolution, output_resolution, scaling=1, offset_factor=0.5, offset=None): + # Margins to offset the origin + margins = np.asarray(input_resolution) * scaling - output_resolution + assert np.all(margins >= 0.0) + if offset is None: + offset = offset_factor * margins + + # Generate new camera parameters + output_camera_matrix_colmap = opencv_to_colmap_intrinsics(input_camera_matrix) + output_camera_matrix_colmap[:2, :] *= scaling + output_camera_matrix_colmap[:2, 2] -= offset + output_camera_matrix = colmap_to_opencv_intrinsics(output_camera_matrix_colmap) + + return output_camera_matrix + + +def crop_image_depthmap(image, depthmap, camera_intrinsics, crop_bbox): + """ + Return a crop of the input view. + """ + image = ImageList(image) + l, t, r, b = crop_bbox + + image = image.crop((l, t, r, b)) + depthmap = depthmap[t:b, l:r] + + camera_intrinsics = camera_intrinsics.copy() + camera_intrinsics[0, 2] -= l + camera_intrinsics[1, 2] -= t + + return image.to_pil(), depthmap, camera_intrinsics + + +def bbox_from_intrinsics_in_out(input_camera_matrix, output_camera_matrix, output_resolution): + out_width, out_height = output_resolution + l, t = np.int32(np.round(input_camera_matrix[:2, 2] - output_camera_matrix[:2, 2])) + crop_bbox = (l, t, l+out_width, t+out_height) + return crop_bbox diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/datasets/utils/transforms.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/datasets/utils/transforms.py new file mode 100644 index 0000000000000000000000000000000000000000..eb34f2f01d3f8f829ba71a7e03e181bf18f72c25 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/datasets/utils/transforms.py @@ -0,0 +1,11 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# DUST3R default transforms +# -------------------------------------------------------- +import torchvision.transforms as tvf +from dust3r.utils.image import ImgNorm + +# define the standard image transforms +ColorJitter = tvf.Compose([tvf.ColorJitter(0.5, 0.5, 0.5, 0.1), ImgNorm]) diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/heads/__init__.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/heads/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..53d0aa5610cae95f34f96bdb3ff9e835a2d6208e --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/heads/__init__.py @@ -0,0 +1,19 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# head factory +# -------------------------------------------------------- +from .linear_head import LinearPts3d +from .dpt_head import create_dpt_head + + +def head_factory(head_type, output_mode, net, has_conf=False): + """" build a prediction head for the decoder + """ + if head_type == 'linear' and output_mode == 'pts3d': + return LinearPts3d(net, has_conf) + elif head_type == 'dpt' and output_mode == 'pts3d': + return create_dpt_head(net, has_conf=has_conf) + else: + raise NotImplementedError(f"unexpected {head_type=} and {output_mode=}") diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/heads/dpt_head.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/heads/dpt_head.py new file mode 100644 index 0000000000000000000000000000000000000000..3470ac507a776e4af32f39c317c77e9351b96c4b --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/heads/dpt_head.py @@ -0,0 +1,114 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# dpt head implementation for DUST3R +# Downstream heads assume inputs of size B x N x C (where N is the number of tokens) ; +# or if it takes as input the output at every layer, the attribute return_all_layers should be set to True +# the forward function also takes as input a dictionnary img_info with key "height" and "width" +# for PixelwiseTask, the output will be of dimension B x num_channels x H x W +# -------------------------------------------------------- +from einops import rearrange +from typing import List +import torch +import torch.nn as nn +from .postprocess import postprocess +from ...croco.dpt_block import DPTOutputAdapter # noqa + + +class DPTOutputAdapter_fix(DPTOutputAdapter): + """ + Adapt croco's DPTOutputAdapter implementation for dust3r: + remove duplicated weigths, and fix forward for dust3r + """ + + def init(self, dim_tokens_enc=768): + super().init(dim_tokens_enc) + # these are duplicated weights + del self.act_1_postprocess + del self.act_2_postprocess + del self.act_3_postprocess + del self.act_4_postprocess + + def forward(self, encoder_tokens: List[torch.Tensor], image_size=None): + assert self.dim_tokens_enc is not None, 'Need to call init(dim_tokens_enc) function first' + # H, W = input_info['image_size'] + image_size = self.image_size if image_size is None else image_size + H, W = image_size + # Number of patches in height and width + N_H = H // (self.stride_level * self.P_H) + N_W = W // (self.stride_level * self.P_W) + + # Hook decoder onto 4 layers from specified ViT layers + layers = [encoder_tokens[hook] for hook in self.hooks] + + # Extract only task-relevant tokens and ignore global tokens. + layers = [self.adapt_tokens(l) for l in layers] + + # Reshape tokens to spatial representation + layers = [rearrange(l, 'b (nh nw) c -> b c nh nw', nh=N_H, nw=N_W) for l in layers] + + layers = [self.act_postprocess[idx](l) for idx, l in enumerate(layers)] + # Project layers to chosen feature dim + layers = [self.scratch.layer_rn[idx](l) for idx, l in enumerate(layers)] + + # Fuse layers using refinement stages + path_4 = self.scratch.refinenet4(layers[3])[:, :, :layers[2].shape[2], :layers[2].shape[3]] + path_3 = self.scratch.refinenet3(path_4, layers[2]) + path_2 = self.scratch.refinenet2(path_3, layers[1]) + path_1 = self.scratch.refinenet1(path_2, layers[0]) + + # Output head + out = self.head(path_1) + + return out + + +class PixelwiseTaskWithDPT(nn.Module): + """ DPT module for dust3r, can return 3D points + confidence for all pixels""" + + def __init__(self, *, n_cls_token=0, hooks_idx=None, dim_tokens=None, + output_width_ratio=1, num_channels=1, postprocess=None, depth_mode=None, conf_mode=None, **kwargs): + super(PixelwiseTaskWithDPT, self).__init__() + self.return_all_layers = True # backbone needs to return all layers + self.postprocess = postprocess + self.depth_mode = depth_mode + self.conf_mode = conf_mode + + assert n_cls_token == 0, "Not implemented" + dpt_args = dict(output_width_ratio=output_width_ratio, + num_channels=num_channels, + **kwargs) + if hooks_idx is not None: + dpt_args.update(hooks=hooks_idx) + self.dpt = DPTOutputAdapter_fix(**dpt_args) + dpt_init_args = {} if dim_tokens is None else {'dim_tokens_enc': dim_tokens} + self.dpt.init(**dpt_init_args) + + def forward(self, x, img_info): + out = self.dpt(x, image_size=(img_info[0], img_info[1])) + if self.postprocess: + out = self.postprocess(out, self.depth_mode, self.conf_mode) + return out + + +def create_dpt_head(net, has_conf=False): + """ + return PixelwiseTaskWithDPT for given net params + """ + assert net.dec_depth > 9 + l2 = net.dec_depth + feature_dim = 256 + last_dim = feature_dim//2 + out_nchan = 3 + ed = net.enc_embed_dim + dd = net.dec_embed_dim + return PixelwiseTaskWithDPT(num_channels=out_nchan + has_conf, + feature_dim=feature_dim, + last_dim=last_dim, + hooks_idx=[0, l2*2//4, l2*3//4, l2], + dim_tokens=[ed, dd, dd, dd], + postprocess=postprocess, + depth_mode=net.depth_mode, + conf_mode=net.conf_mode, + head_type='regression') diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/heads/linear_head.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/heads/linear_head.py new file mode 100644 index 0000000000000000000000000000000000000000..27c5678d551033cc576798626b7ba59b1e7b20cc --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/heads/linear_head.py @@ -0,0 +1,41 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# linear head implementation for DUST3R +# -------------------------------------------------------- +import torch.nn as nn +import torch.nn.functional as F +from .postprocess import postprocess + + +class LinearPts3d (nn.Module): + """ + Linear head for dust3r + Each token outputs: - 16x16 3D points (+ confidence) + """ + + def __init__(self, net, has_conf=False): + super().__init__() + self.patch_size = net.patch_embed.patch_size[0] + self.depth_mode = net.depth_mode + self.conf_mode = net.conf_mode + self.has_conf = has_conf + + self.proj = nn.Linear(net.dec_embed_dim, (3 + has_conf)*self.patch_size**2) + + def setup(self, croconet): + pass + + def forward(self, decout, img_shape): + H, W = img_shape + tokens = decout[-1] + B, S, D = tokens.shape + + # extract 3D points + feat = self.proj(tokens) # B,S,D + feat = feat.transpose(-1, -2).view(B, -1, H//self.patch_size, W//self.patch_size) + feat = F.pixel_shuffle(feat, self.patch_size) # B,3,H,W + + # permute + norm depth + return postprocess(feat, self.depth_mode, self.conf_mode) diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/heads/postprocess.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/heads/postprocess.py new file mode 100644 index 0000000000000000000000000000000000000000..cd68a90d89b8dcd7d8a4b4ea06ef8b17eb5da093 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/heads/postprocess.py @@ -0,0 +1,58 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# post process function for all heads: extract 3D points/confidence from output +# -------------------------------------------------------- +import torch + + +def postprocess(out, depth_mode, conf_mode): + """ + extract 3D points/confidence from prediction head output + """ + fmap = out.permute(0, 2, 3, 1) # B,H,W,3 + res = dict(pts3d=reg_dense_depth(fmap[:, :, :, 0:3], mode=depth_mode)) + + if conf_mode is not None: + res['conf'] = reg_dense_conf(fmap[:, :, :, 3], mode=conf_mode) + return res + + +def reg_dense_depth(xyz, mode): + """ + extract 3D points from prediction head output + """ + mode, vmin, vmax = mode + + no_bounds = (vmin == -float('inf')) and (vmax == float('inf')) + assert no_bounds + + if mode == 'linear': + if no_bounds: + return xyz # [-inf, +inf] + return xyz.clip(min=vmin, max=vmax) + + # distance to origin + d = xyz.norm(dim=-1, keepdim=True) + xyz = xyz / d.clip(min=1e-8) + + if mode == 'square': + return xyz * d.square() + + if mode == 'exp': + return xyz * torch.expm1(d) + + raise ValueError(f'bad {mode=}') + + +def reg_dense_conf(x, mode): + """ + extract confidence from prediction head output + """ + mode, vmin, vmax = mode + if mode == 'exp': + return vmin + x.exp().clip(max=vmax-vmin) + if mode == 'sigmoid': + return (vmax - vmin) * torch.sigmoid(x) + vmin + raise ValueError(f'bad {mode=}') diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/image_pairs.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/image_pairs.py new file mode 100644 index 0000000000000000000000000000000000000000..9251dc822b6b4b11bb9149dfd256ee1e66947562 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/image_pairs.py @@ -0,0 +1,83 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# utilities needed to load image pairs +# -------------------------------------------------------- +import numpy as np +import torch + + +def make_pairs(imgs, scene_graph='complete', prefilter=None, symmetrize=True): + pairs = [] + + if scene_graph == 'complete': # complete graph + for i in range(len(imgs)): + for j in range(i): + pairs.append((imgs[i], imgs[j])) + + elif scene_graph.startswith('swin'): + winsize = int(scene_graph.split('-')[1]) if '-' in scene_graph else 3 + for i in range(len(imgs)): + for j in range(winsize): + idx = (i + j) % len(imgs) # explicit loop closure + pairs.append((imgs[i], imgs[idx])) + + elif scene_graph.startswith('oneref'): + refid = int(scene_graph.split('-')[1]) if '-' in scene_graph else 0 + for j in range(len(imgs)): + if j != refid: + pairs.append((imgs[refid], imgs[j])) + + elif scene_graph == 'pairs': + assert len(imgs) % 2 == 0 + for i in range(0, len(imgs), 2): + pairs.append((imgs[i], imgs[i+1])) + + if symmetrize: + pairs += [(img2, img1) for img1, img2 in pairs] + + # now, remove edges + if isinstance(prefilter, str) and prefilter.startswith('seq'): + pairs = filter_pairs_seq(pairs, int(prefilter[3:])) + + if isinstance(prefilter, str) and prefilter.startswith('cyc'): + pairs = filter_pairs_seq(pairs, int(prefilter[3:]), cyclic=True) + + return pairs + + +def sel(x, kept): + if isinstance(x, dict): + return {k: sel(v, kept) for k, v in x.items()} + if isinstance(x, (torch.Tensor, np.ndarray)): + return x[kept] + if isinstance(x, (tuple, list)): + return type(x)([x[k] for k in kept]) + + +def _filter_edges_seq(edges, seq_dis_thr, cyclic=False): + # number of images + n = max(max(e) for e in edges)+1 + + kept = [] + for e, (i, j) in enumerate(edges): + dis = abs(i-j) + if cyclic: + dis = min(dis, abs(i+n-j), abs(i-n-j)) + if dis <= seq_dis_thr: + kept.append(e) + return kept + + +def filter_pairs_seq(pairs, seq_dis_thr, cyclic=False): + edges = [(img1['idx'], img2['idx']) for img1, img2 in pairs] + kept = _filter_edges_seq(edges, seq_dis_thr, cyclic=cyclic) + return [pairs[i] for i in kept] + + +def filter_edges_seq(view1, view2, pred1, pred2, seq_dis_thr, cyclic=False): + edges = [(int(i), int(j)) for i, j in zip(view1['idx'], view2['idx'])] + kept = _filter_edges_seq(edges, seq_dis_thr, cyclic=cyclic) + print(f'>> Filtering edges more than {seq_dis_thr} frames apart: kept {len(kept)}/{len(edges)} edges') + return sel(view1, kept), sel(view2, kept), sel(pred1, kept), sel(pred2, kept) diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/inference.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/inference.py new file mode 100644 index 0000000000000000000000000000000000000000..708bd46e7d67448bcc05cb7a6d717e3dbffe81a3 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/inference.py @@ -0,0 +1,165 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# utilities needed for the inference +# -------------------------------------------------------- +import tqdm +import torch +from .utils.device import to_cpu, collate_with_cat +from .model import AsymmetricCroCo3DStereo, inf # noqa: F401, needed when loading the model +from .utils.misc import invalid_to_nans +from .utils.geometry import depthmap_to_pts3d, geotrf + + +def load_model(model_path, device): + print('... loading model from', model_path) + ckpt = torch.load(model_path, map_location='cpu') + args = ckpt['args'].model.replace("ManyAR_PatchEmbed", "PatchEmbedDust3R") + if 'landscape_only' not in args: + args = args[:-1] + ', landscape_only=False)' + else: + args = args.replace(" ", "").replace('landscape_only=True', 'landscape_only=False') + assert "landscape_only=False" in args + print(f"instantiating : {args}") + net = eval(args) + print(net.load_state_dict(ckpt['model'], strict=False)) + return net.to(device) + + +def _interleave_imgs(img1, img2): + res = {} + for key, value1 in img1.items(): + value2 = img2[key] + if isinstance(value1, torch.Tensor): + value = torch.stack((value1, value2), dim=1).flatten(0, 1) + else: + value = [x for pair in zip(value1, value2) for x in pair] + res[key] = value + return res + + +def make_batch_symmetric(batch): + view1, view2 = batch + view1, view2 = (_interleave_imgs(view1, view2), _interleave_imgs(view2, view1)) + return view1, view2 + + +def loss_of_one_batch(batch, model, criterion, device, symmetrize_batch=False, use_amp=False, ret=None): + view1, view2 = batch + for view in batch: + for name in 'img pts3d valid_mask camera_pose camera_intrinsics F_matrix corres'.split(): # pseudo_focal + if name not in view: + continue + view[name] = view[name].to(device, non_blocking=True) + + if symmetrize_batch: + view1, view2 = make_batch_symmetric(batch) + + with torch.cuda.amp.autocast(enabled=bool(use_amp)): + pred1, pred2 = model(view1, view2) + + # loss is supposed to be symmetric + with torch.cuda.amp.autocast(enabled=False): + loss = criterion(view1, view2, pred1, pred2) if criterion is not None else None + + result = dict(view1=view1, view2=view2, pred1=pred1, pred2=pred2, loss=loss) + return result[ret] if ret else result + + +@torch.no_grad() +def inference(pairs, model, device, batch_size=8): + print(f'>> Inference with model on {len(pairs)} image pairs') + result = [] + + # first, check if all images have the same size + multiple_shapes = not (check_if_same_size(pairs)) + if multiple_shapes: # force bs=1 + batch_size = 1 + + for i in tqdm.trange(0, len(pairs), batch_size): + res = loss_of_one_batch(collate_with_cat(pairs[i:i+batch_size]), model, None, device) + result.append(to_cpu(res)) + + result = collate_with_cat(result, lists=multiple_shapes) + + torch.cuda.empty_cache() + return result + + +def check_if_same_size(pairs): + shapes1 = [img1['img'].shape[-2:] for img1, img2 in pairs] + shapes2 = [img2['img'].shape[-2:] for img1, img2 in pairs] + return all(shapes1[0] == s for s in shapes1) and all(shapes2[0] == s for s in shapes2) + + +def get_pred_pts3d(gt, pred, use_pose=False): + if 'depth' in pred and 'pseudo_focal' in pred: + try: + pp = gt['camera_intrinsics'][..., :2, 2] + except KeyError: + pp = None + pts3d = depthmap_to_pts3d(**pred, pp=pp) + + elif 'pts3d' in pred: + # pts3d from my camera + pts3d = pred['pts3d'] + + elif 'pts3d_in_other_view' in pred: + # pts3d from the other camera, already transformed + assert use_pose is True + return pred['pts3d_in_other_view'] # return! + + if use_pose: + camera_pose = pred.get('camera_pose') + assert camera_pose is not None + pts3d = geotrf(camera_pose, pts3d) + + return pts3d + + +def find_opt_scaling(gt_pts1, gt_pts2, pr_pts1, pr_pts2=None, fit_mode='weiszfeld_stop_grad', valid1=None, valid2=None): + assert gt_pts1.ndim == pr_pts1.ndim == 4 + assert gt_pts1.shape == pr_pts1.shape + if gt_pts2 is not None: + assert gt_pts2.ndim == pr_pts2.ndim == 4 + assert gt_pts2.shape == pr_pts2.shape + + # concat the pointcloud + nan_gt_pts1 = invalid_to_nans(gt_pts1, valid1).flatten(1, 2) + nan_gt_pts2 = invalid_to_nans(gt_pts2, valid2).flatten(1, 2) if gt_pts2 is not None else None + + pr_pts1 = invalid_to_nans(pr_pts1, valid1).flatten(1, 2) + pr_pts2 = invalid_to_nans(pr_pts2, valid2).flatten(1, 2) if pr_pts2 is not None else None + + all_gt = torch.cat((nan_gt_pts1, nan_gt_pts2), dim=1) if gt_pts2 is not None else nan_gt_pts1 + all_pr = torch.cat((pr_pts1, pr_pts2), dim=1) if pr_pts2 is not None else pr_pts1 + + dot_gt_pr = (all_pr * all_gt).sum(dim=-1) + dot_gt_gt = all_gt.square().sum(dim=-1) + + if fit_mode.startswith('avg'): + # scaling = (all_pr / all_gt).view(B, -1).mean(dim=1) + scaling = dot_gt_pr.nanmean(dim=1) / dot_gt_gt.nanmean(dim=1) + elif fit_mode.startswith('median'): + scaling = (dot_gt_pr / dot_gt_gt).nanmedian(dim=1).values + elif fit_mode.startswith('weiszfeld'): + # init scaling with l2 closed form + scaling = dot_gt_pr.nanmean(dim=1) / dot_gt_gt.nanmean(dim=1) + # iterative re-weighted least-squares + for iter in range(10): + # re-weighting by inverse of distance + dis = (all_pr - scaling.view(-1, 1, 1) * all_gt).norm(dim=-1) + # print(dis.nanmean(-1)) + w = dis.clip_(min=1e-8).reciprocal() + # update the scaling with the new weights + scaling = (w * dot_gt_pr).nanmean(dim=1) / (w * dot_gt_gt).nanmean(dim=1) + else: + raise ValueError(f'bad {fit_mode=}') + + if fit_mode.endswith('stop_grad'): + scaling = scaling.detach() + + scaling = scaling.clip(min=1e-3) + # assert scaling.isfinite().all(), bb() + return scaling diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/losses.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/losses.py new file mode 100644 index 0000000000000000000000000000000000000000..7d6e20fd3a30d6d498afdc13ec852ae984d05f7e --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/losses.py @@ -0,0 +1,297 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# Implementation of DUSt3R training losses +# -------------------------------------------------------- +from copy import copy, deepcopy +import torch +import torch.nn as nn + +from dust3r.inference import get_pred_pts3d, find_opt_scaling +from dust3r.utils.geometry import inv, geotrf, normalize_pointcloud +from dust3r.utils.geometry import get_joint_pointcloud_depth, get_joint_pointcloud_center_scale + + +def Sum(*losses_and_masks): + loss, mask = losses_and_masks[0] + if loss.ndim > 0: + # we are actually returning the loss for every pixels + return losses_and_masks + else: + # we are returning the global loss + for loss2, mask2 in losses_and_masks[1:]: + loss = loss + loss2 + return loss + + +class LLoss (nn.Module): + """ L-norm loss + """ + + def __init__(self, reduction='mean'): + super().__init__() + self.reduction = reduction + + def forward(self, a, b): + assert a.shape == b.shape and a.ndim >= 2 and 1 <= a.shape[-1] <= 3, f'Bad shape = {a.shape}' + dist = self.distance(a, b) + assert dist.ndim == a.ndim-1 # one dimension less + if self.reduction == 'none': + return dist + if self.reduction == 'sum': + return dist.sum() + if self.reduction == 'mean': + return dist.mean() if dist.numel() > 0 else dist.new_zeros(()) + raise ValueError(f'bad {self.reduction=} mode') + + def distance(self, a, b): + raise NotImplementedError() + + +class L21Loss (LLoss): + """ Euclidean distance between 3d points """ + + def distance(self, a, b): + return torch.norm(a - b, dim=-1) # normalized L2 distance + + +L21 = L21Loss() + + +class Criterion (nn.Module): + def __init__(self, criterion=None): + super().__init__() + assert isinstance(criterion, LLoss), f'{criterion} is not a proper criterion!'+bb() + self.criterion = copy(criterion) + + def get_name(self): + return f'{type(self).__name__}({self.criterion})' + + def with_reduction(self, mode): + res = loss = deepcopy(self) + while loss is not None: + assert isinstance(loss, Criterion) + loss.criterion.reduction = 'none' # make it return the loss for each sample + loss = loss._loss2 # we assume loss is a Multiloss + return res + + +class MultiLoss (nn.Module): + """ Easily combinable losses (also keep track of individual loss values): + loss = MyLoss1() + 0.1*MyLoss2() + Usage: + Inherit from this class and override get_name() and compute_loss() + """ + + def __init__(self): + super().__init__() + self._alpha = 1 + self._loss2 = None + + def compute_loss(self, *args, **kwargs): + raise NotImplementedError() + + def get_name(self): + raise NotImplementedError() + + def __mul__(self, alpha): + assert isinstance(alpha, (int, float)) + res = copy(self) + res._alpha = alpha + return res + __rmul__ = __mul__ # same + + def __add__(self, loss2): + assert isinstance(loss2, MultiLoss) + res = cur = copy(self) + # find the end of the chain + while cur._loss2 is not None: + cur = cur._loss2 + cur._loss2 = loss2 + return res + + def __repr__(self): + name = self.get_name() + if self._alpha != 1: + name = f'{self._alpha:g}*{name}' + if self._loss2: + name = f'{name} + {self._loss2}' + return name + + def forward(self, *args, **kwargs): + loss = self.compute_loss(*args, **kwargs) + if isinstance(loss, tuple): + loss, details = loss + elif loss.ndim == 0: + details = {self.get_name(): float(loss)} + else: + details = {} + loss = loss * self._alpha + + if self._loss2: + loss2, details2 = self._loss2(*args, **kwargs) + loss = loss + loss2 + details |= details2 + + return loss, details + + +class Regr3D (Criterion, MultiLoss): + """ Ensure that all 3D points are correct. + Asymmetric loss: view1 is supposed to be the anchor. + + P1 = RT1 @ D1 + P2 = RT2 @ D2 + loss1 = (I @ pred_D1) - (RT1^-1 @ RT1 @ D1) + loss2 = (RT21 @ pred_D2) - (RT1^-1 @ P2) + = (RT21 @ pred_D2) - (RT1^-1 @ RT2 @ D2) + """ + + def __init__(self, criterion, norm_mode='avg_dis', gt_scale=False): + super().__init__(criterion) + self.norm_mode = norm_mode + self.gt_scale = gt_scale + + def get_all_pts3d(self, gt1, gt2, pred1, pred2, dist_clip=None): + # everything is normalized w.r.t. camera of view1 + in_camera1 = inv(gt1['camera_pose']) + gt_pts1 = geotrf(in_camera1, gt1['pts3d']) # B,H,W,3 + gt_pts2 = geotrf(in_camera1, gt2['pts3d']) # B,H,W,3 + + valid1 = gt1['valid_mask'].clone() + valid2 = gt2['valid_mask'].clone() + + if dist_clip is not None: + # points that are too far-away == invalid + dis1 = gt_pts1.norm(dim=-1) # (B, H, W) + dis2 = gt_pts2.norm(dim=-1) # (B, H, W) + valid1 = valid1 & (dis1 <= dist_clip) + valid2 = valid2 & (dis2 <= dist_clip) + + pr_pts1 = get_pred_pts3d(gt1, pred1, use_pose=False) + pr_pts2 = get_pred_pts3d(gt2, pred2, use_pose=True) + + # normalize 3d points + if self.norm_mode: + pr_pts1, pr_pts2 = normalize_pointcloud(pr_pts1, pr_pts2, self.norm_mode, valid1, valid2) + if self.norm_mode and not self.gt_scale: + gt_pts1, gt_pts2 = normalize_pointcloud(gt_pts1, gt_pts2, self.norm_mode, valid1, valid2) + + return gt_pts1, gt_pts2, pr_pts1, pr_pts2, valid1, valid2, {} + + def compute_loss(self, gt1, gt2, pred1, pred2, **kw): + gt_pts1, gt_pts2, pred_pts1, pred_pts2, mask1, mask2, monitoring = \ + self.get_all_pts3d(gt1, gt2, pred1, pred2, **kw) + # loss on img1 side + l1 = self.criterion(pred_pts1[mask1], gt_pts1[mask1]) + # loss on gt2 side + l2 = self.criterion(pred_pts2[mask2], gt_pts2[mask2]) + self_name = type(self).__name__ + details = {self_name+'_pts3d_1': float(l1.mean()), self_name+'_pts3d_2': float(l2.mean())} + return Sum((l1, mask1), (l2, mask2)), (details | monitoring) + + +class ConfLoss (MultiLoss): + """ Weighted regression by learned confidence. + Assuming the input pixel_loss is a pixel-level regression loss. + + Principle: + high-confidence means high conf = 0.1 ==> conf_loss = x / 10 + alpha*log(10) + low confidence means low conf = 10 ==> conf_loss = x * 10 - alpha*log(10) + + alpha: hyperparameter + """ + + def __init__(self, pixel_loss, alpha=1): + super().__init__() + assert alpha > 0 + self.alpha = alpha + self.pixel_loss = pixel_loss.with_reduction('none') + + def get_name(self): + return f'ConfLoss({self.pixel_loss})' + + def get_conf_log(self, x): + return x, torch.log(x) + + def compute_loss(self, gt1, gt2, pred1, pred2, **kw): + # compute per-pixel loss + ((loss1, msk1), (loss2, msk2)), details = self.pixel_loss(gt1, gt2, pred1, pred2, **kw) + if loss1.numel() == 0: + print('NO VALID POINTS in img1', force=True) + if loss2.numel() == 0: + print('NO VALID POINTS in img2', force=True) + + # weight by confidence + conf1, log_conf1 = self.get_conf_log(pred1['conf'][msk1]) + conf2, log_conf2 = self.get_conf_log(pred2['conf'][msk2]) + conf_loss1 = loss1 * conf1 - self.alpha * log_conf1 + conf_loss2 = loss2 * conf2 - self.alpha * log_conf2 + + # average + nan protection (in case of no valid pixels at all) + conf_loss1 = conf_loss1.mean() if conf_loss1.numel() > 0 else 0 + conf_loss2 = conf_loss2.mean() if conf_loss2.numel() > 0 else 0 + + return conf_loss1 + conf_loss2, dict(conf_loss_1=float(conf_loss1), conf_loss2=float(conf_loss2), **details) + + +class Regr3D_ShiftInv (Regr3D): + """ Same than Regr3D but invariant to depth shift. + """ + + def get_all_pts3d(self, gt1, gt2, pred1, pred2): + # compute unnormalized points + gt_pts1, gt_pts2, pred_pts1, pred_pts2, mask1, mask2, monitoring = \ + super().get_all_pts3d(gt1, gt2, pred1, pred2) + + # compute median depth + gt_z1, gt_z2 = gt_pts1[..., 2], gt_pts2[..., 2] + pred_z1, pred_z2 = pred_pts1[..., 2], pred_pts2[..., 2] + gt_shift_z = get_joint_pointcloud_depth(gt_z1, gt_z2, mask1, mask2)[:, None, None] + pred_shift_z = get_joint_pointcloud_depth(pred_z1, pred_z2, mask1, mask2)[:, None, None] + + # subtract the median depth + gt_z1 -= gt_shift_z + gt_z2 -= gt_shift_z + pred_z1 -= pred_shift_z + pred_z2 -= pred_shift_z + + # monitoring = dict(monitoring, gt_shift_z=gt_shift_z.mean().detach(), pred_shift_z=pred_shift_z.mean().detach()) + return gt_pts1, gt_pts2, pred_pts1, pred_pts2, mask1, mask2, monitoring + + +class Regr3D_ScaleInv (Regr3D): + """ Same than Regr3D but invariant to depth shift. + if gt_scale == True: enforce the prediction to take the same scale than GT + """ + + def get_all_pts3d(self, gt1, gt2, pred1, pred2): + # compute depth-normalized points + gt_pts1, gt_pts2, pred_pts1, pred_pts2, mask1, mask2, monitoring = super().get_all_pts3d(gt1, gt2, pred1, pred2) + + # measure scene scale + _, gt_scale = get_joint_pointcloud_center_scale(gt_pts1, gt_pts2, mask1, mask2) + _, pred_scale = get_joint_pointcloud_center_scale(pred_pts1, pred_pts2, mask1, mask2) + + # prevent predictions to be in a ridiculous range + pred_scale = pred_scale.clip(min=1e-3, max=1e3) + + # subtract the median depth + if self.gt_scale: + pred_pts1 *= gt_scale / pred_scale + pred_pts2 *= gt_scale / pred_scale + # monitoring = dict(monitoring, pred_scale=(pred_scale/gt_scale).mean()) + else: + gt_pts1 /= gt_scale + gt_pts2 /= gt_scale + pred_pts1 /= pred_scale + pred_pts2 /= pred_scale + # monitoring = dict(monitoring, gt_scale=gt_scale.mean(), pred_scale=pred_scale.mean().detach()) + + return gt_pts1, gt_pts2, pred_pts1, pred_pts2, mask1, mask2, monitoring + + +class Regr3D_ScaleShiftInv (Regr3D_ScaleInv, Regr3D_ShiftInv): + # calls Regr3D_ShiftInv first, then Regr3D_ScaleInv + pass diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/model.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/model.py new file mode 100644 index 0000000000000000000000000000000000000000..96ce519e30ccefa06afe55aa1d7b4e9188c74f55 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/model.py @@ -0,0 +1,167 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# DUSt3R model class +# -------------------------------------------------------- +from copy import deepcopy +import torch + +from .utils.misc import fill_default_args, freeze_all_params, is_symmetrized, interleave, transpose_to_landscape +from .heads import head_factory +from .patch_embed import get_patch_embed + +from ..croco.croco import CroCoNet # noqa +inf = float('inf') + +class AsymmetricCroCo3DStereo(CroCoNet): + """ Two siamese encoders, followed by two decoders. + The goal is to output 3d points directly, both images in view1's frame + (hence the asymmetry). + """ + + def __init__(self, + output_mode='pts3d', + head_type='linear', + depth_mode=('exp', -inf, inf), + conf_mode=('exp', 1, inf), + freeze='none', + landscape_only=True, + patch_embed_cls='PatchEmbedDust3R', # PatchEmbedDust3R or ManyAR_PatchEmbed + **croco_kwargs): + self.patch_embed_cls = patch_embed_cls + self.croco_args = fill_default_args(croco_kwargs, super().__init__) + super().__init__(**croco_kwargs) + + # dust3r specific initialization + self.dec_blocks2 = deepcopy(self.dec_blocks) + self.set_downstream_head(output_mode, head_type, landscape_only, depth_mode, conf_mode, **croco_kwargs) + self.set_freeze(freeze) + + def _set_patch_embed(self, img_size=224, patch_size=16, enc_embed_dim=768): + self.patch_embed = get_patch_embed(self.patch_embed_cls, img_size, patch_size, enc_embed_dim) + + def load_state_dict(self, ckpt, **kw): + # duplicate all weights for the second decoder if not present + new_ckpt = dict(ckpt) + if not any(k.startswith('dec_blocks2') for k in ckpt): + for key, value in ckpt.items(): + if key.startswith('dec_blocks'): + new_ckpt[key.replace('dec_blocks', 'dec_blocks2')] = value + return super().load_state_dict(new_ckpt, **kw) + + def device(self): + return next(self.dec_blocks2.parameters()).device + + def set_freeze(self, freeze): # this is for use by downstream models + self.freeze = freeze + to_be_frozen = { + 'none': [], + 'mask': [self.mask_token], + 'encoder': [self.mask_token, self.patch_embed, self.enc_blocks], + } + freeze_all_params(to_be_frozen[freeze]) + + def _set_prediction_head(self, *args, **kwargs): + """ No prediction head """ + return + + def set_downstream_head(self, output_mode, head_type, landscape_only, depth_mode, conf_mode, patch_size, img_size, + **kw): + assert img_size[0] % patch_size == 0 and img_size[1] % patch_size == 0, \ + f'{img_size=} must be multiple of {patch_size=}' + self.output_mode = output_mode + self.head_type = head_type + self.depth_mode = depth_mode + self.conf_mode = conf_mode + # allocate heads + self.downstream_head1 = head_factory(head_type, output_mode, self, has_conf=bool(conf_mode)) + self.downstream_head2 = head_factory(head_type, output_mode, self, has_conf=bool(conf_mode)) + # magic wrapper + self.head1 = transpose_to_landscape(self.downstream_head1, activate=landscape_only) + self.head2 = transpose_to_landscape(self.downstream_head2, activate=landscape_only) + + def _encode_image(self, image, true_shape=None): + # embed the image into patches (x has size B x Npatches x C) + x, pos = self.patch_embed(image, true_shape=true_shape) + + # add positional embedding without cls token + assert self.enc_pos_embed is None + + # now apply the transformer encoder and normalization + for blk in self.enc_blocks: + x = blk(x, pos) + + x = self.enc_norm(x) + return x, pos, None + + def _encode_image_pairs(self, img1, img2, true_shape1, true_shape2): + if img1.shape[-2:] == img2.shape[-2:]: + out, pos, _ = self._encode_image(torch.cat((img1, img2), dim=0), + torch.cat((true_shape1, true_shape2), dim=0)) + out, out2 = out.chunk(2, dim=0) + pos, pos2 = pos.chunk(2, dim=0) + else: + out, pos, _ = self._encode_image(img1, true_shape1) + out2, pos2, _ = self._encode_image(img2, true_shape2) + return out, out2, pos, pos2 + + def _encode_symmetrized(self, view1, view2): + img1 = view1['img'] + img2 = view2['img'] + B = img1.shape[0] + # Recover true_shape when available, otherwise assume that the img shape is the true one + shape1 = view1.get('true_shape', torch.tensor(img1.shape[-2:])[None].repeat(B, 1)) + shape2 = view2.get('true_shape', torch.tensor(img2.shape[-2:])[None].repeat(B, 1)) + # warning! maybe the images have different portrait/landscape orientations + + if is_symmetrized(view1, view2): + # computing half of forward pass!' + feat1, feat2, pos1, pos2 = self._encode_image_pairs(img1[::2], img2[::2], shape1[::2], shape2[::2]) + feat1, feat2 = interleave(feat1, feat2) + pos1, pos2 = interleave(pos1, pos2) + else: + feat1, feat2, pos1, pos2 = self._encode_image_pairs(img1, img2, shape1, shape2) + + return (shape1, shape2), (feat1, feat2), (pos1, pos2) + + def _decoder(self, f1, pos1, f2, pos2): + final_output = [(f1, f2)] # before projection + + # project to decoder dim + f1 = self.decoder_embed(f1) + f2 = self.decoder_embed(f2) + + final_output.append((f1, f2)) + for blk1, blk2 in zip(self.dec_blocks, self.dec_blocks2): + # img1 side + f1, _ = blk1(*final_output[-1][::+1], pos1, pos2) + # img2 side + f2, _ = blk2(*final_output[-1][::-1], pos2, pos1) + # store the result + final_output.append((f1, f2)) + + # normalize last output + del final_output[1] # duplicate with final_output[0] + final_output[-1] = tuple(map(self.dec_norm, final_output[-1])) + return zip(*final_output) + + def _downstream_head(self, head_num, decout, img_shape): + B, S, D = decout[-1].shape + # img_shape = tuple(map(int, img_shape)) + head = getattr(self, f'head{head_num}') + return head(decout, img_shape) + + def forward(self, view1, view2): + # encode the two images --> B,S,D + (shape1, shape2), (feat1, feat2), (pos1, pos2) = self._encode_symmetrized(view1, view2) + + # combine all ref images into object-centric representation + dec1, dec2 = self._decoder(feat1, pos1, feat2, pos2) + + with torch.cuda.amp.autocast(enabled=False): + res1 = self._downstream_head(1, [tok.float() for tok in dec1], shape1) + res2 = self._downstream_head(2, [tok.float() for tok in dec2], shape2) + + res2['pts3d_in_other_view'] = res2.pop('pts3d') # predict view2's pts3d in view1's frame + return res1, res2 diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/optim_factory.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/optim_factory.py new file mode 100644 index 0000000000000000000000000000000000000000..9b9c16e0e0fda3fd03c3def61abc1f354f75c584 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/optim_factory.py @@ -0,0 +1,14 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# optimization functions +# -------------------------------------------------------- + + +def adjust_learning_rate_by_lr(optimizer, lr): + for param_group in optimizer.param_groups: + if "lr_scale" in param_group: + param_group["lr"] = lr * param_group["lr_scale"] + else: + param_group["lr"] = lr diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/patch_embed.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/patch_embed.py new file mode 100644 index 0000000000000000000000000000000000000000..4ecd49ed62613f49df7ad82f40202b10843e7885 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/patch_embed.py @@ -0,0 +1,70 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# PatchEmbed implementation for DUST3R, +# in particular ManyAR_PatchEmbed that Handle images with non-square aspect ratio +# -------------------------------------------------------- +import torch +# import dust3r.utils.path_to_croco # noqa: F401 +from ..croco.blocks import PatchEmbed + + +def get_patch_embed(patch_embed_cls, img_size, patch_size, enc_embed_dim): + assert patch_embed_cls in ['PatchEmbedDust3R', 'ManyAR_PatchEmbed'] + patch_embed = eval(patch_embed_cls)(img_size, patch_size, 3, enc_embed_dim) + return patch_embed + + +class PatchEmbedDust3R(PatchEmbed): + def forward(self, x, **kw): + B, C, H, W = x.shape + assert H % self.patch_size[0] == 0, f"Input image height ({H}) is not a multiple of patch size ({self.patch_size[0]})." + assert W % self.patch_size[1] == 0, f"Input image width ({W}) is not a multiple of patch size ({self.patch_size[1]})." + x = self.proj(x) + pos = self.position_getter(B, x.size(2), x.size(3), x.device) + if self.flatten: + x = x.flatten(2).transpose(1, 2) # BCHW -> BNC + x = self.norm(x) + return x, pos + + +class ManyAR_PatchEmbed (PatchEmbed): + """ Handle images with non-square aspect ratio. + All images in the same batch have the same aspect ratio. + true_shape = [(height, width) ...] indicates the actual shape of each image. + """ + + def __init__(self, img_size=224, patch_size=16, in_chans=3, embed_dim=768, norm_layer=None, flatten=True): + self.embed_dim = embed_dim + super().__init__(img_size, patch_size, in_chans, embed_dim, norm_layer, flatten) + + def forward(self, img, true_shape): + B, C, H, W = img.shape + assert W >= H, f'img should be in landscape mode, but got {W=} {H=}' + assert H % self.patch_size[0] == 0, f"Input image height ({H}) is not a multiple of patch size ({self.patch_size[0]})." + assert W % self.patch_size[1] == 0, f"Input image width ({W}) is not a multiple of patch size ({self.patch_size[1]})." + assert true_shape.shape == (B, 2), f"true_shape has the wrong shape={true_shape.shape}" + + # size expressed in tokens + W //= self.patch_size[0] + H //= self.patch_size[1] + n_tokens = H * W + + height, width = true_shape.T + is_landscape = (width >= height) + is_portrait = ~is_landscape + + # allocate result + x = img.new_zeros((B, n_tokens, self.embed_dim)) + pos = img.new_zeros((B, n_tokens, 2), dtype=torch.int64) + + # linear projection, transposed if necessary + x[is_landscape] = self.proj(img[is_landscape]).permute(0, 2, 3, 1).flatten(1, 2).float() + x[is_portrait] = self.proj(img[is_portrait].swapaxes(-1, -2)).permute(0, 2, 3, 1).flatten(1, 2).float() + + pos[is_landscape] = self.position_getter(1, H, W, pos.device) + pos[is_portrait] = self.position_getter(1, W, H, pos.device) + + x = self.norm(x) + return x, pos diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/post_process.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/post_process.py new file mode 100644 index 0000000000000000000000000000000000000000..7d953bce2149eca6021f280d28945378ad8d77e0 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/post_process.py @@ -0,0 +1,60 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# utilities for interpreting the DUST3R output +# -------------------------------------------------------- +import numpy as np +import torch +from dust3r.utils.geometry import xy_grid + + +def estimate_focal_knowing_depth(pts3d, pp, focal_mode='median', min_focal=0.5, max_focal=3.5): + """ Reprojection method, for when the absolute depth is known: + 1) estimate the camera focal using a robust estimator + 2) reproject points onto true rays, minimizing a certain error + """ + B, H, W, THREE = pts3d.shape + assert THREE == 3 + + # centered pixel grid + pixels = xy_grid(W, H, device=pts3d.device).view(1, -1, 2) - pp.view(-1, 1, 2) # B,HW,2 + pts3d = pts3d.flatten(1, 2) # (B, HW, 3) + + if focal_mode == 'median': + with torch.no_grad(): + # direct estimation of focal + u, v = pixels.unbind(dim=-1) + x, y, z = pts3d.unbind(dim=-1) + fx_votes = (u * z) / x + fy_votes = (v * z) / y + + # assume square pixels, hence same focal for X and Y + f_votes = torch.cat((fx_votes.view(B, -1), fy_votes.view(B, -1)), dim=-1) + focal = torch.nanmedian(f_votes, dim=-1).values + + elif focal_mode == 'weiszfeld': + # init focal with l2 closed form + # we try to find focal = argmin Sum | pixel - focal * (x,y)/z| + xy_over_z = (pts3d[..., :2] / pts3d[..., 2:3]).nan_to_num(posinf=0, neginf=0) # homogeneous (x,y,1) + + dot_xy_px = (xy_over_z * pixels).sum(dim=-1) + dot_xy_xy = xy_over_z.square().sum(dim=-1) + + focal = dot_xy_px.mean(dim=1) / dot_xy_xy.mean(dim=1) + + # iterative re-weighted least-squares + for iter in range(10): + # re-weighting by inverse of distance + dis = (pixels - focal.view(-1, 1, 1) * xy_over_z).norm(dim=-1) + # print(dis.nanmean(-1)) + w = dis.clip(min=1e-8).reciprocal() + # update the scaling with the new weights + focal = (w * dot_xy_px).mean(dim=1) / (w * dot_xy_xy).mean(dim=1) + else: + raise ValueError(f'bad {focal_mode=}') + + focal_base = max(H, W) / (2 * np.tan(np.deg2rad(60) / 2)) # size / 1.1547005383792515 + focal = focal.clip(min=min_focal*focal_base, max=max_focal*focal_base) + # print(focal) + return focal diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/utils/__init__.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a32692113d830ddc4af4e6ed608f222fbe062e6e --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/utils/__init__.py @@ -0,0 +1,2 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/utils/device.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/utils/device.py new file mode 100644 index 0000000000000000000000000000000000000000..e3b6a74dac05a2e1ba3a2b2f0faa8cea08ece745 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/utils/device.py @@ -0,0 +1,76 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# utilitary functions for DUSt3R +# -------------------------------------------------------- +import numpy as np +import torch + + +def todevice(batch, device, callback=None, non_blocking=False): + ''' Transfer some variables to another device (i.e. GPU, CPU:torch, CPU:numpy). + + batch: list, tuple, dict of tensors or other things + device: pytorch device or 'numpy' + callback: function that would be called on every sub-elements. + ''' + if callback: + batch = callback(batch) + + if isinstance(batch, dict): + return {k: todevice(v, device) for k, v in batch.items()} + + if isinstance(batch, (tuple, list)): + return type(batch)(todevice(x, device) for x in batch) + + x = batch + if device == 'numpy': + if isinstance(x, torch.Tensor): + x = x.detach().cpu().numpy() + elif x is not None: + if isinstance(x, np.ndarray): + x = torch.from_numpy(x) + if torch.is_tensor(x): + x = x.to(device, non_blocking=non_blocking) + return x + + +to_device = todevice # alias + + +def to_numpy(x): return todevice(x, 'numpy') +def to_cpu(x): return todevice(x, 'cpu') +def to_cuda(x): return todevice(x, 'cuda') + + +def collate_with_cat(whatever, lists=False): + if isinstance(whatever, dict): + return {k: collate_with_cat(vals, lists=lists) for k, vals in whatever.items()} + + elif isinstance(whatever, (tuple, list)): + if len(whatever) == 0: + return whatever + elem = whatever[0] + T = type(whatever) + + if elem is None: + return None + if isinstance(elem, (bool, float, int, str)): + return whatever + if isinstance(elem, tuple): + return T(collate_with_cat(x, lists=lists) for x in zip(*whatever)) + if isinstance(elem, dict): + return {k: collate_with_cat([e[k] for e in whatever], lists=lists) for k in elem} + + if isinstance(elem, torch.Tensor): + return listify(whatever) if lists else torch.cat(whatever) + if isinstance(elem, np.ndarray): + return listify(whatever) if lists else torch.cat([torch.from_numpy(x) for x in whatever]) + + # otherwise, we just chain lists + return sum(whatever, T()) + + +def listify(elems): + return [x for e in elems for x in e] diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/utils/geometry.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/utils/geometry.py new file mode 100644 index 0000000000000000000000000000000000000000..c3a11fe4fbddb5e085a8053a373dd91d29d2c664 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/utils/geometry.py @@ -0,0 +1,361 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# geometry utilitary functions +# -------------------------------------------------------- +import torch +import numpy as np +from scipy.spatial import cKDTree as KDTree + +from .misc import invalid_to_zeros, invalid_to_nans +from .device import to_numpy + + +def xy_grid(W, H, device=None, origin=(0, 0), unsqueeze=None, cat_dim=-1, homogeneous=False, **arange_kw): + """ Output a (H,W,2) array of int32 + with output[j,i,0] = i + origin[0] + output[j,i,1] = j + origin[1] + """ + if device is None: + # numpy + arange, meshgrid, stack, ones = np.arange, np.meshgrid, np.stack, np.ones + else: + # torch + arange = lambda *a, **kw: torch.arange(*a, device=device, **kw) + meshgrid, stack = torch.meshgrid, torch.stack + ones = lambda *a: torch.ones(*a, device=device) + + tw, th = [arange(o, o+s, **arange_kw) for s, o in zip((W, H), origin)] + grid = meshgrid(tw, th, indexing='xy') + if homogeneous: + grid = grid + (ones((H, W)),) + if unsqueeze is not None: + grid = (grid[0].unsqueeze(unsqueeze), grid[1].unsqueeze(unsqueeze)) + if cat_dim is not None: + grid = stack(grid, cat_dim) + return grid + + +def geotrf(Trf, pts, ncol=None, norm=False): + """ Apply a geometric transformation to a list of 3-D points. + + H: 3x3 or 4x4 projection matrix (typically a Homography) + p: numpy/torch/tuple of coordinates. Shape must be (...,2) or (...,3) + + ncol: int. number of columns of the result (2 or 3) + norm: float. if != 0, the resut is projected on the z=norm plane. + + Returns an array of projected 2d points. + """ + assert Trf.ndim >= 2 + if isinstance(Trf, np.ndarray): + pts = np.asarray(pts) + elif isinstance(Trf, torch.Tensor): + pts = torch.as_tensor(pts, dtype=Trf.dtype) + + # adapt shape if necessary + output_reshape = pts.shape[:-1] + ncol = ncol or pts.shape[-1] + + # optimized code + if (isinstance(Trf, torch.Tensor) and isinstance(pts, torch.Tensor) and + Trf.ndim == 3 and pts.ndim == 4): + d = pts.shape[3] + if Trf.shape[-1] == d: + pts = torch.einsum("bij, bhwj -> bhwi", Trf, pts) + elif Trf.shape[-1] == d+1: + pts = torch.einsum("bij, bhwj -> bhwi", Trf[:, :d, :d], pts) + Trf[:, None, None, :d, d] + else: + raise ValueError(f'bad shape, not ending with 3 or 4, for {pts.shape=}') + else: + if Trf.ndim >= 3: + n = Trf.ndim-2 + assert Trf.shape[:n] == pts.shape[:n], 'batch size does not match' + Trf = Trf.reshape(-1, Trf.shape[-2], Trf.shape[-1]) + + if pts.ndim > Trf.ndim: + # Trf == (B,d,d) & pts == (B,H,W,d) --> (B, H*W, d) + pts = pts.reshape(Trf.shape[0], -1, pts.shape[-1]) + elif pts.ndim == 2: + # Trf == (B,d,d) & pts == (B,d) --> (B, 1, d) + pts = pts[:, None, :] + + if pts.shape[-1]+1 == Trf.shape[-1]: + Trf = Trf.swapaxes(-1, -2) # transpose Trf + pts = pts @ Trf[..., :-1, :] + Trf[..., -1:, :] + elif pts.shape[-1] == Trf.shape[-1]: + Trf = Trf.swapaxes(-1, -2) # transpose Trf + pts = pts @ Trf + else: + pts = Trf @ pts.T + if pts.ndim >= 2: + pts = pts.swapaxes(-1, -2) + + if norm: + pts = pts / pts[..., -1:] # DONT DO /= BECAUSE OF WEIRD PYTORCH BUG + if norm != 1: + pts *= norm + + res = pts[..., :ncol].reshape(*output_reshape, ncol) + return res + + +def inv(mat): + """ Invert a torch or numpy matrix + """ + if isinstance(mat, torch.Tensor): + return torch.linalg.inv(mat) + if isinstance(mat, np.ndarray): + return np.linalg.inv(mat) + raise ValueError(f'bad matrix type = {type(mat)}') + + +def depthmap_to_pts3d(depth, pseudo_focal, pp=None, **_): + """ + Args: + - depthmap (BxHxW array): + - pseudo_focal: [B,H,W] ; [B,2,H,W] or [B,1,H,W] + Returns: + pointmap of absolute coordinates (BxHxWx3 array) + """ + + if len(depth.shape) == 4: + B, H, W, n = depth.shape + else: + B, H, W = depth.shape + n = None + + if len(pseudo_focal.shape) == 3: # [B,H,W] + pseudo_focalx = pseudo_focaly = pseudo_focal + elif len(pseudo_focal.shape) == 4: # [B,2,H,W] or [B,1,H,W] + pseudo_focalx = pseudo_focal[:, 0] + if pseudo_focal.shape[1] == 2: + pseudo_focaly = pseudo_focal[:, 1] + else: + pseudo_focaly = pseudo_focalx + else: + raise NotImplementedError("Error, unknown input focal shape format.") + + assert pseudo_focalx.shape == depth.shape[:3] + assert pseudo_focaly.shape == depth.shape[:3] + grid_x, grid_y = xy_grid(W, H, cat_dim=0, device=depth.device)[:, None] + + # set principal point + if pp is None: + grid_x = grid_x - (W-1)/2 + grid_y = grid_y - (H-1)/2 + else: + grid_x = grid_x.expand(B, -1, -1) - pp[:, 0, None, None] + grid_y = grid_y.expand(B, -1, -1) - pp[:, 1, None, None] + + if n is None: + pts3d = torch.empty((B, H, W, 3), device=depth.device) + pts3d[..., 0] = depth * grid_x / pseudo_focalx + pts3d[..., 1] = depth * grid_y / pseudo_focaly + pts3d[..., 2] = depth + else: + pts3d = torch.empty((B, H, W, 3, n), device=depth.device) + pts3d[..., 0, :] = depth * (grid_x / pseudo_focalx)[..., None] + pts3d[..., 1, :] = depth * (grid_y / pseudo_focaly)[..., None] + pts3d[..., 2, :] = depth + return pts3d + + +def depthmap_to_camera_coordinates(depthmap, camera_intrinsics, pseudo_focal=None): + """ + Args: + - depthmap (HxW array): + - camera_intrinsics: a 3x3 matrix + Returns: + pointmap of absolute coordinates (HxWx3 array), and a mask specifying valid pixels. + """ + camera_intrinsics = np.float32(camera_intrinsics) + H, W = depthmap.shape + + # Compute 3D ray associated with each pixel + # Strong assumption: there are no skew terms + assert camera_intrinsics[0, 1] == 0.0 + assert camera_intrinsics[1, 0] == 0.0 + if pseudo_focal is None: + fu = camera_intrinsics[0, 0] + fv = camera_intrinsics[1, 1] + else: + assert pseudo_focal.shape == (H, W) + fu = fv = pseudo_focal + cu = camera_intrinsics[0, 2] + cv = camera_intrinsics[1, 2] + + u, v = np.meshgrid(np.arange(W), np.arange(H)) + z_cam = depthmap + x_cam = (u - cu) * z_cam / fu + y_cam = (v - cv) * z_cam / fv + X_cam = np.stack((x_cam, y_cam, z_cam), axis=-1).astype(np.float32) + + # Mask for valid coordinates + valid_mask = (depthmap > 0.0) + return X_cam, valid_mask + + +def depthmap_to_absolute_camera_coordinates(depthmap, camera_intrinsics, camera_pose, **kw): + """ + Args: + - depthmap (HxW array): + - camera_intrinsics: a 3x3 matrix + - camera_pose: a 4x3 or 4x4 cam2world matrix + Returns: + pointmap of absolute coordinates (HxWx3 array), and a mask specifying valid pixels.""" + X_cam, valid_mask = depthmap_to_camera_coordinates(depthmap, camera_intrinsics) + + # R_cam2world = np.float32(camera_params["R_cam2world"]) + # t_cam2world = np.float32(camera_params["t_cam2world"]).squeeze() + R_cam2world = camera_pose[:3, :3] + t_cam2world = camera_pose[:3, 3] + + # Express in absolute coordinates (invalid depth values) + X_world = np.einsum("ik, vuk -> vui", R_cam2world, X_cam) + t_cam2world[None, None, :] + return X_world, valid_mask + + +def colmap_to_opencv_intrinsics(K): + """ + Modify camera intrinsics to follow a different convention. + Coordinates of the center of the top-left pixels are by default: + - (0.5, 0.5) in Colmap + - (0,0) in OpenCV + """ + K = K.copy() + K[0, 2] -= 0.5 + K[1, 2] -= 0.5 + return K + + +def opencv_to_colmap_intrinsics(K): + """ + Modify camera intrinsics to follow a different convention. + Coordinates of the center of the top-left pixels are by default: + - (0.5, 0.5) in Colmap + - (0,0) in OpenCV + """ + K = K.copy() + K[0, 2] += 0.5 + K[1, 2] += 0.5 + return K + + +def normalize_pointcloud(pts1, pts2, norm_mode='avg_dis', valid1=None, valid2=None): + """ renorm pointmaps pts1, pts2 with norm_mode + """ + assert pts1.ndim >= 3 and pts1.shape[-1] == 3 + assert pts2 is None or (pts2.ndim >= 3 and pts2.shape[-1] == 3) + norm_mode, dis_mode = norm_mode.split('_') + + if norm_mode == 'avg': + # gather all points together (joint normalization) + nan_pts1, nnz1 = invalid_to_zeros(pts1, valid1, ndim=3) + nan_pts2, nnz2 = invalid_to_zeros(pts2, valid2, ndim=3) if pts2 is not None else (None, 0) + all_pts = torch.cat((nan_pts1, nan_pts2), dim=1) if pts2 is not None else nan_pts1 + + # compute distance to origin + all_dis = all_pts.norm(dim=-1) + if dis_mode == 'dis': + pass # do nothing + elif dis_mode == 'log1p': + all_dis = torch.log1p(all_dis) + elif dis_mode == 'warp-log1p': + # actually warp input points before normalizing them + log_dis = torch.log1p(all_dis) + warp_factor = log_dis / all_dis.clip(min=1e-8) + H1, W1 = pts1.shape[1:-1] + pts1 = pts1 * warp_factor[:, :W1*H1].view(-1, H1, W1, 1) + if pts2 is not None: + H2, W2 = pts2.shape[1:-1] + pts2 = pts2 * warp_factor[:, W1*H1:].view(-1, H2, W2, 1) + all_dis = log_dis # this is their true distance afterwards + else: + raise ValueError(f'bad {dis_mode=}') + + norm_factor = all_dis.sum(dim=1) / (nnz1 + nnz2 + 1e-8) + else: + # gather all points together (joint normalization) + nan_pts1 = invalid_to_nans(pts1, valid1, ndim=3) + nan_pts2 = invalid_to_nans(pts2, valid2, ndim=3) if pts2 is not None else None + all_pts = torch.cat((nan_pts1, nan_pts2), dim=1) if pts2 is not None else nan_pts1 + + # compute distance to origin + all_dis = all_pts.norm(dim=-1) + + if norm_mode == 'avg': + norm_factor = all_dis.nanmean(dim=1) + elif norm_mode == 'median': + norm_factor = all_dis.nanmedian(dim=1).values.detach() + elif norm_mode == 'sqrt': + norm_factor = all_dis.sqrt().nanmean(dim=1)**2 + else: + raise ValueError(f'bad {norm_mode=}') + + norm_factor = norm_factor.clip(min=1e-8) + while norm_factor.ndim < pts1.ndim: + norm_factor.unsqueeze_(-1) + + res = pts1 / norm_factor + if pts2 is not None: + res = (res, pts2 / norm_factor) + return res + + +@torch.no_grad() +def get_joint_pointcloud_depth(z1, z2, valid_mask1, valid_mask2=None, quantile=0.5): + # set invalid points to NaN + _z1 = invalid_to_nans(z1, valid_mask1).reshape(len(z1), -1) + _z2 = invalid_to_nans(z2, valid_mask2).reshape(len(z2), -1) if z2 is not None else None + _z = torch.cat((_z1, _z2), dim=-1) if z2 is not None else _z1 + + # compute median depth overall (ignoring nans) + if quantile == 0.5: + shift_z = torch.nanmedian(_z, dim=-1).values + else: + shift_z = torch.nanquantile(_z, quantile, dim=-1) + return shift_z # (B,) + + +@torch.no_grad() +def get_joint_pointcloud_center_scale(pts1, pts2, valid_mask1=None, valid_mask2=None, z_only=False, center=True): + # set invalid points to NaN + _pts1 = invalid_to_nans(pts1, valid_mask1).reshape(len(pts1), -1, 3) + _pts2 = invalid_to_nans(pts2, valid_mask2).reshape(len(pts2), -1, 3) if pts2 is not None else None + _pts = torch.cat((_pts1, _pts2), dim=1) if pts2 is not None else _pts1 + + # compute median center + _center = torch.nanmedian(_pts, dim=1, keepdim=True).values # (B,1,3) + if z_only: + _center[..., :2] = 0 # do not center X and Y + + # compute median norm + _norm = ((_pts - _center) if center else _pts).norm(dim=-1) + scale = torch.nanmedian(_norm, dim=1).values + return _center[:, None, :, :], scale[:, None, None, None] + + +def find_reciprocal_matches(P1, P2): + """ + returns 3 values: + 1 - reciprocal_in_P2: a boolean array of size P2.shape[0], a "True" value indicates a match + 2 - nn2_in_P1: a int array of size P2.shape[0], it contains the indexes of the closest points in P1 + 3 - reciprocal_in_P2.sum(): the number of matches + """ + tree1 = KDTree(P1) + tree2 = KDTree(P2) + + _, nn1_in_P2 = tree2.query(P1, workers=8) + _, nn2_in_P1 = tree1.query(P2, workers=8) + + reciprocal_in_P1 = (nn2_in_P1[nn1_in_P2] == np.arange(len(nn1_in_P2))) + reciprocal_in_P2 = (nn1_in_P2[nn2_in_P1] == np.arange(len(nn2_in_P1))) + assert reciprocal_in_P1.sum() == reciprocal_in_P2.sum() + return reciprocal_in_P2, nn2_in_P1, reciprocal_in_P2.sum() + + +def get_med_dist_between_poses(poses): + from scipy.spatial.distance import pdist + return np.median(pdist([to_numpy(p[:3, 3]) for p in poses])) diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/utils/image.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/utils/image.py new file mode 100644 index 0000000000000000000000000000000000000000..51a3e4391a3b620c13f3c514051ce000e9406a57 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/utils/image.py @@ -0,0 +1,104 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# utilitary functions about images (loading/converting...) +# -------------------------------------------------------- +import os +import torch +import numpy as np +import PIL.Image +import torchvision.transforms as tvf +os.environ["OPENCV_IO_ENABLE_OPENEXR"] = "1" +import cv2 # noqa + +ImgNorm = tvf.Compose([tvf.ToTensor(), tvf.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))]) + + +def imread_cv2(path, options=cv2.IMREAD_COLOR): + """ Open an image or a depthmap with opencv-python. + """ + if path.endswith(('.exr', 'EXR')): + options = cv2.IMREAD_ANYDEPTH + img = cv2.imread(path, options) + if img is None: + raise IOError(f'Could not load image={path} with {options=}') + if img.ndim == 3: + img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) + return img + + +def rgb(ftensor, true_shape=None): + if isinstance(ftensor, list): + return [rgb(x, true_shape=true_shape) for x in ftensor] + if isinstance(ftensor, torch.Tensor): + ftensor = ftensor.detach().cpu().numpy() # H,W,3 + if ftensor.ndim == 3 and ftensor.shape[0] == 3: + ftensor = ftensor.transpose(1, 2, 0) + elif ftensor.ndim == 4 and ftensor.shape[1] == 3: + ftensor = ftensor.transpose(0, 2, 3, 1) + if true_shape is not None: + H, W = true_shape + ftensor = ftensor[:H, :W] + if ftensor.dtype == np.uint8: + img = np.float32(ftensor) / 255 + else: + img = (ftensor * 0.5) + 0.5 + return img.clip(min=0, max=1) + + +def _resize_pil_image(img, long_edge_size): + S = max(img.size) + if S > long_edge_size: + interp = PIL.Image.LANCZOS + elif S <= long_edge_size: + interp = PIL.Image.BICUBIC + new_size = tuple(int(round(x*long_edge_size/S)) for x in img.size) + return img.resize(new_size, interp) + + +def load_images(folder_or_list, size, square_ok=False): + """ open and convert all images in a list or folder to proper input format for DUSt3R + """ + if isinstance(folder_or_list, str): + print(f'>> Loading images from {folder_or_list}') + root, folder_content = folder_or_list, sorted(os.listdir(folder_or_list)) + + elif isinstance(folder_or_list, list): + print(f'>> Loading a list of {len(folder_or_list)} images') + root, folder_content = '', folder_or_list + + else: + raise ValueError(f'bad {folder_or_list=} ({type(folder_or_list)})') + + imgs = [] + for path in folder_content: + if not path.endswith(('.jpg', '.jpeg', '.png', '.JPG')): + continue + img = PIL.Image.open(os.path.join(root, path)).convert('RGB') + W1, H1 = img.size + if size == 224: + # resize short side to 224 (then crop) + img = _resize_pil_image(img, round(size * max(W1/H1, H1/W1))) + else: + # resize long side to 512 + img = _resize_pil_image(img, size) + W, H = img.size + cx, cy = W//2, H//2 + if size == 224: + half = min(cx, cy) + img = img.crop((cx-half, cy-half, cx+half, cy+half)) + else: + halfw, halfh = ((2*cx)//16)*8, ((2*cy)//16)*8 + if not (square_ok) and W == H: + halfh = 3*halfw/4 + img = img.crop((cx-halfw, cy-halfh, cx+halfw, cy+halfh)) + + W2, H2 = img.size + print(f' - adding {path} with resolution {W1}x{H1} --> {W2}x{H2}') + imgs.append(dict(img=ImgNorm(img)[None], true_shape=np.int32( + [img.size[::-1]]), idx=len(imgs), instance=str(len(imgs)))) + + assert imgs, 'no images foud at '+root + print(f' (Found {len(imgs)} images)') + return imgs diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/utils/misc.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/utils/misc.py new file mode 100644 index 0000000000000000000000000000000000000000..ab9fd06a063c3eafbfafddc011064ebb8a3232a8 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/utils/misc.py @@ -0,0 +1,121 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# utilitary functions for DUSt3R +# -------------------------------------------------------- +import torch + + +def fill_default_args(kwargs, func): + import inspect # a bit hacky but it works reliably + signature = inspect.signature(func) + + for k, v in signature.parameters.items(): + if v.default is inspect.Parameter.empty: + continue + kwargs.setdefault(k, v.default) + + return kwargs + + +def freeze_all_params(modules): + for module in modules: + try: + for n, param in module.named_parameters(): + param.requires_grad = False + except AttributeError: + # module is directly a parameter + module.requires_grad = False + + +def is_symmetrized(gt1, gt2): + x = gt1['instance'] + y = gt2['instance'] + if len(x) == len(y) and len(x) == 1: + return False # special case of batchsize 1 + ok = True + for i in range(0, len(x), 2): + ok = ok and (x[i] == y[i+1]) and (x[i+1] == y[i]) + return ok + + +def flip(tensor): + """ flip so that tensor[0::2] <=> tensor[1::2] """ + return torch.stack((tensor[1::2], tensor[0::2]), dim=1).flatten(0, 1) + + +def interleave(tensor1, tensor2): + res1 = torch.stack((tensor1, tensor2), dim=1).flatten(0, 1) + res2 = torch.stack((tensor2, tensor1), dim=1).flatten(0, 1) + return res1, res2 + + +def transpose_to_landscape(head, activate=True): + """ Predict in the correct aspect-ratio, + then transpose the result in landscape + and stack everything back together. + """ + def wrapper_no(decout, true_shape): + B = len(true_shape) + assert true_shape[0:1].allclose(true_shape), 'true_shape must be all identical' + H, W = true_shape[0].cpu().tolist() + res = head(decout, (H, W)) + return res + + def wrapper_yes(decout, true_shape): + B = len(true_shape) + # by definition, the batch is in landscape mode so W >= H + H, W = int(true_shape.min()), int(true_shape.max()) + + height, width = true_shape.T + is_landscape = (width >= height) + is_portrait = ~is_landscape + + # true_shape = true_shape.cpu() + if is_landscape.all(): + return head(decout, (H, W)) + if is_portrait.all(): + return transposed(head(decout, (W, H))) + + # batch is a mix of both portraint & landscape + def selout(ar): return [d[ar] for d in decout] + l_result = head(selout(is_landscape), (H, W)) + p_result = transposed(head(selout(is_portrait), (W, H))) + + # allocate full result + result = {} + for k in l_result | p_result: + x = l_result[k].new(B, *l_result[k].shape[1:]) + x[is_landscape] = l_result[k] + x[is_portrait] = p_result[k] + result[k] = x + + return result + + return wrapper_yes if activate else wrapper_no + + +def transposed(dic): + return {k: v.swapaxes(1, 2) for k, v in dic.items()} + + +def invalid_to_nans(arr, valid_mask, ndim=999): + if valid_mask is not None: + arr = arr.clone() + arr[~valid_mask] = float('nan') + if arr.ndim > ndim: + arr = arr.flatten(-2 - (arr.ndim - ndim), -2) + return arr + + +def invalid_to_zeros(arr, valid_mask, ndim=999): + if valid_mask is not None: + arr = arr.clone() + arr[~valid_mask] = 0 + nnz = valid_mask.view(len(valid_mask), -1).sum(1) + else: + nnz = arr.numel() // len(arr) if len(arr) else 0 # number of point per image + if arr.ndim > ndim: + arr = arr.flatten(-2 - (arr.ndim - ndim), -2) + return arr, nnz diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/utils/path_to_croco.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/utils/path_to_croco.py new file mode 100644 index 0000000000000000000000000000000000000000..39226ce6bc0e1993ba98a22096de32cb6fa916b4 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/utils/path_to_croco.py @@ -0,0 +1,19 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# CroCo submodule import +# -------------------------------------------------------- + +import sys +import os.path as path +HERE_PATH = path.normpath(path.dirname(__file__)) +CROCO_REPO_PATH = path.normpath(path.join(HERE_PATH, '../../croco')) +CROCO_MODELS_PATH = path.join(CROCO_REPO_PATH, 'models') +# check the presence of models directory in repo to be sure its cloned +if path.isdir(CROCO_MODELS_PATH): + # workaround for sibling import + sys.path.insert(0, CROCO_REPO_PATH) +else: + raise ImportError(f"croco is not initialized, could not find: {CROCO_MODELS_PATH}.\n " + "Did you forget to run 'git submodule update --init --recursive' ?") diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/viz.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/viz.py new file mode 100644 index 0000000000000000000000000000000000000000..a21f399accf6710816cc4a858d60849ccaad31e1 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/dust3r/viz.py @@ -0,0 +1,320 @@ +# Copyright (C) 2024-present Naver Corporation. All rights reserved. +# Licensed under CC BY-NC-SA 4.0 (non-commercial use only). +# +# -------------------------------------------------------- +# Visualization utilities using trimesh +# -------------------------------------------------------- +import PIL.Image +import numpy as np +from scipy.spatial.transform import Rotation +import torch + +from dust3r.utils.geometry import geotrf, get_med_dist_between_poses +from dust3r.utils.device import to_numpy +from dust3r.utils.image import rgb + +try: + import trimesh +except ImportError: + print('/!\\ module trimesh is not installed, cannot visualize results /!\\') + + +def cat_3d(vecs): + if isinstance(vecs, (np.ndarray, torch.Tensor)): + vecs = [vecs] + return np.concatenate([p.reshape(-1, 3) for p in to_numpy(vecs)]) + + +def show_raw_pointcloud(pts3d, colors, point_size=2): + scene = trimesh.Scene() + + pct = trimesh.PointCloud(cat_3d(pts3d), colors=cat_3d(colors)) + scene.add_geometry(pct) + + scene.show(line_settings={'point_size': point_size}) + + +def pts3d_to_trimesh(img, pts3d, valid=None): + H, W, THREE = img.shape + assert THREE == 3 + assert img.shape == pts3d.shape + + vertices = pts3d.reshape(-1, 3) + + # make squares: each pixel == 2 triangles + idx = np.arange(len(vertices)).reshape(H, W) + idx1 = idx[:-1, :-1].ravel() # top-left corner + idx2 = idx[:-1, +1:].ravel() # right-left corner + idx3 = idx[+1:, :-1].ravel() # bottom-left corner + idx4 = idx[+1:, +1:].ravel() # bottom-right corner + faces = np.concatenate(( + np.c_[idx1, idx2, idx3], + np.c_[idx3, idx2, idx1], # same triangle, but backward (cheap solution to cancel face culling) + np.c_[idx2, idx3, idx4], + np.c_[idx4, idx3, idx2], # same triangle, but backward (cheap solution to cancel face culling) + ), axis=0) + + # prepare triangle colors + face_colors = np.concatenate(( + img[:-1, :-1].reshape(-1, 3), + img[:-1, :-1].reshape(-1, 3), + img[+1:, +1:].reshape(-1, 3), + img[+1:, +1:].reshape(-1, 3) + ), axis=0) + + # remove invalid faces + if valid is not None: + assert valid.shape == (H, W) + valid_idxs = valid.ravel() + valid_faces = valid_idxs[faces].all(axis=-1) + faces = faces[valid_faces] + face_colors = face_colors[valid_faces] + + assert len(faces) == len(face_colors) + return dict(vertices=vertices, face_colors=face_colors, faces=faces) + + +def cat_meshes(meshes): + vertices, faces, colors = zip(*[(m['vertices'], m['faces'], m['face_colors']) for m in meshes]) + n_vertices = np.cumsum([0]+[len(v) for v in vertices]) + for i in range(len(faces)): + faces[i][:] += n_vertices[i] + + vertices = np.concatenate(vertices) + colors = np.concatenate(colors) + faces = np.concatenate(faces) + return dict(vertices=vertices, face_colors=colors, faces=faces) + + +def show_duster_pairs(view1, view2, pred1, pred2): + import matplotlib.pyplot as pl + pl.ion() + + for e in range(len(view1['instance'])): + i = view1['idx'][e] + j = view2['idx'][e] + img1 = rgb(view1['img'][e]) + img2 = rgb(view2['img'][e]) + conf1 = pred1['conf'][e].squeeze() + conf2 = pred2['conf'][e].squeeze() + score = conf1.mean()*conf2.mean() + print(f">> Showing pair #{e} {i}-{j} {score=:g}") + pl.clf() + pl.subplot(221).imshow(img1) + pl.subplot(223).imshow(img2) + pl.subplot(222).imshow(conf1, vmin=1, vmax=30) + pl.subplot(224).imshow(conf2, vmin=1, vmax=30) + pts1 = pred1['pts3d'][e] + pts2 = pred2['pts3d_in_other_view'][e] + pl.subplots_adjust(0, 0, 1, 1, 0, 0) + if input('show pointcloud? (y/n) ') == 'y': + show_raw_pointcloud(cat(pts1, pts2), cat(img1, img2), point_size=5) + + +def auto_cam_size(im_poses): + return 0.1 * get_med_dist_between_poses(im_poses) + + +class SceneViz: + def __init__(self): + self.scene = trimesh.Scene() + + def add_pointcloud(self, pts3d, color, mask=None): + pts3d = to_numpy(pts3d) + mask = to_numpy(mask) + if mask is None: + mask = [slice(None)] * len(pts3d) + pts = np.concatenate([p[m] for p, m in zip(pts3d, mask)]) + pct = trimesh.PointCloud(pts.reshape(-1, 3)) + + if isinstance(color, (list, np.ndarray, torch.Tensor)): + color = to_numpy(color) + col = np.concatenate([p[m] for p, m in zip(color, mask)]) + assert col.shape == pts.shape + pct.visual.vertex_colors = uint8(col.reshape(-1, 3)) + else: + assert len(color) == 3 + pct.visual.vertex_colors = np.broadcast_to(uint8(color), pts.shape) + + self.scene.add_geometry(pct) + return self + + def add_camera(self, pose_c2w, focal=None, color=(0, 0, 0), image=None, imsize=None, cam_size=0.03): + pose_c2w, focal, color, image = to_numpy((pose_c2w, focal, color, image)) + add_scene_cam(self.scene, pose_c2w, color, image, focal, screen_width=cam_size) + return self + + def add_cameras(self, poses, focals=None, images=None, imsizes=None, colors=None, **kw): + def get(arr, idx): return None if arr is None else arr[idx] + for i, pose_c2w in enumerate(poses): + self.add_camera(pose_c2w, get(focals, i), image=get(images, i), + color=get(colors, i), imsize=get(imsizes, i), **kw) + return self + + def show(self, point_size=2): + self.scene.show(line_settings={'point_size': point_size}) + + +def show_raw_pointcloud_with_cams(imgs, pts3d, mask, focals, cams2world, + point_size=2, cam_size=0.05, cam_color=None): + """ Visualization of a pointcloud with cameras + imgs = (N, H, W, 3) or N-size list of [(H,W,3), ...] + pts3d = (N, H, W, 3) or N-size list of [(H,W,3), ...] + focals = (N,) or N-size list of [focal, ...] + cams2world = (N,4,4) or N-size list of [(4,4), ...] + """ + assert len(pts3d) == len(mask) <= len(imgs) <= len(cams2world) == len(focals) + pts3d = to_numpy(pts3d) + imgs = to_numpy(imgs) + focals = to_numpy(focals) + cams2world = to_numpy(cams2world) + + scene = trimesh.Scene() + + # full pointcloud + pts = np.concatenate([p[m] for p, m in zip(pts3d, mask)]) + col = np.concatenate([p[m] for p, m in zip(imgs, mask)]) + pct = trimesh.PointCloud(pts.reshape(-1, 3), colors=col.reshape(-1, 3)) + scene.add_geometry(pct) + + # add each camera + for i, pose_c2w in enumerate(cams2world): + if isinstance(cam_color, list): + camera_edge_color = cam_color[i] + else: + camera_edge_color = cam_color or CAM_COLORS[i % len(CAM_COLORS)] + add_scene_cam(scene, pose_c2w, camera_edge_color, + imgs[i] if i < len(imgs) else None, focals[i], screen_width=cam_size) + + scene.show(line_settings={'point_size': point_size}) + + +def add_scene_cam(scene, pose_c2w, edge_color, image=None, focal=None, imsize=None, screen_width=0.03): + + if image is not None: + H, W, THREE = image.shape + assert THREE == 3 + if image.dtype != np.uint8: + image = np.uint8(255*image) + elif imsize is not None: + W, H = imsize + elif focal is not None: + H = W = focal / 1.1 + else: + H = W = 1 + + if focal is None: + focal = min(H, W) * 1.1 # default value + elif isinstance(focal, np.ndarray): + focal = focal[0] + + # create fake camera + height = focal * screen_width / H + width = screen_width * 0.5**0.5 + rot45 = np.eye(4) + rot45[:3, :3] = Rotation.from_euler('z', np.deg2rad(45)).as_matrix() + rot45[2, 3] = -height # set the tip of the cone = optical center + aspect_ratio = np.eye(4) + aspect_ratio[0, 0] = W/H + transform = pose_c2w @ OPENGL @ aspect_ratio @ rot45 + cam = trimesh.creation.cone(width, height, sections=4) # , transform=transform) + + # this is the image + if image is not None: + vertices = geotrf(transform, cam.vertices[[4, 5, 1, 3]]) + faces = np.array([[0, 1, 2], [0, 2, 3], [2, 1, 0], [3, 2, 0]]) + img = trimesh.Trimesh(vertices=vertices, faces=faces) + uv_coords = np.float32([[0, 0], [1, 0], [1, 1], [0, 1]]) + img.visual = trimesh.visual.TextureVisuals(uv_coords, image=PIL.Image.fromarray(image)) + scene.add_geometry(img) + + # this is the camera mesh + rot2 = np.eye(4) + rot2[:3, :3] = Rotation.from_euler('z', np.deg2rad(2)).as_matrix() + vertices = np.r_[cam.vertices, 0.95*cam.vertices, geotrf(rot2, cam.vertices)] + vertices = geotrf(transform, vertices) + faces = [] + for face in cam.faces: + if 0 in face: + continue + a, b, c = face + a2, b2, c2 = face + len(cam.vertices) + a3, b3, c3 = face + 2*len(cam.vertices) + + # add 3 pseudo-edges + faces.append((a, b, b2)) + faces.append((a, a2, c)) + faces.append((c2, b, c)) + + faces.append((a, b, b3)) + faces.append((a, a3, c)) + faces.append((c3, b, c)) + + # no culling + faces += [(c, b, a) for a, b, c in faces] + + cam = trimesh.Trimesh(vertices=vertices, faces=faces) + cam.visual.face_colors[:, :3] = edge_color + scene.add_geometry(cam) + + +def cat(a, b): + return np.concatenate((a.reshape(-1, 3), b.reshape(-1, 3))) + + +OPENGL = np.array([[1, 0, 0, 0], + [0, -1, 0, 0], + [0, 0, -1, 0], + [0, 0, 0, 1]]) + + +CAM_COLORS = [(255, 0, 0), (0, 0, 255), (0, 255, 0), (255, 0, 255), (255, 204, 0), (0, 204, 204), + (128, 255, 255), (255, 128, 255), (255, 255, 128), (0, 0, 0), (128, 128, 128)] + + +def uint8(colors): + if not isinstance(colors, np.ndarray): + colors = np.array(colors) + if np.issubdtype(colors.dtype, np.floating): + colors *= 255 + assert 0 <= colors.min() and colors.max() < 256 + return np.uint8(colors) + + +def segment_sky(image): + import cv2 + from scipy import ndimage + + # Convert to HSV + image = to_numpy(image) + if np.issubdtype(image.dtype, np.floating): + image = np.uint8(255*image.clip(min=0, max=1)) + hsv = cv2.cvtColor(image, cv2.COLOR_BGR2HSV) + + # Define range for blue color and create mask + lower_blue = np.array([0, 0, 100]) + upper_blue = np.array([30, 255, 255]) + mask = cv2.inRange(hsv, lower_blue, upper_blue).view(bool) + + # add luminous gray + mask |= (hsv[:, :, 1] < 10) & (hsv[:, :, 2] > 150) + mask |= (hsv[:, :, 1] < 30) & (hsv[:, :, 2] > 180) + mask |= (hsv[:, :, 1] < 50) & (hsv[:, :, 2] > 220) + + # Morphological operations + kernel = np.ones((5, 5), np.uint8) + mask2 = ndimage.binary_opening(mask, structure=kernel) + + # keep only largest CC + _, labels, stats, _ = cv2.connectedComponentsWithStats(mask2.view(np.uint8), connectivity=8) + cc_sizes = stats[1:, cv2.CC_STAT_AREA] + order = cc_sizes.argsort()[::-1] # bigger first + i = 0 + selection = [] + while i < len(order) and cc_sizes[order[i]] > cc_sizes[order[0]] / 2: + selection.append(1 + order[i]) + i += 1 + mask3 = np.in1d(labels, selection).reshape(labels.shape) + + # Apply mask + return torch.from_numpy(mask3) diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/encoders.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/encoders.py new file mode 100644 index 0000000000000000000000000000000000000000..3de24c4224fc817cd3c91ee7e8ec87f175af4562 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/encoders.py @@ -0,0 +1,137 @@ +from typing import Optional, Union +import torch +from torch import device +import torch.nn as nn +import torch.nn.functional as F +import torchvision.models as tvm +import gc + + +class ResNet50(nn.Module): + def __init__(self, pretrained=False, high_res = False, weights = None, + dilation = None, freeze_bn = True, anti_aliased = False, early_exit = False, amp = False, amp_dtype = torch.float16) -> None: + super().__init__() + if dilation is None: + dilation = [False,False,False] + if anti_aliased: + pass + else: + if weights is not None: + self.net = tvm.resnet50(weights = weights,replace_stride_with_dilation=dilation) + else: + net = tvm.resnet50(pretrained=pretrained,replace_stride_with_dilation=dilation) + self.net = nn.Sequential(net.conv1, net.bn1, net.relu, net.maxpool, net.layer1, net.layer2, net.layer3) + + self.high_res = high_res + self.freeze_bn = freeze_bn + self.early_exit = early_exit + self.amp = amp + self.amp_dtype = amp_dtype + + def forward(self, x, **kwargs): + with torch.autocast("cuda", enabled=self.amp, dtype = self.amp_dtype): + # net = self.net + # feats = {1:x} + # x = net.conv1(x) + # x = net.bn1(x) + # x = net.relu(x) + # feats[2] = x + # x = net.maxpool(x) + # x = net.layer1(x) + # feats[4] = x + # x = net.layer2(x) + # feats[8] = x + # if self.early_exit: + # return feats + # x = net.layer3(x) + # feats[16] = x + # x = net.layer4(x) + # feats[32] = x + return self.net(x) + + def train(self, mode=True): + super().train(mode) + if self.freeze_bn: + for m in self.modules(): + if isinstance(m, nn.BatchNorm2d): + m.eval() + pass + +class VGG19(nn.Module): + def __init__(self, pretrained=False, amp = False, amp_dtype = torch.float16) -> None: + super().__init__() + self.layers = nn.ModuleList(tvm.vgg19_bn(pretrained=pretrained).features[:40]) + self.amp = amp + self.amp_dtype = amp_dtype + + def forward(self, x, **kwargs): + with torch.autocast("cuda", enabled=self.amp, dtype = self.amp_dtype): + feats = {} + scale = 1 + for layer in self.layers: + if isinstance(layer, nn.MaxPool2d): + feats[scale] = x + scale = scale*2 + x = layer(x) + return feats + +class CNNandDinov2(nn.Module): + def __init__(self, cnn_kwargs = None, amp = False, use_vgg = False, coarse_backbone='DINOv2_large', coarse_patch_size=14, coarse_feat_dim=1024, dinov2_weights = None, amp_dtype = torch.float16): + super().__init__() + self.amp = amp + self.amp_dtype = amp_dtype + self.coarse_backbone = coarse_backbone + self.coarse_patch_size = coarse_patch_size + self.coarse_feat_dim = coarse_feat_dim + if 'DINOv2' in coarse_backbone: + if 'large' in coarse_backbone: + if dinov2_weights is None: + dinov2_weights = torch.hub.load_state_dict_from_url("https://dl.fbaipublicfiles.com/dinov2/dinov2_vitl14/dinov2_vitl14_pretrain.pth", map_location="cpu") + from .transformer import vit_large as vit_model + vit_kwargs = dict(img_size= 518, + patch_size= coarse_patch_size, + init_values = 1.0, + ffn_layer = "mlp", + block_chunks = 0, + ) + else: + raise NotImplementedError + + dinov2_vitl14 = vit_model(**vit_kwargs).eval() + dinov2_vitl14.load_state_dict(dinov2_weights) + + if self.amp: + dinov2_vitl14 = dinov2_vitl14.to(self.amp_dtype) + self.dinov2_vitl14 = [dinov2_vitl14] # ugly hack to not show parameters to DDP + elif coarse_backbone == 'ResNet50': + self.backbone_model = ResNet50(pretrained=True, amp=self.amp) + else: + raise NotImplementedError + + cnn_kwargs = cnn_kwargs if cnn_kwargs is not None else {} + if not use_vgg: + self.cnn = ResNet50(**cnn_kwargs) + else: + self.cnn = VGG19(**cnn_kwargs) + + def train(self, mode: bool = True): + return self.cnn.train(mode) + + def forward(self, x, upsample = False): + B,C,H,W = x.shape + feature_pyramid = self.cnn(x) + + if not upsample: + with torch.no_grad(): + if 'DINOv2' in self.coarse_backbone: + if self.dinov2_vitl14[0].device != x.device: + self.dinov2_vitl14[0] = self.dinov2_vitl14[0].to(x.device) + dinov2_features_16 = self.dinov2_vitl14[0].forward_features(x.to(self.amp_dtype) if self.amp else x) + features_16 = dinov2_features_16['x_norm_patchtokens'].permute(0,2,1).reshape(B,self.coarse_feat_dim,H//self.coarse_patch_size, W//self.coarse_patch_size) + del dinov2_features_16 + else: + raise NotImplementedError + if self.coarse_backbone == 'ResNet50': + features_16 = self.backbone_model(x.to(self.amp_dtype) if self.amp else x) + feature_pyramid[16] = features_16 + return feature_pyramid \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/matcher.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/matcher.py new file mode 100644 index 0000000000000000000000000000000000000000..7a63ebc391954396056c4fbd91857fe61dfec35b --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/matcher.py @@ -0,0 +1,937 @@ +import os +import math +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +from torchvision.transforms import Normalize +from einops import rearrange +import warnings +from warnings import warn +from PIL import Image + +import roma +from roma.utils import get_tuple_transform_ops, resize_by_longest_edge_and_padding, resize_by_longest_edge_and_stretch +from roma.utils.local_correlation import local_correlation +from roma.utils.utils import cls_to_flow_refine +from roma.utils.kde import kde + +class ConvRefiner(nn.Module): + def __init__( + self, + in_dim=6, + hidden_dim=16, + out_dim=2, + dw=False, + kernel_size=5, + hidden_blocks=3, + displacement_emb = None, + displacement_emb_dim = None, + local_corr_radius = None, + corr_in_other = None, + no_im_B_fm = False, + amp = False, + concat_logits = False, + use_bias_block_1 = True, + use_cosine_corr = False, + disable_local_corr_grad = False, + is_classifier = False, + sample_mode = "bilinear", + norm_type = nn.BatchNorm2d, + bn_momentum = 0.1, + amp_dtype = torch.float16, + ): + super().__init__() + self.bn_momentum = bn_momentum + self.block1 = self.create_block( + in_dim, hidden_dim, dw=dw, kernel_size=kernel_size, bias = use_bias_block_1, + ) + self.hidden_blocks = nn.Sequential( + *[ + self.create_block( + hidden_dim, + hidden_dim, + dw=dw, + kernel_size=kernel_size, + norm_type=norm_type, + ) + for hb in range(hidden_blocks) + ] + ) + self.hidden_blocks = self.hidden_blocks + self.out_conv = nn.Conv2d(hidden_dim, out_dim, 1, 1, 0) + if displacement_emb: + self.has_displacement_emb = True + self.disp_emb = nn.Conv2d(2,displacement_emb_dim,1,1,0) + else: + self.has_displacement_emb = False + self.local_corr_radius = local_corr_radius + self.corr_in_other = corr_in_other + self.no_im_B_fm = no_im_B_fm + self.amp = amp + self.concat_logits = concat_logits + self.use_cosine_corr = use_cosine_corr + self.disable_local_corr_grad = disable_local_corr_grad + self.is_classifier = is_classifier + self.sample_mode = sample_mode + self.amp_dtype = amp_dtype + + def create_block( + self, + in_dim, + out_dim, + dw=False, + kernel_size=5, + bias = True, + norm_type = nn.BatchNorm2d, + ): + num_groups = 1 if not dw else in_dim + if dw: + assert ( + out_dim % in_dim == 0 + ), "outdim must be divisible by indim for depthwise" + conv1 = nn.Conv2d( + in_dim, + out_dim, + kernel_size=kernel_size, + stride=1, + padding=kernel_size // 2, + groups=num_groups, + bias=bias, + ) + norm = norm_type(out_dim, momentum = self.bn_momentum) if norm_type is nn.BatchNorm2d else norm_type(num_channels = out_dim) + relu = nn.ReLU(inplace=True) + conv2 = nn.Conv2d(out_dim, out_dim, 1, 1, 0) + return nn.Sequential(conv1, norm, relu, conv2) + + def forward(self, x, y, flow, scale_factor = 1, logits = None): + b,c,hs,ws = x.shape + with torch.autocast("cuda", enabled=self.amp, dtype = self.amp_dtype): + with torch.no_grad(): + x_hat = F.grid_sample(y, flow.permute(0, 2, 3, 1), align_corners=False, mode = self.sample_mode) + if self.has_displacement_emb: + im_A_coords = torch.meshgrid( + ( + torch.linspace(-1 + 1 / hs, 1 - 1 / hs, hs, device=x.device), + torch.linspace(-1 + 1 / ws, 1 - 1 / ws, ws, device=x.device), + ) + ) + im_A_coords = torch.stack((im_A_coords[1], im_A_coords[0])) + im_A_coords = im_A_coords[None].expand(b, 2, hs, ws) + in_displacement = flow-im_A_coords + emb_in_displacement = self.disp_emb(40/32 * scale_factor * in_displacement) + if self.local_corr_radius: + if self.corr_in_other: + # Corr in other means take a kxk grid around the predicted coordinate in other image + local_corr = local_correlation(x,y,local_radius=self.local_corr_radius,flow = flow, + sample_mode = self.sample_mode) + else: + raise NotImplementedError("Local corr in own frame should not be used.") + if self.no_im_B_fm: + x_hat = torch.zeros_like(x) + d = torch.cat((x, x_hat, emb_in_displacement, local_corr), dim=1) + else: + d = torch.cat((x, x_hat, emb_in_displacement), dim=1) + else: + if self.no_im_B_fm: + x_hat = torch.zeros_like(x) + d = torch.cat((x, x_hat), dim=1) + if self.concat_logits: + d = torch.cat((d, logits), dim=1) + d = self.block1(d) + d = self.hidden_blocks(d) + d = self.out_conv(d.float()) + displacement, certainty = d[:, :-1], d[:, -1:] + return displacement, certainty + +class CosKernel(nn.Module): # similar to softmax kernel + def __init__(self, T, learn_temperature=False): + super().__init__() + self.learn_temperature = learn_temperature + if self.learn_temperature: + self.T = nn.Parameter(torch.tensor(T)) + else: + self.T = T + + def __call__(self, x, y, eps=1e-6): + c = torch.einsum("bnd,bmd->bnm", x, y) / ( + x.norm(dim=-1)[..., None] * y.norm(dim=-1)[:, None] + eps + ) + if self.learn_temperature: + T = self.T.abs() + 0.01 + else: + T = torch.tensor(self.T, device=c.device) + K = ((c - 1.0) / T).exp() + return K + +class GP(nn.Module): + def __init__( + self, + kernel, + T=1, + learn_temperature=False, + only_attention=False, + gp_dim=64, + basis="fourier", + covar_size=5, + only_nearest_neighbour=False, + sigma_noise=0.1, + no_cov=False, + predict_features = False, + ): + super().__init__() + self.K = kernel(T=T, learn_temperature=learn_temperature) + self.sigma_noise = sigma_noise + self.covar_size = covar_size + self.pos_conv = torch.nn.Conv2d(2, gp_dim, 1, 1) + self.only_attention = only_attention + self.only_nearest_neighbour = only_nearest_neighbour + self.basis = basis + self.no_cov = no_cov + self.dim = gp_dim + self.predict_features = predict_features + + def get_local_cov(self, cov): + K = self.covar_size + b, h, w, h, w = cov.shape + hw = h * w + cov = F.pad(cov, 4 * (K // 2,)) # pad v_q + delta = torch.stack( + torch.meshgrid( + torch.arange(-(K // 2), K // 2 + 1), torch.arange(-(K // 2), K // 2 + 1) + ), + dim=-1, + ) + positions = torch.stack( + torch.meshgrid( + torch.arange(K // 2, h + K // 2), torch.arange(K // 2, w + K // 2) + ), + dim=-1, + ) + neighbours = positions[:, :, None, None, :] + delta[None, :, :] + points = torch.arange(hw)[:, None].expand(hw, K**2) + local_cov = cov.reshape(b, hw, h + K - 1, w + K - 1)[ + :, + points.flatten(), + neighbours[..., 0].flatten(), + neighbours[..., 1].flatten(), + ].reshape(b, h, w, K**2) + return local_cov + + def reshape(self, x): + return rearrange(x, "b d h w -> b (h w) d") + + def project_to_basis(self, x): + if self.basis == "fourier": + return torch.cos(8 * math.pi * self.pos_conv(x)) + elif self.basis == "linear": + return self.pos_conv(x) + else: + raise ValueError( + "No other bases other than fourier and linear currently im_Bed in public release" + ) + + def get_pos_enc(self, y): + b, c, h, w = y.shape + coarse_coords = torch.meshgrid( + ( + torch.linspace(-1 + 1 / h, 1 - 1 / h, h, device=y.device), + torch.linspace(-1 + 1 / w, 1 - 1 / w, w, device=y.device), + ) + ) + + coarse_coords = torch.stack((coarse_coords[1], coarse_coords[0]), dim=-1)[ + None + ].expand(b, h, w, 2) + coarse_coords = rearrange(coarse_coords, "b h w d -> b d h w") + coarse_embedded_coords = self.project_to_basis(coarse_coords) + return coarse_embedded_coords + + def forward(self, x, y, **kwargs): + b, c, h1, w1 = x.shape + b, c, h2, w2 = y.shape + f = self.get_pos_enc(y) + b, d, h2, w2 = f.shape + x, y, f = self.reshape(x.float()), self.reshape(y.float()), self.reshape(f) + K_xx = self.K(x, x) + K_yy = self.K(y, y) + K_xy = self.K(x, y) + K_yx = K_xy.permute(0, 2, 1) + sigma_noise = self.sigma_noise * torch.eye(h2 * w2, device=x.device)[None, :, :] + with warnings.catch_warnings(): + K_yy = K_yy + sigma_noise # To increase stability in inverse + with torch.no_grad(): + K_yy_dig_zeromask = ((K_yy[torch.eye(h2 * w2, device=x.device, dtype=torch.bool).repeat(b, 1, 1)] == 0).reshape(b, -1)) + K_yy = K_yy + self.sigma_noise * K_yy_dig_zeromask[..., None] * torch.eye(h2 * w2, device=x.device)[None, :, :] + K_yy_inv = torch.linalg.inv(K_yy) + + mu_x = K_xy.matmul(K_yy_inv.matmul(f)) + mu_x = rearrange(mu_x, "b (h w) d -> b d h w", h=h1, w=w1) + if not self.no_cov: + cov_x = K_xx - K_xy.matmul(K_yy_inv.matmul(K_yx)) + cov_x = rearrange(cov_x, "b (h w) (r c) -> b h w r c", h=h1, w=w1, r=h1, c=w1) + local_cov_x = self.get_local_cov(cov_x) + local_cov_x = rearrange(local_cov_x, "b h w K -> b K h w") + gp_feats = torch.cat((mu_x, local_cov_x), dim=1) + else: + gp_feats = mu_x + return gp_feats + +class Decoder(nn.Module): + def __init__( + self, embedding_decoder, gps, proj, conv_refiner, amp, detach=False, scales="all", pos_embeddings = None, + num_refinement_steps_per_scale = 1, warp_noise_std = 0.0, displacement_dropout_p = 0.0, gm_warp_dropout_p = 0.0, + flow_upsample_mode = "bilinear", amp_dtype = torch.float16, + ): + super().__init__() + self.embedding_decoder = embedding_decoder + self.num_refinement_steps_per_scale = num_refinement_steps_per_scale + self.gps = gps + self.proj = proj + self.amp = amp + self.conv_refiner = conv_refiner + self.detach = detach + if pos_embeddings is None: + self.pos_embeddings = {} + else: + self.pos_embeddings = pos_embeddings + if scales == "all": + self.scales = ["32", "16", "8", "4", "2", "1"] + else: + self.scales = scales + self.warp_noise_std = warp_noise_std + self.refine_init = 4 + self.displacement_dropout_p = displacement_dropout_p + self.gm_warp_dropout_p = gm_warp_dropout_p + self.flow_upsample_mode = flow_upsample_mode + self.amp_dtype = amp_dtype + + def get_placeholder_flow(self, b, h, w, device): + coarse_coords = torch.meshgrid( + ( + torch.linspace(-1 + 1 / h, 1 - 1 / h, h, device=device), + torch.linspace(-1 + 1 / w, 1 - 1 / w, w, device=device), + ) + ) + coarse_coords = torch.stack((coarse_coords[1], coarse_coords[0]), dim=-1)[ + None + ].expand(b, h, w, 2) + coarse_coords = rearrange(coarse_coords, "b h w d -> b d h w") + return coarse_coords + + def get_positional_embedding(self, b, h ,w, device): + coarse_coords = torch.meshgrid( + ( + torch.linspace(-1 + 1 / h, 1 - 1 / h, h, device=device), + torch.linspace(-1 + 1 / w, 1 - 1 / w, w, device=device), + ) + ) + + coarse_coords = torch.stack((coarse_coords[1], coarse_coords[0]), dim=-1)[ + None + ].expand(b, h, w, 2) + coarse_coords = rearrange(coarse_coords, "b h w d -> b d h w") + coarse_embedded_coords = self.pos_embedding(coarse_coords) + return coarse_embedded_coords + + def forward(self, f1, f2, gt_warp = None, gt_prob = None, upsample = False, flow = None, certainty = None, scale_factor = 1): + coarse_scales = self.embedding_decoder.scales() + all_scales = self.scales if not upsample else ["8", "4", "2", "1"] + sizes = {scale: f1[scale].shape[-2:] for scale in f1} + h, w = sizes[1] + b = f1[1].shape[0] + device = f1[1].device + coarsest_scale = int(all_scales[0]) + old_stuff = torch.zeros( + b, self.embedding_decoder.hidden_dim, *sizes[coarsest_scale], device=f1[coarsest_scale].device + ) + corresps = {} + if not upsample: + flow = self.get_placeholder_flow(b, *sizes[coarsest_scale], device) + certainty = 0.0 + else: + flow = F.interpolate( + flow, + size=sizes[coarsest_scale], + align_corners=False, + mode="bilinear", + ) + certainty = F.interpolate( + certainty, + size=sizes[coarsest_scale], + align_corners=False, + mode="bilinear", + ) + displacement = 0.0 + for new_scale in all_scales: + ins = int(new_scale) + corresps[ins] = {} + f1_s, f2_s = f1[ins], f2[ins] + if new_scale in self.proj: + with torch.autocast("cuda", enabled=self.amp, dtype = self.amp_dtype): + f1_s, f2_s = self.proj[new_scale](f1_s), self.proj[new_scale](f2_s) + + if ins in coarse_scales: + old_stuff = F.interpolate( + old_stuff, size=sizes[ins], mode="bilinear", align_corners=False + ) + gp_posterior = self.gps[new_scale](f1_s, f2_s) + gm_warp_or_cls, certainty, old_stuff = self.embedding_decoder( + gp_posterior, f1_s, old_stuff, new_scale + ) + + if self.embedding_decoder.is_classifier: + flow = cls_to_flow_refine( + gm_warp_or_cls, + ).permute(0,3,1,2) + corresps[ins].update({"gm_cls": gm_warp_or_cls,"gm_certainty": certainty,}) + else: + corresps[ins].update({"gm_flow": gm_warp_or_cls,"gm_certainty": certainty,}) + flow = gm_warp_or_cls.detach() + + if new_scale in self.conv_refiner: + corresps[ins].update({"flow_pre_delta": flow}) if self.training else None + delta_flow, delta_certainty = self.conv_refiner[new_scale]( + f1_s, f2_s, flow, scale_factor = scale_factor, logits = certainty, + ) + corresps[ins].update({"delta_flow": delta_flow,}) if self.training else None + displacement = ins*torch.stack((delta_flow[:, 0].float() / (self.refine_init * w), + delta_flow[:, 1].float() / (self.refine_init * h),),dim=1,) + flow = flow + displacement + certainty = ( + certainty + delta_certainty + ) # predict both certainty and displacement + corresps[ins].update({ + "certainty": certainty, + "flow": flow, + }) + if new_scale != "1": + flow = F.interpolate( + flow, + size=sizes[ins // 2], + mode=self.flow_upsample_mode, + ) + certainty = F.interpolate( + certainty, + size=sizes[ins // 2], + mode=self.flow_upsample_mode, + ) + if self.detach: + flow = flow.detach() + certainty = certainty.detach() + #torch.cuda.empty_cache() + return corresps + + +class RegressionMatcher(nn.Module): + def __init__( + self, + encoder, + decoder, + h=448, + w=448, + sample_mode = "threshold", + upsample_preds = False, + symmetric = False, + name = None, + attenuate_cert = None, + recrop_upsample = False, + ): + super().__init__() + self.attenuate_cert = attenuate_cert + self.encoder = encoder + self.decoder = decoder + self.name = name + self.w_resized = w + self.h_resized = h + self.og_transforms = get_tuple_transform_ops(resize=None, normalize=True) + self.sample_mode = sample_mode + self.upsample_preds = upsample_preds + self.upsample_res = (14*16*6, 14*16*6) + self.symmetric = symmetric + self.sample_thresh = 0.05 + self.recrop_upsample = recrop_upsample + + def get_output_resolution(self): + if not self.upsample_preds: + return self.h_resized, self.w_resized + else: + return self.upsample_res + + def extract_backbone_features(self, batch, batched = True, upsample = False): + x_q = batch["im_A"] + x_s = batch["im_B"] + if batched: + X = torch.cat((x_q, x_s), dim = 0) + feature_pyramid = self.encoder(X, upsample = upsample) + else: + feature_pyramid = self.encoder(x_q, upsample = upsample), self.encoder(x_s, upsample = upsample) + return feature_pyramid + + def sample( + self, + matches, + certainty, + num=10000, + ): + if "threshold" in self.sample_mode: + upper_thresh = self.sample_thresh + certainty = certainty.clone() + certainty[certainty > upper_thresh] = 1 + matches, certainty = ( + matches.reshape(-1, 4), + certainty.reshape(-1), + ) + expansion_factor = 4 if "balanced" in self.sample_mode else 1 + + if certainty.sum() == 0: + certainty[0] = 1 # Corner case, to avoid following multinormal error + try: + good_samples = torch.multinomial(certainty, + num_samples = min(expansion_factor*num, len(certainty)), + replacement=False) + except: + return matches[[0]], certainty[[0]] + good_matches, good_certainty = matches[good_samples], certainty[good_samples] + if "balanced" not in self.sample_mode: + return good_matches, good_certainty + density = kde(good_matches, std=0.1) + p = 1 / (density+1) + p[density < 10] = 1e-7 # Basically should have at least 10 perfect neighbours, or around 100 ok ones + balanced_samples = torch.multinomial(p, + num_samples = min(num,len(good_certainty)), + replacement=False) + return good_matches[balanced_samples], good_certainty[balanced_samples] + + def forward(self, batch, batched = True, upsample = False, scale_factor = 1): + feature_pyramid = self.extract_backbone_features(batch, batched=batched, upsample = upsample) + if batched: + f_q_pyramid = { + scale: f_scale.chunk(2)[0] for scale, f_scale in feature_pyramid.items() + } + f_s_pyramid = { + scale: f_scale.chunk(2)[1] for scale, f_scale in feature_pyramid.items() + } + else: + f_q_pyramid, f_s_pyramid = feature_pyramid + corresps = self.decoder(f_q_pyramid, + f_s_pyramid, + upsample = upsample, + **(batch["corresps"] if "corresps" in batch else {}), + scale_factor=scale_factor) + + return corresps + + def forward_symmetric(self, batch, batched = True, upsample = False, scale_factor = 1): + feature_pyramid = self.extract_backbone_features(batch, batched = batched, upsample = upsample) + f_q_pyramid = feature_pyramid + f_s_pyramid = { + scale: torch.cat((f_scale.chunk(2)[1], f_scale.chunk(2)[0]), dim = 0) + for scale, f_scale in feature_pyramid.items() + } + corresps = self.decoder(f_q_pyramid, + f_s_pyramid, + upsample = upsample, + **(batch["corresps"] if "corresps" in batch else {}), + scale_factor=scale_factor) + return corresps + + def to_pixel_coordinates(self, coords, H_A, W_A, H_B, W_B): + if isinstance(coords, (list, tuple)): + kpts_A, kpts_B = coords[0], coords[1] + else: + kpts_A, kpts_B = coords[...,:2], coords[...,2:] + kpts_A = torch.stack((W_A/2 * (kpts_A[...,0]+1), H_A/2 * (kpts_A[...,1]+1)),axis=-1) + kpts_B = torch.stack((W_B/2 * (kpts_B[...,0]+1), H_B/2 * (kpts_B[...,1]+1)),axis=-1) + return kpts_A, kpts_B + + def to_normalized_coordinates(self, coords, H_A, W_A, H_B, W_B): + if isinstance(coords, (list, tuple)): + kpts_A, kpts_B = coords[0], coords[1] + else: + kpts_A, kpts_B = coords[...,:2], coords[...,2:] + kpts_A = torch.stack((2/W_A * kpts_A[...,0] - 1, 2/H_A * kpts_A[...,1] - 1),axis=-1) + kpts_B = torch.stack((2/W_B * kpts_B[...,0] - 1, 2/H_B * kpts_B[...,1] - 1),axis=-1) + return kpts_A, kpts_B + + def match_keypoints(self, x_A, x_B, warp, certainty, return_tuple = True, return_inds = False): + x_A_to_B = F.grid_sample(warp[...,-2:].permute(2,0,1)[None], x_A[None,None], align_corners = False, mode = "bilinear")[0,:,0].mT + cert_A_to_B = F.grid_sample(certainty[None,None,...], x_A[None,None], align_corners = False, mode = "bilinear")[0,0,0] + D = torch.cdist(x_A_to_B, x_B) + inds_A, inds_B = torch.nonzero((D == D.min(dim=-1, keepdim = True).values) * (D == D.min(dim=-2, keepdim = True).values) * (cert_A_to_B[:,None] > self.sample_thresh), as_tuple = True) + + if return_tuple: + if return_inds: + return inds_A, inds_B + else: + return x_A[inds_A], x_B[inds_B] + else: + if return_inds: + return torch.cat((inds_A, inds_B),dim=-1) + else: + return torch.cat((x_A[inds_A], x_B[inds_B]),dim=-1) + + def warp_keypoints(self, x_A, warp, certainty, H_A, W_A, H_B, W_B): + H,W2,_ = warp.shape + W = W2//2 if self.symmetric else W2 + # To normalized coords: + x_A_norm = torch.stack((2/W_A * x_A[...,0] - 1, 2/H_A * x_A[...,1] - 1),axis=-1) + x_A_to_B = F.grid_sample(warp[:,:W, 2:].permute(2,0,1)[None], x_A_norm[None,None], align_corners = False, mode = "bilinear")[0,:,0].mT + cert_A_to_B = F.grid_sample(certainty[None,None,:,:W], x_A_norm[None,None], align_corners = False, mode = "bilinear")[0,0,0] + + # To origin coords: + x_A_to_B = torch.stack((W_B/2 * (x_A_to_B[...,0]+1), H_B/2 * (x_A_to_B[...,1]+1)),axis=-1) + return x_A_to_B, cert_A_to_B + + def get_roi(self, certainty, W, H, thr = 0.025): + raise NotImplementedError("WIP, disable for now") + hs,ws = certainty.shape + certainty = certainty/certainty.sum(dim=(-1,-2)) + cum_certainty_w = certainty.cumsum(dim=-1).sum(dim=-2) + cum_certainty_h = certainty.cumsum(dim=-2).sum(dim=-1) + print(cum_certainty_w) + print(torch.min(torch.nonzero(cum_certainty_w > thr))) + print(torch.min(torch.nonzero(cum_certainty_w < thr))) + left = int(W/ws * torch.min(torch.nonzero(cum_certainty_w > thr))) + right = int(W/ws * torch.max(torch.nonzero(cum_certainty_w < 1 - thr))) + top = int(H/hs * torch.min(torch.nonzero(cum_certainty_h > thr))) + bottom = int(H/hs * torch.max(torch.nonzero(cum_certainty_h < 1 - thr))) + print(left, right, top, bottom) + return left, top, right, bottom + + def recrop(self, certainty, image_path): + roi = self.get_roi(certainty, *Image.open(image_path).size) + return Image.open(image_path).crop(roi) + + @torch.no_grad() + def self_train_time_match( + self, + data, + corresps, + finest_scale=1, + ): + B, C, hs, ws = data['image0'].shape + device = data['image0'].device + im_A_to_im_B = corresps[finest_scale]["flow"] + certainty = corresps[finest_scale]["certainty"] + if finest_scale != 1: + im_A_to_im_B = F.interpolate( + im_A_to_im_B, size=(hs, ws), align_corners=False, mode="bilinear" + ) + certainty = F.interpolate( + certainty, size=(hs, ws), align_corners=False, mode="bilinear" + ) + im_A_to_im_B = im_A_to_im_B.permute( + 0, 2, 3, 1 + ) + # Create im_A meshgrid + im_A_coords = torch.meshgrid( + ( + torch.linspace(-1 + 1 / hs, 1 - 1 / hs, hs, device=device), + torch.linspace(-1 + 1 / ws, 1 - 1 / ws, ws, device=device), + ) + ) + im_A_coords = torch.stack((im_A_coords[1], im_A_coords[0])) + im_A_coords = im_A_coords[None].expand(B, 2, hs, ws) + certainty = certainty.sigmoid() # logits -> probs + im_A_coords = im_A_coords.permute(0, 2, 3, 1) + if (im_A_to_im_B.abs() > 1).any() and True: + wrong = (im_A_to_im_B.abs() > 1).sum(dim=-1) > 0 + certainty[wrong[:,None]] = 0 + im_A_to_im_B = torch.clamp(im_A_to_im_B, -1, 1) + warp = torch.cat((im_A_coords, im_A_to_im_B), dim=-1) + return ( + warp, + certainty[:, 0] + ) + + @torch.no_grad() + def self_inference_time_match( + self, + im_A_org, + im_B_org, + device = None, + resize_by_stretch=False, + norm_img=False, + ): + if isinstance(im_A_org, (str, os.PathLike)): + im_A_org = torch.from_numpy(np.array(Image.open(im_A_org).convert("RGB"))).permute(2,0,1) / 255. + im_B_org = torch.from_numpy(np.array(Image.open(im_B_org).convert("RGB"))).permute(2,0,1) / 255. + + symmetric = self.symmetric + self.train(False) + with torch.no_grad(): + b = 1 + # Get images in good format + assert self.w_resized == self.h_resized + hs, ws = self.h_resized, self.w_resized + if resize_by_stretch: + im_A = resize_by_longest_edge_and_stretch(im_A_org, hs) + im_B = resize_by_longest_edge_and_stretch(im_B_org, hs) + else: + im_A = resize_by_longest_edge_and_padding(im_A_org, hs) + im_B = resize_by_longest_edge_and_padding(im_B_org, hs) + + if norm_img: + im_A = Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])(im_A) # Input: 3*H*W + im_B = Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])(im_B) # Input: 3*H*W + + if device is None: + batch = {"im_A": im_A[None], "im_B": im_B[None]} + else: + batch = {"im_A": im_A[None].to(device), "im_B": im_B[None].to(device)} + + finest_scale = 1 + # Run matcher + if symmetric: + corresps = self.forward_symmetric(batch) + else: + corresps = self.forward(batch, batched = True) + + if self.upsample_preds: + hs, ws = self.upsample_res + + if self.attenuate_cert: + low_res_certainty = F.interpolate( + corresps[16]["certainty"], size=(hs, ws), align_corners=False, mode="bilinear" + ) + cert_clamp = 0 + factor = 0.5 + low_res_certainty = factor*low_res_certainty*(low_res_certainty < cert_clamp) + + if self.upsample_preds: + finest_corresps = corresps[finest_scale] + + assert hs == ws + if resize_by_stretch: + im_A = resize_by_longest_edge_and_stretch(im_A_org, hs) + im_B = resize_by_longest_edge_and_stretch(im_B_org, hs) + else: + im_A = resize_by_longest_edge_and_padding(im_A_org, hs) + im_B = resize_by_longest_edge_and_padding(im_B_org, hs) + + if norm_img: + im_A = Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])(im_A) # Input: 3*H*W + im_B = Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])(im_B) # Input: 3*H*W + + if device is None: + im_A, im_B = im_A[None], im_B[None] + else: + im_A, im_B = im_A[None].to(device), im_B[None].to(device) + scale_factor = math.sqrt(self.upsample_res[0] * self.upsample_res[1] / (self.w_resized * self.h_resized)) + batch = {"im_A": im_A, "im_B": im_B, "corresps": finest_corresps} + if symmetric: + corresps = self.forward_symmetric(batch, upsample = True, batched=True, scale_factor = scale_factor) + else: + corresps = self.forward(batch, batched = True, upsample=True, scale_factor = scale_factor) + + im_A_to_im_B = corresps[finest_scale]["flow"] + certainty = corresps[finest_scale]["certainty"] - (low_res_certainty if self.attenuate_cert else 0) + if finest_scale != 1: + im_A_to_im_B = F.interpolate( + im_A_to_im_B, size=(hs, ws), align_corners=False, mode="bilinear" + ) + certainty = F.interpolate( + certainty, size=(hs, ws), align_corners=False, mode="bilinear" + ) + im_A_to_im_B = im_A_to_im_B.permute( + 0, 2, 3, 1 + ) + # Create im_A meshgrid + im_A_coords = torch.meshgrid( + ( + torch.linspace(-1 + 1 / hs, 1 - 1 / hs, hs, device=im_A.device), + torch.linspace(-1 + 1 / ws, 1 - 1 / ws, ws, device=im_A.device), + ) + ) + im_A_coords = torch.stack((im_A_coords[1], im_A_coords[0])) + im_A_coords = im_A_coords[None].expand(b, 2, hs, ws) + certainty = certainty.sigmoid() # logits -> probs + im_A_coords = im_A_coords.permute(0, 2, 3, 1) + if (im_A_to_im_B.abs() > 1).any() and True: + wrong = (im_A_to_im_B.abs() > 1).sum(dim=-1) > 0 + certainty[wrong[:,None]] = 0 + im_A_to_im_B = torch.clamp(im_A_to_im_B, -1, 1) + if symmetric: + A_to_B, B_to_A = im_A_to_im_B.chunk(2) + q_warp = torch.cat((im_A_coords, A_to_B), dim=-1) + im_B_coords = im_A_coords + s_warp = torch.cat((B_to_A, im_B_coords), dim=-1) + warp = torch.cat((q_warp, s_warp),dim=2) + certainty = torch.cat(certainty.chunk(2), dim=3) + else: + warp = torch.cat((im_A_coords, im_A_to_im_B), dim=-1) + return ( + warp[0], + certainty[0, 0], + ) + + @torch.inference_mode() + def match( + self, + im_A_path, + im_B_path, + *args, + batched=False, + device = None, + ): + if isinstance(im_A_path, (str, os.PathLike)): + im_A, im_B = Image.open(im_A_path).convert("RGB"), Image.open(im_B_path).convert("RGB") + else: + # Assume its not a path + im_A, im_B = im_A_path, im_B_path + symmetric = self.symmetric + self.train(False) + with torch.no_grad(): + if not batched: + b = 1 + if isinstance(im_A, torch.Tensor): + h, w = im_A.shape[-2:] + h2, w2 = im_B.shape[-2:] + else: + w, h = im_A.size + w2, h2 = im_B.size + # Get images in good format + ws = self.w_resized + hs = self.h_resized + + test_transform = get_tuple_transform_ops( + resize=(hs, ws), normalize=True, clahe = False + ) + im_A, im_B = test_transform((im_A, im_B)) + if device is None: + batch = {"im_A": im_A[None], "im_B": im_B[None]} + else: + batch = {"im_A": im_A[None].to(device), "im_B": im_B[None].to(device)} + else: + b, c, h, w = im_A.shape + b, c, h2, w2 = im_B.shape + assert w == w2 and h == h2, "For batched images we assume same size" + if device is None: + batch = {"im_A": im_A, "im_B": im_B} + else: + batch = {"im_A": im_A.to(device), "im_B": im_B.to(device)} + if h != self.h_resized or self.w_resized != w: + warn("Model resolution and batch resolution differ, may produce unexpected results") + hs, ws = h, w + finest_scale = 1 + # Run matcher + if symmetric: + corresps = self.forward_symmetric(batch) + else: + corresps = self.forward(batch, batched = True) + + if self.upsample_preds: + hs, ws = self.upsample_res + + if self.attenuate_cert: + low_res_certainty = F.interpolate( + corresps[16]["certainty"], size=(hs, ws), align_corners=False, mode="bilinear" + ) + cert_clamp = 0 + factor = 0.5 + low_res_certainty = factor*low_res_certainty*(low_res_certainty < cert_clamp) + + if self.upsample_preds: + finest_corresps = corresps[finest_scale] + test_transform = get_tuple_transform_ops( + resize=(hs, ws), normalize=True + ) + if self.recrop_upsample: + certainty = corresps[finest_scale]["certainty"] + print(certainty.shape) + im_A = self.recrop(certainty[0,0], im_A_path) + im_B = self.recrop(certainty[1,0], im_B_path) + #TODO: need to adjust corresps when doing this + else: + if isinstance(im_A_path, (str, os.PathLike)): + im_A, im_B = Image.open(im_A_path).convert("RGB"), Image.open(im_B_path).convert("RGB") + else: + # Assume its not a path + im_A, im_B = im_A_path, im_B_path + + im_A, im_B = test_transform((im_A, im_B)) + if device is None: + im_A, im_B = im_A[None], im_B[None] + else: + im_A, im_B = im_A[None].to(device), im_B[None].to(device) + scale_factor = math.sqrt(self.upsample_res[0] * self.upsample_res[1] / (self.w_resized * self.h_resized)) + batch = {"im_A": im_A, "im_B": im_B, "corresps": finest_corresps} + if symmetric: + corresps = self.forward_symmetric(batch, upsample = True, batched=True, scale_factor = scale_factor) + else: + corresps = self.forward(batch, batched = True, upsample=True, scale_factor = scale_factor) + + im_A_to_im_B = corresps[finest_scale]["flow"] + certainty = corresps[finest_scale]["certainty"] - (low_res_certainty if self.attenuate_cert else 0) + if finest_scale != 1: + im_A_to_im_B = F.interpolate( + im_A_to_im_B, size=(hs, ws), align_corners=False, mode="bilinear" + ) + certainty = F.interpolate( + certainty, size=(hs, ws), align_corners=False, mode="bilinear" + ) + im_A_to_im_B = im_A_to_im_B.permute( + 0, 2, 3, 1 + ) + # Create im_A meshgrid + im_A_coords = torch.meshgrid( + ( + torch.linspace(-1 + 1 / hs, 1 - 1 / hs, hs, device=im_A.device), + torch.linspace(-1 + 1 / ws, 1 - 1 / ws, ws, device=im_A.device), + ) + ) + im_A_coords = torch.stack((im_A_coords[1], im_A_coords[0])) + im_A_coords = im_A_coords[None].expand(b, 2, hs, ws) + certainty = certainty.sigmoid() # logits -> probs + im_A_coords = im_A_coords.permute(0, 2, 3, 1) + if (im_A_to_im_B.abs() > 1).any() and True: + wrong = (im_A_to_im_B.abs() > 1).sum(dim=-1) > 0 + certainty[wrong[:,None]] = 0 + im_A_to_im_B = torch.clamp(im_A_to_im_B, -1, 1) + if symmetric: + A_to_B, B_to_A = im_A_to_im_B.chunk(2) + q_warp = torch.cat((im_A_coords, A_to_B), dim=-1) + im_B_coords = im_A_coords + s_warp = torch.cat((B_to_A, im_B_coords), dim=-1) + warp = torch.cat((q_warp, s_warp),dim=2) + certainty = torch.cat(certainty.chunk(2), dim=3) + else: + warp = torch.cat((im_A_coords, im_A_to_im_B), dim=-1) + if batched: + return ( + warp, + certainty[:, 0] + ) + else: + return ( + warp[0], + certainty[0, 0], + ) + + def visualize_warp(self, warp, certainty, im_A = None, im_B = None, im_A_path = None, im_B_path = None, device = "cuda", symmetric = True, save_path = None): + assert symmetric == True, "Currently assuming bidirectional warp, might update this if someone complains ;)" + H,W2,_ = warp.shape + W = W2//2 if symmetric else W2 + if im_A is None: + from PIL import Image + im_A, im_B = Image.open(im_A_path), Image.open(im_B_path) + im_A = im_A.resize((W,H)) + im_B = im_B.resize((W,H)) + + x_A = (torch.tensor(np.array(im_A)) / 255).to(device).permute(2, 0, 1) + x_B = (torch.tensor(np.array(im_B)) / 255).to(device).permute(2, 0, 1) + + im_A_transfer_rgb = F.grid_sample( + x_B[None], warp[:,:W, 2:][None], mode="bilinear", align_corners=False + )[0] + im_B_transfer_rgb = F.grid_sample( + x_A[None], warp[:, W:, :2][None], mode="bilinear", align_corners=False + )[0] + warp_im = torch.cat((im_A_transfer_rgb,im_B_transfer_rgb),dim=2) + white_im = torch.ones((H,2*W),device=device) + vis_im = certainty * warp_im + (1 - certainty) * white_im + if save_path is not None: + from roma.utils import tensor_to_pil + tensor_to_pil(vis_im, unnormalize=False).save(save_path) + return vis_im \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/model_zoo/__init__.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/model_zoo/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..20e8da481880da376c6d4653770bd4ca1e814034 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/model_zoo/__init__.py @@ -0,0 +1,53 @@ +from typing import Union +import torch +from .roma_models import roma_model + +weight_urls = { + "roma": { + "outdoor": "https://github.com/Parskatt/storage/releases/download/roma/roma_outdoor.pth", + "indoor": "https://github.com/Parskatt/storage/releases/download/roma/roma_indoor.pth", + }, + "dinov2": "https://dl.fbaipublicfiles.com/dinov2/dinov2_vitl14/dinov2_vitl14_pretrain.pth", #hopefully this doesnt change :D +} + +def roma_outdoor(device, weights=None, dinov2_weights=None, coarse_res = 560, upsample_res = 864, upsample_preds = True, symmetric=True, attenuate_cert=True): + if isinstance(coarse_res, int): + coarse_res = (coarse_res, coarse_res) + if isinstance(upsample_res, int): + upsample_res = (upsample_res, upsample_res) + + assert coarse_res[0] % 14 == 0, "Needs to be multiple of 14 for backbone" + assert coarse_res[1] % 14 == 0, "Needs to be multiple of 14 for backbone" + + if weights is None: + weights = torch.hub.load_state_dict_from_url(weight_urls["roma"]["outdoor"], + map_location=device) + if dinov2_weights is None: + dinov2_weights = torch.hub.load_state_dict_from_url(weight_urls["dinov2"], + map_location=device) + model = roma_model(resolution=coarse_res, upsample_preds=upsample_preds, + weights=weights,dinov2_weights = dinov2_weights,device=device, symmetric=symmetric, attenuate_cert=attenuate_cert) + model.upsample_res = upsample_res + print(f"Using coarse resolution {coarse_res}, and upsample res {model.upsample_res}") + return model + +def roma_indoor(device, weights=None, dinov2_weights=None, coarse_res = 560, upsample_res = 864): + if isinstance(coarse_res, int): + coarse_res = (coarse_res, coarse_res) + if isinstance(upsample_res, int): + upsample_res = (upsample_res, upsample_res) + + assert coarse_res[0] % 14 == 0, "Needs to be multiple of 14 for backbone" + assert coarse_res[1] % 14 == 0, "Needs to be multiple of 14 for backbone" + + if weights is None: + weights = torch.hub.load_state_dict_from_url(weight_urls["roma"]["indoor"], + map_location=device) + if dinov2_weights is None: + dinov2_weights = torch.hub.load_state_dict_from_url(weight_urls["dinov2"], + map_location=device) + model = roma_model(resolution=coarse_res, upsample_preds=True, + weights=weights,dinov2_weights = dinov2_weights,device=device) + model.upsample_res = upsample_res + print(f"Using coarse resolution {coarse_res}, and upsample res {model.upsample_res}") + return model diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/model_zoo/roma_models.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/model_zoo/roma_models.py new file mode 100644 index 0000000000000000000000000000000000000000..cb4e66952d58b78cba7c58ab84f91a3d0bd02c33 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/model_zoo/roma_models.py @@ -0,0 +1,162 @@ +import warnings +import torch.nn as nn + +from pathlib import Path +import sys +sys.path.append(str(Path(__file__).parent.parent.parent.parent.resolve())) +from roma.models.matcher import * +from roma.models.transformer import Block, TransformerDecoder, MemEffAttention +from roma.models.encoders import * + +def roma_model(resolution, upsample_preds, device = None, weights=None, dinov2_weights=None, symmetric=True, attenuate_cert=True, **kwargs): + # roma weights and dinov2 weights are loaded seperately, as dinov2 weights are not parameters + torch.backends.cuda.matmul.allow_tf32 = True # allow tf32 on matmul + torch.backends.cudnn.allow_tf32 = True # allow tf32 on cudnn + warnings.filterwarnings('ignore', category=UserWarning, message='TypedStorage is deprecated') + gp_dim = 512 + feat_dim = 512 + decoder_dim = gp_dim + feat_dim + cls_to_coord_res = 64 + coordinate_decoder = TransformerDecoder( + nn.Sequential(*[Block(decoder_dim, 8, attn_class=MemEffAttention) for _ in range(5)]), + decoder_dim, + cls_to_coord_res**2 + 1, + is_classifier=True, + amp = True, + pos_enc = False,) + dw = True + hidden_blocks = 8 + kernel_size = 5 + displacement_emb = "linear" + disable_local_corr_grad = True + + conv_refiner = nn.ModuleDict( + { + "16": ConvRefiner( + 2 * 512+128+(2*7+1)**2, + 2 * 512+128+(2*7+1)**2, + 2 + 1, + kernel_size=kernel_size, + dw=dw, + hidden_blocks=hidden_blocks, + displacement_emb=displacement_emb, + displacement_emb_dim=128, + local_corr_radius = 7, + corr_in_other = True, + amp = True, + disable_local_corr_grad = disable_local_corr_grad, + bn_momentum = 0.01, + ), + "8": ConvRefiner( + 2 * 512+64+(2*3+1)**2, + 2 * 512+64+(2*3+1)**2, + 2 + 1, + kernel_size=kernel_size, + dw=dw, + hidden_blocks=hidden_blocks, + displacement_emb=displacement_emb, + displacement_emb_dim=64, + local_corr_radius = 3, + corr_in_other = True, + amp = True, + disable_local_corr_grad = disable_local_corr_grad, + bn_momentum = 0.01, + ), + "4": ConvRefiner( + 2 * 256+32+(2*2+1)**2, + 2 * 256+32+(2*2+1)**2, + 2 + 1, + kernel_size=kernel_size, + dw=dw, + hidden_blocks=hidden_blocks, + displacement_emb=displacement_emb, + displacement_emb_dim=32, + local_corr_radius = 2, + corr_in_other = True, + amp = True, + disable_local_corr_grad = disable_local_corr_grad, + bn_momentum = 0.01, + ), + "2": ConvRefiner( + 2 * 64+16, + 128+16, + 2 + 1, + kernel_size=kernel_size, + dw=dw, + hidden_blocks=hidden_blocks, + displacement_emb=displacement_emb, + displacement_emb_dim=16, + amp = True, + disable_local_corr_grad = disable_local_corr_grad, + bn_momentum = 0.01, + ), + "1": ConvRefiner( + 2 * 9 + 6, + 24, + 2 + 1, + kernel_size=kernel_size, + dw=dw, + hidden_blocks = hidden_blocks, + displacement_emb = displacement_emb, + displacement_emb_dim = 6, + amp = True, + disable_local_corr_grad = disable_local_corr_grad, + bn_momentum = 0.01, + ), + } + ) + kernel_temperature = 0.2 + learn_temperature = False + no_cov = True + kernel = CosKernel + only_attention = False + basis = "fourier" + gp16 = GP( + kernel, + T=kernel_temperature, + learn_temperature=learn_temperature, + only_attention=only_attention, + gp_dim=gp_dim, + basis=basis, + no_cov=no_cov, + ) + gps = nn.ModuleDict({"16": gp16}) + proj16 = nn.Sequential(nn.Conv2d(1024, 512, 1, 1), nn.BatchNorm2d(512)) + proj8 = nn.Sequential(nn.Conv2d(512, 512, 1, 1), nn.BatchNorm2d(512)) + proj4 = nn.Sequential(nn.Conv2d(256, 256, 1, 1), nn.BatchNorm2d(256)) + proj2 = nn.Sequential(nn.Conv2d(128, 64, 1, 1), nn.BatchNorm2d(64)) + proj1 = nn.Sequential(nn.Conv2d(64, 9, 1, 1), nn.BatchNorm2d(9)) + proj = nn.ModuleDict({ + "16": proj16, + "8": proj8, + "4": proj4, + "2": proj2, + "1": proj1, + }) + displacement_dropout_p = 0.0 + gm_warp_dropout_p = 0.0 + decoder = Decoder(coordinate_decoder, + gps, + proj, + conv_refiner, + amp=True, + detach=True, + scales=["16", "8", "4", "2", "1"], + displacement_dropout_p = displacement_dropout_p, + gm_warp_dropout_p = gm_warp_dropout_p) + + encoder = CNNandDinov2( + cnn_kwargs = dict( + pretrained=False, + amp = True), + amp = True, + use_vgg = True, + dinov2_weights = dinov2_weights + ) + h,w = resolution + symmetric = symmetric + attenuate_cert = attenuate_cert + matcher = RegressionMatcher(encoder, decoder, h=h, w=w, upsample_preds=upsample_preds, + symmetric = symmetric, attenuate_cert=attenuate_cert, **kwargs).to(device) + matcher.load_state_dict(weights) + return matcher diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/__init__.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3f7a3ccf92e70755817c98a3282f6d8769e32e63 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/__init__.py @@ -0,0 +1,47 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +from roma.utils.utils import get_grid +from .layers.block import Block +from .layers.attention import MemEffAttention +from .dinov2 import vit_large, vit_base, vit_small + +class TransformerDecoder(nn.Module): + def __init__(self, blocks, hidden_dim, out_dim, is_classifier = False, *args, + amp = False, pos_enc = True, learned_embeddings = False, embedding_dim = None, amp_dtype = torch.float16, **kwargs) -> None: + super().__init__(*args, **kwargs) + self.blocks = blocks + self.to_out = nn.Linear(hidden_dim, out_dim) + self.hidden_dim = hidden_dim + self.out_dim = out_dim + self._scales = [16] + self.is_classifier = is_classifier + self.amp = amp + self.amp_dtype = amp_dtype + self.pos_enc = pos_enc + self.learned_embeddings = learned_embeddings + if self.learned_embeddings: + self.learned_pos_embeddings = nn.Parameter(nn.init.kaiming_normal_(torch.empty((1, hidden_dim, embedding_dim, embedding_dim)))) + + def scales(self): + return self._scales.copy() + + def forward(self, gp_posterior, features, old_stuff, new_scale): + with torch.autocast("cuda", dtype=self.amp_dtype, enabled=self.amp): + B,C,H,W = gp_posterior.shape + x = torch.cat((gp_posterior, features), dim = 1) + B,C,H,W = x.shape + grid = get_grid(B, H, W, x.device).reshape(B,H*W,2) + if self.learned_embeddings: + pos_enc = F.interpolate(self.learned_pos_embeddings, size = (H,W), mode = 'bilinear', align_corners = False).permute(0,2,3,1).reshape(1,H*W,C) + else: + pos_enc = 0 + tokens = x.reshape(B,C,H*W).permute(0,2,1) + pos_enc + z = self.blocks(tokens) + out = self.to_out(z) + out = out.permute(0,2,1).reshape(B, self.out_dim, H, W) + warp, certainty = out[:, :-1], out[:, -1:] + return warp, certainty, None + + diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/dinov2.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/dinov2.py new file mode 100644 index 0000000000000000000000000000000000000000..b556c63096d17239c8603d5fe626c331963099fd --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/dinov2.py @@ -0,0 +1,359 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +# References: +# https://github.com/facebookresearch/dino/blob/main/vision_transformer.py +# https://github.com/rwightman/pytorch-image-models/tree/master/timm/models/vision_transformer.py + +from functools import partial +import math +import logging +from typing import Sequence, Tuple, Union, Callable + +import torch +import torch.nn as nn +import torch.utils.checkpoint +from torch.nn.init import trunc_normal_ + +from .layers import Mlp, PatchEmbed, SwiGLUFFNFused, MemEffAttention, NestedTensorBlock as Block + + + +def named_apply(fn: Callable, module: nn.Module, name="", depth_first=True, include_root=False) -> nn.Module: + if not depth_first and include_root: + fn(module=module, name=name) + for child_name, child_module in module.named_children(): + child_name = ".".join((name, child_name)) if name else child_name + named_apply(fn=fn, module=child_module, name=child_name, depth_first=depth_first, include_root=True) + if depth_first and include_root: + fn(module=module, name=name) + return module + + +class BlockChunk(nn.ModuleList): + def forward(self, x): + for b in self: + x = b(x) + return x + + +class DinoVisionTransformer(nn.Module): + def __init__( + self, + img_size=224, + patch_size=16, + in_chans=3, + embed_dim=768, + depth=12, + num_heads=12, + mlp_ratio=4.0, + qkv_bias=True, + ffn_bias=True, + proj_bias=True, + drop_path_rate=0.0, + drop_path_uniform=False, + init_values=None, # for layerscale: None or 0 => no layerscale + embed_layer=PatchEmbed, + act_layer=nn.GELU, + block_fn=Block, + ffn_layer="mlp", + block_chunks=1, + ): + """ + Args: + img_size (int, tuple): input image size + patch_size (int, tuple): patch size + in_chans (int): number of input channels + embed_dim (int): embedding dimension + depth (int): depth of transformer + num_heads (int): number of attention heads + mlp_ratio (int): ratio of mlp hidden dim to embedding dim + qkv_bias (bool): enable bias for qkv if True + proj_bias (bool): enable bias for proj in attn if True + ffn_bias (bool): enable bias for ffn if True + drop_path_rate (float): stochastic depth rate + drop_path_uniform (bool): apply uniform drop rate across blocks + weight_init (str): weight init scheme + init_values (float): layer-scale init values + embed_layer (nn.Module): patch embedding layer + act_layer (nn.Module): MLP activation layer + block_fn (nn.Module): transformer block class + ffn_layer (str): "mlp", "swiglu", "swiglufused" or "identity" + block_chunks: (int) split block sequence into block_chunks units for FSDP wrap + """ + super().__init__() + norm_layer = partial(nn.LayerNorm, eps=1e-6) + + self.num_features = self.embed_dim = embed_dim # num_features for consistency with other models + self.num_tokens = 1 + self.n_blocks = depth + self.num_heads = num_heads + self.patch_size = patch_size + + self.patch_embed = embed_layer(img_size=img_size, patch_size=patch_size, in_chans=in_chans, embed_dim=embed_dim) + num_patches = self.patch_embed.num_patches + + self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dim)) + self.pos_embed = nn.Parameter(torch.zeros(1, num_patches + self.num_tokens, embed_dim)) + + if drop_path_uniform is True: + dpr = [drop_path_rate] * depth + else: + dpr = [x.item() for x in torch.linspace(0, drop_path_rate, depth)] # stochastic depth decay rule + + if ffn_layer == "mlp": + ffn_layer = Mlp + elif ffn_layer == "swiglufused" or ffn_layer == "swiglu": + ffn_layer = SwiGLUFFNFused + elif ffn_layer == "identity": + + def f(*args, **kwargs): + return nn.Identity() + + ffn_layer = f + else: + raise NotImplementedError + + blocks_list = [ + block_fn( + dim=embed_dim, + num_heads=num_heads, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + proj_bias=proj_bias, + ffn_bias=ffn_bias, + drop_path=dpr[i], + norm_layer=norm_layer, + act_layer=act_layer, + ffn_layer=ffn_layer, + init_values=init_values, + ) + for i in range(depth) + ] + if block_chunks > 0: + self.chunked_blocks = True + chunked_blocks = [] + chunksize = depth // block_chunks + for i in range(0, depth, chunksize): + # this is to keep the block index consistent if we chunk the block list + chunked_blocks.append([nn.Identity()] * i + blocks_list[i : i + chunksize]) + self.blocks = nn.ModuleList([BlockChunk(p) for p in chunked_blocks]) + else: + self.chunked_blocks = False + self.blocks = nn.ModuleList(blocks_list) + + self.norm = norm_layer(embed_dim) + self.head = nn.Identity() + + self.mask_token = nn.Parameter(torch.zeros(1, embed_dim)) + + self.init_weights() + for param in self.parameters(): + param.requires_grad = False + + @property + def device(self): + return self.cls_token.device + + def init_weights(self): + trunc_normal_(self.pos_embed, std=0.02) + nn.init.normal_(self.cls_token, std=1e-6) + named_apply(init_weights_vit_timm, self) + + def interpolate_pos_encoding(self, x, w, h): + previous_dtype = x.dtype + npatch = x.shape[1] - 1 + N = self.pos_embed.shape[1] - 1 + if npatch == N and w == h: + return self.pos_embed + pos_embed = self.pos_embed.float() + class_pos_embed = pos_embed[:, 0] + patch_pos_embed = pos_embed[:, 1:] + dim = x.shape[-1] + w0 = w // self.patch_size + h0 = h // self.patch_size + # we add a small number to avoid floating point error in the interpolation + # see discussion at https://github.com/facebookresearch/dino/issues/8 + w0, h0 = w0 + 0.1, h0 + 0.1 + + patch_pos_embed = nn.functional.interpolate( + patch_pos_embed.reshape(1, int(math.sqrt(N)), int(math.sqrt(N)), dim).permute(0, 3, 1, 2), + scale_factor=(w0 / math.sqrt(N), h0 / math.sqrt(N)), + mode="bicubic", + ) + + assert int(w0) == patch_pos_embed.shape[-2] and int(h0) == patch_pos_embed.shape[-1] + patch_pos_embed = patch_pos_embed.permute(0, 2, 3, 1).view(1, -1, dim) + return torch.cat((class_pos_embed.unsqueeze(0), patch_pos_embed), dim=1).to(previous_dtype) + + def prepare_tokens_with_masks(self, x, masks=None): + B, nc, w, h = x.shape + x = self.patch_embed(x) + if masks is not None: + x = torch.where(masks.unsqueeze(-1), self.mask_token.to(x.dtype).unsqueeze(0), x) + + x = torch.cat((self.cls_token.expand(x.shape[0], -1, -1), x), dim=1) + x = x + self.interpolate_pos_encoding(x, w, h) + + return x + + def forward_features_list(self, x_list, masks_list): + x = [self.prepare_tokens_with_masks(x, masks) for x, masks in zip(x_list, masks_list)] + for blk in self.blocks: + x = blk(x) + + all_x = x + output = [] + for x, masks in zip(all_x, masks_list): + x_norm = self.norm(x) + output.append( + { + "x_norm_clstoken": x_norm[:, 0], + "x_norm_patchtokens": x_norm[:, 1:], + "x_prenorm": x, + "masks": masks, + } + ) + return output + + def forward_features(self, x, masks=None): + if isinstance(x, list): + return self.forward_features_list(x, masks) + + x = self.prepare_tokens_with_masks(x, masks) + + for blk in self.blocks: + x = blk(x) + + x_norm = self.norm(x) + return { + "x_norm_clstoken": x_norm[:, 0], + "x_norm_patchtokens": x_norm[:, 1:], + "x_prenorm": x, + "masks": masks, + } + + def _get_intermediate_layers_not_chunked(self, x, n=1): + x = self.prepare_tokens_with_masks(x) + # If n is an int, take the n last blocks. If it's a list, take them + output, total_block_len = [], len(self.blocks) + blocks_to_take = range(total_block_len - n, total_block_len) if isinstance(n, int) else n + for i, blk in enumerate(self.blocks): + x = blk(x) + if i in blocks_to_take: + output.append(x) + assert len(output) == len(blocks_to_take), f"only {len(output)} / {len(blocks_to_take)} blocks found" + return output + + def _get_intermediate_layers_chunked(self, x, n=1): + x = self.prepare_tokens_with_masks(x) + output, i, total_block_len = [], 0, len(self.blocks[-1]) + # If n is an int, take the n last blocks. If it's a list, take them + blocks_to_take = range(total_block_len - n, total_block_len) if isinstance(n, int) else n + for block_chunk in self.blocks: + for blk in block_chunk[i:]: # Passing the nn.Identity() + x = blk(x) + if i in blocks_to_take: + output.append(x) + i += 1 + assert len(output) == len(blocks_to_take), f"only {len(output)} / {len(blocks_to_take)} blocks found" + return output + + def get_intermediate_layers( + self, + x: torch.Tensor, + n: Union[int, Sequence] = 1, # Layers or n last layers to take + reshape: bool = False, + return_class_token: bool = False, + norm=True, + ) -> Tuple[Union[torch.Tensor, Tuple[torch.Tensor]]]: + if self.chunked_blocks: + outputs = self._get_intermediate_layers_chunked(x, n) + else: + outputs = self._get_intermediate_layers_not_chunked(x, n) + if norm: + outputs = [self.norm(out) for out in outputs] + class_tokens = [out[:, 0] for out in outputs] + outputs = [out[:, 1:] for out in outputs] + if reshape: + B, _, w, h = x.shape + outputs = [ + out.reshape(B, w // self.patch_size, h // self.patch_size, -1).permute(0, 3, 1, 2).contiguous() + for out in outputs + ] + if return_class_token: + return tuple(zip(outputs, class_tokens)) + return tuple(outputs) + + def forward(self, *args, is_training=False, **kwargs): + ret = self.forward_features(*args, **kwargs) + if is_training: + return ret + else: + return self.head(ret["x_norm_clstoken"]) + + +def init_weights_vit_timm(module: nn.Module, name: str = ""): + """ViT weight initialization, original timm impl (for reproducibility)""" + if isinstance(module, nn.Linear): + trunc_normal_(module.weight, std=0.02) + if module.bias is not None: + nn.init.zeros_(module.bias) + + +def vit_small(patch_size=16, **kwargs): + model = DinoVisionTransformer( + patch_size=patch_size, + embed_dim=384, + depth=12, + num_heads=6, + mlp_ratio=4, + block_fn=partial(Block, attn_class=MemEffAttention), + **kwargs, + ) + return model + + +def vit_base(patch_size=16, **kwargs): + model = DinoVisionTransformer( + patch_size=patch_size, + embed_dim=768, + depth=12, + num_heads=12, + mlp_ratio=4, + block_fn=partial(Block, attn_class=MemEffAttention), + **kwargs, + ) + return model + + +def vit_large(patch_size=16, **kwargs): + model = DinoVisionTransformer( + patch_size=patch_size, + embed_dim=1024, + depth=24, + num_heads=16, + mlp_ratio=4, + block_fn=partial(Block, attn_class=MemEffAttention), + **kwargs, + ) + return model + + +def vit_giant2(patch_size=16, **kwargs): + """ + Close to ViT-giant, with embed-dim 1536 and 24 heads => embed-dim per head 64 + """ + model = DinoVisionTransformer( + patch_size=patch_size, + embed_dim=1536, + depth=40, + num_heads=24, + mlp_ratio=4, + block_fn=partial(Block, attn_class=MemEffAttention), + **kwargs, + ) + return model \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/layers/__init__.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..31f196aacac5be8a7c537a3dfa8f97084671b466 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/layers/__init__.py @@ -0,0 +1,12 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +from .dino_head import DINOHead +from .mlp import Mlp +from .patch_embed import PatchEmbed +from .swiglu_ffn import SwiGLUFFN, SwiGLUFFNFused +from .block import NestedTensorBlock +from .attention import MemEffAttention diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/layers/attention.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/layers/attention.py new file mode 100644 index 0000000000000000000000000000000000000000..1f9b0c94b40967dfdff4f261c127cbd21328c905 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/layers/attention.py @@ -0,0 +1,81 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +# References: +# https://github.com/facebookresearch/dino/blob/master/vision_transformer.py +# https://github.com/rwightman/pytorch-image-models/tree/master/timm/models/vision_transformer.py + +import logging + +from torch import Tensor +from torch import nn + + +logger = logging.getLogger("dinov2") + + +try: + from xformers.ops import memory_efficient_attention, unbind, fmha + + XFORMERS_AVAILABLE = True +except ImportError: + logger.warning("xFormers not available") + XFORMERS_AVAILABLE = False + + +class Attention(nn.Module): + def __init__( + self, + dim: int, + num_heads: int = 8, + qkv_bias: bool = False, + proj_bias: bool = True, + attn_drop: float = 0.0, + proj_drop: float = 0.0, + ) -> None: + super().__init__() + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = head_dim**-0.5 + + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(dim, dim, bias=proj_bias) + self.proj_drop = nn.Dropout(proj_drop) + + def forward(self, x: Tensor) -> Tensor: + B, N, C = x.shape + qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4) + + q, k, v = qkv[0] * self.scale, qkv[1], qkv[2] + attn = q @ k.transpose(-2, -1) + + attn = attn.softmax(dim=-1) + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + + +class MemEffAttention(Attention): + def forward(self, x: Tensor, attn_bias=None) -> Tensor: + if not XFORMERS_AVAILABLE: + assert attn_bias is None, "xFormers is required for nested tensors usage" + return super().forward(x) + + B, N, C = x.shape + qkv = self.qkv(x).reshape(B, N, 3, self.num_heads, C // self.num_heads) + + q, k, v = unbind(qkv, 2) + + x = memory_efficient_attention(q, k, v, attn_bias=attn_bias) + x = x.reshape([B, N, C]) + + x = self.proj(x) + x = self.proj_drop(x) + return x diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/layers/block.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/layers/block.py new file mode 100644 index 0000000000000000000000000000000000000000..25488f57cc0ad3c692f86b62555f6668e2a66db1 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/layers/block.py @@ -0,0 +1,252 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +# References: +# https://github.com/facebookresearch/dino/blob/master/vision_transformer.py +# https://github.com/rwightman/pytorch-image-models/tree/master/timm/layers/patch_embed.py + +import logging +from typing import Callable, List, Any, Tuple, Dict + +import torch +from torch import nn, Tensor + +from .attention import Attention, MemEffAttention +from .drop_path import DropPath +from .layer_scale import LayerScale +from .mlp import Mlp + + +logger = logging.getLogger("dinov2") + + +try: + from xformers.ops import fmha + from xformers.ops import scaled_index_add, index_select_cat + + XFORMERS_AVAILABLE = True +except ImportError: + logger.warning("xFormers not available") + XFORMERS_AVAILABLE = False + + +class Block(nn.Module): + def __init__( + self, + dim: int, + num_heads: int, + mlp_ratio: float = 4.0, + qkv_bias: bool = False, + proj_bias: bool = True, + ffn_bias: bool = True, + drop: float = 0.0, + attn_drop: float = 0.0, + init_values=None, + drop_path: float = 0.0, + act_layer: Callable[..., nn.Module] = nn.GELU, + norm_layer: Callable[..., nn.Module] = nn.LayerNorm, + attn_class: Callable[..., nn.Module] = Attention, + ffn_layer: Callable[..., nn.Module] = Mlp, + ) -> None: + super().__init__() + # print(f"biases: qkv: {qkv_bias}, proj: {proj_bias}, ffn: {ffn_bias}") + self.norm1 = norm_layer(dim) + self.attn = attn_class( + dim, + num_heads=num_heads, + qkv_bias=qkv_bias, + proj_bias=proj_bias, + attn_drop=attn_drop, + proj_drop=drop, + ) + self.ls1 = LayerScale(dim, init_values=init_values) if init_values else nn.Identity() + self.drop_path1 = DropPath(drop_path) if drop_path > 0.0 else nn.Identity() + + self.norm2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = ffn_layer( + in_features=dim, + hidden_features=mlp_hidden_dim, + act_layer=act_layer, + drop=drop, + bias=ffn_bias, + ) + self.ls2 = LayerScale(dim, init_values=init_values) if init_values else nn.Identity() + self.drop_path2 = DropPath(drop_path) if drop_path > 0.0 else nn.Identity() + + self.sample_drop_ratio = drop_path + + def forward(self, x: Tensor) -> Tensor: + def attn_residual_func(x: Tensor) -> Tensor: + return self.ls1(self.attn(self.norm1(x))) + + def ffn_residual_func(x: Tensor) -> Tensor: + return self.ls2(self.mlp(self.norm2(x))) + + if self.training and self.sample_drop_ratio > 0.1: + # the overhead is compensated only for a drop path rate larger than 0.1 + x = drop_add_residual_stochastic_depth( + x, + residual_func=attn_residual_func, + sample_drop_ratio=self.sample_drop_ratio, + ) + x = drop_add_residual_stochastic_depth( + x, + residual_func=ffn_residual_func, + sample_drop_ratio=self.sample_drop_ratio, + ) + elif self.training and self.sample_drop_ratio > 0.0: + x = x + self.drop_path1(attn_residual_func(x)) + x = x + self.drop_path1(ffn_residual_func(x)) # FIXME: drop_path2 + else: + x = x + attn_residual_func(x) + x = x + ffn_residual_func(x) + return x + + +def drop_add_residual_stochastic_depth( + x: Tensor, + residual_func: Callable[[Tensor], Tensor], + sample_drop_ratio: float = 0.0, +) -> Tensor: + # 1) extract subset using permutation + b, n, d = x.shape + sample_subset_size = max(int(b * (1 - sample_drop_ratio)), 1) + brange = (torch.randperm(b, device=x.device))[:sample_subset_size] + x_subset = x[brange] + + # 2) apply residual_func to get residual + residual = residual_func(x_subset) + + x_flat = x.flatten(1) + residual = residual.flatten(1) + + residual_scale_factor = b / sample_subset_size + + # 3) add the residual + x_plus_residual = torch.index_add(x_flat, 0, brange, residual.to(dtype=x.dtype), alpha=residual_scale_factor) + return x_plus_residual.view_as(x) + + +def get_branges_scales(x, sample_drop_ratio=0.0): + b, n, d = x.shape + sample_subset_size = max(int(b * (1 - sample_drop_ratio)), 1) + brange = (torch.randperm(b, device=x.device))[:sample_subset_size] + residual_scale_factor = b / sample_subset_size + return brange, residual_scale_factor + + +def add_residual(x, brange, residual, residual_scale_factor, scaling_vector=None): + if scaling_vector is None: + x_flat = x.flatten(1) + residual = residual.flatten(1) + x_plus_residual = torch.index_add(x_flat, 0, brange, residual.to(dtype=x.dtype), alpha=residual_scale_factor) + else: + x_plus_residual = scaled_index_add( + x, brange, residual.to(dtype=x.dtype), scaling=scaling_vector, alpha=residual_scale_factor + ) + return x_plus_residual + + +attn_bias_cache: Dict[Tuple, Any] = {} + + +def get_attn_bias_and_cat(x_list, branges=None): + """ + this will perform the index select, cat the tensors, and provide the attn_bias from cache + """ + batch_sizes = [b.shape[0] for b in branges] if branges is not None else [x.shape[0] for x in x_list] + all_shapes = tuple((b, x.shape[1]) for b, x in zip(batch_sizes, x_list)) + if all_shapes not in attn_bias_cache.keys(): + seqlens = [] + for b, x in zip(batch_sizes, x_list): + for _ in range(b): + seqlens.append(x.shape[1]) + attn_bias = fmha.BlockDiagonalMask.from_seqlens(seqlens) + attn_bias._batch_sizes = batch_sizes + attn_bias_cache[all_shapes] = attn_bias + + if branges is not None: + cat_tensors = index_select_cat([x.flatten(1) for x in x_list], branges).view(1, -1, x_list[0].shape[-1]) + else: + tensors_bs1 = tuple(x.reshape([1, -1, *x.shape[2:]]) for x in x_list) + cat_tensors = torch.cat(tensors_bs1, dim=1) + + return attn_bias_cache[all_shapes], cat_tensors + + +def drop_add_residual_stochastic_depth_list( + x_list: List[Tensor], + residual_func: Callable[[Tensor, Any], Tensor], + sample_drop_ratio: float = 0.0, + scaling_vector=None, +) -> Tensor: + # 1) generate random set of indices for dropping samples in the batch + branges_scales = [get_branges_scales(x, sample_drop_ratio=sample_drop_ratio) for x in x_list] + branges = [s[0] for s in branges_scales] + residual_scale_factors = [s[1] for s in branges_scales] + + # 2) get attention bias and index+concat the tensors + attn_bias, x_cat = get_attn_bias_and_cat(x_list, branges) + + # 3) apply residual_func to get residual, and split the result + residual_list = attn_bias.split(residual_func(x_cat, attn_bias=attn_bias)) # type: ignore + + outputs = [] + for x, brange, residual, residual_scale_factor in zip(x_list, branges, residual_list, residual_scale_factors): + outputs.append(add_residual(x, brange, residual, residual_scale_factor, scaling_vector).view_as(x)) + return outputs + + +class NestedTensorBlock(Block): + def forward_nested(self, x_list: List[Tensor]) -> List[Tensor]: + """ + x_list contains a list of tensors to nest together and run + """ + assert isinstance(self.attn, MemEffAttention) + + if self.training and self.sample_drop_ratio > 0.0: + + def attn_residual_func(x: Tensor, attn_bias=None) -> Tensor: + return self.attn(self.norm1(x), attn_bias=attn_bias) + + def ffn_residual_func(x: Tensor, attn_bias=None) -> Tensor: + return self.mlp(self.norm2(x)) + + x_list = drop_add_residual_stochastic_depth_list( + x_list, + residual_func=attn_residual_func, + sample_drop_ratio=self.sample_drop_ratio, + scaling_vector=self.ls1.gamma if isinstance(self.ls1, LayerScale) else None, + ) + x_list = drop_add_residual_stochastic_depth_list( + x_list, + residual_func=ffn_residual_func, + sample_drop_ratio=self.sample_drop_ratio, + scaling_vector=self.ls2.gamma if isinstance(self.ls1, LayerScale) else None, + ) + return x_list + else: + + def attn_residual_func(x: Tensor, attn_bias=None) -> Tensor: + return self.ls1(self.attn(self.norm1(x), attn_bias=attn_bias)) + + def ffn_residual_func(x: Tensor, attn_bias=None) -> Tensor: + return self.ls2(self.mlp(self.norm2(x))) + + attn_bias, x = get_attn_bias_and_cat(x_list) + x = x + attn_residual_func(x, attn_bias=attn_bias) + x = x + ffn_residual_func(x) + return attn_bias.split(x) + + def forward(self, x_or_x_list): + if isinstance(x_or_x_list, Tensor): + return super().forward(x_or_x_list) + elif isinstance(x_or_x_list, list): + assert XFORMERS_AVAILABLE, "Please install xFormers for nested tensors usage" + return self.forward_nested(x_or_x_list) + else: + raise AssertionError diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/layers/dino_head.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/layers/dino_head.py new file mode 100644 index 0000000000000000000000000000000000000000..7212db92a4fd8d4c7230e284e551a0234e9d8623 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/layers/dino_head.py @@ -0,0 +1,59 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +import torch +import torch.nn as nn +from torch.nn.init import trunc_normal_ +from torch.nn.utils import weight_norm + + +class DINOHead(nn.Module): + def __init__( + self, + in_dim, + out_dim, + use_bn=False, + nlayers=3, + hidden_dim=2048, + bottleneck_dim=256, + mlp_bias=True, + ): + super().__init__() + nlayers = max(nlayers, 1) + self.mlp = _build_mlp(nlayers, in_dim, bottleneck_dim, hidden_dim=hidden_dim, use_bn=use_bn, bias=mlp_bias) + self.apply(self._init_weights) + self.last_layer = weight_norm(nn.Linear(bottleneck_dim, out_dim, bias=False)) + self.last_layer.weight_g.data.fill_(1) + + def _init_weights(self, m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=0.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + + def forward(self, x): + x = self.mlp(x) + eps = 1e-6 if x.dtype == torch.float16 else 1e-12 + x = nn.functional.normalize(x, dim=-1, p=2, eps=eps) + x = self.last_layer(x) + return x + + +def _build_mlp(nlayers, in_dim, bottleneck_dim, hidden_dim=None, use_bn=False, bias=True): + if nlayers == 1: + return nn.Linear(in_dim, bottleneck_dim, bias=bias) + else: + layers = [nn.Linear(in_dim, hidden_dim, bias=bias)] + if use_bn: + layers.append(nn.BatchNorm1d(hidden_dim)) + layers.append(nn.GELU()) + for _ in range(nlayers - 2): + layers.append(nn.Linear(hidden_dim, hidden_dim, bias=bias)) + if use_bn: + layers.append(nn.BatchNorm1d(hidden_dim)) + layers.append(nn.GELU()) + layers.append(nn.Linear(hidden_dim, bottleneck_dim, bias=bias)) + return nn.Sequential(*layers) diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/layers/drop_path.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/layers/drop_path.py new file mode 100644 index 0000000000000000000000000000000000000000..af05625984dd14682cc96a63bf0c97bab1f123b1 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/layers/drop_path.py @@ -0,0 +1,35 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +# References: +# https://github.com/facebookresearch/dino/blob/master/vision_transformer.py +# https://github.com/rwightman/pytorch-image-models/tree/master/timm/layers/drop.py + + +from torch import nn + + +def drop_path(x, drop_prob: float = 0.0, training: bool = False): + if drop_prob == 0.0 or not training: + return x + keep_prob = 1 - drop_prob + shape = (x.shape[0],) + (1,) * (x.ndim - 1) # work with diff dim tensors, not just 2D ConvNets + random_tensor = x.new_empty(shape).bernoulli_(keep_prob) + if keep_prob > 0.0: + random_tensor.div_(keep_prob) + output = x * random_tensor + return output + + +class DropPath(nn.Module): + """Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks).""" + + def __init__(self, drop_prob=None): + super(DropPath, self).__init__() + self.drop_prob = drop_prob + + def forward(self, x): + return drop_path(x, self.drop_prob, self.training) diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/layers/layer_scale.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/layers/layer_scale.py new file mode 100644 index 0000000000000000000000000000000000000000..ca5daa52bd81d3581adeb2198ea5b7dba2a3aea1 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/layers/layer_scale.py @@ -0,0 +1,28 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +# Modified from: https://github.com/huggingface/pytorch-image-models/blob/main/timm/models/vision_transformer.py#L103-L110 + +from typing import Union + +import torch +from torch import Tensor +from torch import nn + + +class LayerScale(nn.Module): + def __init__( + self, + dim: int, + init_values: Union[float, Tensor] = 1e-5, + inplace: bool = False, + ) -> None: + super().__init__() + self.inplace = inplace + self.gamma = nn.Parameter(init_values * torch.ones(dim)) + + def forward(self, x: Tensor) -> Tensor: + return x.mul_(self.gamma) if self.inplace else x * self.gamma diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/layers/mlp.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/layers/mlp.py new file mode 100644 index 0000000000000000000000000000000000000000..5e4b315f972f9a9f54aef1e4ef4e81b52976f018 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/layers/mlp.py @@ -0,0 +1,41 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +# References: +# https://github.com/facebookresearch/dino/blob/master/vision_transformer.py +# https://github.com/rwightman/pytorch-image-models/tree/master/timm/layers/mlp.py + + +from typing import Callable, Optional + +from torch import Tensor, nn + + +class Mlp(nn.Module): + def __init__( + self, + in_features: int, + hidden_features: Optional[int] = None, + out_features: Optional[int] = None, + act_layer: Callable[..., nn.Module] = nn.GELU, + drop: float = 0.0, + bias: bool = True, + ) -> None: + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Linear(in_features, hidden_features, bias=bias) + self.act = act_layer() + self.fc2 = nn.Linear(hidden_features, out_features, bias=bias) + self.drop = nn.Dropout(drop) + + def forward(self, x: Tensor) -> Tensor: + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/layers/patch_embed.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/layers/patch_embed.py new file mode 100644 index 0000000000000000000000000000000000000000..574abe41175568d700a389b8b96d1ba554914779 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/layers/patch_embed.py @@ -0,0 +1,89 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +# References: +# https://github.com/facebookresearch/dino/blob/master/vision_transformer.py +# https://github.com/rwightman/pytorch-image-models/tree/master/timm/layers/patch_embed.py + +from typing import Callable, Optional, Tuple, Union + +from torch import Tensor +import torch.nn as nn + + +def make_2tuple(x): + if isinstance(x, tuple): + assert len(x) == 2 + return x + + assert isinstance(x, int) + return (x, x) + + +class PatchEmbed(nn.Module): + """ + 2D image to patch embedding: (B,C,H,W) -> (B,N,D) + + Args: + img_size: Image size. + patch_size: Patch token size. + in_chans: Number of input image channels. + embed_dim: Number of linear projection output channels. + norm_layer: Normalization layer. + """ + + def __init__( + self, + img_size: Union[int, Tuple[int, int]] = 224, + patch_size: Union[int, Tuple[int, int]] = 16, + in_chans: int = 3, + embed_dim: int = 768, + norm_layer: Optional[Callable] = None, + flatten_embedding: bool = True, + ) -> None: + super().__init__() + + image_HW = make_2tuple(img_size) + patch_HW = make_2tuple(patch_size) + patch_grid_size = ( + image_HW[0] // patch_HW[0], + image_HW[1] // patch_HW[1], + ) + + self.img_size = image_HW + self.patch_size = patch_HW + self.patches_resolution = patch_grid_size + self.num_patches = patch_grid_size[0] * patch_grid_size[1] + + self.in_chans = in_chans + self.embed_dim = embed_dim + + self.flatten_embedding = flatten_embedding + + self.proj = nn.Conv2d(in_chans, embed_dim, kernel_size=patch_HW, stride=patch_HW) + self.norm = norm_layer(embed_dim) if norm_layer else nn.Identity() + + def forward(self, x: Tensor) -> Tensor: + _, _, H, W = x.shape + patch_H, patch_W = self.patch_size + + assert H % patch_H == 0, f"Input image height {H} is not a multiple of patch height {patch_H}" + assert W % patch_W == 0, f"Input image width {W} is not a multiple of patch width: {patch_W}" + + x = self.proj(x) # B C H W + H, W = x.size(2), x.size(3) + x = x.flatten(2).transpose(1, 2) # B HW C + x = self.norm(x) + if not self.flatten_embedding: + x = x.reshape(-1, H, W, self.embed_dim) # B H W C + return x + + def flops(self) -> float: + Ho, Wo = self.patches_resolution + flops = Ho * Wo * self.embed_dim * self.in_chans * (self.patch_size[0] * self.patch_size[1]) + if self.norm is not None: + flops += Ho * Wo * self.embed_dim + return flops diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/layers/swiglu_ffn.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/layers/swiglu_ffn.py new file mode 100644 index 0000000000000000000000000000000000000000..b3324b266fb0a50ccf8c3a0ede2ae10ac4dfa03e --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/models/transformer/layers/swiglu_ffn.py @@ -0,0 +1,63 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +from typing import Callable, Optional + +from torch import Tensor, nn +import torch.nn.functional as F + + +class SwiGLUFFN(nn.Module): + def __init__( + self, + in_features: int, + hidden_features: Optional[int] = None, + out_features: Optional[int] = None, + act_layer: Callable[..., nn.Module] = None, + drop: float = 0.0, + bias: bool = True, + ) -> None: + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.w12 = nn.Linear(in_features, 2 * hidden_features, bias=bias) + self.w3 = nn.Linear(hidden_features, out_features, bias=bias) + + def forward(self, x: Tensor) -> Tensor: + x12 = self.w12(x) + x1, x2 = x12.chunk(2, dim=-1) + hidden = F.silu(x1) * x2 + return self.w3(hidden) + + +try: + from xformers.ops import SwiGLU + + XFORMERS_AVAILABLE = True +except ImportError: + SwiGLU = SwiGLUFFN + XFORMERS_AVAILABLE = False + + +class SwiGLUFFNFused(SwiGLU): + def __init__( + self, + in_features: int, + hidden_features: Optional[int] = None, + out_features: Optional[int] = None, + act_layer: Callable[..., nn.Module] = None, + drop: float = 0.0, + bias: bool = True, + ) -> None: + out_features = out_features or in_features + hidden_features = hidden_features or in_features + hidden_features = (int(hidden_features * 2 / 3) + 7) // 8 * 8 + super().__init__( + in_features=in_features, + hidden_features=hidden_features, + out_features=out_features, + bias=bias, + ) diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/roma_adpat_model.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/roma_adpat_model.py new file mode 100644 index 0000000000000000000000000000000000000000..3dbc9e1c16fa8b7f42b9df7b29b114c99799c9ab --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/roma_adpat_model.py @@ -0,0 +1,32 @@ +import torch +import torch.nn as nn +from PIL import Image +import numpy as np +from pathlib import Path +import sys +sys.path.append(str(Path(__file__).parent.parent.resolve())) + +from .models import roma_outdoor + +class ROMA_Model(nn.Module): + def __init__(self, MAX_MATCHES=5000, SAMPLE_THRESH=0.8, MATCH_THRESH=0.3) -> None: + super().__init__() + self.model = roma_outdoor(device=torch.device("cpu")) + self.MAX_MATCHES = MAX_MATCHES + self.MATCH_THRESH = MATCH_THRESH + self.model.sample_thresh = SAMPLE_THRESH # Inner matcher + + def forward(self, data): + img0, img1 = data['image0_rgb'][0], data['image1_rgb'][0] # unbatch, 3 * H * W + + H_A, W_A = img0.shape[-2:] + H_B, W_B = img1.shape[-2:] + warp, certainty = self.model.match(img0, img1) # 3 * H * W + # Sample matches for estimation + matches, certainty = self.model.sample(warp, certainty, num=self.MAX_MATCHES) + + mask = certainty > self.MATCH_THRESH + kpts0, kpts1 = self.model.to_pixel_coordinates(matches, H_A, W_A, H_B, W_B) + kpts0, kpts1, certainty = map(lambda x:x[mask], [kpts0, kpts1, certainty]) + data.update({'m_bids': torch.zeros_like(kpts0[:, 0]), "mkpts0_f": kpts0, "mkpts1_f": kpts1, "mconf": certainty}) + return data \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/train/__init__.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/train/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..90269dc0f345a575e0ba21f5afa34202c7e6b433 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/train/__init__.py @@ -0,0 +1 @@ +from .train import train_k_epochs diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/train/train.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/train/train.py new file mode 100644 index 0000000000000000000000000000000000000000..d51909772ec464be2428796c5ef936d2bf4e1ef4 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/train/train.py @@ -0,0 +1,102 @@ +from tqdm import tqdm +from roma.utils.utils import to_cuda +import roma +import torch +# import wandb + +def log_param_statistics(named_parameters, norm_type = 2): + named_parameters = list(named_parameters) + grads = [p.grad for n, p in named_parameters if p.grad is not None] + weight_norms = [p.norm(p=norm_type) for n, p in named_parameters if p.grad is not None] + names = [n for n,p in named_parameters if p.grad is not None] + param_norm = torch.stack(weight_norms).norm(p=norm_type) + device = grads[0].device + grad_norms = torch.stack([torch.norm(g.detach(), norm_type).to(device) for g in grads]) + nans_or_infs = torch.isinf(grad_norms) | torch.isnan(grad_norms) + nan_inf_names = [name for name, naninf in zip(names, nans_or_infs) if naninf] + total_grad_norm = torch.norm(grad_norms, norm_type) + if torch.any(nans_or_infs): + print(f"These params have nan or inf grads: {nan_inf_names}") + wandb.log({"grad_norm": total_grad_norm.item()}, step = roma.GLOBAL_STEP) + wandb.log({"param_norm": param_norm.item()}, step = roma.GLOBAL_STEP) + +def train_step(train_batch, model, objective, optimizer, grad_scaler, grad_clip_norm = 1.,**kwargs): + optimizer.zero_grad() + out = model(train_batch) + l = objective(out, train_batch) + grad_scaler.scale(l).backward() + grad_scaler.unscale_(optimizer) + log_param_statistics(model.named_parameters()) + torch.nn.utils.clip_grad_norm_(model.parameters(), grad_clip_norm) # what should max norm be? + grad_scaler.step(optimizer) + grad_scaler.update() + wandb.log({"grad_scale": grad_scaler._scale.item()}, step = roma.GLOBAL_STEP) + if grad_scaler._scale < 1.: + grad_scaler._scale = torch.tensor(1.).to(grad_scaler._scale) + roma.GLOBAL_STEP = roma.GLOBAL_STEP + roma.STEP_SIZE # increment global step + return {"train_out": out, "train_loss": l.item()} + + +def train_k_steps( + n_0, k, dataloader, model, objective, optimizer, lr_scheduler, grad_scaler, progress_bar=True, grad_clip_norm = 1., warmup = None, ema_model = None, +): + for n in tqdm(range(n_0, n_0 + k), disable=(not progress_bar) or roma.RANK > 0): + batch = next(dataloader) + model.train(True) + batch = to_cuda(batch) + train_step( + train_batch=batch, + model=model, + objective=objective, + optimizer=optimizer, + lr_scheduler=lr_scheduler, + grad_scaler=grad_scaler, + n=n, + grad_clip_norm = grad_clip_norm, + ) + if ema_model is not None: + ema_model.update() + if warmup is not None: + with warmup.dampening(): + lr_scheduler.step() + else: + lr_scheduler.step() + [wandb.log({f"lr_group_{grp}": lr}) for grp, lr in enumerate(lr_scheduler.get_last_lr())] + + +def train_epoch( + dataloader=None, + model=None, + objective=None, + optimizer=None, + lr_scheduler=None, + epoch=None, +): + model.train(True) + print(f"At epoch {epoch}") + for batch in tqdm(dataloader, mininterval=5.0): + batch = to_cuda(batch) + train_step( + train_batch=batch, model=model, objective=objective, optimizer=optimizer + ) + lr_scheduler.step() + return { + "model": model, + "optimizer": optimizer, + "lr_scheduler": lr_scheduler, + "epoch": epoch, + } + + +def train_k_epochs( + start_epoch, end_epoch, dataloader, model, objective, optimizer, lr_scheduler +): + for epoch in range(start_epoch, end_epoch + 1): + train_epoch( + dataloader=dataloader, + model=model, + objective=objective, + optimizer=optimizer, + lr_scheduler=lr_scheduler, + epoch=epoch, + ) diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/utils/__init__.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ce65460cfe65694fcef49d8aec3130672d9d7b8f --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/utils/__init__.py @@ -0,0 +1,18 @@ +from .utils import ( + pose_auc, + get_pose, + compute_relative_pose, + compute_pose_error, + estimate_pose, + estimate_pose_uncalibrated, + rotate_intrinsic, + get_tuple_transform_ops, + get_depth_tuple_transform_ops, + warp_kpts, + numpy_to_pil, + tensor_to_pil, + recover_pose, + signed_left_to_right_epipolar_distance, + resize_by_longest_edge_and_padding, + resize_by_longest_edge_and_stretch +) diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/utils/kde.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/utils/kde.py new file mode 100644 index 0000000000000000000000000000000000000000..c858b0734854d4ac3186c0fc507fdcf824a8275e --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/utils/kde.py @@ -0,0 +1,8 @@ +import torch + +def kde(x, std = 0.1): + # use a gaussian kernel to estimate density + # x = x.half() # Do it in half precision TODO: remove hardcoding + scores = (-torch.cdist(x,x)**2/(2*std**2)).exp() + density = scores.sum(dim=-1) + return density \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/utils/local_correlation.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/utils/local_correlation.py new file mode 100644 index 0000000000000000000000000000000000000000..e180c189c2801d528cfe7de9943baecf11851135 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/utils/local_correlation.py @@ -0,0 +1,44 @@ +import torch +import torch.nn.functional as F + +def local_correlation( + feature0, + feature1, + local_radius, + padding_mode="zeros", + flow = None, + sample_mode = "bilinear", +): + r = local_radius + K = (2*r+1)**2 + B, c, h, w = feature0.size() + corr = torch.empty((B,K,h,w), device = feature0.device, dtype=feature0.dtype) + if flow is None: + # If flow is None, assume feature0 and feature1 are aligned + coords = torch.meshgrid( + ( + torch.linspace(-1 + 1 / h, 1 - 1 / h, h, device=feature0.device), + torch.linspace(-1 + 1 / w, 1 - 1 / w, w, device=feature0.device), + )) + coords = torch.stack((coords[1], coords[0]), dim=-1)[ + None + ].expand(B, h, w, 2) + else: + coords = flow.permute(0,2,3,1) # If using flow, sample around flow target. + local_window = torch.meshgrid( + ( + torch.linspace(-2*local_radius/h, 2*local_radius/h, 2*r+1, device=feature0.device), + torch.linspace(-2*local_radius/w, 2*local_radius/w, 2*r+1, device=feature0.device), + )) + local_window = torch.stack((local_window[1], local_window[0]), dim=-1)[ + None + ].expand(1, 2*r+1, 2*r+1, 2).reshape(1, (2*r+1)**2, 2) + for _ in range(B): + with torch.no_grad(): + local_window_coords = (coords[_,:,:,None]+local_window[:,None,None]).reshape(1,h,w*(2*r+1)**2,2) + window_feature = F.grid_sample( + feature1[_:_+1], local_window_coords, padding_mode=padding_mode, align_corners=False, mode = sample_mode, # + ) + window_feature = window_feature.reshape(c,h,w,(2*r+1)**2) + corr[_] = (feature0[_,...,None]/(c**.5)*window_feature).sum(dim=0).permute(2,0,1) + return corr \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/utils/transforms.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/utils/transforms.py new file mode 100644 index 0000000000000000000000000000000000000000..ea6476bd816a31df36f7d1b5417853637b65474b --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/utils/transforms.py @@ -0,0 +1,118 @@ +from typing import Dict +import numpy as np +import torch +import kornia.augmentation as K +from kornia.geometry.transform import warp_perspective + +# Adapted from Kornia +class GeometricSequential: + def __init__(self, *transforms, align_corners=True) -> None: + self.transforms = transforms + self.align_corners = align_corners + + def __call__(self, x, mode="bilinear"): + b, c, h, w = x.shape + M = torch.eye(3, device=x.device)[None].expand(b, 3, 3) + for t in self.transforms: + if np.random.rand() < t.p: + M = M.matmul( + t.compute_transformation(x, t.generate_parameters((b, c, h, w)), None) + ) + return ( + warp_perspective( + x, M, dsize=(h, w), mode=mode, align_corners=self.align_corners + ), + M, + ) + + def apply_transform(self, x, M, mode="bilinear"): + b, c, h, w = x.shape + return warp_perspective( + x, M, dsize=(h, w), align_corners=self.align_corners, mode=mode + ) + + +class RandomPerspective(K.RandomPerspective): + def generate_parameters(self, batch_shape: torch.Size) -> Dict[str, torch.Tensor]: + distortion_scale = torch.as_tensor( + self.distortion_scale, device=self._device, dtype=self._dtype + ) + return self.random_perspective_generator( + batch_shape[0], + batch_shape[-2], + batch_shape[-1], + distortion_scale, + self.same_on_batch, + self.device, + self.dtype, + ) + + def random_perspective_generator( + self, + batch_size: int, + height: int, + width: int, + distortion_scale: torch.Tensor, + same_on_batch: bool = False, + device: torch.device = torch.device("cpu"), + dtype: torch.dtype = torch.float32, + ) -> Dict[str, torch.Tensor]: + r"""Get parameters for ``perspective`` for a random perspective transform. + + Args: + batch_size (int): the tensor batch size. + height (int) : height of the image. + width (int): width of the image. + distortion_scale (torch.Tensor): it controls the degree of distortion and ranges from 0 to 1. + same_on_batch (bool): apply the same transformation across the batch. Default: False. + device (torch.device): the device on which the random numbers will be generated. Default: cpu. + dtype (torch.dtype): the data type of the generated random numbers. Default: float32. + + Returns: + params Dict[str, torch.Tensor]: parameters to be passed for transformation. + - start_points (torch.Tensor): element-wise perspective source areas with a shape of (B, 4, 2). + - end_points (torch.Tensor): element-wise perspective target areas with a shape of (B, 4, 2). + + Note: + The generated random numbers are not reproducible across different devices and dtypes. + """ + if not (distortion_scale.dim() == 0 and 0 <= distortion_scale <= 1): + raise AssertionError( + f"'distortion_scale' must be a scalar within [0, 1]. Got {distortion_scale}." + ) + if not ( + type(height) is int and height > 0 and type(width) is int and width > 0 + ): + raise AssertionError( + f"'height' and 'width' must be integers. Got {height}, {width}." + ) + + start_points: torch.Tensor = torch.tensor( + [[[0.0, 0], [width - 1, 0], [width - 1, height - 1], [0, height - 1]]], + device=distortion_scale.device, + dtype=distortion_scale.dtype, + ).expand(batch_size, -1, -1) + + # generate random offset not larger than half of the image + fx = distortion_scale * width / 2 + fy = distortion_scale * height / 2 + + factor = torch.stack([fx, fy], dim=0).view(-1, 1, 2) + offset = (torch.rand_like(start_points) - 0.5) * 2 + end_points = start_points + factor * offset + + return dict(start_points=start_points, end_points=end_points) + + + +class RandomErasing: + def __init__(self, p = 0., scale = 0.) -> None: + self.p = p + self.scale = scale + self.random_eraser = K.RandomErasing(scale = (0.02, scale), p = p) + def __call__(self, image, depth): + if self.p > 0: + image = self.random_eraser(image) + depth = self.random_eraser(depth, params=self.random_eraser._params) + return image, depth + \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/roma/utils/utils.py b/imcui/third_party/MatchAnything/third_party/ROMA/roma/utils/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..a6691458ea801390c89ba4db47a2b53283c97691 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/roma/utils/utils.py @@ -0,0 +1,661 @@ +import warnings +import numpy as np +import cv2 +import math +import torch +from torchvision import transforms +from torchvision.transforms.functional import InterpolationMode +import torch.nn.functional as F +from PIL import Image + +def resize_by_longest_edge_and_stretch(img, aim_long_edge_size): + """ + img: C * H * W, torch.tensor + aim_long_edge_size: int + """ + c, h, w = img.shape + + hs, ws = aim_long_edge_size, aim_long_edge_size + return resize_and_padding(img, (hs, ws), padding=False) + +def resize_by_longest_edge_and_padding(img, aim_long_edge_size): + """ + img: C * H * W, torch.tensor + aim_long_edge_size: int + """ + c, h, w = img.shape + + scale = aim_long_edge_size / max(h, w) + hs, ws = round(h * scale), round(w * scale) + return resize_and_padding(img, (hs, ws)) + +def resize_and_padding(img, resize, padding=True): + """ + img: C * H * W, torch.tensor + resize: aim (h, w) + """ + c, h_org, w_org = img.shape + # img_resized = transforms.Resize(resize, InterpolationMode.BILINEAR)(img) + img_resized = transforms.Resize(resize, InterpolationMode.BICUBIC)(img) + + if padding: + img_padded = torch.zeros((c, max(resize), max(resize)), device=img.device) + img_padded[:, :resize[0], :resize[1]] = img_resized + return img_padded + else: + return img_resized + +def recover_pose(E, kpts0, kpts1, K0, K1, mask): + best_num_inliers = 0 + K0inv = np.linalg.inv(K0[:2,:2]) + K1inv = np.linalg.inv(K1[:2,:2]) + + kpts0_n = (K0inv @ (kpts0-K0[None,:2,2]).T).T + kpts1_n = (K1inv @ (kpts1-K1[None,:2,2]).T).T + + for _E in np.split(E, len(E) / 3): + n, R, t, _ = cv2.recoverPose(_E, kpts0_n, kpts1_n, np.eye(3), 1e9, mask=mask) + if n > best_num_inliers: + best_num_inliers = n + ret = (R, t, mask.ravel() > 0) + return ret + + + +# Code taken from https://github.com/PruneTruong/DenseMatching/blob/40c29a6b5c35e86b9509e65ab0cd12553d998e5f/validation/utils_pose_estimation.py +# --- GEOMETRY --- +def estimate_pose(kpts0, kpts1, K0, K1, norm_thresh, conf=0.99999): + if len(kpts0) < 5: + return None + K0inv = np.linalg.inv(K0[:2,:2]) + K1inv = np.linalg.inv(K1[:2,:2]) + + kpts0 = (K0inv @ (kpts0-K0[None,:2,2]).T).T + kpts1 = (K1inv @ (kpts1-K1[None,:2,2]).T).T + E, mask = cv2.findEssentialMat( + kpts0, kpts1, np.eye(3), threshold=norm_thresh, prob=conf + ) + + ret = None + if E is not None: + best_num_inliers = 0 + + for _E in np.split(E, len(E) / 3): + n, R, t, _ = cv2.recoverPose(_E, kpts0, kpts1, np.eye(3), 1e9, mask=mask) + if n > best_num_inliers: + best_num_inliers = n + ret = (R, t, mask.ravel() > 0) + return ret + +def estimate_pose_uncalibrated(kpts0, kpts1, K0, K1, norm_thresh, conf=0.99999): + if len(kpts0) < 5: + return None + method = cv2.USAC_ACCURATE + F, mask = cv2.findFundamentalMat( + kpts0, kpts1, ransacReprojThreshold=norm_thresh, confidence=conf, method=method, maxIters=10000 + ) + E = K1.T@F@K0 + ret = None + if E is not None: + best_num_inliers = 0 + K0inv = np.linalg.inv(K0[:2,:2]) + K1inv = np.linalg.inv(K1[:2,:2]) + + kpts0_n = (K0inv @ (kpts0-K0[None,:2,2]).T).T + kpts1_n = (K1inv @ (kpts1-K1[None,:2,2]).T).T + + for _E in np.split(E, len(E) / 3): + n, R, t, _ = cv2.recoverPose(_E, kpts0_n, kpts1_n, np.eye(3), 1e9, mask=mask) + if n > best_num_inliers: + best_num_inliers = n + ret = (R, t, mask.ravel() > 0) + return ret + +def unnormalize_coords(x_n,h,w): + x = torch.stack( + (w * (x_n[..., 0] + 1) / 2, h * (x_n[..., 1] + 1) / 2), dim=-1 + ) # [-1+1/h, 1-1/h] -> [0.5, h-0.5] + return x + + +def rotate_intrinsic(K, n): + base_rot = np.array([[0, 1, 0], [-1, 0, 0], [0, 0, 1]]) + rot = np.linalg.matrix_power(base_rot, n) + return rot @ K + + +def rotate_pose_inplane(i_T_w, rot): + rotation_matrices = [ + np.array( + [ + [np.cos(r), -np.sin(r), 0.0, 0.0], + [np.sin(r), np.cos(r), 0.0, 0.0], + [0.0, 0.0, 1.0, 0.0], + [0.0, 0.0, 0.0, 1.0], + ], + dtype=np.float32, + ) + for r in [np.deg2rad(d) for d in (0, 270, 180, 90)] + ] + return np.dot(rotation_matrices[rot], i_T_w) + + +def scale_intrinsics(K, scales): + scales = np.diag([1.0 / scales[0], 1.0 / scales[1], 1.0]) + return np.dot(scales, K) + + +def to_homogeneous(points): + return np.concatenate([points, np.ones_like(points[:, :1])], axis=-1) + + +def angle_error_mat(R1, R2): + cos = (np.trace(np.dot(R1.T, R2)) - 1) / 2 + cos = np.clip(cos, -1.0, 1.0) # numercial errors can make it out of bounds + return np.rad2deg(np.abs(np.arccos(cos))) + + +def angle_error_vec(v1, v2): + n = np.linalg.norm(v1) * np.linalg.norm(v2) + return np.rad2deg(np.arccos(np.clip(np.dot(v1, v2) / n, -1.0, 1.0))) + + +def compute_pose_error(T_0to1, R, t): + R_gt = T_0to1[:3, :3] + t_gt = T_0to1[:3, 3] + error_t = angle_error_vec(t.squeeze(), t_gt) + error_t = np.minimum(error_t, 180 - error_t) # ambiguity of E estimation + error_R = angle_error_mat(R, R_gt) + return error_t, error_R + + +def pose_auc(errors, thresholds): + sort_idx = np.argsort(errors) + errors = np.array(errors.copy())[sort_idx] + recall = (np.arange(len(errors)) + 1) / len(errors) + errors = np.r_[0.0, errors] + recall = np.r_[0.0, recall] + aucs = [] + for t in thresholds: + last_index = np.searchsorted(errors, t) + r = np.r_[recall[:last_index], recall[last_index - 1]] + e = np.r_[errors[:last_index], t] + aucs.append(np.trapz(r, x=e) / t) + return aucs + + +# From Patch2Pix https://github.com/GrumpyZhou/patch2pix +def get_depth_tuple_transform_ops_nearest_exact(resize=None): + ops = [] + if resize: + ops.append(TupleResizeNearestExact(resize)) + return TupleCompose(ops) + +def get_depth_tuple_transform_ops(resize=None, normalize=True, unscale=False): + ops = [] + if resize: + ops.append(TupleResize(resize, mode=InterpolationMode.BILINEAR)) + return TupleCompose(ops) + + +def get_tuple_transform_ops(resize=None, normalize=True, unscale=False, clahe = False, colorjiggle_params = None): + ops = [] + if resize: + ops.append(TupleResize(resize)) + ops.append(TupleToTensorScaled()) + if normalize: + ops.append( + TupleNormalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) + ) # Imagenet mean/std + return TupleCompose(ops) + +class ToTensorScaled(object): + """Convert a RGB PIL Image to a CHW ordered Tensor, scale the range to [0, 1]""" + + def __call__(self, im): + if not isinstance(im, torch.Tensor): + im = np.array(im, dtype=np.float32).transpose((2, 0, 1)) + im /= 255.0 + return torch.from_numpy(im) + else: + return im + + def __repr__(self): + return "ToTensorScaled(./255)" + + +class TupleToTensorScaled(object): + def __init__(self): + self.to_tensor = ToTensorScaled() + + def __call__(self, im_tuple): + return [self.to_tensor(im) for im in im_tuple] + + def __repr__(self): + return "TupleToTensorScaled(./255)" + + +class ToTensorUnscaled(object): + """Convert a RGB PIL Image to a CHW ordered Tensor""" + + def __call__(self, im): + return torch.from_numpy(np.array(im, dtype=np.float32).transpose((2, 0, 1))) + + def __repr__(self): + return "ToTensorUnscaled()" + + +class TupleToTensorUnscaled(object): + """Convert a RGB PIL Image to a CHW ordered Tensor""" + + def __init__(self): + self.to_tensor = ToTensorUnscaled() + + def __call__(self, im_tuple): + return [self.to_tensor(im) for im in im_tuple] + + def __repr__(self): + return "TupleToTensorUnscaled()" + +class TupleResizeNearestExact: + def __init__(self, size): + self.size = size + def __call__(self, im_tuple): + return [F.interpolate(im, size = self.size, mode = 'nearest-exact') for im in im_tuple] + + def __repr__(self): + return "TupleResizeNearestExact(size={})".format(self.size) + + +class TupleResize(object): + def __init__(self, size, mode=InterpolationMode.BICUBIC): + self.size = size + self.resize = transforms.Resize(size, mode) + def __call__(self, im_tuple): + return [self.resize(im) for im in im_tuple] + + def __repr__(self): + return "TupleResize(size={})".format(self.size) + +class Normalize: + def __call__(self,im): + mean = im.mean(dim=(1,2), keepdims=True) + std = im.std(dim=(1,2), keepdims=True) + return (im-mean)/std + + +class TupleNormalize(object): + def __init__(self, mean, std): + self.mean = mean + self.std = std + self.normalize = transforms.Normalize(mean=mean, std=std) + + def __call__(self, im_tuple): + c,h,w = im_tuple[0].shape + if c > 3: + warnings.warn(f"Number of channels c={c} > 3, assuming first 3 are rgb") + return [self.normalize(im[:3]) for im in im_tuple] + + def __repr__(self): + return "TupleNormalize(mean={}, std={})".format(self.mean, self.std) + + +class TupleCompose(object): + def __init__(self, transforms): + self.transforms = transforms + + def __call__(self, im_tuple): + for t in self.transforms: + im_tuple = t(im_tuple) + return im_tuple + + def __repr__(self): + format_string = self.__class__.__name__ + "(" + for t in self.transforms: + format_string += "\n" + format_string += " {0}".format(t) + format_string += "\n)" + return format_string + +@torch.no_grad() +def cls_to_flow(cls, deterministic_sampling = True): + B,C,H,W = cls.shape + device = cls.device + res = round(math.sqrt(C)) + G = torch.meshgrid(*[torch.linspace(-1+1/res, 1-1/res, steps = res, device = device) for _ in range(2)]) + G = torch.stack([G[1],G[0]],dim=-1).reshape(C,2) + if deterministic_sampling: + sampled_cls = cls.max(dim=1).indices + else: + sampled_cls = torch.multinomial(cls.permute(0,2,3,1).reshape(B*H*W,C).softmax(dim=-1), 1).reshape(B,H,W) + flow = G[sampled_cls] + return flow + +@torch.no_grad() +def cls_to_flow_refine(cls): + B,C,H,W = cls.shape + device = cls.device + res = round(math.sqrt(C)) + G = torch.meshgrid(*[torch.linspace(-1+1/res, 1-1/res, steps = res, device = device) for _ in range(2)]) + G = torch.stack([G[1],G[0]],dim=-1).reshape(C,2) + cls = cls.softmax(dim=1) + mode = cls.max(dim=1).indices + + index = torch.stack((mode-1, mode, mode+1, mode - res, mode + res), dim = 1).clamp(0,C - 1).long() + neighbours = torch.gather(cls, dim = 1, index = index)[...,None] + flow = neighbours[:,0] * G[index[:,0]] + neighbours[:,1] * G[index[:,1]] + neighbours[:,2] * G[index[:,2]] + neighbours[:,3] * G[index[:,3]] + neighbours[:,4] * G[index[:,4]] + tot_prob = neighbours.sum(dim=1) + flow = flow / tot_prob + return flow + + +def get_gt_warp(depth1, depth2, T_1to2, K1, K2, depth_interpolation_mode = 'bilinear', relative_depth_error_threshold = 0.05, H = None, W = None): + + if H is None: + B,H,W = depth1.shape + else: + B = depth1.shape[0] + with torch.no_grad(): + x1_n = torch.meshgrid( + *[ + torch.linspace( + -1 + 1 / n, 1 - 1 / n, n, device=depth1.device + ) + for n in (B, H, W) + ] + ) + x1_n = torch.stack((x1_n[2], x1_n[1]), dim=-1).reshape(B, H * W, 2) + mask, x2 = warp_kpts( + x1_n.double(), + depth1.double(), + depth2.double(), + T_1to2.double(), + K1.double(), + K2.double(), + depth_interpolation_mode = depth_interpolation_mode, + relative_depth_error_threshold = relative_depth_error_threshold, + ) + prob = mask.float().reshape(B, H, W) + x2 = x2.reshape(B, H, W, 2) + return x2, prob + +@torch.no_grad() +def warp_kpts(kpts0, depth0, depth1, T_0to1, K0, K1, smooth_mask = False, return_relative_depth_error = False, depth_interpolation_mode = "bilinear", relative_depth_error_threshold = 0.05): + """Warp kpts0 from I0 to I1 with depth, K and Rt + Also check covisibility and depth consistency. + Depth is consistent if relative error < 0.2 (hard-coded). + # https://github.com/zju3dv/LoFTR/blob/94e98b695be18acb43d5d3250f52226a8e36f839/src/loftr/utils/geometry.py adapted from here + Args: + kpts0 (torch.Tensor): [N, L, 2] - , should be normalized in (-1,1) + depth0 (torch.Tensor): [N, H, W], + depth1 (torch.Tensor): [N, H, W], + T_0to1 (torch.Tensor): [N, 3, 4], + K0 (torch.Tensor): [N, 3, 3], + K1 (torch.Tensor): [N, 3, 3], + Returns: + calculable_mask (torch.Tensor): [N, L] + warped_keypoints0 (torch.Tensor): [N, L, 2] + """ + ( + n, + h, + w, + ) = depth0.shape + if depth_interpolation_mode == "combined": + # Inspired by approach in inloc, try to fill holes from bilinear interpolation by nearest neighbour interpolation + if smooth_mask: + raise NotImplementedError("Combined bilinear and NN warp not implemented") + valid_bilinear, warp_bilinear = warp_kpts(kpts0, depth0, depth1, T_0to1, K0, K1, + smooth_mask = smooth_mask, + return_relative_depth_error = return_relative_depth_error, + depth_interpolation_mode = "bilinear", + relative_depth_error_threshold = relative_depth_error_threshold) + valid_nearest, warp_nearest = warp_kpts(kpts0, depth0, depth1, T_0to1, K0, K1, + smooth_mask = smooth_mask, + return_relative_depth_error = return_relative_depth_error, + depth_interpolation_mode = "nearest-exact", + relative_depth_error_threshold = relative_depth_error_threshold) + nearest_valid_bilinear_invalid = (~valid_bilinear).logical_and(valid_nearest) + warp = warp_bilinear.clone() + warp[nearest_valid_bilinear_invalid] = warp_nearest[nearest_valid_bilinear_invalid] + valid = valid_bilinear | valid_nearest + return valid, warp + + + kpts0_depth = F.grid_sample(depth0[:, None], kpts0[:, :, None], mode = depth_interpolation_mode, align_corners=False)[ + :, 0, :, 0 + ] + kpts0 = torch.stack( + (w * (kpts0[..., 0] + 1) / 2, h * (kpts0[..., 1] + 1) / 2), dim=-1 + ) # [-1+1/h, 1-1/h] -> [0.5, h-0.5] + # Sample depth, get calculable_mask on depth != 0 + nonzero_mask = kpts0_depth != 0 + + # Unproject + kpts0_h = ( + torch.cat([kpts0, torch.ones_like(kpts0[:, :, [0]])], dim=-1) + * kpts0_depth[..., None] + ) # (N, L, 3) + kpts0_n = K0.inverse() @ kpts0_h.transpose(2, 1) # (N, 3, L) + kpts0_cam = kpts0_n + + # Rigid Transform + w_kpts0_cam = T_0to1[:, :3, :3] @ kpts0_cam + T_0to1[:, :3, [3]] # (N, 3, L) + w_kpts0_depth_computed = w_kpts0_cam[:, 2, :] + + # Project + w_kpts0_h = (K1 @ w_kpts0_cam).transpose(2, 1) # (N, L, 3) + w_kpts0 = w_kpts0_h[:, :, :2] / ( + w_kpts0_h[:, :, [2]] + 1e-4 + ) # (N, L, 2), +1e-4 to avoid zero depth + + # Covisible Check + h, w = depth1.shape[1:3] + covisible_mask = ( + (w_kpts0[:, :, 0] > 0) + * (w_kpts0[:, :, 0] < w - 1) + * (w_kpts0[:, :, 1] > 0) + * (w_kpts0[:, :, 1] < h - 1) + ) + w_kpts0 = torch.stack( + (2 * w_kpts0[..., 0] / w - 1, 2 * w_kpts0[..., 1] / h - 1), dim=-1 + ) # from [0.5,h-0.5] -> [-1+1/h, 1-1/h] + # w_kpts0[~covisible_mask, :] = -5 # xd + + w_kpts0_depth = F.grid_sample( + depth1[:, None], w_kpts0[:, :, None], mode=depth_interpolation_mode, align_corners=False + )[:, 0, :, 0] + + relative_depth_error = ( + (w_kpts0_depth - w_kpts0_depth_computed) / w_kpts0_depth + ).abs() + if not smooth_mask: + consistent_mask = relative_depth_error < relative_depth_error_threshold + else: + consistent_mask = (-relative_depth_error/smooth_mask).exp() + valid_mask = nonzero_mask * covisible_mask * consistent_mask + if return_relative_depth_error: + return relative_depth_error, w_kpts0 + else: + return valid_mask, w_kpts0 + +imagenet_mean = torch.tensor([0.485, 0.456, 0.406]) +imagenet_std = torch.tensor([0.229, 0.224, 0.225]) + + +def numpy_to_pil(x: np.ndarray): + """ + Args: + x: Assumed to be of shape (h,w,c) + """ + if isinstance(x, torch.Tensor): + x = x.detach().cpu().numpy() + if x.max() <= 1.01: + x *= 255 + x = x.astype(np.uint8) + return Image.fromarray(x) + + +def tensor_to_pil(x, unnormalize=False): + if unnormalize: + x = x * (imagenet_std[:, None, None].to(x.device)) + (imagenet_mean[:, None, None].to(x.device)) + x = x.detach().permute(1, 2, 0).cpu().numpy() + x = np.clip(x, 0.0, 1.0) + return numpy_to_pil(x) + + +def to_cuda(batch): + for key, value in batch.items(): + if isinstance(value, torch.Tensor): + batch[key] = value.cuda() + return batch + + +def to_cpu(batch): + for key, value in batch.items(): + if isinstance(value, torch.Tensor): + batch[key] = value.cpu() + return batch + + +def get_pose(calib): + w, h = np.array(calib["imsize"])[0] + return np.array(calib["K"]), np.array(calib["R"]), np.array(calib["T"]).T, h, w + + +def compute_relative_pose(R1, t1, R2, t2): + rots = R2 @ (R1.T) + trans = -rots @ t1 + t2 + return rots, trans + +@torch.no_grad() +def reset_opt(opt): + for group in opt.param_groups: + for p in group['params']: + if p.requires_grad: + state = opt.state[p] + # State initialization + + # Exponential moving average of gradient values + state['exp_avg'] = torch.zeros_like(p) + # Exponential moving average of squared gradient values + state['exp_avg_sq'] = torch.zeros_like(p) + # Exponential moving average of gradient difference + state['exp_avg_diff'] = torch.zeros_like(p) + + +def flow_to_pixel_coords(flow, h1, w1): + flow = ( + torch.stack( + ( + w1 * (flow[..., 0] + 1) / 2, + h1 * (flow[..., 1] + 1) / 2, + ), + axis=-1, + ) + ) + return flow + +to_pixel_coords = flow_to_pixel_coords # just an alias + +def flow_to_normalized_coords(flow, h1, w1): + flow = ( + torch.stack( + ( + 2 * (flow[..., 0]) / w1 - 1, + 2 * (flow[..., 1]) / h1 - 1, + ), + axis=-1, + ) + ) + return flow + +to_normalized_coords = flow_to_normalized_coords # just an alias + +def warp_to_pixel_coords(warp, h1, w1, h2, w2): + warp1 = warp[..., :2] + warp1 = ( + torch.stack( + ( + w1 * (warp1[..., 0] + 1) / 2, + h1 * (warp1[..., 1] + 1) / 2, + ), + axis=-1, + ) + ) + warp2 = warp[..., 2:] + warp2 = ( + torch.stack( + ( + w2 * (warp2[..., 0] + 1) / 2, + h2 * (warp2[..., 1] + 1) / 2, + ), + axis=-1, + ) + ) + return torch.cat((warp1,warp2), dim=-1) + + + +def signed_point_line_distance(point, line, eps: float = 1e-9): + r"""Return the distance from points to lines. + + Args: + point: (possibly homogeneous) points :math:`(*, N, 2 or 3)`. + line: lines coefficients :math:`(a, b, c)` with shape :math:`(*, N, 3)`, where :math:`ax + by + c = 0`. + eps: Small constant for safe sqrt. + + Returns: + the computed distance with shape :math:`(*, N)`. + """ + + if not point.shape[-1] in (2, 3): + raise ValueError(f"pts must be a (*, 2 or 3) tensor. Got {point.shape}") + + if not line.shape[-1] == 3: + raise ValueError(f"lines must be a (*, 3) tensor. Got {line.shape}") + + numerator = (line[..., 0] * point[..., 0] + line[..., 1] * point[..., 1] + line[..., 2]) + denominator = line[..., :2].norm(dim=-1) + + return numerator / (denominator + eps) + + +def signed_left_to_right_epipolar_distance(pts1, pts2, Fm): + r"""Return one-sided epipolar distance for correspondences given the fundamental matrix. + + This method measures the distance from points in the right images to the epilines + of the corresponding points in the left images as they reflect in the right images. + + Args: + pts1: correspondences from the left images with shape + :math:`(*, N, 2 or 3)`. If they are not homogeneous, converted automatically. + pts2: correspondences from the right images with shape + :math:`(*, N, 2 or 3)`. If they are not homogeneous, converted automatically. + Fm: Fundamental matrices with shape :math:`(*, 3, 3)`. Called Fm to + avoid ambiguity with torch.nn.functional. + + Returns: + the computed Symmetrical distance with shape :math:`(*, N)`. + """ + import kornia + if (len(Fm.shape) < 3) or not Fm.shape[-2:] == (3, 3): + raise ValueError(f"Fm must be a (*, 3, 3) tensor. Got {Fm.shape}") + + if pts1.shape[-1] == 2: + pts1 = kornia.geometry.convert_points_to_homogeneous(pts1) + + F_t = Fm.transpose(dim0=-2, dim1=-1) + line1_in_2 = pts1 @ F_t + + return signed_point_line_distance(pts2, line1_in_2) + +def get_grid(b, h, w, device): + grid = torch.meshgrid( + *[ + torch.linspace(-1 + 1 / n, 1 - 1 / n, n, device=device) + for n in (b, h, w) + ] + ) + grid = torch.stack((grid[2], grid[1]), dim=-1).reshape(b, h, w, 2) + return grid diff --git a/imcui/third_party/MatchAnything/third_party/ROMA/setup.py b/imcui/third_party/MatchAnything/third_party/ROMA/setup.py new file mode 100644 index 0000000000000000000000000000000000000000..ae777c0e5a41f0e4b03a838d19bc9a2bb04d4617 --- /dev/null +++ b/imcui/third_party/MatchAnything/third_party/ROMA/setup.py @@ -0,0 +1,9 @@ +from setuptools import setup + +setup( + name="roma", + packages=["roma"], + version="0.0.1", + author="Johan Edstedt", + install_requires=open("requirements.txt", "r").read().split("\n"), +) diff --git a/imcui/third_party/MatchAnything/tools/__init__.py b/imcui/third_party/MatchAnything/tools/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/imcui/third_party/MatchAnything/tools/evaluate_datasets.py b/imcui/third_party/MatchAnything/tools/evaluate_datasets.py new file mode 100644 index 0000000000000000000000000000000000000000..9ebbf2aa096367611491f3846734ff7cd9320928 --- /dev/null +++ b/imcui/third_party/MatchAnything/tools/evaluate_datasets.py @@ -0,0 +1,240 @@ +import argparse +import pytorch_lightning as pl +from tqdm import tqdm +import os.path as osp +import numpy as np +from loguru import logger +from PIL import Image +Image.MAX_IMAGE_PIXELS = None +import torch + +from torch.utils.data import ( + DataLoader, + ConcatDataset) + +from pathlib import Path +import sys +sys.path.append(str(Path(__file__).parent.parent.resolve())) + +from src.lightning.lightning_loftr import PL_LoFTR +from src.config.default import get_cfg_defaults +from src.utils.dataset import dict_to_cuda +from src.utils.metrics import estimate_homo, estimate_pose, relative_pose_error +from src.utils.homography_utils import warp_points + +from src.datasets.common_data_pair import CommonDataset +from src.utils.metrics import error_auc +from tools_utils.plot import plot_matches, warp_img_and_blend, epipolar_error + +from pairs_match_and_propogation.utils.data_io import save_h5 + +def parse_args(): + parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser.add_argument( + 'main_cfg_path', type=str, help='main config path') + parser.add_argument( + '--ckpt_path', type=str, default="", help='path to the checkpoint') + parser.add_argument( + '--thr', type=float, default=0.1, help='modify the coarse-level matching threshold.') + parser.add_argument( + '--method', type=str, default='loftr@-@ransac_affine', help='choose method') + parser.add_argument( + '--imgresize', type=int, default=None) + parser.add_argument( + '--npe', action='store_true', default=False, help='') + parser.add_argument( + '--npe2', action='store_true', default=False, help='') + parser.add_argument( + '--ckpt32', action='store_true', default=False, help='') + parser.add_argument( + '--fp32', action='store_true', default=False, help='') + + # Input: + parser.add_argument( + '--data_root', type=str, default="data/test_data") + + parser.add_argument( + '--npz_root', type=str, default="") + + parser.add_argument( + '--npz_list_path', type=str, default="") + + parser.add_argument( + '--plot_matches', action='store_true') + + parser.add_argument( + '--plot_matches_alpha', type=float, default=0.2) + + parser.add_argument( + '--plot_matches_color', type=str, default='error', choices=['green', 'error', 'conf']) + + parser.add_argument( + '--plot_align', action='store_true') + parser.add_argument( + '--plot_refinement', action='store_true') + parser.add_argument( + '--output_path', type=str, default="") + + parser.add_argument( + '--rigid_ransac_thr', type=float, default=3.0) + parser.add_argument( + '--elastix_ransac_thr', type=float, default=40.0) + parser.add_argument( + '--comment', type=str, default="") + + return parser.parse_args() + +def array_rgb2gray(img): + return (img * np.array([0.2989, 0.5870, 0.1140])[None, None]).sum(axis=-1) + +if __name__ == '__main__': + args = parse_args() + + # Load data: + datasets = [] + sub_dataset_name = Path(args.npz_list_path).parent.name + with open(args.npz_list_path, 'r') as f: + npz_names = [name.split()[0] for name in f.readlines()] + npz_names = [f'{n}.npz' for n in npz_names] + data_root = args.data_root + + vis_output_path = args.output_path + Path(vis_output_path).mkdir(parents=True, exist_ok=True) + + ########################## + config = get_cfg_defaults() + method, estimator = (args.method).split('@-@')[0], (args.method).split('@-@')[1] + if method != 'None': + config.merge_from_file(args.main_cfg_path) + + pl.seed_everything(config.TRAINER.SEED) + config.METHOD = method + # Config overwrite: + if config.LOFTR.COARSE.ROPE: + assert config.DATASET.NPE_NAME is not None + if config.DATASET.NPE_NAME is not None: + config.LOFTR.COARSE.NPE = [832, 832, args.imgresize, args.imgresize] + + if "visible_sar" in args.npz_list_path: + config.DATASET.RESIZE_BY_STRETCH = True + + if args.thr is not None: + config.LOFTR.MATCH_COARSE.THR = args.thr + + matcher = PL_LoFTR(config, pretrained_ckpt=args.ckpt_path, test_mode=True).matcher + matcher.eval().cuda() + else: + matcher = None + + for npz_name in tqdm(npz_names): + npz_path = osp.join(args.npz_root, npz_name) + try: + np.load(npz_path, allow_pickle=True) + except: + logger.info(f"{npz_path} cannot be opened!") + continue + + datasets.append( + CommonDataset(data_root, npz_path, mode='test', min_overlap_score=-1, img_resize=args.imgresize, df=None, img_padding=False, depth_padding=True, testNpairs=None, fp16=False, load_origin_rgb=True, read_gray=True, normalize_img=False, resize_by_stretch=config.DATASET.RESIZE_BY_STRETCH, gt_matches_padding_n=100)) + + concat_dataset = ConcatDataset(datasets) + + dataloader = DataLoader(concat_dataset, num_workers=4, pin_memory=True, batch_size=1, drop_last=False) + errors = [] # distance + result_dict = {} + pose_error = [] + + eval_mode = 'gt_homo' + for id, data in enumerate(tqdm(dataloader)): + img0, img1 = (data['image0_rgb_origin'] * 255.)[0].permute(1,2,0).numpy().round().squeeze(), (data['image1_rgb_origin'] * 255.)[0].permute(1,2,0).numpy().round().squeeze() + img_1_h, img_1_w = img1.shape[:2] + pair_name = '@-@'.join([data['pair_names'][0][0].split('/', 1)[1], data['pair_names'][1][0].split('/', 1)[1]]).replace('/', '_') + homography_gt = data['homography'][0].numpy() + if 'gt_2D_matches' in data and data["gt_2D_matches"].shape[-1] == 4: + gt_2D_matches = data["gt_2D_matches"][0].numpy() # N * 4 + eval_coord = gt_2D_matches[:, :2] + gt_points = gt_2D_matches[:, 2:] + eval_mode = 'gt_match' + ransac_mode = 'homo' if 'FIRE' in args.npz_list_path else 'affine' + elif homography_gt.sum() != 0: + h, w = img0.shape[0], img0.shape[1] + eval_coord = np.array([[0, 0], [0, h], [w, 0], [w, h]]) + ransac_mode = 'affine' + assert homography_gt.sum() != 0, f"Evaluation should either using gt match, or using gt homography warp." + else: + eval_mode = 'pose_error' + K0 = data['K0'].cpu().numpy()[0] + K1 = data['K1'].cpu().numpy()[0] + T_0to1 = data['T_0to1'].cpu().numpy()[0] + estimator = 'pose' + + # Perform matching + if matcher is not None: + if eval_mode in ['gt_match']: + data.update({'query_points': torch.from_numpy(eval_coord)[None]}) + batch = dict_to_cuda(data) + + with torch.no_grad(): + with torch.autocast(enabled=config.LOFTR.FP16, device_type='cuda'): + matcher(batch) + + mkpts0 = batch['mkpts0_f'].cpu().numpy() + mkpts1 = batch['mkpts1_f'].cpu().numpy() + mconf = batch['mconf'].cpu().numpy() + + # Get warpped points by homography: + if estimator == "ransac_affine": + H_est, _ = estimate_homo(mkpts0, mkpts1, thresh=args.rigid_ransac_thr, mode=ransac_mode) + # Warp points for eval: + eval_points_warpped = warp_points(eval_coord, H_est, inverse=False) + + # Warp images and blend: + if args.plot_align: + warp_img_and_blend(img0, img1, H_est, save_path=Path(vis_output_path)/'aligned'/f"{pair_name}_{args.method}.png", alpha=0.5, inverse=True) + elif estimator == 'pose': + pose = estimate_pose(mkpts0, mkpts1, K0, K1, args.rigid_ransac_thr, conf=0.99999) + else: + raise NotImplementedError + else: + raise NotImplementedError + + if eval_mode == 'pose_error': + if pose is None: + t_err, R_err = np.inf, np.inf + else: + R, t, inliers = pose + t_err, R_err = relative_pose_error(T_0to1, R, t, ignore_gt_t_thr=0.0) + error = max(t_err, R_err) + errors.append(error) + match_error = epipolar_error(mkpts0, mkpts1, T_0to1, K0, K1) + plot_text = f"R_err_{R_err:.2}_t_err_{t_err:.2}" + thr = 3e-3 + print(f"max_error:{error}") + else: + if eval_mode == 'gt_homo': + gt_points = warp_points(eval_coord, homography_gt, inverse=False) + match_error = np.linalg.norm(warp_points(mkpts0, homography_gt, inverse=False) - mkpts1, axis=-1) + else: + match_error = None + + thr = 5 # Pix + error = np.mean(np.linalg.norm(eval_points_warpped - gt_points, axis=1)) + print(f"error: {error}") + errors.append(error) + + result_dict['@-@'.join([data['pair_names'][0][0].split('/', 1)[1], data['pair_names'][1][0].split('/', 1)[1]])] = error + + if args.plot_matches and matcher is not None: + draw_match_type='corres' + color_type=args.plot_matches_color + plot_matches(img0, img1, mkpts0, mkpts1, mconf, vertical=False, draw_match_type=draw_match_type, alpha=args.plot_matches_alpha, save_path=Path(vis_output_path)/'demo_matches'/f"{pair_name}_{draw_match_type}.pdf", inverse=False, match_error=match_error if color_type == 'error' else None, error_thr=thr, color_type=color_type) + + # Success Rate Metric: + metric = error_auc(np.array(errors), thresholds=[5,10,20], method="success_rate") + print(metric) + + # AUC Metric: + metric = error_auc(np.array(errors), thresholds=[5,10,20], method='fire_paper' if 'FIRE' in args.npz_list_path else 'exact_auc') + print(metric) + + save_h5(result_dict, (Path(args.output_path) / f'eval_{sub_dataset_name}_{args.method}_{args.comment}_error.h5')) \ No newline at end of file diff --git a/imcui/third_party/MatchAnything/tools/tools_utils/plot.py b/imcui/third_party/MatchAnything/tools/tools_utils/plot.py new file mode 100644 index 0000000000000000000000000000000000000000..a7eefeb312b3954a8d05a67a264efc0e5e7713ad --- /dev/null +++ b/imcui/third_party/MatchAnything/tools/tools_utils/plot.py @@ -0,0 +1,77 @@ +import matplotlib +matplotlib.use("agg") +import matplotlib.cm as cm +import numpy as np +from PIL import Image +import cv2 +from kornia.geometry.epipolar import numeric +import torch + +from pathlib import Path +import sys +sys.path.append(str(Path(__file__).parent.parent.parent.resolve())) + +from src.utils.plotting import error_colormap, dynamic_alpha +from src.utils.metrics import symmetric_epipolar_distance +from notebooks.notebooks_utils import make_matching_figure + +def plot_matches(img0_origin, img1_origin, mkpts0, mkpts1, mconf, vertical, draw_match_type, alpha, save_path, inverse=False, match_error=None, error_thr=5e-3, color_type='error'): + if inverse: + img0_origin, img1_origin, mkpts0, mkpts1 = img1_origin, img0_origin, mkpts1, mkpts0 + img0_origin = np.copy(img0_origin) / 255.0 + img1_origin = np.copy(img1_origin) / 255.0 + # Draw + alpha =dynamic_alpha(len(mkpts0), milestones=[0, 200, 500, 1000, 2000, 4000], alphas=[1.0, 0.5, 0.3, 0.2, 0.15, 0.09]) + if color_type == 'conf': + color = error_colormap(mconf, thr=None, alpha=alpha) + elif color_type == 'green': + mconf = np.ones_like(mconf) * 0.15 + color = error_colormap(mconf, thr=None, alpha=alpha) + else: + color = error_colormap(np.zeros((len(mconf),)) if match_error is None else match_error, error_thr, alpha=alpha) + + text = [ + '' + ] + + Path(save_path).parent.mkdir(parents=True, exist_ok=True) + fig = make_matching_figure(img0_origin, img1_origin, mkpts0, mkpts1, color, text=text, path=save_path, vertical=vertical, plot_size_factor=1, draw_match_type=draw_match_type, r_normalize_factor=0.4) + +def blend_img(img0, img1, alpha=0.4, save_path=None, blend_method='weighted_sum'): + img0, img1 = Image.fromarray(np.array(img0)), Image.fromarray(np.array(img1)) + Path(save_path).parent.mkdir(parents=True, exist_ok=True) + + # Blend: + if blend_method == 'weighted_sum': + blended_img = Image.blend(img0, img1, alpha=alpha) + else: + raise NotImplementedError + + blended_img.save(save_path) + +def warp_img(img0, img1, H, fill_white=False): + img0 = np.copy(img0).astype(np.uint8) + img1 = np.copy(img1).astype(np.uint8) + if fill_white: + img0_warped = cv2.warpAffine(np.array(img0), H[:2, :], [img1.shape[1], img1.shape[0]], flags=cv2.INTER_LINEAR, borderMode=cv2.BORDER_CONSTANT, borderValue=[255, 255, 255]) + else: + img0_warped = cv2.warpAffine(np.array(img0), H[:2, :], [img1.shape[1], img1.shape[0]], flags=cv2.INTER_LINEAR) + return img0_warped + +def warp_img_and_blend(img0_origin, img1_origin, H, save_path, alpha=0.4, inverse=False): + if inverse: + img0_origin, img1_origin = img1_origin, img0_origin + H = np.linalg.inv(H) + img0_origin = np.copy(img0_origin).astype(np.uint8) + img1_origin = np.copy(img1_origin).astype(np.uint8) + + # Warp + img0_warpped = Image.fromarray(warp_img(img0_origin, img1_origin, H, fill_white=False)) + + # Blend and save: + blend_img(img0_warpped, Image.fromarray(img1_origin), alpha=alpha, save_path=save_path) + +def epipolar_error(mkpts0, mkpts1, T_0to1, K0, K1): + Tx = numeric.cross_product_matrix(torch.from_numpy(T_0to1)[:3, 3]) + E_mat = Tx @ T_0to1[:3, :3] + return symmetric_epipolar_distance(torch.from_numpy(mkpts0), torch.from_numpy(mkpts1), E_mat, torch.from_numpy(K0), torch.from_numpy(K1)).numpy() \ No newline at end of file diff --git a/imcui/ui/app_class.py b/imcui/ui/app_class.py index f98f98575c4437b1d6cbff2d8eb165cfb7061f08..21c12f79bb545ca606015774aa68f2a661b8037a 100644 --- a/imcui/ui/app_class.py +++ b/imcui/ui/app_class.py @@ -20,11 +20,13 @@ from .utils import ( send_to_match, ) import os -GITHUB_TOKEN = os.environ.get('GITHUB_TOKEN') -GOOGLE_TOKEN = os.environ.get('GOOGLE_TOKEN') +# GITHUB_TOKEN = os.environ.get('GITHUB_TOKEN') +# GOOGLE_TOKEN = os.environ.get('GOOGLE_TOKEN') +GOOGLE_TOKEN = "12L3g9-w8rR9K2L4rYaGaDJ7NqX1D713d" if not (Path(__file__).parent / "../third_party/MatchAnything").exists(): print("**********************************") - os.system(f"cd {str(Path(__file__).parent / '../third_party')} && git clone https://{GITHUB_TOKEN}@github.com/hxy-123/MatchAnything_HF.git && mv MatchAnything_HF MatchAnything && cd MatchAnything && gdown {GOOGLE_TOKEN} && unzip weights.zip") + # os.system(f"cd {str(Path(__file__).parent / '../third_party')} && git clone https://{GITHUB_TOKEN}@github.com/hxy-123/MatchAnything_HF.git && mv MatchAnything_HF MatchAnything && cd MatchAnything && gdown {GOOGLE_TOKEN} && unzip weights.zip") + os.system(f"cd {str(Path(__file__).parent / '../third_party')} && cd MatchAnything && gdown {GOOGLE_TOKEN} && unzip weights.zip") DESCRIPTION = '''
MatchAnything